Commit 71940dc2 authored by Henrique Dias's avatar Henrique Dias
Browse files

Merge remote-tracking branch 'upstream/master'

Conflicts:
	Gruntfile.coffee
parents 4e7cdba8 39aa46fb
......@@ -36,7 +36,7 @@ module.exports = (grunt) ->
limit: SERVICES.length
logConcurrentOutput: true
coffee:
migrate:
migrate:
expand: true,
flatten: false,
cwd: './',
......@@ -69,9 +69,8 @@ module.exports = (grunt) ->
"Misc": [
"help"
]
"Install tasks": ("install:#{service.name}" for service in SERVICES).concat(["install:all", "install", "install:dirs", "install:config"])
"Install tasks": ("install:#{service.name}" for service in SERVICES).concat(["install:all", "install", "install:dirs"])
"Update tasks": ("update:#{service.name}" for service in SERVICES).concat(["update:all", "update"])
"Config tasks": ["install:config"]
"Checks": ["check", "check:redis", "check:latexmk", "check:s3", "check:make"]
for service in SERVICES
......@@ -87,14 +86,13 @@ module.exports = (grunt) ->
done = @async()
Helpers.createNewRelease(service, grunt.option("release"), done)
grunt.registerTask 'install:config', "Copy the example config into the real config", () ->
Helpers.installConfig @async()
grunt.registerTask 'install:dirs', "Copy the example config into the real config", () ->
Helpers.createDataDirs @async()
grunt.registerTask 'install:all', "Download and set up all ShareLaTeX services",
["check:make"].concat(
("install:#{service.name}" for service in SERVICES)
).concat(["install:config", "install:dirs"])
).concat([ "install:dirs"])
grunt.registerTask 'install', 'install:all'
grunt.registerTask 'update:all', "Checkout and update all ShareLaTeX services",
["check:make"].concat(
......@@ -174,7 +172,7 @@ module.exports = (grunt) ->
proc = spawn "git", ["pull"], cwd: dir, stdio: "inherit"
proc.on "close", () ->
callback()
createNewRelease: (service, version, callback = (error) ->) ->
dir = service.name
proc = spawn "sed", [
......@@ -224,19 +222,6 @@ module.exports = (grunt) ->
exec "mkdir -p #{path}", callback
async.series jobs, callback
installConfig: (callback = (error) ->) ->
src = "config/settings.development.coffee.example"
dest = "config/settings.development.coffee"
if !fs.existsSync(dest)
grunt.log.writeln "Creating config at #{dest}"
config = fs.readFileSync(src).toString()
config = config.replace /CRYPTO_RANDOM/g, () ->
crypto.randomBytes(64).toString("hex")
fs.writeFileSync dest, config
callback()
else
grunt.log.writeln "Config file already exists. Skipping."
callback()
runGruntInstall: (service, callback = (error) ->) ->
dir = service.name
......@@ -280,9 +265,9 @@ module.exports = (grunt) ->
latexmk comes with TexLive 2013, and must be a version from 2013 or later.
If you have already have TeXLive installed, then make sure it is
included in your PATH (example for 64-bit linux):
export PATH=$PATH:/usr/local/texlive/2014/bin/x86_64-linux/
This is a not a fatal error, but compiling will not work without latexmk.
"""
return callback(error)
......@@ -307,7 +292,7 @@ module.exports = (grunt) ->
"""
error = new Error("latexmk is too old")
callback(error)
checkAspell: (callback = (error) ->) ->
grunt.log.write "Checking aspell is installed... "
exec "aspell dump dicts", (error, stdout, stderr) ->
......@@ -315,15 +300,15 @@ module.exports = (grunt) ->
grunt.log.error "FAIL."
grunt.log.errorlns """
Either aspell is not installed or is not in your PATH.
On Ubuntu you can install aspell with:
sudo apt-get install aspell
Or on a mac:
brew install aspell
This is not a fatal error, but the spell-checker will not work without aspell
"""
return callback(error)
......@@ -355,11 +340,11 @@ module.exports = (grunt) ->
Please configure your Amazon S3 credentials in config/settings.development.coffee
Amazon S3 (Simple Storage Service) is a cloud storage service provided by
Amazon. ShareLaTeX uses S3 for storing binary files like images. You can
Amazon. ShareLaTeX uses S3 for storing binary files like images. You can
sign up for an account and find out more at:
http://aws.amazon.com/s3/
"""
return callback()
client.getFile "does-not-exist", (error, response) ->
......@@ -386,7 +371,7 @@ module.exports = (grunt) ->
else
grunt.log.error "FAIL."
grunt.log.errorlns """
Could not find directory "#{Settings.filestore.stores.user_files}".
Could not find directory "#{Settings.filestore.stores.user_files}".
Please check your configuration.
"""
callback()
......@@ -401,11 +386,11 @@ module.exports = (grunt) ->
grunt.log.error "FAIL."
grunt.log.errorlns """
Either make is not installed or is not in your path.
On Ubuntu you can install make with:
sudo apt-get install build-essential
"""
return callback(error)
else if error?
......@@ -418,4 +403,3 @@ module.exports = (grunt) ->
template = fs.readFileSync("package/upstart/sharelatex-template.conf").toString()
for service in SERVICES
fs.writeFileSync "package/upstart/sharelatex-#{service.name}.conf", template.replace(/__SERVICE__/g, service.name)
......@@ -15,9 +15,7 @@ Installation
We have detailed installation instructions in our wiki:
* [Installing ShareLaTeX in Production using docker](https://github.com/sharelatex/sharelatex/wiki/Production-Installation-Instructions)
* [Setting up a ShareLaTeX Development Environment](https://github.com/sharelatex/sharelatex/wiki/Setting-up-a-Development-Environment)
**If you have any problems, have a look at our page of [Frequent Problems and Questions](https://github.com/sharelatex/sharelatex/wiki/FAQ).**
Upgrading
---------
......@@ -25,18 +23,6 @@ Upgrading
If you are upgrading from a previous version of ShareLaTeX, please see the [Release Notes section on the Wiki] (https://github.com/sharelatex/sharelatex/wiki/Home) for all of the versions between your current version and the version you are upgrading to.
Dependencies
------------
ShareLaTeX should run on OS X and Linux. You need:
* [Node.js](http://nodejs.org/) 0.10.x. We recommend that you use [nvm](https://github.com/creationix/nvm) to install it.
* The [grunt](http://gruntjs.com/) command line tools (Run `npm install -g grunt-cli` to install them)
* A local instance of [Redis](http://redis.io/topics/quickstart) (version 2.6.12 or later) and [MongoDB](http://docs.mongodb.org/manual/installation/) running on their standard ports.
* [TeXLive](https://www.tug.org/texlive/) 2013 or later with the `latexmk` program installed.
ShareLaTeX needs a minimum of 2gb of memory, it is likely to be more than that though depending on usage.
Other repositories
------------------
......@@ -67,6 +53,10 @@ documents.
An API for performing CRUD (Create, Read, Update and Delete) operations on text files
stored in ShareLaTeX.
### [realtime](https://github.com/sharelatex/real-time-sharelatex) [![Build Status](https://travis-ci.org/sharelatex/real-time-sharelatex.png?branch=master)](https://travis-ci.org/sharelatex/real-time-sharelatex)
The websocket process clients connect to
### [filestore](https://github.com/sharelatex/filestore-sharelatex) [![Build Status](https://travis-ci.org/sharelatex/filestore-sharelatex.png?branch=master)](https://travis-ci.org/sharelatex/filestore-sharelatex)
An API for performing CRUD (Create, Read, Update and Delete) operations on binary files
......
# -*- mode: ruby -*-
# vi: set ft=ruby :
# Vagrantfile API/syntax version. Don't touch unless you know what you're doing!
VAGRANTFILE_API_VERSION = "2"
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
config.vm.box = "ubuntu-12.04"
config.vm.box_url = "http://files.vagrantup.com/precise64.box"
config.vm.network :forwarded_port, guest: 3000, host: 3000
config.vm.network :forwarded_port, guest: 80, host: 8080
config.ssh.forward_agent = true
config.vm.provider "virtualbox" do |v|
v.memory = 1024
end
config.vm.provision :chef_solo do |chef|
chef.cookbooks_path = "chef/cookbooks"
chef.add_recipe 'apt'
chef.add_recipe 'redis-server'
chef.add_recipe 'mongodb'
chef.add_recipe 'nodejs'
chef.add_recipe 'texlive'
chef.add_recipe 'packages'
# You may also specify custom JSON attributes:
chef.json = {}
end
# Enable provisioning with chef server, specifying the chef server URL,
# and the path to the validation key (relative to this Vagrantfile).
#
# The Opscode Platform uses HTTPS. Substitute your organization for
# ORGNAME in the URL and validation key.
#
# If you have your own Chef Server, use the appropriate URL, which may be
# HTTP instead of HTTPS depending on your configuration. Also change the
# validation key to validation.pem.
#
# config.vm.provision :chef_client do |chef|
# chef.chef_server_url = "https://api.opscode.com/organizations/ORGNAME"
# chef.validation_key_path = "ORGNAME-validator.pem"
# end
#
# If you're using the Opscode platform, your validator client is
# ORGNAME-validator, replacing ORGNAME with your organization name.
#
# If you have your own Chef Server, the default validation client name is
# chef-validator, unless you changed the configuration.
#
# chef.validation_client_name = "ORGNAME-validator"
end
current_dir = File.dirname(__FILE__)
cookbook_path ["#{current_dir}/../cookbooks"]
apt Cookbook CHANGELOG
======================
This file is used to list changes made in each version of the apt cookbook.
v2.3.8 (2014-02-14)
-------------------
### Bug
- **[COOK-4287](https://tickets.opscode.com/browse/COOK-4287)** - Cleanup the Kitchen
v2.3.6
------
* [COOK-4154] - Add chefspec matchers.rb file to apt cookbook
* [COOK-4102] - Only index created repository
v2.3.6
------
* [COOK-4154] - Add chefspec matchers.rb file to apt cookbook
* [COOK-4102] - Only index created repository
v2.3.4
------
No change. Version bump for toolchain sanity
v2.3.2
------
- [COOK-3905] apt-get-update-periodic: configuration for the update period
- Updating style for rubocops
- Updating test-kitchen harness
v2.3.0
------
### Bug
- **[COOK-3812](https://tickets.opscode.com/browse/COOK-3812)** - Add a way to bypass the apt existence check
### Improvement
- **[COOK-3567](https://tickets.opscode.com/browse/COOK-3567)** - Allow users to bypass apt-cache via attributes
v2.2.1
------
### Improvement
- **[COOK-664](https://tickets.opscode.com/browse/COOK-664)** - Check platform before running apt-specific commands
v2.2.0
------
### Bug
- **[COOK-3707](https://tickets.opscode.com/browse/COOK-3707)** - multiple nics confuse apt::cacher-client
v2.1.2
------
### Improvement
- **[COOK-3551](https://tickets.opscode.com/browse/COOK-3551)** - Allow user to set up a trusted APT repository
v2.1.1
------
### Bug
- **[COOK-1856](https://tickets.opscode.com/browse/COOK-1856)** - Match GPG keys without case sensitivity
v2.1.0
------
- [COOK-3426]: cacher-ng fails with restrict_environment set to true
- [COOK-2859]: cacher-client executes out of order
- [COOK-3052]: Long GPG keys are downloaded on every run
- [COOK-1856]: apt cookbook should match keys without case sensitivity
- [COOK-3255]: Attribute name incorrect in README
- [COOK-3225]: Call use_inline_resources only if defined
- [COOK-3386]: Cache dir for apt-cacher-ng
- [COOK-3291]: apt_repository: enable usage of a keyserver on port 80
- Greatly expanded test coverage with ChefSpec and Test-Kitchen
v2.0.0
------
### Bug
- [COOK-2258]: apt: LWRP results in error under why-run mode in apt 1.9.0 cookbook
v1.10.0
-------
### Improvement
- [COOK-2885]: Improvements for apt cache server search
### Bug
- [COOK-2441]: Apt recipe broken in new chef version
- [COOK-2660]: Create Debian 6.0 "squeeze" specific template for
apt-cacher-ng
v1.9.2
------
- [COOK-2631] - Create Ubuntu 10.04 specific template for apt-cacher-ng
v1.9.0
------
- [COOK-2185] - Proxy for apt-key
- [COOK-2338] - Support pinning by glob() or regexp
v1.8.4
------
- [COOK-2171] - Update README to clarify required Chef version: 10.18.0
or higher.
v1.8.2
------
- [COOK-2112] - need [] around "arch" in sources.list entries
- [COOK-2171] - fixes a regression in the notification
v1.8.0
------
- [COOK-2143] - Allow for a custom cacher-ng port
- [COOK-2171] - On `apt_repository.run_action(:add)` the source file
is not created.
- [COOK-2184] - apt::cacher-ng, use `cacher_port` attribute in
acng.conf
v1.7.0
------
- [COOK-2082] - add "arch" parameter to apt_repository LWRP
v1.6.0
------
- [COOK-1893] - `apt_preference` use "`package_name`" resource instead of "name"
- [COOK-1894] - change filename for sources.list.d files
- [COOK-1914] - Wrong dir permissions for /etc/apt/preferences.d/
- [COOK-1942] - README.md has wrong name for the keyserver attribute
- [COOK-2019] - create 01proxy before any other apt-get updates get executed
v1.5.2
------
- [COOK-1682] - use template instead of file resource in apt::cacher-client
- [COOK-1875] - cacher-client should be Environment-aware
V1.5.0
------
- [COOK-1500] - Avoid triggering apt-get update
- [COOK-1548] - Add execute commands for autoclean and autoremove
- [COOK-1591] - Setting up the apt proxy should leave https
connections direct
- [COOK-1596] - execute[apt-get-update-periodic] never runs
- [COOK-1762] - create /etc/apt/preferences.d directory
- [COOK-1776] - apt key check isn't idempotent
v1.4.8
------
* Adds test-kitchen support
- [COOK-1435] - repository lwrp is not idempotent with http key
v1.4.6
------
- [COOK-1530] - apt_repository isn't aware of update-success-stamp
file (also reverts COOK-1382 patch).
v1.4.4
------
- [COOK-1229] - Allow cacher IP to be set manually in non-Chef Solo
environments
- [COOK-1530] - Immediately update apt-cache when sources.list file is dropped off
v1.4.2
------
- [COOK-1155] - LWRP for apt pinning
v1.4.0
------
- [COOK-889] - overwrite existing repo source files
- [COOK-921] - optionally use cookbook\_file or remote\_file for key
- [COOK-1032] - fixes problem with apt repository key installation
apt Cookbook
============
This cookbook includes recipes to execute apt-get update to ensure the local APT package cache is up to date. There are recipes for managing the apt-cacher-ng caching proxy and proxy clients. It also includes a LWRP for managing APT repositories in /etc/apt/sources.list.d as well as an LWRP for pinning packages via /etc/apt/preferences.d.
Requirements
------------
**Version 2.0.0+ of this cookbook requires Chef 11.0.0 or later**. If your Chef version is earlier than 11.0.0, use version 1.10.0 of this cookbook.
Version 1.8.2 to 1.10.0 of this cookbook requires **Chef 10.16.4** or later.
If your Chef version is earlier than 10.16.4, use version 1.7.0 of this cookbook.
### Platform
Please refer to the [TESTING file](TESTING.md) to see the currently (and passing) tested platforms. The release was tested on:
* Ubuntu 10.04
* Ubuntu 12.04
* Ubuntu 13.04
* Debian 7.1
* Debian 6.0 (have with manual testing)
May work with or without modification on other Debian derivatives.
-------
### default
This recipe installs the `update-notifier-common` package to provide the timestamp file used to only run `apt-get update` if the cache is more than one day old.
This recipe should appear first in the run list of Debian or Ubuntu nodes to ensure that the package cache is up to date before managing any `package` resources with Chef.
This recipe also sets up a local cache directory for preseeding packages.
**Including the default recipe on a node that does not support apt (such as Windows) results in a noop.**
### cacher-client
Configures the node to use the `apt-cacher-ng` server as a client.
#### Bypassing the cache
Occasionally you may come across repositories that do not play nicely when the node is using an `apt-cacher-ng` server. You can configure `cacher-client` to bypass the server and connect directly to the repository with the `cache_bypass` attribute.
To do this, you need to override the `cache_bypass` attribute with an array of repositories, with each array key as the repository URL and value as the protocol to use:
```json
{
...,
'apt': {
...,
'cache_bypass': {
URL: PROTOCOL
}
}
}
```
For example, to prevent caching and directly connect to the repository at `download.oracle.com` via http:
```json
{
'apt': {
'cache_bypass': {
'download.oracle.com': 'http'
}
}
}
```
### cacher-ng
Installs the `apt-cacher-ng` package and service so the system can provide APT caching. You can check the usage report at http://{hostname}:3142/acng-report.html.
If you wish to help the `cacher-ng` recipe seed itself, you must now explicitly include the `cacher-client` recipe in your run list **after** `cacher-ng` or you will block your ability to install any packages (ie. `apt-cacher-ng`).
Attributes
----------
* `['apt']['cacher_ipaddress']` - use a cacher server (or standard proxy server) not available via search
* `['apt']['cacher_interface]` - interface to connect to the cacher-ng service, no default.
* `['apt']['cacher_port']` - port for the cacher-ng service (either client or server), default is '3142'
* `['apt']['cacher_dir']` - directory used by cacher-ng service, default is '/var/cache/apt-cacher-ng'
* `['apt']['cacher-client']['restrict_environment']` - restrict your node to using the `apt-cacher-ng` server in your Environment, default is 'false'
* `['apt']['compiletime']` - force the `cacher-client` recipe to run before other recipes. It forces apt to use the proxy before other recipes run. Useful if your nodes have limited access to public apt repositories. This is overridden if the `cacher-ng` recipe is in your run list. Default is 'false'
* `['apt']['cache_bypass']` - array of URLs to bypass the cache. Accepts the URL and protocol to fetch directly from the remote repository and not attempt to cache
* `['apt']['periodic_update_min_delay']` - minimum delay (in seconds) beetween two actual executions of `apt-get update` by the `execute[apt-get-update-periodic]` resource, default is '86400' (24 hours)
Libraries
---------
There is an `interface_ipaddress` method that returns the IP address for a particular host and interface, used by the `cacher-client` recipe. To enable it on the server use the `['apt']['cacher_interface']` attribute.
Resources/Providers
-------------------
### `apt_repository`
This LWRP provides an easy way to manage additional APT repositories. Adding a new repository will notify running the `execute[apt-get-update]` resource immediately.
#### Actions
- :add: creates a repository file and builds the repository listing
- :remove: removes the repository file
#### Attribute Parameters
- repo_name: name attribute. The name of the channel to discover
- uri: the base of the Debian distribution
- distribution: this is usually your release's codename...ie something like `karmic`, `lucid` or `maverick`
- components: package groupings..when it doubt use `main`
- arch: constrain package to a particular arch like `i386`, `amd64` or even `armhf` or `powerpc`. Defaults to nil.
- trusted: treat all packages from this repository as authenticated regardless of signature
- deb_src: whether or not to add the repository as a source repo as well - value can be `true` or `false`, default `false`.
- keyserver: the GPG keyserver where the key for the repo should be retrieved
- key: if a `keyserver` is provided, this is assumed to be the fingerprint, otherwise it can be either the URI to the GPG key for the repo, or a cookbook_file.
- key_proxy: if set, pass the specified proxy via `http-proxy=` to GPG.
- cookbook: if key should be a cookbook_file, specify a cookbook where the key is located for files/default. Defaults to nil, so it will use the cookbook where the resource is used.
#### Examples
Add the Zenoss repo:
```ruby
apt_repository 'zenoss' do
uri 'http://dev.zenoss.org/deb'
components ['main', 'stable']
end
```
Add the Nginx PPA, grabbing the key from keyserver:
```ruby
apt_repository 'nginx-php' do
uri 'http://ppa.launchpad.net/nginx/php5/ubuntu'
distribution node['lsb']['codename']
components ['main']
keyserver 'keyserver.ubuntu.com'
key 'C300EE8C'
end
```
Add the Nginx PPA, grab the key from the keyserver, and add source repo:
```ruby
apt_repository 'nginx-php' do
uri 'http://ppa.launchpad.net/nginx/php5/ubuntu'
distribution node['lsb']['codename']
components ['main']
keyserver 'keyserver.ubuntu.com'
key 'C300EE8C'
deb_src true
end
```
Add the Cloudera Repo of CDH4 packages for Ubuntu 12.04 on AMD64:
```ruby
apt_repository 'cloudera' do
uri 'http://archive.cloudera.com/cdh4/ubuntu/precise/amd64/cdh'
arch 'amd64'
distribution 'precise-cdh4'
components ['contrib']
key 'http://archive.cloudera.com/debian/archive.key'
end
```
Remove Zenoss repo:
```ruby
apt_repository 'zenoss' do
action :remove
end
```
### `apt_preference`
This LWRP provides an easy way to pin packages in /etc/apt/preferences.d. Although apt-pinning is quite helpful from time to time please note that Debian does not encourage its use without thorough consideration.
Further information regarding apt-pinning is available via http://wiki.debian.org/AptPreferences.
#### Actions
- :add: creates a preferences file under /etc/apt/preferences.d
- :remove: Removes the file, therefore unpin the package
#### Attribute Parameters
- package_name: name attribute. The name of the package
- glob: Pin by glob() expression or regexp surrounded by /.
- pin: The package version/repository to pin
- pin_priority: The pinning priority aka "the highest package version wins"
#### Examples
Pin libmysqlclient16 to version 5.1.49-3:
```ruby
apt_preference 'libmysqlclient16' do
pin 'version 5.1.49-3'
pin_priority '700'
end
```
Unpin libmysqlclient16:
```ruby
apt_preference 'libmysqlclient16' do
action :remove
end
```
Pin all packages from dotdeb.org:
```ruby
apt_preference 'dotdeb' do
glob '*'
pin 'origin packages.dotdeb.org'
pin_priority '700'
end
```
Usage
-----