2014-01-05 17:24:04 +00:00
|
|
|
from fabric.api import *
|
|
|
|
from fabric.contrib.files import *
|
|
|
|
from fabric.contrib.project import rsync_project
|
|
|
|
from subprocess import check_output
|
|
|
|
|
|
|
|
|
|
|
|
env.use_ssh_config = True
|
|
|
|
|
|
|
|
env.user = 'ubuntu'
|
|
|
|
|
|
|
|
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
|
|
|
HOME_DIR = '/home/ubuntu'
|
|
|
|
DEPLOY_PATH = '%s/cabot' % HOME_DIR
|
|
|
|
LOG_DIR = '/var/log/cabot/'
|
|
|
|
VENV_DIR = '%s/venv' % HOME_DIR
|
|
|
|
BACKUP_DIR = '/tmp/'
|
|
|
|
|
|
|
|
PG_DATABASE = 'index'
|
|
|
|
PG_USERNAME = 'cabot'
|
2014-01-28 00:53:34 +00:00
|
|
|
PG_PASSWORD = 'cabot' # You should probably change this
|
|
|
|
|
2014-01-05 17:24:04 +00:00
|
|
|
|
|
|
|
def _ensure_dirs():
|
2014-01-28 00:53:34 +00:00
|
|
|
dirs = [LOG_DIR]
|
|
|
|
for d in dirs:
|
|
|
|
sudo('mkdir -p {d}'.format(d=d))
|
|
|
|
sudo('chmod -R 777 {d}'.format(d=d))
|
2014-01-05 17:24:04 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _setup_venv():
|
2014-01-28 00:53:34 +00:00
|
|
|
with settings(warn_only=True):
|
|
|
|
if sudo('test -d %s' % VENV_DIR).failed:
|
|
|
|
sudo('virtualenv %s' % VENV_DIR)
|
2014-01-05 17:24:04 +00:00
|
|
|
|
|
|
|
|
|
|
|
def install_requirements(deploy_path=DEPLOY_PATH):
|
2014-12-05 16:52:33 +00:00
|
|
|
sudo("foreman run -e conf/{env}.env {venv}/bin/pip install --editable {path} --exists-action=w".format(
|
|
|
|
env=env.deploy_version, venv=VENV_DIR, path=deploy_path))
|
2014-01-05 17:24:04 +00:00
|
|
|
|
|
|
|
|
|
|
|
def run_migrations(deploy_path=DEPLOY_PATH):
|
2014-01-28 00:53:34 +00:00
|
|
|
with cd(deploy_path):
|
|
|
|
with prefix("source {venv}/bin/activate".format(venv=VENV_DIR)):
|
|
|
|
sudo(
|
2015-04-03 15:55:54 +00:00
|
|
|
"foreman run -e conf/{env}.env python manage.py syncdb".format(env=env.deploy_version))
|
2014-01-28 00:53:34 +00:00
|
|
|
sudo(
|
2015-04-03 15:55:54 +00:00
|
|
|
"foreman run -e conf/{env}.env python manage.py migrate cabotapp --noinput".format(env=env.deploy_version))
|
2014-01-28 00:53:34 +00:00
|
|
|
# Wrap in failure for legacy reasons
|
2014-07-14 16:29:46 +00:00
|
|
|
# https://github.com/celery/django-celery/issues/149
|
|
|
|
print "You can ignore an error message regarding 'relation \"celery_taskmeta\" already exists'"
|
2014-01-28 00:53:34 +00:00
|
|
|
with settings(warn_only=True):
|
|
|
|
sudo(
|
2015-04-03 15:55:54 +00:00
|
|
|
"foreman run -e conf/{env}.env python manage.py migrate djcelery --noinput".format(env=env.deploy_version))
|
2014-01-05 17:24:04 +00:00
|
|
|
|
|
|
|
|
|
|
|
def collect_static(deploy_path=DEPLOY_PATH):
|
2014-01-28 00:53:34 +00:00
|
|
|
with cd(deploy_path):
|
|
|
|
with prefix("source {venv}/bin/activate".format(venv=VENV_DIR)):
|
|
|
|
sudo(
|
2015-04-03 15:55:54 +00:00
|
|
|
"foreman run -e conf/{env}.env python manage.py collectstatic --noinput".format(env=env.deploy_version))
|
2014-01-28 00:53:34 +00:00
|
|
|
sudo(
|
2015-04-03 15:55:54 +00:00
|
|
|
"foreman run -e conf/{env}.env python manage.py compress".format(env=env.deploy_version))
|
2014-01-05 17:24:04 +00:00
|
|
|
|
|
|
|
|
|
|
|
def setup_upstart(deploy_path=DEPLOY_PATH):
|
2014-01-28 00:53:34 +00:00
|
|
|
with cd(deploy_path):
|
|
|
|
# Point at master (i.e. symlinked) path
|
|
|
|
procfile = os.path.join(DEPLOY_PATH, 'Procfile')
|
|
|
|
env_file = os.path.join(DEPLOY_PATH, 'conf', '%s.env' %
|
|
|
|
env.deploy_version)
|
|
|
|
template_file = os.path.join(DEPLOY_PATH, 'upstart')
|
|
|
|
sudo('foreman export upstart /etc/init -f {conf} -e {env} -u ubuntu -a cabot -t {tmplt}'.format(
|
|
|
|
conf=procfile, env=env_file, tmplt=template_file))
|
2014-01-05 17:24:04 +00:00
|
|
|
|
|
|
|
|
|
|
|
def production():
|
2014-01-28 00:53:34 +00:00
|
|
|
"""
|
|
|
|
Select production instance(s)
|
|
|
|
"""
|
|
|
|
env.hosts = ['cabot.arachnys.com']
|
2014-01-05 17:24:04 +00:00
|
|
|
|
|
|
|
|
|
|
|
def restart():
|
2014-01-28 00:53:34 +00:00
|
|
|
with settings(warn_only=True):
|
|
|
|
if sudo('restart cabot').failed:
|
|
|
|
sudo('start cabot')
|
2014-01-05 17:24:04 +00:00
|
|
|
|
|
|
|
|
|
|
|
def stop():
|
2014-01-28 00:53:34 +00:00
|
|
|
with settings(warn_only=True):
|
|
|
|
sudo('stop cabot')
|
2014-01-05 17:24:04 +00:00
|
|
|
|
|
|
|
|
|
|
|
def provision():
|
2014-01-28 00:53:34 +00:00
|
|
|
"""
|
|
|
|
Provision a clean Ubuntu 12.04 instance with dependencies
|
|
|
|
"""
|
|
|
|
with open(os.path.expanduser('~/.ssh/id_rsa.pub')) as f:
|
|
|
|
local_ssh_key = f.read().strip('\n')
|
|
|
|
put('bin/setup_dependencies.sh', '/tmp/setup_dependencies.sh')
|
|
|
|
sudo('LOCAL_SSH_KEY="%s" bash /tmp/setup_dependencies.sh' % local_ssh_key)
|
|
|
|
# Clean up
|
|
|
|
run('rm /tmp/setup_dependencies.sh')
|
2014-01-05 17:24:04 +00:00
|
|
|
|
|
|
|
|
|
|
|
def deploy(deploy_version=None):
|
2014-01-28 00:53:34 +00:00
|
|
|
"""
|
|
|
|
Deploy a new version of code to production or test server.
|
|
|
|
|
|
|
|
Push code to remote server, install requirements, apply migrations,
|
|
|
|
collect and compress static assets, export foreman to upstart,
|
|
|
|
restart service
|
|
|
|
"""
|
|
|
|
# TODO: replace this with
|
|
|
|
# - zip up working directory
|
|
|
|
# - upload and unzip into DEPLOY_PATH
|
|
|
|
env.deploy_version = deploy_version or 'production'
|
|
|
|
dirname = check_output(
|
|
|
|
["echo \"$(date +'%Y-%m-%d')-$(git log --pretty=format:'%h' -n 1)\""], shell=True).strip('\n ')
|
|
|
|
deploy_path = os.path.join(HOME_DIR, dirname)
|
|
|
|
run('mkdir -p {}'.format(deploy_path))
|
|
|
|
print 'Uploading project to %s' % deploy_path
|
|
|
|
rsync_project(
|
|
|
|
remote_dir=deploy_path,
|
|
|
|
local_dir='./',
|
|
|
|
exclude=['.git', 'backups', 'venv',
|
|
|
|
'static/CACHE', '.vagrant', '*.pyc', 'dev.db'],
|
|
|
|
)
|
|
|
|
with cd(deploy_path):
|
2014-07-11 14:22:12 +00:00
|
|
|
_ensure_dirs()
|
2014-01-28 00:53:34 +00:00
|
|
|
_setup_venv()
|
|
|
|
create_database()
|
|
|
|
install_requirements(deploy_path)
|
|
|
|
run_migrations(deploy_path)
|
|
|
|
collect_static(deploy_path)
|
|
|
|
# This may cause a bit of downtime
|
|
|
|
run('ln -sfn {new} {current}'.format(
|
|
|
|
new=deploy_path,
|
|
|
|
current=DEPLOY_PATH
|
|
|
|
))
|
|
|
|
setup_upstart(deploy_path)
|
|
|
|
restart()
|
|
|
|
print "Done!"
|
2014-01-05 17:24:04 +00:00
|
|
|
|
|
|
|
|
|
|
|
def backup():
|
2014-01-28 00:53:34 +00:00
|
|
|
"""
|
|
|
|
Back up database locally
|
2014-01-05 17:24:04 +00:00
|
|
|
|
2014-01-28 00:53:34 +00:00
|
|
|
TODO: send backups to s3
|
|
|
|
"""
|
|
|
|
backup_file = 'outfile.sql.gz'
|
|
|
|
with cd(BACKUP_DIR):
|
|
|
|
run('PGPASSWORD=cabot pg_dump -U cabot index | gzip > {}'.format(backup_file))
|
|
|
|
get(backup_file, 'backups/%(basename)s')
|
2014-01-05 17:24:04 +00:00
|
|
|
|
|
|
|
|
|
|
|
def create_database():
|
2014-01-28 00:53:34 +00:00
|
|
|
"""Creates role and database"""
|
|
|
|
with settings(warn_only=True):
|
|
|
|
sudo(
|
|
|
|
'psql -c "CREATE USER %s WITH NOCREATEDB NOCREATEUSER ENCRYPTED PASSWORD E\'%s\'"' %
|
|
|
|
(PG_USERNAME, PG_PASSWORD), user='postgres')
|
|
|
|
sudo('psql -c "CREATE DATABASE %s WITH OWNER %s"' %
|
|
|
|
(PG_DATABASE, PG_USERNAME), user='postgres')
|
2014-01-05 17:24:04 +00:00
|
|
|
|
|
|
|
|
|
|
|
@parallel
|
|
|
|
def logs():
|
2014-01-28 00:53:34 +00:00
|
|
|
"""
|
|
|
|
Tail logfiles
|
|
|
|
"""
|
|
|
|
sudo('tail -f {logdir}* /var/log/nginx/*.log'.format(logdir=LOG_DIR))
|