Merge pull request #423 from arachnys/alpine-docker

Switch to using alpine linux for docker container
This commit is contained in:
Frank Hamand 2017-02-16 12:42:23 +00:00 committed by GitHub
commit 9388274b84
13 changed files with 163 additions and 37 deletions

View File

@ -18,7 +18,7 @@ before_script:
script:
- tox
- docker-compose run --rm web bash bin/test_with_coverage
- docker-compose -f docker-compose-test.yml run --rm test bash bin/test_with_coverage -v2
- docker-compose run --rm -e CABOT_SUPERUSER_USERNAME='admin' -e CABOT_SUPERUSER_PASSWORD='pass' web true
after_success:

View File

@ -1,6 +1,8 @@
master
------
* Build docker image from alpine
* Refactor docker-compose files
* Fix db_clean task failing on large results tables
* Wait for docker containers to start in docker-entrypoint.sh
* Update CABOT_PLUGINS_ENABLED to compatible plugin versions

View File

@ -1,4 +1,4 @@
FROM python:2.7
FROM node:4-alpine
ENV PYTHONUNBUFFERED 1
@ -6,31 +6,32 @@ RUN mkdir /code
WORKDIR /code
RUN apt-get update && apt-get install -y \
RUN apk add --no-cache \
python-dev \
libsasl2-dev \
libldap2-dev \
libpq-dev \
npm
py-pip \
postgresql-dev \
gcc \
musl-dev \
libffi-dev \
openldap-dev \
bash
RUN npm install -g \
--registry http://registry.npmjs.org/ \
coffee-script \
less@1.3
RUN ln -s `which nodejs` /usr/bin/node
RUN pip install --upgrade pip
COPY requirements.txt ./
RUN pip install --no-cache-dir -r requirements.txt
COPY requirements-dev.txt ./
RUN pip install --no-cache-dir -r requirements-dev.txt
COPY requirements-plugins.txt ./
RUN pip install --no-cache-dir -r requirements-plugins.txt
RUN pip install ipdb
ADD . /code/
ENTRYPOINT ["./docker-entrypoint.sh"]

View File

@ -988,7 +988,7 @@ class TestCleanUpTask(LocalTestCase):
self.assertEqual(StatusCheckResult.objects.all().count(), initial_results + 2)
tasks.clean_db(batch_size=1)
self.assertEqual(StatusCheckResult.objects.all().count(), initial_results + 1)
self.assertEqual(StatusCheckResult.objects.all().count(), initial_results)
class TestMinimizeTargets(LocalTestCase):

View File

@ -4,6 +4,7 @@ from datetime import timedelta
BROKER_URL = os.environ['CELERY_BROKER_URL']
# Set environment variable if you want to run tests without a redis instance
CELERY_ALWAYS_EAGER = os.environ.get('CELERY_ALWAYS_EAGER', False)
CELERY_RESULT_BACKEND = os.environ.get('CELERY_RESULT_BACKEND', None)
CELERY_IMPORTS = ('cabot.cabotapp.tasks', )
CELERYBEAT_SCHEDULER = "djcelery.schedulers.DatabaseScheduler"
CELERY_TASK_SERIALIZER = "json"

74
conf/default.env Normal file
View File

@ -0,0 +1,74 @@
# Plugins to be loaded at launch
CABOT_PLUGINS_ENABLED=cabot_alert_hipchat==1.8.3,cabot_alert_twilio==1.2.0,cabot_alert_email==1.4.3
DEBUG=t
DATABASE_URL=postgres://postgres@db:5432/postgres
DJANGO_SETTINGS_MODULE=cabot.settings
HIPCHAT_URL=https://api.hipchat.com/v1/rooms/message
LOG_FILE=/dev/null
PORT=5001
# You shouldn't need to change anything above this line
# Base path to include before generated URLs. If not defined, uses `/`
# URL_PREFIX=/
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
TIME_ZONE=Etc/UTC
# URL of calendar to synchronise rota with
CALENDAR_ICAL_URL=http://www.google.com/calendar/ical/example.ics
# Django settings
CELERY_BROKER_URL=redis://redis:6379/1
# From parameter for the graphite request. If not defined, by default take -10 minutes
# GRAPHITE_FROM=-10minute
# User-Agent string used for HTTP checks
HTTP_USER_AGENT=Cabot
# Used for pointing links back in alerts etc.
WWW_HTTP_HOST=localhost
WWW_SCHEME=http
# OPTIONAL SETTINGS
#
# # Django admin email
# ADMIN_EMAIL=you@example.com
# CABOT_FROM_EMAIL=cabot@example.com
#
# DJANGO_SECRET_KEY=
#
# Hostname of your Graphite server instance
GRAPHITE_API=http://graphite.example.com/
GRAPHITE_USER=username
GRAPHITE_PASS=password
# Hipchat integration
HIPCHAT_ALERT_ROOM=48052
HIPCHAT_API_KEY=your_hipchat_api_key
# Jenkins integration
JENKINS_API=https://jenkins.example.com/
JENKINS_USER=username
JENKINS_PASS=password
# SMTP settings
SES_HOST=email-smtp.us-east-1.amazonaws.com
SES_USER=username
SES_PASS=password
SES_PORT=465
# Twilio integration for SMS and telephone alerts
TWILIO_ACCOUNT_SID=your_account_sid
TWILIO_AUTH_TOKEN=your_auth_token
TWILIO_OUTGOING_NUMBER=+14155551234
# Use for LDAP authentication
AUTH_LDAP=true
AUTH_LDAP_SERVER_URI=ldap://ldap.example.com
AUTH_LDAP_BIND_DN="cn=Manager,dc=example,dc=com"
AUTH_LDAP_BIND_PASSWORD=""
AUTH_LDAP_USER_SEARCH="ou=People,dc=example,dc=com"

7
conf/test.env Normal file
View File

@ -0,0 +1,7 @@
DATABASE_URL=sqlite://:memory:
CELERY_BROKER_URL=sqla+sqlite://:memory:
CELERY_RESULT_BACKEND=db+sqlite://:memory:
CELERY_ALWAYS_EAGER=true
SKIP_INIT=true

10
docker-compose-base.yml Normal file
View File

@ -0,0 +1,10 @@
version: '2'
services:
base:
build: .
image: cabot:web
command: "false"
volumes:
- .:/code
env_file:
- conf/default.env

9
docker-compose-test.yml Normal file
View File

@ -0,0 +1,9 @@
version: '2'
services:
test:
extends:
file: docker-compose-base.yml
service: base
command: python manage.py test -v2
env_file:
- conf/test.env

View File

@ -1,37 +1,45 @@
version: '2'
services:
web:
extends:
file: docker-compose-base.yml
service: base
env_file:
- conf/development.env
build: .
image: cabot:web
environment:
- CABOT_SUPERUSER_USERNAME=admin
- CABOT_SUPERUSER_PASSWORD=pass
command: python manage.py runserver 0.0.0.0:5001
ports:
- "5001:5001"
volumes:
- .:/code
- "5000:5000"
links:
- redis
- db
worker:
extends:
file: docker-compose-base.yml
service: base
env_file:
- conf/development.env
image: cabot:web
command: python manage.py celery worker -A cabot --loglevel=DEBUG --concurrency=16 -Ofair
volumes:
- .:/code
environment:
- SKIP_INIT=1
- WAIT_FOR_MIGRATIONS=1
links:
- redis
- db
beat:
extends:
file: docker-compose-base.yml
service: base
env_file:
- conf/development.env
image: cabot:web
command: python manage.py celery beat -A cabot --loglevel=DEBUG
volumes:
- .:/code
environment:
- SKIP_INIT=1
- WAIT_FOR_MIGRATIONS=1
links:
- redis
- db

View File

@ -1,6 +1,5 @@
#!/bin/bash
set -e
set -o allexport
function wait_for_broker {(
set +e
@ -20,6 +19,17 @@ function wait_for_database {(
done
)}
function wait_for_migrations {(
set +e
for try in {1..60} ; do
# Kind of ugly but not sure if there's another way to determine if migrations haven't run
# migrate --list returns a checkbox list of migrations, empty checkboxes mean they haven't been run
python manage.py migrate --list | grep "\[ \]" &> /dev/null || break
echo "Waiting for database migrations to be run..."
sleep 1
done
)}
wait_for_broker
wait_for_database
@ -28,4 +38,8 @@ if [ -z "$SKIP_INIT" ]; then
/code/bin/build-app
fi
if [ -n "$WAIT_FOR_MIGRATIONS" ]; then
wait_for_migrations
fi
exec "$@"

View File

@ -1,4 +1,5 @@
-r requirements.txt
coverage==4.2
django_coverage_plugin==1.3.1
mock==1.0.1
sqlalchemy==1.1.5
ipdb

View File

@ -13,10 +13,9 @@ django-filter==0.13
django-jsonify==0.3.0
django-mptt==0.6.0
django-polymorphic==0.7.2
django-redis==1.4.5
django-smtp-ssl==1.0
djangorestframework==2.4.8
gunicorn==18.0
gunicorn==19.6.0
gevent==1.0.1
httplib2==0.7.7
icalendar==3.2