add role files

Signed-off-by: Jakub Sokołowski <jakub@status.im>
This commit is contained in:
Jakub Sokołowski 2019-06-05 13:53:19 -04:00
parent 051bcd2d61
commit 59f173a018
No known key found for this signature in database
GPG Key ID: 4EF064D0E6D63020
7 changed files with 185 additions and 0 deletions

22
README.md Normal file
View File

@ -0,0 +1,22 @@
# Description
This role is intended for uploading backups to DigitalOcean Spaces using the [`s3cmd`]() script.
# Usage
This role should be included by anothe role and ran with certain variables:
```yaml
- name: Configure Ghost backups
include_role:
name: s3cmd-backup
vars:
backup_name: my-app-backups
backup_number: 1
backup_hour: 4
backup_day: '*/4'
backup_directory: '/var/tmp/backups'
backup_base_domain: 'ams3.digitaloceanspaces.com'
backup_bucket_name: 'my-app-backups'
backup_access_key: 'ACCESS_KEY'
backup_secret_key: 'SECRET_KEY'
```

26
defaults/main.yml Normal file
View File

@ -0,0 +1,26 @@
---
# For uploading backups
s3cmd_version: '2.0.1'
s3cmd_name: 's3cmd-{{ s3cmd_version }}'
s3cmd_archive: '{{ s3cmd_name }}.tar.gz'
s3cmd_checksum: 'md5:ef525ec3bd2d430b10130de54f85e8bc'
s3cmd_url: 'https://github.com/s3tools/s3cmd/releases/download/v{{ s3cmd_version }}/{{ s3cmd_archive }}'
# Backup timing
backup_hour: 2
backup_minute: 0
backup_day: '*'
backup_month: '*'
# Number of most recent files to backup
backup_number: 1
# Digital Ocean Spaces configuration
backup_name: 'default'
backup_directory: ~
backup_script: '/var/lib/backups/{{ backup_name }}_backup.sh'
backup_base_domain: ams3.digitaloceanspaces.com
# example: s3://discourse-backups
backup_bucket_name: ~
backup_encryption_pass: ~
backup_access_key: ~
backup_secret_key: ~

27
tasks/backups.yml Normal file
View File

@ -0,0 +1,27 @@
---
- name: Create directory for backup script
file:
path: '{{ item }}'
state: directory
group: adm
mode: 0775
with_items:
- /var/tmp/backups
- /var/lib/backups
- name: Create backup script
template:
src: backup.sh
dest: '{{ backup_script }}'
mode: 0755
- name: Configure a cron job for backups
cron:
name: Upload Discourse Backups
# backup happens at 03:30 UTC
hour: '{{ backup_hour }}'
minute: '{{ backup_minute }}'
day: '{{ backup_day }}'
month: '{{ backup_month }}'
job: '{{ backup_script }}'
user: root

3
tasks/main.yml Normal file
View File

@ -0,0 +1,3 @@
---
- include_tasks: s3cmd.yml
- include_tasks: backups.yml

30
tasks/s3cmd.yml Normal file
View File

@ -0,0 +1,30 @@
---
- name: Check if s3cmd is installed
stat:
path: /usr/local/bin/s3cmd
register: s3cmd
- name: Download s3cmd archive
get_url:
url: '{{ s3cmd_url }}'
dest: /tmp
when: not s3cmd.stat.exists
- name: Extract s3cmd archive
unarchive:
src: '/tmp/{{ s3cmd_archive }}'
dest: /tmp
remote_src: yes
when: not s3cmd.stat.exists
- name: Install s3cmd
command: python setup.py install
args:
chdir: '/tmp/{{ s3cmd_name }}'
when: not s3cmd.stat.exists
- name: Create s3cmd config
template:
src: s3cfg.j2
dest: /root/.s3cfg
mode: 0400

12
templates/backup.sh Normal file
View File

@ -0,0 +1,12 @@
#!/usr/bin/env bash
BACKUPS_NUM='{{ backup_number }}'
BACKUPS_DIR='{{ backup_directory }}'
BUCKET_NAME='{{ backup_bucket_name }}'
# Find most recent archive
ARCHIVES=$(ls -Art ${BACKUPS_DIR} | tail -n ${BACKUPS_NUM})
for ARCHIVE in ${ARCHIVES}; do
echo "Uploading: ${ARCHIVE} >> ${BUCKET_NAME}"
/usr/local/bin/s3cmd put "${BACKUPS_DIR}/${ARCHIVE}" "${BUCKET_NAME}"
done

65
templates/s3cfg.j2 Normal file
View File

@ -0,0 +1,65 @@
[default]
host_base = {{ backup_base_domain }}
host_bucket = %(bucket)s.{{ backup_base_domain }}
access_key = {{ backup_access_key }}
secret_key = {{ backup_secret_key }}
{% if backup_encryption_pass %}
encrypt = True
gpg_passphrase = {{ backup_encryption_pass }}
{% else %}
encrypt = False
{% endif %}
bucket_location = US
check_ssl_certificate = True
check_ssl_hostname = True
default_mime_type = binary/octet-stream
delay_updates = False
delete_after = False
delete_after_fetch = False
delete_removed = False
dry_run = False
enable_multipart = True
follow_symlinks = False
force = False
get_continue = False
gpg_command = /usr/bin/gpg
gpg_decrypt = %(gpg_command)s -d --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s
gpg_encrypt = %(gpg_command)s -c --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s
guess_mime_type = True
human_readable_sizes = False
invalidate_default_index_on_cf = False
invalidate_default_index_root_on_cf = True
invalidate_on_cf = False
limit = -1
limitrate = 0
list_md5 = False
long_listing = False
max_delete = -1
multipart_chunk_size_mb = 15
multipart_max_chunks = 10000
preserve_attrs = True
progress_meter = True
proxy_port = 0
put_continue = False
recursive = False
recv_chunk = 65536
reduced_redundancy = False
requester_pays = False
restore_days = 1
restore_priority = Standard
send_chunk = 65536
server_side_encryption = False
signature_v2 = False
signurl_use_https = False
skip_existing = False
socket_timeout = 300
stats = False
stop_on_error = False
throttle_max = 100
urlencoding_mode = normal
use_http_expect = False
use_https = True
use_mime_magic = True
verbosity = WARNING
website_endpoint = http://%(bucket)s.s3-website-%(location)s.amazonaws.com/
website_index = index.html