mirror of
https://github.com/status-im/infra-role-s3cmd-upload.git
synced 2025-01-21 06:08:50 +00:00
rename role to infra-role-s3cmd-upload
Also changed prefix of variables to `s3cmd_upload_` from `backup_`. Signed-off-by: Jakub Sokołowski <jakub@status.im>
This commit is contained in:
parent
554ee3f5d5
commit
3cc98275bd
42
README.md
42
README.md
@ -6,8 +6,8 @@ This role is intended for uploading backups to DigitalOcean Spaces using the [`s
|
|||||||
|
|
||||||
In your `requirements.yml` file:
|
In your `requirements.yml` file:
|
||||||
```yaml
|
```yaml
|
||||||
- name: infra-role-s3cmd-backup
|
- name: infra-role-s3cmd-upload
|
||||||
src: git@github.com:status-im/infra-role-s3cmd-backup.git
|
src: git@github.com:status-im/infra-role-s3cmd-upload.git
|
||||||
scm: git
|
scm: git
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -17,22 +17,22 @@ This role should be included by anothe role and ran with certain variables:
|
|||||||
```yaml
|
```yaml
|
||||||
- name: Configure Ghost backups
|
- name: Configure Ghost backups
|
||||||
include_role:
|
include_role:
|
||||||
name: s3cmd-backup
|
name: s3cmd-upload
|
||||||
vars:
|
vars:
|
||||||
backup_name: my-app-backups
|
s3cmd_upload_name: 'my-app-backups'
|
||||||
backup_number: 1
|
s3cmd_upload_number: 1
|
||||||
backup_hour: 4
|
s3cmd_upload_hour: 4
|
||||||
backup_day: '*/4'
|
s3cmd_upload_day: '*/4'
|
||||||
backup_directory: '/var/tmp/backups'
|
s3cmd_upload_directory: '/var/tmp/backups'
|
||||||
backup_base_domain: 'ams3.digitaloceanspaces.com'
|
s3cmd_upload_base_domain: 'ams3.digitaloceanspaces.com'
|
||||||
backup_bucket_name: 'my-app-backups'
|
s3cmd_upload_bucket_name: 'my-app-backups'
|
||||||
backup_access_key: 'ACCESS_KEY'
|
s3cmd_upload_access_key: 'ACCESS_KEY'
|
||||||
backup_secret_key: 'SECRET_KEY'
|
s3cmd_upload_secret_key: 'SECRET_KEY'
|
||||||
```
|
```
|
||||||
|
|
||||||
If you want to use S3 you can set:
|
If you want to use S3 you can set:
|
||||||
```yaml
|
```yaml
|
||||||
backup_base_domain: s3.amazonaws.com
|
s3cmd_upload_base_domain: 's3.amazonaws.com"
|
||||||
```
|
```
|
||||||
|
|
||||||
# Administration
|
# Administration
|
||||||
@ -40,21 +40,21 @@ backup_base_domain: s3.amazonaws.com
|
|||||||
The upload job runs as a systemd service triggered by a timer.
|
The upload job runs as a systemd service triggered by a timer.
|
||||||
Assuming our backup target is called `database` you can do:
|
Assuming our backup target is called `database` you can do:
|
||||||
```
|
```
|
||||||
$ sudo systemctl status backup-database.service
|
$ sudo systemctl status upload-database.service
|
||||||
● backup-database.service - "Service for uploading database backups to s3 buckets."
|
● upload-database.service - "Service for uploading database backups to s3 buckets."
|
||||||
Loaded: loaded (/lib/systemd/system/backup-database.service; static; vendor preset: enabled)
|
Loaded: loaded (/lib/systemd/system/upload-database.service; static; vendor preset: enabled)
|
||||||
Active: inactive (dead) since Fri 2020-01-24 15:08:57 UTC; 7min ago
|
Active: inactive (dead) since Fri 2020-01-24 15:08:57 UTC; 7min ago
|
||||||
Docs: https://github.com/status-im/infra-role-s3cmd-backup
|
Docs: https://github.com/status-im/infra-role-s3cmd-upload
|
||||||
Process: 15536 ExecStart=/var/lib/backups/backup_hackmd.sh (code=exited, status=0/SUCCESS)
|
Process: 15536 ExecStart=/usr/local/bin/upload_databasse.sh (code=exited, status=0/SUCCESS)
|
||||||
Main PID: 15536 (code=exited, status=0/SUCCESS)
|
Main PID: 15536 (code=exited, status=0/SUCCESS)
|
||||||
|
|
||||||
Jan 24 15:08:44 node-01.do-ams3.todo.misc systemd[1]: Starting "Service for uploading database backups to s3 buckets."...
|
Jan 24 15:08:44 node-01.do-ams3.todo.misc systemd[1]: Starting "Service for uploading database backups to s3 buckets."...
|
||||||
Jan 24 15:08:44 node-01.do-ams3.todo.misc backup_database.sh[15536]: Uploading: database_db_dump_20200124040001.sql >> s3://hackmd-backups
|
Jan 24 15:08:44 node-01.do-ams3.todo.misc upload_database.sh[15536]: Uploading: database_db_dump_20200124040001.sql >> s3://database-backups
|
||||||
...
|
...
|
||||||
```
|
```
|
||||||
You can check the timer status too:
|
You can check the timer status too:
|
||||||
```
|
```
|
||||||
$ sudo systemctl list-timers backup-database.timer
|
$ sudo systemctl list-timers upload-database.timer
|
||||||
NEXT LEFT LAST PASSED UNIT ACTIVATES
|
NEXT LEFT LAST PASSED UNIT ACTIVATES
|
||||||
Sat 2020-01-25 00:00:00 UTC 8h left n/a n/a backup-database.timer backup-database.service
|
Sat 2020-01-25 00:00:00 UTC 8h left n/a n/a upload-database.timer upload-database.service
|
||||||
```
|
```
|
||||||
|
@ -1,24 +1,24 @@
|
|||||||
---
|
---
|
||||||
backup_name: 'default'
|
s3cmd_upload_name: 'default'
|
||||||
backup_directory: ~
|
s3cmd_upload_directory: ~
|
||||||
backup_script_dir: '/var/lib/backups'
|
s3cmd_upload_script_dir: '/usr/local/bin'
|
||||||
backup_script: '{{ backup_script_dir }}/backup_{{ backup_name }}.sh'
|
s3cmd_upload_script: '{{ s3cmd_upload_script_dir }}/upload_{{ s3cmd_upload_name }}.sh'
|
||||||
backup_service_path: '/lib/systemd/system'
|
s3cmd_upload_service_path: '/lib/systemd/system'
|
||||||
backup_service_name: 'backup-{{ backup_name }}'
|
s3cmd_upload_service_name: 'upload-{{ s3cmd_upload_name }}'
|
||||||
backup_service_user: root
|
s3cmd_upload_service_user: root
|
||||||
# It takes a bit to upload files
|
# It takes a bit to upload files
|
||||||
backup_service_start_timeout: 120
|
s3cmd_upload_service_start_timeout: 120
|
||||||
# Optionally wait for specified service to run
|
# Optionally wait for specified service to run
|
||||||
backup_service_extra_after: ~
|
s3cmd_upload_service_extra_after: ~
|
||||||
# Backup frequency in systemd OnCalendar format
|
# Backup frequency in systemd OnCalendar format
|
||||||
backup_timer_frequency: 'daily'
|
s3cmd_upload_timer_frequency: 'daily'
|
||||||
# Number of most recent files to backup
|
# Number of most recent files to backup
|
||||||
backup_number: 1
|
s3cmd_upload_number: 1
|
||||||
|
|
||||||
# Digital Ocean Spaces configuration
|
# Digital Ocean Spaces configuration
|
||||||
backup_base_domain: ams3.digitaloceanspaces.com
|
s3cmd_upload_base_domain: ams3.digitaloceanspaces.com
|
||||||
# example: s3://discourse-backups
|
# example: s3://discourse-backups
|
||||||
backup_bucket_name: ~
|
s3cmd_upload_bucket_name: ~
|
||||||
backup_encryption_pass: ~
|
s3cmd_upload_encryption_pass: ~
|
||||||
backup_access_key: ~
|
s3cmd_upload_access_key: ~
|
||||||
backup_secret_key: ~
|
s3cmd_upload_secret_key: ~
|
||||||
|
@ -6,11 +6,11 @@
|
|||||||
group: adm
|
group: adm
|
||||||
mode: 0775
|
mode: 0775
|
||||||
with_items:
|
with_items:
|
||||||
- '{{ backup_script_dir }}'
|
- '{{ s3cmd_upload_script_dir }}'
|
||||||
- '{{ backup_directory }}'
|
- '{{ s3cmd_upload_directory }}'
|
||||||
|
|
||||||
- name: Create backup script
|
- name: Create backup script
|
||||||
template:
|
template:
|
||||||
src: backup.sh
|
src: 'upload.sh.j2'
|
||||||
dest: '{{ backup_script }}'
|
dest: '{{ s3cmd_upload_script }}'
|
||||||
mode: 0755
|
mode: 0755
|
||||||
|
@ -1,27 +1,27 @@
|
|||||||
---
|
---
|
||||||
- name: 'Create systemd service file: {{ backup_service_name }}'
|
- name: 'Create systemd service file: {{ s3cmd_upload_service_name }}'
|
||||||
template:
|
template:
|
||||||
src: backup.service.j2
|
src: backup.service.j2
|
||||||
dest: '{{ backup_service_path }}/{{ backup_service_name }}.service'
|
dest: '{{ s3cmd_upload_service_path }}/{{ s3cmd_upload_service_name }}.service'
|
||||||
mode: 0644
|
mode: 0644
|
||||||
|
|
||||||
- name: 'Create systemd timer file: {{ backup_service_name }}'
|
- name: 'Create systemd timer file: {{ s3cmd_upload_service_name }}'
|
||||||
template:
|
template:
|
||||||
src: backup.timer.j2
|
src: backup.timer.j2
|
||||||
dest: '{{ backup_service_path }}/{{ backup_service_name }}.timer'
|
dest: '{{ s3cmd_upload_service_path }}/{{ s3cmd_upload_service_name }}.timer'
|
||||||
mode: 0644
|
mode: 0644
|
||||||
|
|
||||||
- name: Reload systemctl
|
- name: Reload systemctl
|
||||||
command: systemctl daemon-reload
|
command: systemctl daemon-reload
|
||||||
|
|
||||||
- name: '(Re)start fetching service: {{ backup_service_name }}'
|
- name: '(Re)start upload service: {{ s3cmd_upload_service_name }}'
|
||||||
service:
|
service:
|
||||||
name: '{{ backup_service_name }}.service'
|
name: '{{ s3cmd_upload_service_name }}.service'
|
||||||
state: started
|
state: started
|
||||||
enabled: true
|
enabled: true
|
||||||
|
|
||||||
- name: 'Enable the service timer: {{ backup_service_name }}'
|
- name: 'Enable the service timer: {{ s3cmd_upload_service_name }}'
|
||||||
systemd:
|
systemd:
|
||||||
name: '{{ backup_service_name }}.timer'
|
name: '{{ s3cmd_upload_service_name }}.timer'
|
||||||
state: started
|
state: started
|
||||||
enabled: yes
|
enabled: yes
|
||||||
|
@ -1,14 +1,14 @@
|
|||||||
[Unit]
|
[Unit]
|
||||||
Description="Service for uploading {{ backup_name }} backups to s3 buckets."
|
Description="Service for uploading {{ s3cmd_upload_name }} backups to s3 buckets."
|
||||||
Documentation=https://github.com/status-im/infra-role-s3cmd-backup
|
Documentation=https://github.com/status-im/infra-role-s3cmd-upload
|
||||||
Requires=network-online.target
|
Requires=network-online.target
|
||||||
After=network-online.target
|
After=network-online.target
|
||||||
{% if backup_service_extra_after is defined %}
|
{% if s3cmd_upload_service_extra_after is defined %}
|
||||||
After={{ backup_service_extra_after }}
|
After={{ s3cmd_upload_service_extra_after }}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
User={{ backup_service_user }}
|
User={{ s3cmd_upload_service_user }}
|
||||||
ExecStart={{ backup_script }}
|
ExecStart={{ s3cmd_upload_script }}
|
||||||
Type=oneshot
|
Type=oneshot
|
||||||
TimeoutStartSec={{ backup_service_start_timeout }}
|
TimeoutStartSec={{ s3cmd_upload_service_start_timeout }}
|
||||||
|
@ -1,12 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
BACKUPS_NUM='{{ backup_number }}'
|
|
||||||
BACKUPS_DIR='{{ backup_directory }}'
|
|
||||||
BUCKET_NAME='{{ backup_bucket_name }}'
|
|
||||||
# Find most recent archive
|
|
||||||
ARCHIVES=$(ls -Art ${BACKUPS_DIR} | tail -n ${BACKUPS_NUM})
|
|
||||||
|
|
||||||
for ARCHIVE in ${ARCHIVES}; do
|
|
||||||
echo "Uploading: ${ARCHIVE} >> ${BUCKET_NAME}"
|
|
||||||
/usr/bin/s3cmd put "${BACKUPS_DIR}/${ARCHIVE}" "${BUCKET_NAME}"
|
|
||||||
done
|
|
@ -2,7 +2,7 @@
|
|||||||
After=multi-user.target
|
After=multi-user.target
|
||||||
|
|
||||||
[Timer]
|
[Timer]
|
||||||
OnCalendar={{ backup_timer_frequency }}
|
OnCalendar={{ s3cmd_upload_timer_frequency }}
|
||||||
Persistent=yes
|
Persistent=yes
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
[default]
|
[default]
|
||||||
host_base = {{ backup_base_domain }}
|
host_base = {{ s3cmd_upload_base_domain }}
|
||||||
host_bucket = %(bucket)s.{{ backup_base_domain }}
|
host_bucket = %(bucket)s.{{ s3cmd_upload_base_domain }}
|
||||||
access_key = {{ backup_access_key }}
|
access_key = {{ s3cmd_upload_access_key }}
|
||||||
secret_key = {{ backup_secret_key }}
|
secret_key = {{ s3cmd_upload_secret_key }}
|
||||||
{% if backup_encryption_pass %}
|
{% if s3cmd_upload_encryption_pass %}
|
||||||
encrypt = True
|
encrypt = True
|
||||||
gpg_passphrase = {{ backup_encryption_pass }}
|
gpg_passphrase = {{ s3cmd_upload_encryption_pass }}
|
||||||
{% else %}
|
{% else %}
|
||||||
encrypt = False
|
encrypt = False
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
12
templates/upload.sh.j2
Normal file
12
templates/upload.sh.j2
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
BACKUPS_NUM='{{ s3cmd_upload_number }}'
|
||||||
|
BACKUPS_DIR='{{ s3cmd_upload_directory }}'
|
||||||
|
BUCKET_NAME='{{ s3cmd_upload_bucket_name }}'
|
||||||
|
# Find most recent archive
|
||||||
|
ARCHIVES=$(ls -Art ${BACKUPS_DIR} | tail -n ${BACKUPS_NUM})
|
||||||
|
|
||||||
|
for ARCHIVE in ${ARCHIVES}; do
|
||||||
|
echo "Uploading: ${ARCHIVE} >> ${BUCKET_NAME}"
|
||||||
|
/usr/bin/s3cmd put "${BACKUPS_DIR}/${ARCHIVE}" "s3://${BUCKET_NAME}"
|
||||||
|
done
|
Loading…
x
Reference in New Issue
Block a user