Skip to content
This repository has been archived by the owner on Sep 12, 2023. It is now read-only.

Commit

Permalink
Added gsutil as backup method.
Browse files Browse the repository at this point in the history
  • Loading branch information
socketwench committed Jul 7, 2020
1 parent 3c64826 commit 69337c0
Show file tree
Hide file tree
Showing 8 changed files with 239 additions and 90 deletions.
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM alpine:3.9
FROM alpine:3.10
MAINTAINER tess@ten7.com

# Update the package list and install Ansible.
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -453,7 +453,7 @@ Each item in the list is a, s3-to-s3 backup to perform, where:
* **srcRegion** is the S3 region in which `srcBucket` resides. Optional.
* **srcEndpoint** is the S3 API endpoint to use for the source bucket. Optional, defaults to AWS S3.
* **delete** specifies if files not present in the source bucket should be deleted in the target bucket. Optional, defaults to true.
* **method** specifies the command to use to perform the sync. Must be `awcli` or `s3cmd`. Optional, default is `s3cmd`.
* **method** specifies the command to use to perform the sync. Must be `awcli`, `gsutil` or `s3cmd`. Optional, default is `s3cmd`.

By design, the S3-to-S3 backup is always performed *last* in Tractorbeam. This allows you to mirror previous backups easily.

Expand Down
15 changes: 15 additions & 0 deletions ansible/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,15 +27,27 @@
- "py3-mysqlclient"
- "py3-pip"
- "rsync"
- "tree"
notify:
- clear apk cache
- name: Install build deps
shell: >
apk add --virtual build-deps
gcc
libffi-dev
python3-dev
linux-headers
musl-dev
openssl-dev
notify: remove build deps
- name: Install the AWS cli
pip:
name:
- "botocore"
- "boto3"
- "awscli"
- "s3cmd"
- "gsutil"
extra_args: "--ignore-installed"
- name: Change the ownership of s3cmd due to permission wonkiness
file:
Expand Down Expand Up @@ -93,6 +105,9 @@
loop:
- "/home/backup/.ssh"
handlers:
- name: remove build deps
shell: >
apk del build-deps
- name: clear apk cache
file:
path: "{{ item }}"
Expand Down
2 changes: 1 addition & 1 deletion ansible/group_vars/backup.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
---
flighdeck_motd_name: "Tractorbeam 2.3.0"
flighdeck_motd_name: "Tractorbeam 2.4.0"

flightdeck_groups:
- name: "backup"
Expand Down
10 changes: 9 additions & 1 deletion ansible/roles/tractorbeam/tasks/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -99,9 +99,17 @@
# S3 to S3 backups always run last!
#
- name: Work with s3 backups
include_tasks: "s3.yml"
include_tasks: "{{ _task_file }}"
vars:
_retain_count: "{{ _backupSet.retainCount }}"
_task_file: "\
{% if _backup.method | default('s3cmd') == 'awscli' %}\
s3/awscli.yml\
{% elif _backup.method | default('s3cmd') == 'gsutil' %}\
s3/gsutil.yml\
{% else %}\
s3/s3cmd.yml\
{% endif %}"
loop: "{{ tractorbeam.s3 | default([]) }}"
loop_control:
loop_var: _backup
Expand Down
102 changes: 102 additions & 0 deletions ansible/roles/tractorbeam/tasks/s3/awscli.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
---
- name: Create a temp directory to stage the backup
tempfile:
state: directory
prefix: "s3-{{ _backup.bucket }}-{{ _backup_timestamp }}"
register: _s3_temp_dir
when:
- _backup.cacheDir is not defined
- name: Create a cache directory for the backup
file:
state: directory
path: "{{ _backup.cacheDir }}"
owner: "backup"
group: "backup"
mode: "u=rwx,g=rxw,o=rwx"
when:
- _backup.cacheDir is defined
- name: Sync files from source S3 using awscli
shell: >
aws s3 sync
{% for _exclude in _backup.excludes | default(_tractorbeam_default_files_excludes) %}
--exclude "{{ _exclude }}"{{ '' }}
{% endfor %}
{% if _backup.delete | default(true) %}
--delete
{% endif %}
{% if (flightdeck_debug | default(false)) != true %}
--quiet
{% elif lookup('env', 'ANSIBLE_VERBOSITY') == 4 %}
--only-show-errors
{% endif %}
{% if _backup.srcEndpoint is defined %}
--endpoint-url {{ _backup.srcEndpoint }}
{% endif %}
{% if _backup.srcRegion is defined %}
--region {{ _backup.srcRegion }}
{% endif %}
s3://{{ _backup.srcBucket }}/{% if _backup.prefix is defined %}{{ _backup.prefix }}/{% endif %}
{{ _backup.cacheDir | default(_s3_temp_dir.path) }}
environment:
AWS_ACCESS_KEY_ID: "\
{% if _backup.srcAccessKeyFile is defined %}\
{{ lookup('file', _backup.srcAccessKeyFile) }}\
{% else %}\
{{ _backup.srcAccessKey | default(omit) }}\
{% endif %}"
AWS_SECRET_ACCESS_KEY: "\
{% if _backup.srcSecretKeyFile is defined %}\
{{ lookup('file', _backup.srcSecretKeyFile) }}\
{% else %}\
{{ _backup.srcSecretKey | default(omit) }}\
{% endif %}"
register: _froms3_result
until: _froms3_result.rc == 0
retries: "{{ _backup.retryCount | default(3) }}"
delay: "{{ _backup.retryDelay | default(30) }}"
- name: Sync directory to S3 using awscli
shell: >
aws s3 sync
{% for _exclude in _backup.excludes | default(_tractorbeam_default_files_excludes) %}
--exclude "{{ _exclude }}"{{ '' }}
{% endfor %}
{% if _backup.delete | default(true) %}
--delete
{% endif %}
{% if (flightdeck_debug | default(false)) != true %}
--quiet
{% elif lookup('env', 'ANSIBLE_VERBOSITY') == 4 %}
--only-show-errors
{% endif %}
{% if _backup.endpoint is defined %}
--endpoint-url {{ _backup.endpoint }}
{% endif %}
{% if _backup.region is defined %}
--region {{ _backup.region }}
{% endif %}
{{ _backup.cacheDir | default(_s3_temp_dir.path) }}/
s3://{{ _backup.bucket }}/{{ _backup.prefix }}
environment:
AWS_ACCESS_KEY_ID: "\
{% if _backup.accessKeyFile is defined %}\
{{ lookup('file', _backup.accessKeyFile) }}\
{% else %}\
{{ _backup.accessKey | default(omit) }}\
{% endif %}"
AWS_SECRET_ACCESS_KEY: "\
{% if _backup.secretKeyFile is defined %}\
{{ lookup('file', _backup.secretKeyFile) }}\
{% else %}\
{{ _backup.secretKey | default(omit) }}\
{% endif %}"
register: _tos3_result
until: _tos3_result.rc == 0
retries: "{{ _backup.retryCount | default(3) }}"
delay: "{{ _backup.retryDelay | default(30) }}"
- name: delete stage directory
file:
path: "_s3_temp_dir.path"
state: absent
when:
- _s3_temp_dir.path is defined
- include_tasks: "healhcheck.yml"
110 changes: 110 additions & 0 deletions ansible/roles/tractorbeam/tasks/s3/gsutil.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
---
- name: Create a temp directory to stage the backup
tempfile:
state: directory
prefix: "s3-{{ _backup.bucket }}-{{ _backup_timestamp }}"
register: _s3_temp_dir
when:
- _backup.cacheDir is not defined
- name: Create a cache directory for the backup
file:
state: directory
path: "{{ _backup.cacheDir }}"
owner: "backup"
group: "backup"
mode: "u=rwx,g=rxw,o=rwx"
when:
- _backup.cacheDir is defined
- name: Create the .boto file for the source sync
copy:
content: |
[Credentials]
aws_access_key_id = {{ _access_key }}
aws_secret_access_key = {{ _secret_key }}
{% if _backup.srcEndpoint is defined %}
s3_host = {{ _backup.srcEndpoint | regex_replace('[A-z]*://', '') }}
{% endif %}
dest: "/home/backup/.boto"
owner: "backup"
group: "backup"
mode: "u=rw,g=rw,o="
vars:
_access_key: "\
{% if _backup.srcAccessKeyFile is defined %}\
{{ lookup('file', _backup.srcAccessKeyFile) }}\
{% else %}\
{{ _backup.srcAccessKey | default(omit) }}\
{% endif %}"
_secret_key: "\
{% if _backup.srcSecretKeyFile is defined %}\
{{ lookup('file', _backup.srcSecretKeyFile) }}\
{% else %}\
{{ _backup.srcSecretKey | default(omit) }}\
{% endif %}"
- name: Sync files from source S3 using gsutil
shell: >
gsutil -m rsync
{% for _exclude in _backup.excludes | default([]) %}
-x "{{ _exclude }}"{{ '' }}
{% endfor %}
{% if _backup.delete | default(true) %}
-d
{% endif %}
-r
-C
s3://{{ _backup.srcBucket }}/{% if _backup.prefix is defined %}{{ _backup.prefix }}/{% endif %}
{{ _backup.cacheDir | default(_s3_temp_dir.path) }}/
register: _froms3_result
until: _froms3_result.rc == 0
retries: "{{ _backup.retryCount | default(3) }}"
delay: "{{ _backup.retryDelay | default(30) }}"
- name: Create the .boto file for the destination sync
copy:
content: |
[Credentials]
aws_access_key_id = {{ _access_key }}
aws_secret_access_key = {{ _secret_key }}
{% if _backup.endpoint is defined %}
s3_host = {{ _backup.endpoint | regex_replace('[A-z]*://', '') }}
{% endif %}
dest: "/home/backup/.boto"
owner: "backup"
group: "backup"
mode: "u=rw,g=rw,o="
vars:
_access_key: "\
{% if _backup.accessKeyFile is defined %}\
{{ lookup('file', _backup.accessKeyFile) }}\
{% else %}\
{{ _backup.accessKey | default(omit) }}\
{% endif %}"
_secret_key: "\
{% if _backup.secretKeyFile is defined %}\
{{ lookup('file', _backup.secretKeyFile) }}\
{% else %}\
{{ _backup.secretKey | default(omit) }}\
{% endif %}"
- name: Sync directory to S3 using gsutil
shell: >
gsutil -m rsync
{% for _exclude in _backup.excludes | default([]) %}
-x "{{ _exclude }}"{{ '' }}
{% endfor %}
{% if _backup.delete | default(true) %}
-d
{% endif %}
-r
-C
{{ _backup.cacheDir | default(_s3_temp_dir.path) }}/
s3://{{ _backup.bucket }}/{{ _backup.prefix }}/
register: _tos3_result
until: _tos3_result.rc == 0
retries: "{{ _backup.retryCount | default(3) }}"
delay: "{{ _backup.retryDelay | default(30) }}"
- name: delete stage directory
file:
path: "_s3_temp_dir.path"
state: absent
when:
- _s3_temp_dir.path is defined
- include_tasks: "healhcheck.yml"
Original file line number Diff line number Diff line change
Expand Up @@ -15,88 +15,6 @@
mode: "u=rwx,g=rxw,o=rwx"
when:
- _backup.cacheDir is defined
- name: Sync files from source S3 using awscli
shell: >
aws s3 sync
{% for _exclude in _backup.excludes | default(_tractorbeam_default_files_excludes) %}
--exclude "{{ _exclude }}"{{ '' }}
{% endfor %}
{% if _backup.delete | default(true) %}
--delete
{% endif %}
{% if (flightdeck_debug | default(false)) != true %}
--quiet
{% elif lookup('env', 'ANSIBLE_VERBOSITY') == 4 %}
--only-show-errors
{% endif %}
{% if _backup.srcEndpoint is defined %}
--endpoint-url {{ _backup.srcEndpoint }}
{% endif %}
{% if _backup.srcRegion is defined %}
--region {{ _backup.srcRegion }}
{% endif %}
s3://{{ _backup.srcBucket }}/{% if _backup.prefix is defined %}{{ _backup.prefix }}/{% endif %}
{{ _backup.cacheDir | default(_s3_temp_dir.path) }}
environment:
AWS_ACCESS_KEY_ID: "\
{% if _backup.srcAccessKeyFile is defined %}\
{{ lookup('file', _backup.srcAccessKeyFile) }}\
{% else %}\
{{ _backup.srcAccessKey | default(omit) }}\
{% endif %}"
AWS_SECRET_ACCESS_KEY: "\
{% if _backup.srcSecretKeyFile is defined %}\
{{ lookup('file', _backup.srcSecretKeyFile) }}\
{% else %}\
{{ _backup.srcSecretKey | default(omit) }}\
{% endif %}"
register: _froms3_result
until: _froms3_result.rc == 0
retries: "{{ _backup.retryCount | default(3) }}"
delay: "{{ _backup.retryDelay | default(30) }}"
when:
- _backup.method | default('s3cmd') == 'awscli'
- name: Sync directory to S3 using awscli
shell: >
aws s3 sync
{% for _exclude in _backup.excludes | default(_tractorbeam_default_files_excludes) %}
--exclude "{{ _exclude }}"{{ '' }}
{% endfor %}
{% if _backup.delete | default(true) %}
--delete
{% endif %}
{% if (flightdeck_debug | default(false)) != true %}
--quiet
{% elif lookup('env', 'ANSIBLE_VERBOSITY') == 4 %}
--only-show-errors
{% endif %}
{% if _backup.endpoint is defined %}
--endpoint-url {{ _backup.endpoint }}
{% endif %}
{% if _backup.region is defined %}
--region {{ _backup.region }}
{% endif %}
{{ _backup.cacheDir | default(_s3_temp_dir.path) }}/
s3://{{ _backup.bucket }}/{{ _backup.prefix }}
environment:
AWS_ACCESS_KEY_ID: "\
{% if _backup.accessKeyFile is defined %}\
{{ lookup('file', _backup.accessKeyFile) }}\
{% else %}\
{{ _backup.accessKey | default(omit) }}\
{% endif %}"
AWS_SECRET_ACCESS_KEY: "\
{% if _backup.secretKeyFile is defined %}\
{{ lookup('file', _backup.secretKeyFile) }}\
{% else %}\
{{ _backup.secretKey | default(omit) }}\
{% endif %}"
register: _tos3_result
until: _tos3_result.rc == 0
retries: "{{ _backup.retryCount | default(3) }}"
delay: "{{ _backup.retryDelay | default(30) }}"
when:
- _backup.method | default('s3cmd') == 'awscli'
- name: Sync files from source S3 using s3cmd
shell: >
s3cmd sync
Expand Down Expand Up @@ -140,8 +58,6 @@
until: _froms3_result.rc == 0
retries: "{{ _backup.retryCount | default(3) }}"
delay: "{{ _backup.retryDelay | default(30) }}"
when:
- _backup.method | default('s3cmd') == 's3cmd'
- name: Sync directory to S3 using s3cmd
shell: >
s3cmd sync
Expand Down Expand Up @@ -186,8 +102,6 @@
until: _tos3_result.rc == 0
retries: "{{ _backup.retryCount | default(3) }}"
delay: "{{ _backup.retryDelay | default(30) }}"
when:
- _backup.method | default('s3cmd') == 's3cmd'
- name: delete stage directory
file:
path: "_s3_temp_dir.path"
Expand Down

0 comments on commit 69337c0

Please sign in to comment.