backup_db_to_s3.erb 4.08 KB
#!/bin/bash
# Generated by Chef.
#
# Perform mysqldump on databases, optionally encrypt them,
# and upload the resulting backup files into an S3 bucket.
#
# This script is not meant to be run manually,
# but instead through a regular cron job.

set -e

suffix=.backup_db_to_s3
if [ -f /tmp/*"$suffix" ] ; then
    ( >&2 echo "[ERROR] Another operation might still be in progress" )
    exit 200
fi
tmp_file=$( mktemp --suffix "$suffix" )

<% bak_dir = "#{Chef::Config[:file_cache_path]}/backup_db_to_s3" -%>
bak_dir=<%= bak_dir %>
db_host=<%= @db_ip %>
db_port=<%= @db_port %>
bucket=<%= @s3_bucket %>
region=<%= @s3_region %>

aws_bin=<%= @aws_bin %>
mysqldump_bin=<%= @mysqldump_bin %>
pub_key_file=<%= @pub_key_file %>

if [[ ! -d "$bak_dir" ]] ; then
    echo "$(date) : Create backup directory."
    mkdir -p "$bak_dir"
fi

# Perform mysqldump on a database.
# Args:
#   $1 = db name
#   $2 = db user
#   $3 = db password
#   $4 = dump file filename, e.g. 'mydb.sql'
export_db() {
    echo "$(date) : Export database ${1}."
    "$mysqldump_bin" -h "$db_host" -P "$db_port" -C --opt \
                     --no-create-db --single-transaction --lock-tables \
                     --routines --events --triggers \
                     -u "$2" -p"$3" "$1" > "${bak_dir}/${4}"
}

# Encrypt a file using OpenSSL and a given public key.
# The original file will be replaced by a new file, suffixed with '.enc'.
# Args:
#   $1 = compressed dump file filename, e.g. 'mydb.sql.gz'
encrypt_file() {
    echo "$(date) : Encrypt file ${1}."
    openssl smime -encrypt -binary -text -aes256 -in "${bak_dir}/${1}" \
                  -out "${bak_dir}/${1}.enc" -outform DER "${pub_key_file}"
    rm "${bak_dir}/${1}"
}

# Compress the backup file with gzip.
# Args:
#   $1 = dump file filename, e.g. 'mydb.sql'
compress_backup_file() {
    echo "$(date) : Gzip file ${1}."
    gzip "${bak_dir}/${1}"
}

# Rotate the current backups in S3.
# Args:
#   $1 = resulting dump filename, e.g. 'mydb.sql.gz', 'mydb.sql.gz.enc'
#   $2 = max number of backup files to store at a time
increment_backup_names() {
    bak_keyname=$1
    max_backups=$2

    baks=$( "$aws_bin" --output text --region "$region" \
            s3 ls "s3://${bucket}/" | awk '{ printf("%s\n", $4); }' || echo "" )

    echo "$(date) : Backup rotation for ${bak_keyname}."
    start=$((max_backups - 1))

    for (( x=start ; x > 0 ; x-- )) ; do
        if echo "$baks" | grep "^${bak_keyname}\\.${x}\$" ; then
            newx=$((x + 1))
            if [[ $newx -lt $max_backups ]] ; then
                "$aws_bin" --region "$region" \
                           s3 cp "s3://${bucket}/${bak_keyname}.${x}" \
                                 "s3://${bucket}/${bak_keyname}.${newx}"
            fi
        fi
    done

    if echo "$baks" | grep "^${bak_keyname}\$" ; then
        "$aws_bin" --region "$region" \
                   s3 cp "s3://${bucket}/${bak_keyname}" \
                         "s3://${bucket}/${bak_keyname}.1"
    fi
}

# Upload the compressed db backup file.
# Args:
#   $1 = resulting dump filename, e.g. 'mydb.sql.gz', 'mydb.sql.gz.enc'
upload_to_s3() {
    echo "$(date) : Upload ${1} to S3 bucket ${bucket}."
    "$aws_bin" --region "$region" \
               s3 mv "${bak_dir}/${1}" "s3://${bucket}/${1}"
}

# First, perform mysqldump on each database (and encrypt if desired):

<% @db_map.each do |db| -%>
<%   if db.is_a?(Array) -%>
<%     db_name = db[0] -%>
<%     db = db[1] -%>
<%   else -%>
<%     db_name = db[:db_name] -%>
<%   end -%>
export_db <%= db_name %> <%= db[:db_user] %> '<%= db[:db_pass] %>' <%= db[:bak_filename] %>
compress_backup_file <%= db[:bak_filename] %>
<%   if db[:bak_encrypted] -%>
encrypt_file <%= db[:bak_filename] %>.gz
<%   end -%>
<% end -%>

# Then upload the backup files one by one:

<% @db_map.each do |db| -%>
<%   if db.is_a?(Array) then db = db[1] end -%>
<%   if db[:bak_encrypted] -%>
<%     bfname = "#{db[:bak_filename]}.gz.enc" -%>
<%   else -%>
<%     bfname = "#{db[:bak_filename]}.gz" -%>
<%   end -%>
increment_backup_names <%= bfname %> <%= db[:bak_maxcopies] %>
upload_to_s3 <%= bfname %>

<%  end -%>
rm "$tmp_file"
echo "$(date) : Done."