backup_db_to_s3.erb 3.4 KB
#!/bin/bash
# Generated by Chef.
#
# Perform mysqldump on databases and upload the 
# resulting backup files into an S3 bucket.

set -e

suffix=.backup_db_to_s3
[ -f /tmp/*"$suffix" ] && exit 200
tmp_file=$( mktemp --suffix "$suffix" )

<% bak_dir = "#{Chef::Config[:file_cache_path]}/backup_db_to_s3" -%>
bak_dir=<%= bak_dir %>
db_host=<%= @db_ip %>
db_port=<%= @db_port %>
bucket=<%= @s3_bucket %>
region=<%= @s3_region %>

aws_bin=<%= @aws_bin %>
mysqldump_bin=<%= @mysqldump_bin %>

log_dir=/var/log/backup_db_to_s3
if [[ ! -d "$log_dir" ]] ; then
    mkdir -p "$log_dir"
fi

exec 3>&1 4>&2
trap 'exec 2>&4 1>&3' 0 1 2 3
exec 1>>"${log_dir}/backup_db_to_s3.log" 2>&1

if [[ ! -d "$bak_dir" ]] ; then
    echo "$(date) : Create backup directory."
    mkdir -p "$bak_dir"
fi

# Perform mysqldump on a database.
# Args:
#   $1 = db name
#   $2 = db user
#   $3 = db password
#   $4 = dump file filename, e.g. 'mydb.sql'
export_db() {
    echo "$(date) : Export database ${1}."
    "$mysqldump_bin" -h "$db_host" -P "$db_port" -C --opt \
                     --no-create-db --single-transaction --lock-tables \
                     --routines --events --triggers \
                     -u "$2" -p"$3" "$1" > "${bak_dir}/${4}"
}

# Compress the backup file with gzip.
# Args:
#   $1 = dump file filename, e.g. 'mydb.sql'
compress_backup_file() {
    echo "$(date) : Gzip file ${1}."
    gzip "${bak_dir}/${1}"
}

# Rotate the current backups in S3.
# Args:
#   $1 = dump file filename, e.g. 'mydb.sql'
#   $2 = max number of backup files to store at a time
increment_backup_names() {
    bak_keyname="${1}.gz"
    max_backups=$2

    baks=$( "$aws_bin" --output text --region "$region" \
                       s3api list-objects --bucket "$bucket" \
            | grep '^CONTENTS' | cut -f3 | grep "^${bak_keyname}" || echo "" )

    echo "$(date) : Backup rotation for ${bak_keyname}."
    start=$((max_backups - 1))

    for (( x=start ; x > 0 ; x-- )) ; do
        if echo "$baks" | grep "^${bak_keyname}\\.${x}\$" ; then
            newx=$((x + 1))
            if [[ $newx -lt $max_backups ]] ; then
                "$aws_bin" --region "$region" \
                           s3 cp "s3://${bucket}/${bak_keyname}.${x}" \
                                 "s3://${bucket}/${bak_keyname}.${newx}"
            fi
        fi
    done

    if echo "$baks" | grep "^${bak_keyname}\$" ; then
        "$aws_bin" --region "$region" \
                   s3 cp "s3://${bucket}/${bak_keyname}" \
                         "s3://${bucket}/${bak_keyname}.1"
    fi
}

# Upload the compressed db backup file.
# Args:
#   $1 = dump file filename, e.g. 'mydb.sql'
upload_to_s3() {
    echo "$(date) : Upload ${1}.gz to S3 bucket ${bucket}."
    "$aws_bin" --region "$region" \
               s3 mv "${bak_dir}/${1}.gz" "s3://${bucket}/${1}.gz"
}

# First, perform mysqldump on each database.
<% @db_map.each do |db| -%>
<%  if db.is_a?(Array) -%>
<%    db_name = db[0] -%>
<%    db = db[1] -%>
<%  else -%>
<%    db_name = db[:db_name] -%>
<%  end -%>
export_db <%= db_name %> <%= db[:db_user] %> '<%= db[:db_pass] %>' <%= db[:bak_filename] %>
<% end -%>

# Then compress and upload the backup files one by one.
<% @db_map.each do |db| -%>
<%  if db.is_a?(Array) then db = db[1] end -%>
compress_backup_file <%= db[:bak_filename] %>
increment_backup_names <%= db[:bak_filename] %> <%= db[:bak_maxcopies] %>
upload_to_s3 <%= db[:bak_filename] %>

<% end -%>
rm "$tmp_file"
echo "$(date) : Done."