backup_db_to_s3.erb
4.23 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
#!/bin/bash
# Generated by Chef.
#
# Perform mysqldump on databases, optionally encrypt them,
# and upload the resulting backup files into an S3 bucket.
#
# This script is not meant to be run manually,
# but instead through a regular cron job.
set -e
suffix=.backup_db_to_s3
if [ -f /tmp/*"$suffix" ] ; then
( >&2 echo "[ERROR] Another operation might still be in progress" )
exit 200
fi
tmp_file=$( mktemp --suffix "$suffix" )
<% bak_dir = "#{Chef::Config[:file_cache_path]}/backup_db_to_s3" -%>
bak_dir=<%= bak_dir %>
db_host=<%= @db_ip %>
db_port=<%= @db_port %>
bucket=<%= @s3_bucket %>
region=<%= @s3_region %>
aws_bin=<%= @aws_bin %>
mysqldump_bin=<%= @mysqldump_bin %>
pub_key_file=<%= @pub_key_file %>
if [[ ! -d "$bak_dir" ]] ; then
echo "$(date) : Create backup directory."
mkdir -p "$bak_dir"
fi
# Perform mysqldump on a database.
# Args:
# $1 = db name
# $2 = db user
# $3 = db password
# $4 = dump file filename, e.g. 'mydb.sql'
export_db() {
echo "$(date) : Export database ${1}."
"$mysqldump_bin" -h "$db_host" -P "$db_port" -C --opt \
--no-create-db --single-transaction --lock-tables \
--routines --events --triggers \
-u "$2" -p"$3" "$1" > "${bak_dir}/${4}"
}
# Encrypt a file using OpenSSL and a given public key.
# The original file will be replaced by a new file, suffixed with '.enc'.
# Args:
# $1 = compressed dump file filename, e.g. 'mydb.sql.gz'
encrypt_file() {
echo "$(date) : Encrypt file ${1}."
openssl smime -encrypt -binary -text -aes256 -in "${bak_dir}/${1}" \
-out "${bak_dir}/${1}.enc" -outform DER "${pub_key_file}"
rm "${bak_dir}/${1}"
}
# Compress the backup file with gzip.
# Args:
# $1 = dump file filename, e.g. 'mydb.sql'
compress_backup_file() {
echo "$(date) : Gzip file ${1}."
gzip "${bak_dir}/${1}"
}
# Rotate the current backups in S3.
# Args:
# $1 = resulting dump filename, e.g. 'mydb.sql.gz', 'mydb.sql.gz.enc'
# $2 = max number of backup files to store at a time
increment_backup_names() {
bak_keyname=$1
max_backups=$2
baks=$( "$aws_bin" --output text --region "$region" \
s3 ls "s3://${bucket}/" | awk '{ printf("%s\n", $4); }' || echo "" )
echo "$(date) : Backup rotation for ${bak_keyname}."
start=$((max_backups - 1))
for (( x=start ; x > 0 ; x-- )) ; do
if echo "$baks" | grep "^${bak_keyname}\\.${x}\$" ; then
newx=$((x + 1))
if [[ $newx -lt $max_backups ]] ; then
"$aws_bin" --region "$region" \
s3 cp "s3://${bucket}/${bak_keyname}.${x}" \
"s3://${bucket}/${bak_keyname}.${newx}"
fi
fi
done
if echo "$baks" | grep "^${bak_keyname}\$" ; then
"$aws_bin" --region "$region" \
s3 cp "s3://${bucket}/${bak_keyname}" \
"s3://${bucket}/${bak_keyname}.1"
fi
}
# Upload the compressed db backup file.
# Args:
# $1 = resulting dump filename, e.g. 'mydb.sql.gz', 'mydb.sql.gz.enc'
upload_to_s3() {
echo "$(date) : Upload ${1} to S3 bucket ${bucket}."
"$aws_bin" --region "$region" \
s3 mv "${bak_dir}/${1}" "s3://${bucket}/${1}"
}
# First, perform mysqldump on each database (and encrypt if desired):
<% @db_map.each do |db| -%>
<% if db.is_a?(Array) -%>
<% db_name = db[0] -%>
<% db = db[1] -%>
<% else -%>
<% db_name = db[:db_name] -%>
<% end -%>
<% unless db.has_key?(:backup) && db[:backup] == false -%>
export_db <%= db_name %> <%= db[:db_user] %> '<%= db[:db_pass] %>' <%= db[:bak_filename] %>
compress_backup_file <%= db[:bak_filename] %>
<% if db[:bak_encrypted] -%>
encrypt_file <%= db[:bak_filename] %>.gz
<% end -%>
<% end -%>
<% end -%>
# Then upload the backup files one by one:
<% @db_map.each do |db| -%>
<% if db.is_a?(Array) then db = db[1] end -%>
<% if db[:bak_encrypted] -%>
<% bfname = "#{db[:bak_filename]}.gz.enc" -%>
<% else -%>
<% bfname = "#{db[:bak_filename]}.gz" -%>
<% end -%>
<% unless db.has_key?(:backup) && db[:backup] == false -%>
increment_backup_names <%= bfname %> <%= db[:bak_maxcopies] %>
upload_to_s3 <%= bfname %>
<% end -%>
<% end -%>
rm "$tmp_file"
echo "$(date) : Done."