backup_file_to_s3.erb
4.18 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
#!/bin/bash
#
# Generated by Chef
#
# Back up directories/files into an S3 bucket
set -e
suffix=.backup_file_to_s3
if [ -f /tmp/*"$suffix" ] ; then
( >&2 echo "[ERROR] Another operation might still be in progress" )
exit 200
fi
tmp_file=$( mktemp --suffix "$suffix" )
bucket=<%= @bucket %>
region=<%= @region %>
bak_dir=<%= @tmp_dir %>
aws_bin=<%= @aws_bin %>
tar_bin=<%= @tar_bin %>
pub_key_file=<%= @pub_key_file %>
# Create the tarball.
# Args:
# $1 = full path to be backed up, e.g. '/var/src/mydir'
# $2 = desired filename, sans '.tar.gz' extension, e.g. 'myfilename'
tar_dir() {
bname=$( basename "$1" )
dname=$( dirname "$1" )
echo "$(date) : Creating tarball from ${1}"
"$tar_bin" -C "${dname}" -czf "${bak_dir}/${2}.tar.gz" "$bname"
}
# Encrypt a tarball using OpenSSL and a given public key.
# The original file will be replaced by a new one with the same filename,
# suffixed with '.enc'.
#
# Given file should be located in $bak_dir.
# Args:
# $1 = filename, sans '.tar.gz' extension, e.g. 'myfilename'
encrypt_tarball() {
echo "$(date) : Encrypt file ${1}.tar.gz"
openssl smime -encrypt -binary -text -aes256 -in "${bak_dir}/${1}.tar.gz" \
-out "${bak_dir}/${1}.tar.gz.enc" -outform DER "${pub_key_file}"
rm "${bak_dir}/${1}.tar.gz"
}
# Rotate the number suffixes of the current backups in S3
# Args:
# $1 = filename, sans '.tar.gz' extension, e.g. 'myfilename'
# $2 = max number of backup copies to store at a time
increment_backup_names() {
bak_keyname=$1
max_backups=$2
# Backups will stored inside subdirectories (prefixes)
# in S3, so only look at 'PRE' objects.
baks=$( "$aws_bin" --output text --region "$region" \
s3 ls "s3://${bucket}/" | grep '^\s*PRE' | \
sed -e 's/^ *PRE //' -e 's/\/$//' | \
grep "^${bak_keyname}" || echo "" )
echo "$(date) : Backup rotation for ${bak_keyname}"
start=$((max_backups - 1))
for (( x=start ; x > 0 ; x-- )) ; do
if echo "$baks" | grep "^${bak_keyname}\\.${x}\$" ; then
newx=$((x + 1))
if [[ $newx -lt $max_backups ]] ; then
"$aws_bin" --region "$region" \
s3 mv --recursive "s3://${bucket}/${bak_keyname}.${x}" \
"s3://${bucket}/${bak_keyname}.${newx}"
else
"$aws_bin" --region "$region" \
s3 rm --recursive "s3://${bucket}/${bak_keyname}.${x}"
fi
fi
done
if echo "$baks" | grep "^${bak_keyname}\$" ; then
"$aws_bin" --region "$region" \
s3 mv --recursive "s3://${bucket}/${bak_keyname}" \
"s3://${bucket}/${bak_keyname}.1"
fi
}
# Upload the tarball to the S3 bucket. It will be uploaded
# to this location: ${bucket}/basename/myfile.tar.gz.enc
#
# A timestamp file will also be uploaded to this location:
# ${bucket}/basename/YYYY-MM-DDThh:mm:ss.txt
# Args:
# $1 = filename, sans '.tar.gz' extension, e.g. 'myfilename'
# $2 = if file is encrypted or not (boolean)
upload_to_s3() {
bak_keyname=$1
if [ "$2" = true ] ; then
fname="${1}.tar.gz.enc"
else
fname="${1}.tar.gz"
fi
echo "$(date) : Upload ${fname} to S3 bucket ${bucket}"
stamp=$( date +"%FT%T" )
echo "Uploaded: ${stamp}" > "${bak_dir}/${stamp}.txt"
"$aws_bin" --region "$region" \
s3 mv "${bak_dir}/${fname}" \
"s3://${bucket}/${bak_keyname}/${fname}"
"$aws_bin" --region "$region" \
s3 mv "${bak_dir}/${stamp}.txt" \
"s3://${bucket}/${bak_keyname}/${stamp}.txt"
}
if [[ ! -d "$bak_dir" ]] ; then
mkdir -p "$bak_dir"
fi
<% @paths.each do |path| -%>
<% bname = path[:bak_filename] || ::File.basename(path[:path]) -%>
<% is_enc = path.has_key?(:bak_encrypted) ? path[:bak_encrypted] : false -%>
if [[ -d <%= path[:path] %> || -f <%= path[:path] %> ]] ; then
increment_backup_names <%= bname %> <%= path[:bak_maxcopies] || 30 %>
tar_dir <%= path[:path] %> <%= bname %>
<% if is_enc -%>
encrypt_tarball <%= bname %>
<% end -%>
upload_to_s3 <%= bname %> <%= is_enc %>
else
>&2 echo "$(date) [WARNING] Path <%= path[:path] %> does not exist"
fi
<% end -%>
rm "$tmp_file"
echo "$(date) : Done"