Commit 32b629703f3007f641b273bb723502be3268802d

Authored by nollieheel
Committed by Earth Ugat
1 parent ec6c0fc7

Bump to v0.2.0

  1 +# 0.2.0
  2 +
  3 +Add definition aws_tar_extract to abstract reloading of backups.
  4 +
1 5 # 0.1.0
2 6
3 7 Initial release of backup-file2s3
... ...
... ... @@ -2,6 +2,8 @@
2 2
3 3 Installs a script that backs up one or more directories into an S3 bucket. Also sets up the cronjob to regularly run said script.
4 4
  5 +There is also a definition `aws_tar_extract` to recover those backup files and unpack them.
  6 +
5 7 ## Supported Platforms
6 8
7 9 Ubuntu 14.04
... ... @@ -61,6 +63,19 @@ Include `backup-file2s3` in your node's `run_list`:
61 63 }
62 64 ```
63 65
  66 +### aws_tar_extract
  67 +
  68 +The following example will get `mydir.tar.gz` from the provided S3 region and bucket and unpack it into the `/opt/src`.
  69 +
  70 +```ruby
  71 +aws_tar_extract 'mydir' do
  72 + region 'us-east-1'
  73 + bucket 'my-bucket'
  74 + target_dir '/opt/src'
  75 + creates '/ops/src/mydir/index.html'
  76 +end
  77 +```
  78 +
64 79 ## License and Authors
65 80
66 81 Author:: Earth U. (<sysadmin@chromedia.com>)
... ...
  1 +#
  2 +# Author:: Earth U (<sysadmin@chromedia.com>)
  3 +# Cookbook Name:: backup-file2s3
  4 +# Definition :: aws_tar_extract
  5 +#
  6 +# Copyright 2016, Chromedia Far East, Inc.
  7 +#
  8 +# Licensed under the Apache License, Version 2.0 (the "License");
  9 +# you may not use this file except in compliance with the License.
  10 +# You may obtain a copy of the License at
  11 +#
  12 +# http://www.apache.org/licenses/LICENSE-2.0
  13 +#
  14 +# Unless required by applicable law or agreed to in writing, software
  15 +# distributed under the License is distributed on an "AS IS" BASIS,
  16 +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  17 +# See the License for the specific language governing permissions and
  18 +# limitations under the License.
  19 +#
  20 +
  21 +# Gets a tarball from AWS S3, then unpack it into a directory.
  22 +# Parameters (all mandatory):
  23 +# :name | :file => The name of the backup tarball, without the extension
  24 +# :region => AWS region
  25 +# :bucket => AWS bucket
  26 +# :target_dir => Where the tarball is to be unpacked. If not
  27 +# exists, it will be created
  28 +# :creates => A file path used for idempotency
  29 +
  30 +define :aws_tar_extract,
  31 + :file => nil, # default is params[:name]
  32 + :region => 'us-east-1',
  33 + :bucket => nil,
  34 + :target_dir => nil,
  35 + :creates => nil do
  36 + file = params[:file] || params[:name]
  37 +
  38 + tmp_dir = ::File.join(Chef::Config[:file_cache_path], 'backups')
  39 + filetgz = "#{tmp_dir}/#{file}.tar.gz"
  40 +
  41 + include_recipe 'awscli'
  42 + include_recipe 'tar'
  43 +
  44 + [ tmp_dir, params[:target_dir] ].each do |ndir|
  45 + directory ndir do
  46 + recursive true
  47 + end
  48 + end
  49 +
  50 + unless ::File.exist?(params[:creates])
  51 + awscli_s3_file filetgz do
  52 + region params[:region]
  53 + bucket params[:bucket]
  54 + key "#{file}.tar.gz"
  55 + end
  56 +
  57 + tar_extract filetgz do
  58 + action :extract_local
  59 + target_dir params[:target_dir]
  60 + creates params[:creates]
  61 + end
  62 + end
  63 +end
... ...
... ... @@ -4,9 +4,10 @@ maintainer_email 'sysadmin@chromedia.com'
4 4 license 'Apache License'
5 5 description 'Creates a script to backup directories into an S3 bucket.'
6 6 long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
7   -version '0.1.0'
  7 +version '0.2.0'
8 8
9 9 depends 'awscli', '~> 1.0.1'
10 10 depends 'cron', '~> 1.7.4'
  11 +depends 'tar', '~> 0.7.0'
11 12
12 13 supports 'ubuntu', '>= 14.04'
... ...
... ... @@ -6,6 +6,10 @@
6 6
7 7 set -e
8 8
  9 +suffix=.backup_file_to_s3
  10 +[ -f /tmp/*"$suffix" ] && exit 200
  11 +tmp_file=$( mktemp --suffix "$suffix" )
  12 +
9 13 log_dir=<%= @log_dir %>
10 14 if [[ ! -d "$log_dir" ]] ; then
11 15 mkdir -p "$log_dir"
... ... @@ -98,8 +102,9 @@ for dirx in "${tar_dirs[@]}" ; do
98 102 tar_dir "$dirx"
99 103 upload_to_s3 "$dirx"
100 104 else
101   - echo "$(date) : WARNING : Directory ${dirx} does not exist"
  105 + echo "$(date) : WARNING : Directory ${dirx} does not exist. Skipping."
102 106 fi
103 107 done
104 108
  109 +rm "$tmp_file"
105 110 echo "$(date) : Done"
... ...