Commit ec6c0fc78ba325b3ae505719382b9b15b1b6d4ba

Authored by Earth Ugat
1 parent 36b9f84a

First version of cookbook backup-file2s3

  1 +*~
  2 +*#
  3 +.#*
  4 +\#*#
  5 +.*.sw[a-z]
  6 +*.un~
  7 +pkg/
  8 +
  9 +# Berkshelf
  10 +.vagrant
  11 +/cookbooks
  12 +Berksfile.lock
  13 +
  14 +# Bundler
  15 +Gemfile.lock
  16 +bin/*
  17 +.bundle/*
  18 +
  19 +.kitchen/
  20 +.kitchen.local.yml
... ...
  1 +---
  2 +driver:
  3 + name: ec2
  4 + aws_ssh_key_id: cfe_stg_20160222
  5 + security_group_ids: ["sg-7f6fda18"]
  6 + region: us-west-2
  7 + availability_zone: b
  8 + subnet_id: subnet-d530d8b1
  9 + instance_type: t2.micro
  10 + associate_public_ip: true
  11 + require_chef_omnibus: true
  12 + shared_credentials_profile: earth
  13 +
  14 +provisioner:
  15 + name: chef_solo
  16 +
  17 +platforms:
  18 + - name: ubuntu-14.04
  19 + driver:
  20 + image_id: ami-50946030
  21 + transport:
  22 + username: ubuntu
  23 + ssh_key: ~/.ssh/cfe_stg_20160222.pem
  24 +
  25 +suites:
  26 + - name: default
  27 + run_list:
  28 + - recipe[backup-file2s3::default]
  29 + attributes:
... ...
  1 +source "https://supermarket.chef.io"
  2 +
  3 +metadata
... ...
  1 +# 0.1.0
  2 +
  3 +Initial release of backup-file2s3
... ...
  1 +source 'https://rubygems.org'
  2 +
  3 +gem "test-kitchen"
  4 +gem "kitchen-vagrant"
  5 +gem "ec2"
... ...
  1 +# backup-file2s3-cookbook
  2 +
  3 +Installs a script that backs up one or more directories into an S3 bucket. Also sets up the cronjob to regularly run said script.
  4 +
  5 +## Supported Platforms
  6 +
  7 +Ubuntu 14.04
  8 +
  9 +## Attributes
  10 +
  11 +<table>
  12 + <tr>
  13 + <th>Key</th>
  14 + <th>Type</th>
  15 + <th>Description</th>
  16 + <th>Default</th>
  17 + </tr>
  18 + <tr>
  19 + <td><tt>['backup-file2s3']['bucket']</tt></td>
  20 + <td>String</td>
  21 + <td>The S3 bucket where backup tarballs will be stored.</td>
  22 + <td><tt>nil</tt></td>
  23 + </tr>
  24 + <tr>
  25 + <td><tt>['backup-file2s3']['region']</tt></td>
  26 + <td>String</td>
  27 + <td>AWS region of the bucket.</td>
  28 + <td><tt>'us-east-1'</tt></td>
  29 + </tr>
  30 + <tr>
  31 + <td><tt>['backup-file2s3']['max_backups']</tt></td>
  32 + <td>Integer</td>
  33 + <td>Number of old backup tarballs to retain in S3.</td>
  34 + <td><tt>30</tt></td>
  35 + </tr>
  36 + <tr>
  37 + <td><tt>['backup-file2s3']['dirs']</tt></td>
  38 + <td>Array</td>
  39 + <td>An array of directories to be backed up.</td>
  40 + <td><tt>[]</tt></td>
  41 + </tr>
  42 + <tr>
  43 + <td><tt>['backup-file2s3']['cron']['min']</tt></td>
  44 + <td>String</td>
  45 + <td>Related cron attributes are: `hour`, `day`, `mon`, `wday`, each specifying a corresponding crontab value. This cron job will determine how often the backup script is run.</td>
  46 + <td><tt>nil</tt></td>
  47 + </tr>
  48 +</table>
  49 +
  50 +## Usage
  51 +
  52 +### backup-file2s3::default
  53 +
  54 +Include `backup-file2s3` in your node's `run_list`:
  55 +
  56 +```json
  57 +{
  58 + "run_list": [
  59 + "recipe[backup-file2s3::default]"
  60 + ]
  61 +}
  62 +```
  63 +
  64 +## License and Authors
  65 +
  66 +Author:: Earth U. (<sysadmin@chromedia.com>)
... ...
  1 +# encoding: utf-8
  2 +
  3 +require 'bundler'
  4 +require 'bundler/setup'
  5 +require 'berkshelf/thor'
  6 +
  7 +begin
  8 + require 'kitchen/thor_tasks'
  9 + Kitchen::ThorTasks.new
  10 +rescue LoadError
  11 + puts '>>>>> Kitchen gem not loaded, omitting tasks' unless ENV['CI']
  12 +end
... ...
  1 +# -*- mode: ruby -*-
  2 +# vi: set ft=ruby :
  3 +
  4 +# Vagrantfile API/syntax version. Don't touch unless you know what you're doing!
  5 +VAGRANTFILE_API_VERSION = '2'
  6 +
  7 +Vagrant.require_version '>= 1.5.0'
  8 +
  9 +Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
  10 + # All Vagrant configuration is done here. The most common configuration
  11 + # options are documented and commented below. For a complete reference,
  12 + # please see the online documentation at vagrantup.com.
  13 +
  14 + config.vm.hostname = 'backup-file2s3-berkshelf'
  15 +
  16 + # Set the version of chef to install using the vagrant-omnibus plugin
  17 + # NOTE: You will need to install the vagrant-omnibus plugin:
  18 + #
  19 + # $ vagrant plugin install vagrant-omnibus
  20 + #
  21 + if Vagrant.has_plugin?("vagrant-omnibus")
  22 + config.omnibus.chef_version = 'latest'
  23 + end
  24 +
  25 + # Every Vagrant virtual environment requires a box to build off of.
  26 + # If this value is a shorthand to a box in Vagrant Cloud then
  27 + # config.vm.box_url doesn't need to be specified.
  28 + config.vm.box = 'chef/ubuntu-14.04'
  29 +
  30 +
  31 + # Assign this VM to a host-only network IP, allowing you to access it
  32 + # via the IP. Host-only networks can talk to the host machine as well as
  33 + # any other machines on the same network, but cannot be accessed (through this
  34 + # network interface) by any external networks.
  35 + config.vm.network :private_network, type: 'dhcp'
  36 +
  37 + # Create a forwarded port mapping which allows access to a specific port
  38 + # within the machine from a port on the host machine. In the example below,
  39 + # accessing "localhost:8080" will access port 80 on the guest machine.
  40 +
  41 + # Share an additional folder to the guest VM. The first argument is
  42 + # the path on the host to the actual folder. The second argument is
  43 + # the path on the guest to mount the folder. And the optional third
  44 + # argument is a set of non-required options.
  45 + # config.vm.synced_folder "../data", "/vagrant_data"
  46 +
  47 + # Provider-specific configuration so you can fine-tune various
  48 + # backing providers for Vagrant. These expose provider-specific options.
  49 + # Example for VirtualBox:
  50 + #
  51 + # config.vm.provider :virtualbox do |vb|
  52 + # # Don't boot with headless mode
  53 + # vb.gui = true
  54 + #
  55 + # # Use VBoxManage to customize the VM. For example to change memory:
  56 + # vb.customize ["modifyvm", :id, "--memory", "1024"]
  57 + # end
  58 + #
  59 + # View the documentation for the provider you're using for more
  60 + # information on available options.
  61 +
  62 + # The path to the Berksfile to use with Vagrant Berkshelf
  63 + # config.berkshelf.berksfile_path = "./Berksfile"
  64 +
  65 + # Enabling the Berkshelf plugin. To enable this globally, add this configuration
  66 + # option to your ~/.vagrant.d/Vagrantfile file
  67 + config.berkshelf.enabled = true
  68 +
  69 + # An array of symbols representing groups of cookbook described in the Vagrantfile
  70 + # to exclusively install and copy to Vagrant's shelf.
  71 + # config.berkshelf.only = []
  72 +
  73 + # An array of symbols representing groups of cookbook described in the Vagrantfile
  74 + # to skip installing and copying to Vagrant's shelf.
  75 + # config.berkshelf.except = []
  76 +
  77 + config.vm.provision :chef_solo do |chef|
  78 + chef.json = {
  79 + mysql: {
  80 + server_root_password: 'rootpass',
  81 + server_debian_password: 'debpass',
  82 + server_repl_password: 'replpass'
  83 + }
  84 + }
  85 +
  86 + chef.run_list = [
  87 + 'recipe[backup-file2s3::default]'
  88 + ]
  89 + end
  90 +end
... ...
  1 +#
  2 +# Author:: Earth U (<sysadmin@chromedia.com>)
  3 +# Cookbook Name:: backup-file2s3
  4 +# Attributes:: default
  5 +#
  6 +# Copyright 2016, Chromedia Far East, Inc.
  7 +#
  8 +# Licensed under the Apache License, Version 2.0 (the "License");
  9 +# you may not use this file except in compliance with the License.
  10 +# You may obtain a copy of the License at
  11 +#
  12 +# http://www.apache.org/licenses/LICENSE-2.0
  13 +#
  14 +# Unless required by applicable law or agreed to in writing, software
  15 +# distributed under the License is distributed on an "AS IS" BASIS,
  16 +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  17 +# See the License for the specific language governing permissions and
  18 +# limitations under the License.
  19 +#
  20 +
  21 +default['backup-file2s3']['bucket'] = 'bucketname'
  22 +default['backup-file2s3']['region'] = 'us-east-1'
  23 +default['backup-file2s3']['max_backups'] = 30
  24 +
  25 +# The array of file directories to be backed up to S3
  26 +default['backup-file2s3']['dirs'] = []
  27 +
  28 +default['backup-file2s3']['script_dir'] = '/etc/backup_file_to_s3'
  29 +default['backup-file2s3']['log_dir'] = '/var/log/backup_file_to_s3'
  30 +default['backup-file2s3']['tmp_dir'] = '/tmp/backup_files'
  31 +
  32 +default['backup-file2s3']['aws_bin'] = value_for_platform(
  33 + 'ubuntu' => { 'default' => '/usr/local/bin/aws' },
  34 + 'default' => '/usr/local/bin/aws' # haven't tested on other platforms yet
  35 +)
  36 +
  37 +# Basic options for cron
  38 +default['backup-file2s3']['cron']['min'] = '0'
  39 +default['backup-file2s3']['cron']['hour'] = '0'
  40 +default['backup-file2s3']['cron']['day'] = '*'
  41 +default['backup-file2s3']['cron']['mon'] = '*'
  42 +default['backup-file2s3']['cron']['wday'] = '*'
  43 +default['backup-file2s3']['cron']['mailto'] = "''"
  44 +
  45 +# Basic options for logrotate
  46 +default['backup-file2s3']['logrotate']['conf_dir'] = '/etc/logrotate.d'
  47 +default['backup-file2s3']['logrotate']['options'] = %w{
  48 + weekly
  49 + rotate\ 12
  50 + missingok
  51 + compress
  52 + notifempty
  53 +}
... ...
  1 +# Put files/directories that should be ignored in this file when uploading
  2 +# or sharing to the community site.
  3 +# Lines that start with '# ' are comments.
  4 +
  5 +# OS generated files #
  6 +######################
  7 +.DS_Store
  8 +Icon?
  9 +nohup.out
  10 +ehthumbs.db
  11 +Thumbs.db
  12 +
  13 +# SASS #
  14 +########
  15 +.sass-cache
  16 +
  17 +# EDITORS #
  18 +###########
  19 +\#*
  20 +.#*
  21 +*~
  22 +*.sw[a-z]
  23 +*.bak
  24 +REVISION
  25 +TAGS*
  26 +tmtags
  27 +*_flymake.*
  28 +*_flymake
  29 +*.tmproj
  30 +.project
  31 +.settings
  32 +mkmf.log
  33 +
  34 +## COMPILED ##
  35 +##############
  36 +a.out
  37 +*.o
  38 +*.pyc
  39 +*.so
  40 +*.com
  41 +*.class
  42 +*.dll
  43 +*.exe
  44 +*/rdoc/
  45 +
  46 +# Testing #
  47 +###########
  48 +.watchr
  49 +.rspec
  50 +spec/*
  51 +spec/fixtures/*
  52 +test/*
  53 +features/*
  54 +Guardfile
  55 +Procfile
  56 +
  57 +# SCM #
  58 +#######
  59 +.git
  60 +*/.git
  61 +.gitignore
  62 +.gitmodules
  63 +.gitconfig
  64 +.gitattributes
  65 +.svn
  66 +*/.bzr/*
  67 +*/.hg/*
  68 +*/.svn/*
  69 +
  70 +# Berkshelf #
  71 +#############
  72 +cookbooks/*
  73 +tmp
  74 +
  75 +# Cookbooks #
  76 +#############
  77 +CONTRIBUTING
  78 +CHANGELOG*
  79 +
  80 +# Strainer #
  81 +############
  82 +Colanderfile
  83 +Strainerfile
  84 +.colander
  85 +.strainer
  86 +
  87 +# Vagrant #
  88 +###########
  89 +.vagrant
  90 +Vagrantfile
  91 +
  92 +# Travis #
  93 +##########
  94 +.travis.yml
... ...
  1 +name 'backup-file2s3'
  2 +maintainer 'Chromedia Far East, Inc.'
  3 +maintainer_email 'sysadmin@chromedia.com'
  4 +license 'Apache License'
  5 +description 'Creates a script to backup directories into an S3 bucket.'
  6 +long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
  7 +version '0.1.0'
  8 +
  9 +depends 'awscli', '~> 1.0.1'
  10 +depends 'cron', '~> 1.7.4'
  11 +
  12 +supports 'ubuntu', '>= 14.04'
... ...
  1 +#
  2 +# Author:: Earth U (<sysadmin@chromedia.com>)
  3 +# Cookbook Name:: backup-file2s3
  4 +# Recipe:: default
  5 +#
  6 +# Copyright 2016, Chromedia Far East, Inc.
  7 +#
  8 +# Licensed under the Apache License, Version 2.0 (the "License");
  9 +# you may not use this file except in compliance with the License.
  10 +# You may obtain a copy of the License at
  11 +#
  12 +# http://www.apache.org/licenses/LICENSE-2.0
  13 +#
  14 +# Unless required by applicable law or agreed to in writing, software
  15 +# distributed under the License is distributed on an "AS IS" BASIS,
  16 +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  17 +# See the License for the specific language governing permissions and
  18 +# limitations under the License.
  19 +#
  20 +
  21 +include_recipe 'awscli'
  22 +
  23 +attribs = node['backup-file2s3']
  24 +scr_dir = attribs['script_dir']
  25 +sname = 'backup_file_to_s3'
  26 +
  27 +template "#{scr_dir}/#{sname}" do
  28 + mode 0644
  29 + only_if "test -d #{scr_dir} || mkdir -p #{scr_dir}"
  30 + variables(
  31 + :aws_bin => attribs['aws_bin'],
  32 + :log_dir => attribs['log_dir'],
  33 + :tmp_dir => attribs['tmp_dir'],
  34 + :bucket => attribs['bucket'],
  35 + :region => attribs['region'] || 'us-east-1',
  36 + :max_backups => attribs['max_backups'] || 30,
  37 + :dirs => attribs['dirs']
  38 + )
  39 +end
  40 +
  41 +cra = attribs['cron']
  42 +cron_d sname do
  43 + command "bash #{scr_dir}/#{sname}"
  44 + minute cra['min']
  45 + hour cra['hour']
  46 + day cra['day']
  47 + month cra['mon']
  48 + weekday cra['wday']
  49 + mailto cra['mailto']
  50 + path '/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin'
  51 +end
  52 +
  53 +package 'logrotate'
  54 +
  55 +loa = attribs['logrotate']
  56 +template "#{loa['conf_dir']}/#{sname}" do
  57 + source "#{sname}_logrotate.erb"
  58 + only_if "test -d #{loa['conf_dir']} || mkdir -p #{loa['conf_dir']}"
  59 + variables(
  60 + :log_dir => attribs['log_dir'],
  61 + :options => loa['options']
  62 + )
  63 +end
... ...
  1 +#!/bin/bash
  2 +#
  3 +# Generated by Chef
  4 +#
  5 +# Back up directories into an S3 bucket
  6 +
  7 +set -e
  8 +
  9 +log_dir=<%= @log_dir %>
  10 +if [[ ! -d "$log_dir" ]] ; then
  11 + mkdir -p "$log_dir"
  12 +fi
  13 +
  14 +aws_cmd=<%= @aws_bin %>
  15 +
  16 +exec 3>&1 4>&2
  17 +trap 'exec 2>&4 1>&3' 0 1 2 3
  18 +exec 1>>"${log_dir}/backup_file_to_s3.log" 2>&1
  19 +
  20 +bucket=<%= @bucket %>
  21 +region=<%= @region %>
  22 +max_backups=<%= @max_backups %>
  23 +bak_dir=<%= @tmp_dir %>
  24 +
  25 +# Array of directories to be backed up.
  26 +#
  27 +# Example:
  28 +# declare -a tar_dirs=(
  29 +# "/path/to/dira"
  30 +# "/another/path/to/dirb"
  31 +# )
  32 +#
  33 +# Tarball names will be the basename of each path given.
  34 +declare -a tar_dirs=(
  35 +<% @dirs.each do |dirx| -%>
  36 + "<%= dirx %>"
  37 +<% end -%>
  38 +)
  39 +
  40 +if [[ ! -d "$bak_dir" ]] ; then
  41 + echo "$(date) : Missing backup directory. Creating."
  42 + mkdir -p "$bak_dir"
  43 +fi
  44 +
  45 +# Rotate the current backups in S3
  46 +# $1 = directory to be tarred
  47 +increment_backup_names() {
  48 + fname=$( basename "$1" )
  49 + bak_keyname="${fname}.tar.gz"
  50 +
  51 + baks=$( "$aws_cmd" --output text --region "$region" \
  52 + s3api list-objects --bucket "$bucket" \
  53 + | grep '^CONTENTS' | cut -f3 | grep "^${bak_keyname}" || echo "" )
  54 +
  55 + echo "$(date) : Backup rotation for ${bak_keyname}"
  56 + start=$((max_backups - 1))
  57 +
  58 + for (( x=start ; x > 0 ; x-- )) ; do
  59 + if echo "$baks" | grep "^${bak_keyname}\\.${x}\$" ; then
  60 + newx=$((x + 1))
  61 + if [[ $newx -lt $max_backups ]] ; then
  62 + "$aws_cmd" --region "$region" \
  63 + s3 cp "s3://${bucket}/${bak_keyname}.${x}" \
  64 + "s3://${bucket}/${bak_keyname}.${newx}"
  65 + fi
  66 + fi
  67 + done
  68 +
  69 + if echo "$baks" | grep "^${bak_keyname}\$" ; then
  70 + "$aws_cmd" --region "$region" \
  71 + s3 cp "s3://${bucket}/${bak_keyname}" \
  72 + "s3://${bucket}/${bak_keyname}.1"
  73 + fi
  74 +}
  75 +
  76 +# Tar up the directory
  77 +# $1 = directory to be tarred
  78 +tar_dir() {
  79 + fname=$( basename "$1" )
  80 + parent=$( dirname "$1" )
  81 + echo "$(date) : Tar up ${1}"
  82 +
  83 + tar -C "$parent" -czf "${bak_dir}/${fname}.tar.gz" "$fname"
  84 +}
  85 +
  86 +# $1 = directory to be tarred
  87 +upload_to_s3() {
  88 + fname=$( basename "$1" )
  89 + echo "$(date) : Upload ${fname}.tar.gz to S3 bucket ${bucket}"
  90 +
  91 + "$aws_cmd" --region "$region" \
  92 + s3 mv "${bak_dir}/${fname}.tar.gz" "s3://${bucket}/${fname}.tar.gz"
  93 +}
  94 +
  95 +for dirx in "${tar_dirs[@]}" ; do
  96 + if [[ -d "$dirx" ]] ; then
  97 + increment_backup_names "$dirx"
  98 + tar_dir "$dirx"
  99 + upload_to_s3 "$dirx"
  100 + else
  101 + echo "$(date) : WARNING : Directory ${dirx} does not exist"
  102 + fi
  103 +done
  104 +
  105 +echo "$(date) : Done"
... ...
  1 +<%= @log_dir %>/*.log {
  2 +<% @options.each do |str| -%>
  3 + <%= str %>
  4 +<% end -%>
  5 +}
... ...