Commit c6adb1d7 authored by Dave Lane's avatar Dave Lane

initial commit of new mongobackup codebase

parents
MIT License
Copyright (c) 2018 OERu
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
This is a bash script, run by cron, which creates backups using a editable command. This is used for, say, backing up Docker-based MongoDBs, to a designated directory. Each backup file created (usually hourly) is timestamped. 24 hourly instances, 7 daily, 4 weekly, 12 monthly, and 7 yearly versions of each file being backed up are retained automatically. A "Success" or "Error" log is automatically sent to a designated email address.
To install, create the directory - /etc/mongobackup by default - for this code.
`git clone git@git.oeru.org:oeru/mongobackup.git /etc/mongobackup`
Copy default-mongo.conf-sample to default-mongo.conf and edit it to customise it to set various directories (you'll need to create those that don't yet exist!)
`cp default-mongo.conf-sample default-mongo.conf`
Link the cron file to enable it
`ln -sf /etc/mongobackup/dbbackup-mongo-cron /etc/cron.d`
To run it, testing hourly backups...
`/etc/mongobackup/dbbackup-mongo --verbose --hourly`
#!/bin/bash
#
# This script dumps the PostgreSQL db in a Docker container
# managed by Docker Compose using the selected command within
# the database container and pipes the results into an
# appropriately named/dated file...
#
# It also manages rolling out older backups to avoid filling
# your storage...
#
# Default retention
BU_TO_KEEP_HOURLY=24
BU_TO_KEEP_DAILY=7
BU_TO_KEEP_WEEKLY=4
BU_TO_KEEP_MONTHLY=12
BU_TO_KEEP_YEARLY=7
#
BU_FROOT=mongo
BU_FROOT_HOURLY=hourly
BU_FROOT_DAILY=daily
BU_FROOT_WEEKLY=weekly
BU_FROOT_MONTHLY=monthly
BU_FROOT_YEARLY=yearly
#
BU_README=README.mongo
#
# this can be overridden at invocation
BU_CONF=default-mongo.conf
# # output for debugging...
VERBOSE=0
#
# Stuff that should be universal for this install...
# where we can find this app...
MAIN_DIR=/etc/mongobackup
# determine today's date
DATE=`date '+%Y-%m-%d-%a'`
# determine today's date
TIME=`date '+%H-%M-%S'`
TMPDIR=/tmp
# temporary holding point for email
TMP_EMAIL=$TMPDIR/tmp_mongobackup_email.$DATE_$TIME
# log file
LOG=/var/log/mongobackup.log
#
# Commandsa
# tar command
TAR=`which tar`
# gzip command
GZIP=`which gzip`
# grep command
GREP=`which grep`
# copy
CP=`which cp`
# remove
RM=`which rm`
# email program
MAIL=`which mail`
# database dump utility
DC=`which docker-compose`
#
# pattern for "ls" command to build list of
# pruneable backup files...
# -1t = 1 column, ordered by time of last mod
PRUNEABLES_CMD="ls -1t"
# function to direct a message...
message() {
#
# a timestamp for logging purposes
TIMESTAMP=`date '+%Y-%m-%d %H:%M.%S'`
echo "$0: $TIMESTAMP $@" >> $LOG
if test -f $TMP_EMAIL ; then
echo "$0: $TIMESTAMP $@" >> $TMP_EMAIL
fi
verbose "$TIMESTAMP $@"
}
# create the temporary email file
create_tmp_email() {
touch $TMP_EMAIL
if test -f $TMP_EMAIL ; then
message "created temporary email $TMP_EMAIL"
else
message "failed to create temporary email $TMP_EMAIL"
fi
}
# send the contents of the temporary file to the
# designated report recipient
send_email_report() {
if test -f $TMP_EMAIL ; then
message "sending email report to $EMAIL"
$MAIL -s "$EMAIL_SUBJ ($1)" $EMAIL < $TMP_EMAIL
rm $TMP_EMAIL
if test -f $TMP_EMAIL ; then
message "failed to remove temporary email $TMP_EMAIL"
else
message "successfully removed temporary email $TMP_EMAIL"
fi
message "email report successfully sent"
fi
}
# insert a blank line into the log and on the console
insert_blank() {
echo "" >> $LOG
verbose ""
}
# function to direct a message...
verbose() {
if test $VERBOSE = 1 ; then
echo "$@"
fi
}
#
# delete old backups
delete_old() {
#
verbose "deleting old files based on $1"
# pattern to search for to build the list...
PATTERN="$BU_DIR/*-$1-*.*"
# build the list, with the suffix...
PRUNEABLES=`$PRUNEABLES_CMD $PATTERN`
if test "$?" -eq "0" ; then
message "pruning older files based on $PATTERN"
BU_TO_KEEP=$2
message "keeping last $BU_TO_KEEP backups"
#
# set counter
NUM=0
# go through the list of files and remove those we don't want
for PRUNEABLE in $PRUNEABLES
do
NUM=$(($NUM + 1))
if test $NUM -gt $BU_TO_KEEP ; then
message "deleting $PRUNEABLE"
rm $PRUNEABLE 2>&1 > /dev/null
else
message "keeping $PRUNEABLE"
fi
done
else
message "No files with $PATTERN to delete"
fi
}
#
#
do_backup() {
# tar archive file
FILE_BASE=$1
CONTAINER=$2
# go into the context of the Docker Compose instance
CWD=`pwd`
cd $DC_DIR
# create a README with Mongo version info
VER=`$DC exec -T $CONTAINER mongo --version | head -n 1`
verbose "$VER - echoing to backup: $BU_DIR_HOST/$BU_README"
echo "--" > $BU_DIR_HOST/$BU_README
echo "-- $VER" >> $BU_DIR_HOST/$BU_README
echo "--" >> $BU_DIR_HOST/$BU_README
# dump all the mongo databases
CMD="$DC exec -T $CONTAINER mongodump --quiet --out $BU_DIR_DOCK"
verbose "doing mongo dump: $CMD in to $BU_DIR_DOCK on the container = $BU_DIR_HOST"
# run the backup dump
$CMD
# now process the results - get the names of the dumped databases
DBS=`find $BU_DIR_HOST -mindepth 1 -maxdepth 1 -type d -exec basename {} \;`
# convert each db directory into a timestamped archive file
for DB in $DBS
do
# copy README into the directory
cp $BU_DIR_HOST/$BU_README $BU_DIR_HOST/$DB/$BU_README
# tar up the whole directory into the bakcup directory
$TAR cvfz $BU_DIR/$DB-$FILE_BASE.tgz $BU_DIR_HOST/$DB
# remove the directory
rm -rf $BU_DIR_HOST/$DB
done
# clean up the README
rm $BU_DIR_HOST/$BU_README
# return to where we were before
cd $CWD
}
#
TASK=
#
# cycle through the command line options
while test $# -ne 0 ; do
case $1 in
--verbose|-v)
VERBOSE=1
;;
--config|-c)
shift # shift from the flag to the value
verbose "setting configuration directory to $1"
BU_CONF=$1
;;
--hourly|-h)
verbose "running hourly backup"
TASK=HOURLY
;;
--daily|-d)
verbose "running daily backup"
TASK=DAILY
;;
--weekly|-w)
verbose "running weekly backup"
TASK=WEEKLY
;;
--monthly|-m)
verbose "running monthly backup"
TASK=MONTHLY
;;
--yearly|-y)
verbose "running yearly backup"
TASK=YEARLY
;;
esac
shift
done
#
# we must have *some* task specified
if [[ $TASK == '' ]] ; then
message "No TASK specified! Pick --hourly, --daily, --weekly, --monthly, --yearly..."
exit 1
fi
#
#
# create the blank email report
if ! [[ $TASK == 'HOURLY' ]] ; then
create_tmp_email
fi
#
if test -f $MAIN_DIR/$BU_CONF ; then
verbose "Reading default in $MAIN_DIR/$BU_CONF"
source $MAIN_DIR/$BU_CONF
else
message "ERROR: Couldn't find or read $MAIN_DIR/$BU_CONF"
exit 1
fi
#
# run through the various tasks
verbose "using configuration file $BU_CONF"
# go to the Docker Compose directory where we need to be to run the
# docker-compose commands
verbose "going to $DC_DIR"
OLD_DIR=`pwd`
cd $DC_DIR
#
# a timestamp for logging purposes
STAMP=`date '+%Y-%m-%d_%H-%M-%S'`
#
# generate the filename
INC="BU_FROOT_$TASK"
FILEPART=$BU_FROOT-${!INC}
FILENAME=$FILEPART-$STAMP
#
# delete stale backups
TO_KEEP="BU_TO_KEEP_$TASK"
delete_old $FILEPART ${!TO_KEEP}
#
message "backing up all the databases into $BU_DIR"
# dump the data into the file
#
do_backup $FILENAME $DC_CONTAINER
#
message "completed backup"
# return to where you started from...
cd $OLD_DIR
#
# sent resulting email report
#
if ! [[ $TASK == 'HOURLY' ]] ; then
send_email_report $TASK
fi
exit 0
SHELL=/bin/sh
PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin
#
# run system backups
#
# hourly - at 5 minutes past mon-sat
05 * * * * root /etc/mongobackup/dbbackup-mongo --hourly
#
# daily - at 7:30 pm, mon-sat
30 19 * * * root /etc/mongobackup/dbbackup-mongo --daily
#
# weekly - at 7:30 pm, sun
30 19 * * sun root /etc/mongobackup/dbbackup-mongo --weekly
#
# monthly - at 8:30 pm, on the first of the last of the month
30 20 1 * * root /etc/mongobackup/dbbackup-mongo --monthly
#
# yearly - at 8:30 pm, on the first of January.
30 20 1 1 * root /etc/mongobackup/dbbackup-mongo --yearly
#
# dump backup directories
#
# for backup archives
BU_DIR=[path to backup archive store]
# working directories
# from docker-compose.yml
BU_DIR_HOST=[path on host to mapped mongo container backup dir]
BU_DIR_DOCK=[path on mongo container to backup dir]
#
# Docker Compose details
#
# dir containing the docker-compose.yml
DC_DIR=[docker-compose directory for mongo container]
# name of the database container
DC_CONTAINER=[name of mongo container in docker-compose.yml]
#
# Reporting
#
# email address to send reports to, and subject
EMAIL=[admin email]
EMAIL_SUBJ="[email subject to distinguish this email from others]"
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment