LAMP Virtualhost Backup Script

I’ve cobbled together a couple scripts to create a handy LAMP (Ubuntu) environment backup script for web servers running multiple virtual hosts. The script dumps each of your MySQL databases to a gzipped file, then backs up each sub-directory in your web root directory (e.g. /var/www) to a separate gzipped tarball with a bash array loop.

The script automatically rotates daily, weekly, and monthly backups automatically.

It’s messy as hell, but gets the job done.

If you have an Ubuntu slice from Slicehost or any other Ubuntu web server, this should cover your bases in terms of local, daily backups.

#!/bin/bash
#
# Virtual Host Backup Script
# Backs up your MySQL databases and all directories within WEBROOT
#
# Based on automysqlbackup.sh
# VER. 2.5.1 - http://sourceforge.net/projects/automysqlbackup/
# Copyright (c) 2002-2003 [email protected]
#
# Mashed up with http://edwardawebb.com/linux/backup-subdirectories-bash-array-loop
# Backup all directories within webroot
#
 
#=====================================================================
# Instructions
#=====================================================================
# Set USERNAME and PASSWORD of a user that has the appropriate permissions
# to backup ALL databases. 
# Run this MySQL Statement to create the user...
# GRANT SELECT, LOCK TABLES ON *.* TO [email protected] IDENTIFIED BY 'xxx';
#
# Copy this file to anywhere on your server and make sure
# to set executable permission. You can also copy the script to
# /etc/cron.daily to have it execute automatically every night or simply
# place a symlink in /etc/cron.daily to the file if you wish to keep it 
# somwhere else. (sudo ln -s /path/to/file.sh)
#
#
# use empty file ".DONT_BACKUP" to exclude any directory
#
# Thats it.
#
#=====================================================================
 
 
#=====================================================================
# Set the following variables to your system needs
#=====================================================================
# Username to access the MySQL server e.g. dbuser
USERNAME=backup
 
# Password to access the MySQL server e.g. password
PASSWORD='sdfk27eka78'
 
# Host name (or IP address) of MySQL server e.g localhost
DBHOST=localhost
 
# List of DBNAMES for Daily/Weekly Backup e.g. "DB1 DB2 DB3"
DBNAMES="all"
 
# Backup directory location e.g /backups
BACKUPDIR="/var/backups"
 
# Directory of Webroot
WEBDIR="/var/www"
 
# List of DBBNAMES for Monthly Backups.
MDBNAMES="${DBNAMES}"
 
# Mail setup
# What would you like to be mailed to you?
# - log   : send only log file
# - files : send log file and sql files as attachments (see docs)
# - stdout : will simply output the log to the screen if run manually.
# - quiet : Only send logs if an error occurs to the MAILADDR.
MAILCONTENT="log"
 
# Set the maximum allowed email size in k. (4000 = approx 5MB email [see docs])
MAXATTSIZE="4000"
 
# Email Address to send mail to? ([email protected])
MAILADDR="[email protected]"
 
# Set an obscene number of constants
WHICH="`which which`"
AWK="`${WHICH} gawk`"
LOGGER="`${WHICH} logger`"
ECHO="`${WHICH} echo`"
CAT="`${WHICH} cat`"
BASENAME="`${WHICH} basename`"
DATEC="`${WHICH} date`"
DU="`${WHICH} du`"
EXPR="`${WHICH} expr`"
FIND="`${WHICH} find`"
RM="`${WHICH} rm`"
MYSQL="`${WHICH} mysql`"
MYSQLDUMP="`${WHICH} mysqldump`"
GZIP="`${WHICH} gzip`"
BZIP2="`${WHICH} bzip2`"
CP="`${WHICH} cp`"
HOSTNAMEC="`${WHICH} hostname`"
SED="`${WHICH} sed`"
GREP="`${WHICH} grep`"
PROGNAME=`${BASENAME} $0`
# Set some useful constants
PATH=/usr/local/bin:/usr/bin:/bin:/usr/local/mysql/bin 
DATE=`${DATEC} +%Y-%m-%d_%Hh%Mm`				# Datestamp e.g 2002-09-21
DOW=`${DATEC} +%A`							# Day of the week e.g. Monday
DNOW=`${DATEC} +%u`						# Day number of the week 1 to 7 where 1 represents Monday
DOM=`${DATEC} +%d`							# Date of the Month e.g. 27
M=`${DATEC} +%B`							# Month e.g January
W=`${DATEC} +%V`							# Week Number e.g 37
VER=2.5.1									# Version Number
LOGFILE=${BACKUPDIR}/${DBHOST}-`${DATEC} +%N`.log		# Logfile Name
LOGERR=${BACKUPDIR}/ERRORS_${DBHOST}-`${DATEC} +%N`.log		# Logfile Name
BACKUPFILES=""
OPT="--quote-names --opt"			# OPT string for use with mysqldump ( see man mysqldump )
DATESUFFIX=$(date +%m-%d-%Y)
 
#=====================================================================
# Functions
#=====================================================================
 
# Database dump function
function dbdump () {
mysqldump --user=${USERNAME} --password=${PASSWORD} --host=${DBHOST} ${OPT} ${1} > ${2}
return $?
}
 
# Check if directory exists. If not, create it.
function check_directory () {
	if [ ! -e "${1}" ]		# Check if Directory exists.
		then
		mkdir -p "${1}"
	fi
}
 
# Backup Webroot Directories
# Create gziped backup of directory, and delete files older than rotate parameter
# Input parameters: ${1} = destination directory name
# 					${2} = rotate age in days
#					${3} = directory to backup
function backup_directory() {			
	DESTDIRECTORY=${BACKUPDIR}/${1}/${3}
	check_directory ${DESTDIRECTORY}  # check that directory exists
 
	${ECHO} ${1} Backup of Webroot Directory \( ${3} \)
	${ECHO}					
		echo Backing up ${3} to ${DESTDIRECTORY}/${3}_${DATESUFFIX}.tar.gz
		tar -zcf ${DESTDIRECTORY}/${3}_${DATESUFFIX}.tar.gz ${WEBDIR}/$3
 
		[ $? -eq 0 ] && {
			${ECHO} Rotating ${2} Day Backups...
			${FIND} "${DESTDIRECTORY}" -mtime +${2} -type f -exec ${RM} -v {} \; 
		}
 
		BACKUPFILES="${BACKUPFILES} ${DESTDIRECTORY}/${3}_${DATESUFFIX}.tar.gz"
	${ECHO} ----------------------------------------------------------------------
}
# Compression function plus latest copy
SUFFIX=""
function compression () {
		${GZIP} -f "${1}"
		${ECHO}
		${ECHO} Backup Information for "${1}"
		${GZIP} -l "${1}.gz"
		SUFFIX=".gz"
	if [ "${LATEST}" = "yes" ]; then
		${CP} ${1}${SUFFIX} "${BACKUPDIR}/latest/"
	fi	
	return 0
}
 
 
# IO redirection for logging.
#touch ${LOGFILE}
#exec 6>&1           # Link file descriptor #6 with stdout.
                    # Saves stdout.
#exec > ${LOGFILE}     # stdout replaced with file ${LOGFILE}.
#touch ${LOGERR}
#exec 7>&2           # Link file descriptor #7 with stderr.
                    # Saves stderr.
#exec 2> ${LOGERR}     # stderr replaced with file ${LOGERR}.
 
# Create required directories
check_directory "${BACKUPDIR}"
check_directory "${BACKUPDIR}/daily"
check_directory "${BACKUPDIR}/weekly"
check_directory "${BACKUPDIR}/monthly"
 
if [ "${LATEST}" = "yes" ]
then
	check_directory  "${BACKUPDIR}/latest" 
eval ${RM} -fv "${BACKUPDIR}/latest/*"
fi
 
# If backing up all DBs on the server
if [ "${DBNAMES}" = "all" ]; then
        DBNAMES="`${MYSQL} --user=${USERNAME} --password=${PASSWORD} --host=${DBHOST} --batch --skip-column-names -e "show databases"| ${SED} 's/ /%/g'`"
 
	# If DBs are excluded
	for exclude in ${DBEXCLUDE}
	do
		DBNAMES=`${ECHO} ${DBNAMES} | ${SED} "s/\b${exclude}\b//g"`
	done
 
        MDBNAMES=${DBNAMES}
fi
 
${ECHO} ======================================================================
${ECHO} Backup of Database Server - ${HOST}
${ECHO} ======================================================================
${ECHO} Backup Start Time `${DATEC}`
${ECHO} ======================================================================
if [ ${DOM} = "01" ]; then
	# Monthly Full Backup of all Databases
	for MDB in ${MDBNAMES}
	do
 
		 # Prepare ${DB} for using
			MDB="`${ECHO} ${MDB} | ${SED} 's/%/ /g'`"
		DIRECTORY=${BACKUPDIR}/monthly/${MDB}
 
		check_directory "${DIRECTORY}" # Check Monthly DB Directory exists.
 
		${ECHO} Monthly Backup of ${MDB}...
			dbdump "${MDB}" "${BACKUPDIR}/monthly/${MDB}/${MDB}_${DATE}.${M}.${MDB}.sql"
			[ $? -eq 0 ] && {
				${ECHO} "Rotating 5 month backups for ${MDB}"
				${FIND} "${BACKUPDIR}/monthly/${MDB}" -mtime +150 -type f -exec ${RM} -v {} \; 
			}
			compression "${BACKUPDIR}/monthly/${MDB}/${MDB}_${DATE}.${M}.${MDB}.sql"
			BACKUPFILES="${BACKUPFILES} ${BACKUPDIR}/monthly/${MDB}/${MDB}_${DATE}.${M}.${MDB}.sql${SUFFIX}"
		${ECHO} ----------------------------------------------------------------------
	done
fi
 
for DB in ${DBNAMES}
do
	# Prepare ${DB} for using
	DB="`${ECHO} ${DB} | ${SED} 's/%/ /g'`"
 
	# Weekly Backup
	if [ "${DNOW} = ${DOWEEKLY}" ]; then
		${ECHO} Weekly Backup of Database \( ${DB} \)
		check_directory "${BACKUPDIR}/weekly/${DB}"  Check Weekly DB Directory exists.
		${ECHO}
			dbdump "${DB}" "${BACKUPDIR}/weekly/${DB}/${DB}_week.${W}.${DATE}.sql"
			[ $? -eq 0 ] && {
				${ECHO} Rotating 5 weeks Backups...
				${FIND} "${BACKUPDIR}/weekly/${DB}" -mtime +35 -type f -exec ${RM} -v {} \; 
			}
			compression "${BACKUPDIR}/weekly/${DB}/${DB}_week.${W}.${DATE}.sql"
			BACKUPFILES="${BACKUPFILES} ${BACKUPDIR}/weekly/${DB}/${DB}_week.${W}.${DATE}.sql${SUFFIX}"
		${ECHO} ----------------------------------------------------------------------
 
	# Daily Backup
	else
		${ECHO} Daily Backup of Database \( ${DB} \)
		check_directory "${BACKUPDIR}/daily/${DB}" 	# Check Daily DB Directory exists.
		${ECHO}
			dbdump "${DB}" "${BACKUPDIR}/daily/${DB}/${DB}_${DATE}.${DOW}.sql"
			[ $? -eq 0 ] && {
				${ECHO} Rotating last weeks Backup...
				${FIND} "${BACKUPDIR}/daily/${DB}" -mtime +6 -type f -exec ${RM} -v {} \; 
			}
			compression "${BACKUPDIR}/daily/${DB}/${DB}_${DATE}.${DOW}.sql"
			BACKUPFILES="${BACKUPFILES} ${BACKUPDIR}/daily/${DB}/${DB}_${DATE}.${DOW}.sql${SUFFIX}"
		${ECHO} ----------------------------------------------------------------------
	fi
done	
 
 
for DIR in $(ls ${WEBDIR} | grep ^[a-z.]*$) 
do
	echo $DIR
	if [ -f $DIR/.DONT_BACKUP ]
	then
		printf "\tSKIPPING $DIR as it contains ignore file\n" | tee -a $LOGFILE
	else
		# Create Seperate directory for each directory
		if [ ${DOM} = "01" ]; then
			backup_directory "monthly" "150" "${DIR}"
		fi
 
		if [ "${DNOW} = ${DOWEEKLY}" ]; then
			backup_directory "weekly" "35" "${DIR}" 
		else
			backup_directory "daily" "6" "${DIR}" 
		fi
	fi
done
 
${ECHO} Backup End Time `${DATEC}`
${ECHO} ======================================================================
 
${ECHO} Total disk space used for backup storage..
${ECHO} Size - Location
${ECHO} `${DU} -hs "${BACKUPDIR}"`
${ECHO}
 
#Clean up IO redirection
#exec 1>&6 6>&-      # Restore stdout and close file descriptor #6.
#exec 2>&7 7>&-      # Restore stdout and close file descriptor #7.
 
# Clean up Logfile
# eval ${RM} -f "${LOGFILE}"
# eval ${RM} -f "${LOGERR}"
 
exit ${STATUS}

About Mark Egge

Two truths and a lie: Mark Egge is an outdoor enthusiast, opera singer, and a transportation data scientist. He lives in Bozeman, Montana.
This entry was posted in Technology, Uncategorized. Bookmark the permalink.

Comments are closed.