diff --git a/README.md b/README.md index 63fece7..e5b38d4 100644 --- a/README.md +++ b/README.md @@ -63,10 +63,11 @@ The above is applicable to all the scripts! ## Contributors -The following people have contributed to this repo in various ways... +Thanks to the following people who have contributed to this repo... - [Nik](https://github.com/nik-lampe) - [Francesco](https://github.com/Cicciodev) +- [Wischweh Mobile Consultancy](https://github.com/wischweh) ### How to decrypt, if I used a passphrase diff --git a/changelog.md b/changelog.md new file mode 100644 index 0000000..d4cef46 --- /dev/null +++ b/changelog.md @@ -0,0 +1,90 @@ +version: 5.0.0 + - date: 2022-12-08 + - Add user bin directories and snap bin to PATH. + - a new common changelog.txt file. + - mention the actual script name in the output / log. + - migrate laravel and php backup scripts to their own repo. + - remove redundant check for aws cli. + - better documentation. + +Before 5.0.0, separate changelogs were used. + +db-backup.sh + +version: 3.2.3 + - minor fixes +version: 3.2.2 + - date: 2022-11-29 + - rewrite logic while attempting to create required directories + - add requirements section +version: 3.2.1 + - date: 2021-07-14 + - aws cli add option "--only-show-errors" +version: 3.2.0 + - date: 2021-03-27 + - improve naming scheme. +version: 3.1.1 + - date: 2020-11-24 + - improve documentation. + +full-backup.sh + +version: 4.0.3 + - multiple fixes +version: 4.0.2 + - date: 2022-11-29 + - rewrite logic while attempting to create required directories + - add requirements section +version: 4.0.1 + - date: 2021-08-30 + - fix a minor bug +version: 4.0.0 + - date: 2021-06-06 + - simplify excludes in tar command + - simplify naming scheme for encrypted backups + - show only errors while uploading to S3. Not even progress bar. +version: 3.2.0 + - date: 2021-03-27 + - improve naming scheme. +changelog +version: 3.1.1 + - date: 2020-11-24 + - improve documentation +version: 3.1.0 + - delete old backups in $ENCRYPTED_BACKUP_PATH only if this directory / path exists + + +files-backup-without-uploads.sh + +version: 3.1.2 + - date: 2022-11-29 + - rewrite logic while attempting to create required directories +v3.1.1 + - date: 2020-11-24 + - improve documentation +v2 + - date 2017-09-13 + - change of script name + - change the output file name + - remove older backups using a simple find command; props - @wpbullet +v1.1.2 + - date 2017-09-04 + - dynamically find the location of aws cli +v1.1.1 + - date 2017-09-03 + - change the default dir name from Backup to backups + - no more syncing by default +v1.1 + - date 2017-05-05 + - moved to nightly backups + - started excluding wp core files and uploads + - uploads files are now synced, rather than taken as part of regular nightly backup +v1.0.4 + - date 2017-03-06 + - support for hard-coded variable AWS S3 Bucket Name + - support for environment files (.envrc / .env) + - skipped version 1.0.3 +v1.0.2 + - date 2017-03-06 + - support for hard-coded variable $DOMAIN + diff --git a/db-backup.sh b/db-backup.sh index 7743f7f..ee2b486 100755 --- a/db-backup.sh +++ b/db-backup.sh @@ -3,33 +3,10 @@ # requirements # ~/log, ~/backups, ~/path/to/example.com/public -# version - 3.2.3 - -# changelog -# version: 3.2.3 -# - minor fixes -# version: 3.2.2 -# - date: 2022-11-29 -# - rewrite logic while attempting to create required directories -# - add requirements section -# version: 3.2.1 -# - date: 2021-07-14 -# - aws cli add option "--only-show-errors" -# version: 3.2.0 -# - date: 2021-03-27 -# - improve naming scheme. -# version: 3.1.1 -# - date: 2020-11-24 -# - improve documentation. +# version - 5.0.0 ### Variables - Please do not add trailing slash in the PATHs -# To enable offsite backups... -# apt install awscli (or yum install awscli) -# legacy method -# run 'pip install awscli' (as root) -# aws configure (as normal user) - # where to store the database backups? BACKUP_PATH=${HOME}/backups/db-backups ENCRYPTED_BACKUP_PATH=${HOME}/backups/encrypted-db-backups @@ -55,14 +32,11 @@ DOMAIN= # AWS Variable can be hard-coded here AWS_S3_BUCKET_NAME= -# ref: http://docs.aws.amazon.com/cli/latest/userguide/cli-environment.html -AWS_ACCESS_KEY_ID= -AWS_SECRET_ACCESS_KEY= -AWS_DEFAULT_REGION= -AWS_PROFILE= - #-------- Do NOT Edit Below This Line --------# +# to capture non-zero exit code in the pipeline +set -o pipefail + # attempt to create log directory if it doesn't exist [ -d "${HOME}/log" ] || mkdir -p ${HOME}/log if [ "$?" -ne "0" ]; then @@ -91,8 +65,6 @@ log_file=${HOME}/log/backups.log exec > >(tee -a ${log_file} ) exec 2> >(tee -a ${log_file} >&2) -echo "Script started on... $(date +%c)" - export PATH=~/bin:~/.local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/snap/bin declare -r script_name=$(basename "$0") @@ -110,6 +82,8 @@ if [ -z "$aws_cli" ]; then exit 1 fi +echo "'$script_name' started on... $(date +%c)" + let AUTODELETEAFTER-- # get environment variables, if exists @@ -126,35 +100,33 @@ if [ "$DOMAIN" == "" ]; then if [ "$WP_DOMAIN" != "" ]; then DOMAIN=$WP_DOMAIN else - echo 'Usage ${script_name} example.com (S3 bucket name)'; exit 1 + echo "Usage $script_name example.com (S3 bucket name)"; exit 1 fi else DOMAIN=$1 fi fi -WP_PATH=${SITES_PATH}/$DOMAIN/${PUBLIC_DIR} -[ ! -d "$WP_PATH" ] && echo "WordPress is not found at $WP_PATH" && exit 1 - -if [ "$AWS_BUCKET" == "" ]; then +if [ "$BUCKET_NAME" == "" ]; then if [ "$2" != "" ]; then - AWS_BUCKET=$2 + BUCKET_NAME=$2 elif [ "$AWS_S3_BUCKET_NAME" != "" ]; then - AWS_BUCKET=$AWS_S3_BUCKET_NAME + BUCKET_NAME=$AWS_S3_BUCKET_NAME fi fi +# WordPress root +WP_PATH=${SITES_PATH}/${DOMAIN}/${PUBLIC_DIR} +[ ! -d "$WP_PATH" ] && echo "WordPress is not found at $WP_PATH" && exit 1 + # convert forward slash found in sub-directories to hyphen # ex: example.com/test would become example.com-test -DOMAIN_FULL_PATH=$(echo $DOMAIN | awk '{gsub(/\//,"_")}; 1') +DOMAIN_FULL_PATH=$(echo $DOMAIN | awk '{gsub(/\//,"-")}; 1') DB_OUTPUT_FILE_NAME=${BACKUP_PATH}/${DOMAIN_FULL_PATH}-${timestamp}.sql.gz ENCRYPTED_DB_OUTPUT_FILE_NAME=${ENCRYPTED_BACKUP_PATH}/db-${DOMAIN_FULL_PATH}-${timestamp}.sql.gz DB_LATEST_FILE_NAME=${BACKUP_PATH}/${DOMAIN_FULL_PATH}-latest.sql.gz -# to capture non-zero exit code in the pipeline -set -o pipefail - # take actual DB backup if [ -f "$wp_cli" ]; then $wp_cli --path=${WP_PATH} transient delete --all @@ -177,15 +149,11 @@ else fi # external backup -if [ "$AWS_BUCKET" != "" ]; then - if [ ! -e "$aws_cli" ] ; then - echo; echo 'Did you run "pip install aws && aws configure"'; echo; - fi - +if [ "$BUCKET_NAME" != "" ]; then if [ -z "$PASSPHRASE" ] ; then - $aws_cli s3 cp $DB_OUTPUT_FILE_NAME s3://$AWS_BUCKET/${DOMAIN_FULL_PATH}/db-backups/ --only-show-errors + $aws_cli s3 cp $DB_OUTPUT_FILE_NAME s3://$BUCKET_NAME/${DOMAIN_FULL_PATH}/db-backups/ --only-show-errors else - $aws_cli s3 cp $ENCRYPTED_DB_OUTPUT_FILE_NAME s3://$AWS_BUCKET/${DOMAIN_FULL_PATH}/encrypted-db-backups/ --only-show-errors + $aws_cli s3 cp $ENCRYPTED_DB_OUTPUT_FILE_NAME s3://$BUCKET_NAME/${DOMAIN_FULL_PATH}/encrypted-db-backups/ --only-show-errors fi if [ "$?" != "0" ]; then echo; echo 'Something went wrong while taking offsite backup'; diff --git a/files-backup-without-uploads.sh b/files-backup-without-uploads.sh index 1424b88..2cacd2d 100755 --- a/files-backup-without-uploads.sh +++ b/files-backup-without-uploads.sh @@ -1,39 +1,6 @@ #!/bin/bash -# version: 3.1.2 - -# Changelog -# version: 3.1.2 -# - date: 2022-11-29 -# - rewrite logic while attempting to create required directories -# v3.1.1 -# - date: 2020-11-24 -# - improve documentation -# v2 -# - date 2017-09-13 -# - change of script name -# - change the output file name -# - remove older backups using a simple find command; props - @wpbullet -# v1.1.2 -# - date 2017-09-04 -# - dynamically find the location of aws cli -# v1.1.1 -# - date 2017-09-03 -# - change the default dir name from Backup to backups -# - no more syncing by default -# v1.1 -# - date 2017-05-05 -# - moved to nightly backups -# - started excluding wp core files and uploads -# - uploads files are now synced, rather than taken as part of regular nightly backup -# v1.0.4 -# - date 2017-03-06 -# - support for hard-coded variable AWS S3 Bucket Name -# - support for environment files (.envrc / .env) -# - skipped version 1.0.3 -# v1.0.2 -# - date 2017-03-06 -# - support for hard-coded variable $DOMAIN +# version: 5.0.0 # Variable AUTODELETEAFTER=30 @@ -52,6 +19,9 @@ BUCKET_NAME= #-------- Do NOT Edit Below This Line --------# +# to capture non-zero exit code in the pipeline +set -o pipefail + # attempt to create log directory if it doesn't exist [ -d "${HOME}/log" ] || mkdir -p ${HOME}/log if [ "$?" -ne 0 ]; then @@ -97,6 +67,8 @@ if [ -z "$aws_cli" ]; then exit 1 fi +echo "'$script_name' started on... $(date +%c)" + let AUTODELETEAFTER-- # get environment variables, if exists @@ -113,7 +85,7 @@ if [ "$DOMAIN" == "" ]; then if [ "$WP_DOMAIN" != "" ]; then DOMAIN=$WP_DOMAIN else - echo "Usage ${script_name} domainname.com (S3 bucket name)"; exit 1 + echo "Usage $script_name example.com (S3 bucket name)"; exit 1 fi else DOMAIN=$1 @@ -128,12 +100,10 @@ if [ "$BUCKET_NAME" == "" ]; then fi fi -# path to be backed up +# WordPress root WP_PATH=${SITES_PATH}/${DOMAIN}/${PUBLIC_DIR} [ ! -d "$WP_PATH" ] && echo "WordPress is not found at $WP_PATH" && exit 1 -echo "Script started on... $(date +%c)" - # path to be excluded from the backup # no trailing slash, please EXCLUDE_BASE_PATH=${DOMAIN} @@ -162,10 +132,6 @@ BACKUP_FILE_NAME=${BACKUP_PATH}/files-without-uploads-${DOMAIN}-$timestamp.tar.g tar hczf ${BACKUP_FILE_NAME} -C ${SITES_PATH} ${EXCLUDES} ${DOMAIN} &> /dev/null if [ "$BUCKET_NAME" != "" ]; then - if [ ! -e "$aws_cli" ] ; then - echo; echo 'Did you run "pip install aws && aws configure"'; echo; - fi - $aws_cli s3 cp ${BACKUP_FILE_NAME} s3://$BUCKET_NAME/${DOMAIN}/files-backup-without-uploads/ --only-show-errors if [ "$?" != "0" ]; then echo; echo 'Something went wrong while taking offsite backup'; echo diff --git a/full-backup.sh b/full-backup.sh index 5d14e9f..de27ce8 100755 --- a/full-backup.sh +++ b/full-backup.sh @@ -3,37 +3,7 @@ # requirements # ~/log, ~/backups, ~/path/to/example.com/public -# Don't allow unset variables -# set -o nounset -# Exit if any command gives an error -# set -o errexit - -# version: 4.0.3 - -# changelog -# version: 4.0.3 -# - multiple fixes -# version: 4.0.2 -# - date: 2022-11-29 -# - rewrite logic while attempting to create required directories -# - add requirements section -# version: 4.0.1 -# - date: 2021-08-30 -# - fix a minor bug -# version: 4.0.0 -# - date: 2021-06-06 -# - simplify excludes in tar command -# - simplify naming scheme for encrypted backups -# - show only errors while uploading to S3. Not even progress bar. -# version: 3.2.0 -# - date: 2021-03-27 -# - improve naming scheme. -# changelog -# version: 3.1.1 -# - date: 2020-11-24 -# - improve documentation -# version: 3.1.0 -# - delete old backups in $ENCRYPTED_BACKUP_PATH only if this directory / path exists +# version: 5.0.0 # this script is basically # files-backup-without-uploads.sh script + part of db-backup.sh script @@ -66,6 +36,9 @@ BUCKET_NAME= #-------- Do NOT Edit Below This Line --------# +# to capture non-zero exit code in the pipeline +set -o pipefail + # attempt to create log directory if it doesn't exist [ -d "${HOME}/log" ] || mkdir -p ${HOME}/log if [ "$?" -ne 0 ]; then @@ -111,6 +84,8 @@ if [ -z "$aws_cli" ]; then exit 1 fi +echo "'$script_name' started on... $(date +%c)" + let AUTODELETEAFTER-- # get environment variables, if exists @@ -142,9 +117,7 @@ if [ "$BUCKET_NAME" == "" ]; then fi fi -echo "Script started on... $(date +%c)" - -# path to backup +# WordPress root WP_PATH=${SITES_PATH}/${DOMAIN}/${PUBLIC_DIR} [ ! -d "$WP_PATH" ] && echo "WordPress is not found at $WP_PATH" && exit 1 @@ -171,9 +144,6 @@ done #------------- from db-script.sh --------------# DB_OUTPUT_FILE_NAME=${SITES_PATH}/${DOMAIN}/db.sql -# to capture non-zero exit code in the pipeline -set -o pipefail - # take actual DB backup $wp_cli --path=${WP_PATH} transient delete --all $wp_cli --path=${WP_PATH} db export --no-tablespaces=true --add-drop-table $DB_OUTPUT_FILE_NAME @@ -214,13 +184,6 @@ ln -s ${FULL_BACKUP_FILE_NAME} $LATEST_FULL_BACKUP_FILE_NAME # send backup to AWS S3 bucket if [ "$BUCKET_NAME" != "" ]; then - if [ ! -e "$aws_cli" ]; then - echo "[Warn] aws-cli is not found in \$PATH. Exiting." - echo "PATH: $PATH" - echo "AWS Bucket Name: '$BUCKET_NAME'." - exit 1 - fi - $aws_cli s3 cp ${FULL_BACKUP_FILE_NAME} s3://$BUCKET_NAME/${DOMAIN}/full-backups/ --only-show-errors if [ "$?" != "0" ]; then diff --git a/laravel/db-backup-laravel.sh b/laravel/db-backup-laravel.sh deleted file mode 100644 index b012e9a..0000000 --- a/laravel/db-backup-laravel.sh +++ /dev/null @@ -1,183 +0,0 @@ -#!/bin/bash - -# version - 1.0 - -# based on db-backup.sh script version 3.2.1 - -# changelog -# version: 1.0 -# - date: 2022-10-03 -# - first version - -### Variables - Please do not add trailing slash in the PATHs - -# To enable offsite backups... -# apt install awscli (or yum install awscli) -# legacy method -# run 'pip install awscli' (as root) -# aws configure (as normal user) - -# where to store the database backups? -BACKUP_PATH=${HOME}/backups/db-backups -ENCRYPTED_BACKUP_PATH=${HOME}/backups/encrypted-db-backups - -# the script assumes your sites are stored like ~/sites/example.com, ~/sites/example.net, ~/sites/example.org and so on. -# if you have a different pattern, such as ~/app/example.com, please change the following to fit the server environment! -SITES_PATH=${HOME}/sites - -# if WP is in a sub-directory, please leave this empty! -PUBLIC_DIR=public - -# a passphrase for encryption, in order to being able to use almost any special characters use "" -PASSPHRASE= - -# auto delete older backups after certain number days - default 60. YMMV -AUTODELETEAFTER=60 - -# You may hard-code the domain name -DOMAIN= - -# AWS Variable can be hard-coded here -AWS_S3_BUCKET_NAME= - -# ref: http://docs.aws.amazon.com/cli/latest/userguide/cli-environment.html -AWS_ACCESS_KEY_ID= -AWS_SECRET_ACCESS_KEY= -AWS_DEFAULT_REGION= -AWS_PROFILE= - -#-------- Do NOT Edit Below This Line --------# - -script_name=$(basename "$0") - -# create log directory if it doesn't exist -[ ! -d ${HOME}/log ] && mkdir ${HOME}/log - -LOG_FILE=${HOME}/log/backups.log -exec > >(tee -a ${LOG_FILE} ) -exec 2> >(tee -a ${LOG_FILE} >&2) - -echo "Script started on... $(date +%c)" - -export PATH=/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin:/usr/local/sbin - -which aws &> /dev/null && declare -r aws_cli=`which aws` -declare -r timestamp=$(date +%F_%H-%M-%S) - -let AUTODELETEAFTER-- - -# check if log directory exists -if [ ! -d "${HOME}/log" ] && [ "$(mkdir -p ${HOME}/log)" ]; then - echo 'Log directory not found' - echo "Please create it manually at $HOME/log and then re-run this script" - exit 1 -fi - -# create the dir to keep backups, if not exists -if [ ! -d "$BACKUP_PATH" ] && [ "$(mkdir -p $BACKUP_PATH)" ]; then - echo "BACKUP_PATH is not found at $BACKUP_PATH. The script can't create it, either!" - echo 'You may want to create it manually' - exit 1 -fi -if [ -n "$PASSPHRASE" ] && [ ! -d "$ENCRYPTED_BACKUP_PATH" ] && [ "$(mkdir -p $ENCRYPTED_BACKUP_PATH)" ]; then - echo "ENCRYPTED_BACKUP_PATH Is not found at $ENCRYPTED_BACKUP_PATH. the script can't create it, either!" - echo 'you may want to create it manually' - exit 1 -fi - -# get environment variables -if [ -f "$HOME/.envrc" ]; then - source ~/.envrc -fi -if [ -f "$HOME/.env" ]; then - source ~/.env -fi - -# check for the variable/s in three places -# 1 - hard-coded value -# 2 - optional parameter while invoking the script -# 3 - environment files - -if [ "$DOMAIN" == "" ]; then - if [ "$1" == "" ]; then - if [ "$WP_DOMAIN" != "" ]; then - DOMAIN=$WP_DOMAIN - else - echo 'Usage ${script_name} example.com (S3 bucket name)'; exit 1 - fi - else - DOMAIN=$1 - fi -fi - -# convert forward slash found in sub-directories to hyphen -# ex: example.com/test would become example.com-test -DOMAIN_FULL_PATH=$(echo $DOMAIN | awk '{gsub(/\//,"_")}; 1') - -source ~/sites/${DOMAIN_FULL_PATH}/.env - -WP_PATH=${SITES_PATH}/$DOMAIN/${PUBLIC_DIR} -if [ ! -d "$WP_PATH" ]; then - echo; echo 'WordPress is not found at '$WP_PATH; echo "Usage ${script_name} domainname.tld (S3 bucket name)"; echo; - exit 1 -fi - -if [ "$AWS_BUCKET" == "" ]; then - if [ "$2" != "" ]; then - AWS_BUCKET=$2 - elif [ "$AWS_S3_BUCKET_NAME" != "" ]; then - AWS_BUCKET=$AWS_S3_BUCKET_NAME - fi -fi - -DB_OUTPUT_FILE_NAME=${BACKUP_PATH}/db-${DOMAIN_FULL_PATH}-${timestamp}.sql.gz -ENCRYPTED_DB_OUTPUT_FILE_NAME=${ENCRYPTED_BACKUP_PATH}/db-${DOMAIN_FULL_PATH}-${timestamp}.sql.gz -DB_LATEST_FILE_NAME=${BACKUP_PATH}/db-${DOMAIN_FULL_PATH}-latest.sql.gz - -# take actual DB backup - -mysqldump --add-drop-table -u$DB_USERNAME $DB_DATABASE -p"$DB_PASSWORD" | gzip > $DB_OUTPUT_FILE_NAME - -[ -f $DB_LATEST_FILE_NAME ] && rm $DB_LATEST_FILE_NAME -if [ -n "$PASSPHRASE" ] ; then - gpg --symmetric --passphrase $PASSPHRASE --batch -o ${ENCRYPTED_DB_OUTPUT_FILE_NAME} $DB_OUTPUT_FILE_NAME - rm $DB_OUTPUT_FILE_NAME - ln -s $ENCRYPTED_DB_OUTPUT_FILE_NAME $DB_LATEST_FILE_NAME -else - ln -s $DB_OUTPUT_FILE_NAME $DB_LATEST_FILE_NAME -fi -if [ "$?" != "0" ]; then - echo; echo 'Something went wrong while taking local backup!' - rm -f $DB_OUTPUT_FILE_NAME &> /dev/null -fi - -# external backup -if [ "$AWS_BUCKET" != "" ]; then - if [ ! -e "$aws_cli" ] ; then - echo; echo 'Did you run "pip install aws && aws configure"'; echo; - fi - - if [ -z "$PASSPHRASE" ] ; then - $aws_cli s3 cp $DB_OUTPUT_FILE_NAME s3://$AWS_BUCKET/${DOMAIN_FULL_PATH}/db-backups/ --only-show-errors - else - $aws_cli s3 cp $ENCRYPTED_DB_OUTPUT_FILE_NAME s3://$AWS_BUCKET/${DOMAIN_FULL_PATH}/encrypted-db-backups/ --only-show-errors - fi - if [ "$?" != "0" ]; then - echo; echo 'Something went wrong while taking offsite backup'; - echo "Check $LOG_FILE for any log info"; echo - else - echo; echo 'Offsite backup successful'; echo - fi -fi - -# Auto delete backups -[ -d "$BACKUP_PATH" ] && find $BACKUP_PATH -type f -mtime +$AUTODELETEAFTER -exec rm {} \; -[ -d $ENCRYPTED_BACKUP_PATH ] && find $ENCRYPTED_BACKUP_PATH -type f -mtime +$AUTODELETEAFTER -exec rm {} \; - -if [ -z "$PASSPHRASE" ] ; then - echo; echo 'DB backup is done without encryption: '${DB_LATEST_FILE_NAME}' -> '${DB_OUTPUT_FILE_NAME}; echo -else - echo; echo 'DB backup is done encrypted: '${DB_LATEST_FILE_NAME}' -> '${ENCRYPTED_DB_OUTPUT_FILE_NAME}; echo -fi - -echo "Script ended on... $(date +%c)" diff --git a/phpbb/db-backup-phpbb.sh b/phpbb/db-backup-phpbb.sh deleted file mode 100644 index 2dc77bf..0000000 --- a/phpbb/db-backup-phpbb.sh +++ /dev/null @@ -1,178 +0,0 @@ -#!/usr/bin/env bash - -# version - 1 - -### Variables - Please do not add trailing slash in the PATHs - -# To enable offsite backups... -# apt install awscli (or yum install awscli) -# legacy method -# run 'pip install awscli' (as root) -# aws configure (as normal user) - -# where to store the database backups? -BACKUP_PATH=${HOME}/backups/db-backups -encrypted_backup_path=${HOME}/backups/encrypted-db-backups - -# the script assumes that the sites are stored like... -# ~/sites/example.com/public -# ~/sites/example.net/public -# ~/sites/example.org/public and so on. -# if you have a different pattern, such as ~/app/example.com, please change the following to fit the server environment! -SITES_PATH=${HOME}/sites - -PUBLIC_DIR=public - -# a passphrase for encryption, in order to being able to use almost any special characters use "" -PASSPHRASE= - -# auto delete older backups after certain number days - default 60. YMMV -AUTODELETEAFTER=120 - -# You may hard-code the domain name -DOMAIN= - -# AWS Variable can be hard-coded here -AWS_S3_BUCKET_NAME= - -# ref: http://docs.aws.amazon.com/cli/latest/userguide/cli-environment.html -AWS_ACCESS_KEY_ID= -AWS_SECRET_ACCESS_KEY= -AWS_DEFAULT_REGION= -AWS_PROFILE= - -#-------- Do NOT Edit Below This Line --------# - -script_name=$(basename "$0") - -# create log directory if it doesn't exist -[ ! -d ${HOME}/log ] && mkdir ${HOME}/log - -LOG_FILE=${HOME}/log/backups.log -exec > >(tee -a ${LOG_FILE} ) -exec 2> >(tee -a ${LOG_FILE} >&2) - -echo "Script started on... $(date +%c)" - -export PATH=/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin:/usr/local/sbin - -which aws &> /dev/null && declare -r aws_cli=`which aws` -declare -r timestamp=$(date +%F_%H-%M-%S) - -let AUTODELETEAFTER-- - -# check if log directory exists -if [ ! -d "${HOME}/log" ] && [ "$(mkdir -p ${HOME}/log)" ]; then - echo 'Log directory not found' - echo "Please create it manually at $HOME/log and then re-run this script" - exit 1 -fi - -# create the dir to keep backups, if not exists -if [ ! -d "$BACKUP_PATH" ] && [ "$(mkdir -p $BACKUP_PATH)" ]; then - echo "BACKUP_PATH is not found at $BACKUP_PATH. The script can't create it, either!" - echo 'You may want to create it manually' - exit 1 -fi -if [ -n "$PASSPHRASE" ] && [ ! -d "$encrypted_backup_path" ] && [ "$(mkdir -p $encrypted_backup_path)" ]; then - echo "encrypted_backup_path is not found at $encrypted_backup_path. the script can't create it, either!" - echo 'you may want to create it manually' - exit 1 -fi - -# get environment variables -if [ -f "$HOME/.envrc" ]; then - source ~/.envrc -fi -if [ -f "$HOME/.env" ]; then - source ~/.env -fi - -# check for the variable/s in three places -# 1 - hard-coded value -# 2 - optional parameter while invoking the script -# 3 - environment files - -if [ "$DOMAIN" == "" ]; then - if [ "$1" == "" ]; then - echo 'Usage ${script_name} example.com (S3 bucket name)'; exit 1 - else - DOMAIN=$1 - fi -fi - -phpbb_path=${SITES_PATH}/$DOMAIN/${PUBLIC_DIR} -if [ ! -d "$phpbb_path" ]; then - echo; echo 'WordPress is not found at '$phpbb_path; echo "Usage ${script_name} domainname.tld (S3 bucket name)"; echo; - exit 1 -fi - -if [ "$AWS_BUCKET" == "" ]; then - if [ "$2" != "" ]; then - AWS_BUCKET=$2 - elif [ "$AWS_S3_BUCKET_NAME" != "" ]; then - AWS_BUCKET=$AWS_S3_BUCKET_NAME - fi -fi - -# convert forward slash found in sub-directories to hyphen -# ex: example.com/test would become example.com-test -DOMAIN_FULL_PATH=$(echo $DOMAIN | awk '{gsub(/\//,"_")}; 1') - -DB_OUTPUT_FILE_NAME=${BACKUP_PATH}/db-${DOMAIN_FULL_PATH}-${timestamp}.sql.gz -ENCRYPTED_DB_OUTPUT_FILE_NAME=${encrypted_backup_path}/db-${DOMAIN_FULL_PATH}-${timestamp}.sql.gz -DB_LATEST_FILE_NAME=${BACKUP_PATH}/db-${DOMAIN_FULL_PATH}-latest.sql.gz - -# when installed by the OS provided phpBB package. -CONFIG_FILE_PATH=${phpbb_path}/database.inc.php -# in some installations, it is with config.php file -# CONFIG_FILE_PATH=${phpbb_path}/config.php - -DB_NAME=$(/bin/sed -n "/dbname/ s/[';\r]//gp" ${CONFIG_FILE_PATH} | /usr/bin/awk -F '=' '{print $2}') -DB_USER=$(/bin/sed -n "/dbuser/ s/[';\r]//gp" ${CONFIG_FILE_PATH} | /usr/bin/awk -F '=' '{print $2}') -DB_PASS=$(/bin/sed -n "/dbpass/ s/[';\r]//gp" ${CONFIG_FILE_PATH} | /usr/bin/awk -F '=' '{print $2}') - -# take actual DB backup - /usr/bin/mysqldump --add-drop-table ${DB_NAME} -u${DB_USER} -p${DB_PASS} | /bin/gzip > $DB_OUTPUT_FILE_NAME - rm $DB_LATEST_FILE_NAME - ln -s $DB_OUTPUT_FILE_NAME $DB_LATEST_FILE_NAME - if [ ! -z "$PASSPHRASE" ] ; then - gpg --symmetric --passphrase $PASSPHRASE --batch -o ${ENCRYPTED_DB_OUTPUT_FILE_NAME} $DB_OUTPUT_FILE_NAME - rm $DB_OUTPUT_FILE_NAME - fi - if [ "$?" != "0" ]; then - echo; echo 'Something went wrong while taking local backup!' - echo "Check $LOG_FILE for any further log info. Exiting now!"; echo; exit 2 - fi - -# external backup -if [ "$AWS_BUCKET" != "" ]; then - if [ ! -e "$aws_cli" ] ; then - echo; echo 'Did you run "pip install aws && aws configure"'; echo; - fi - - if [ -z "$PASSPHRASE" ] ; then - $aws_cli s3 cp $DB_OUTPUT_FILE_NAME s3://$AWS_BUCKET/${DOMAIN_FULL_PATH}/databases/ - else - $aws_cli s3 cp $ENCRYPTED_DB_OUTPUT_FILE_NAME s3://$AWS_BUCKET/${DOMAIN_FULL_PATH}/databases/ - fi - if [ "$?" != "0" ]; then - echo; echo 'Something went wrong while taking offsite backup'; - echo "Check $LOG_FILE for any log info"; echo - else - echo; echo 'Offsite backup successful'; echo - fi -fi - -# Auto delete backups -[ -d "$BACKUP_PATH" ] && find $BACKUP_PATH -type f -mtime +$AUTODELETEAFTER -exec rm {} \; -[ -d $encrypted_backup_path ] && find $encrypted_backup_path -type f -mtime +$AUTODELETEAFTER -exec rm {} \; - -echo "Script ended on... $(date +%c)" - -if [ -z "$PASSPHRASE" ] ; then - echo; echo 'DB backup is done; please check the latest backup at '${BACKUP_PATH}'.'; echo -else - echo; echo 'DB backup is done; please check the latest backup at '${ENCRYPTED_BACKUP_PATH}'.'; echo -fi - diff --git a/phpbb/full-backup-phpbb.sh b/phpbb/full-backup-phpbb.sh deleted file mode 100644 index 0a410a7..0000000 --- a/phpbb/full-backup-phpbb.sh +++ /dev/null @@ -1,222 +0,0 @@ -#!/usr/bin/env bash - -# version: 1.0 - -# changelog -# version: 1.0 -# date: 2020-04-26 - -# this script is basically -# files-backup-without-uploads.sh script + part of db-backup.sh script -# from files-backup-without-uploads.sh script, we do not exclude uploads directory - just removed the line from it - -### Variables ### - -# a passphrase for encryption, in order to being able to use almost any special characters use "" -PASSPHRASE= - -# auto delete older backups after certain number days - default 30. YMMV -AUTODELETEAFTER=90 - -# the script assumes your sites are stored like ~/sites/example.com, ~/sites/example.net, ~/sites/example.org and so on. -# if you have a different pattern, such as ~/app/example.com, please change the following to fit the server environment! -SITES_PATH=${HOME}/sites - -# if WP is in a sub-directory, please leave this empty! -PUBLIC_DIR=public - -### Variables -# You may hard-code the domain name and AWS S3 Bucket Name here -DOMAIN= -bucket_name= - -#-------- Do NOT Edit Below This Line --------# - -# create log directory if it doesn't exist -[ ! -d ${HOME}/log ] && mkdir ${HOME}/log - -LOG_FILE=${HOME}/log/backups.log -exec > >(tee -a ${LOG_FILE} ) -exec 2> >(tee -a ${LOG_FILE} >&2) - -echo "Script started on... $(date +%c)" - -export PATH=/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin:/usr/local/sbin - -which aws &> /dev/null && declare -r aws_cli=`which aws` -declare -r timestamp=$(date +%F_%H-%M-%S) -declare -r script_name=$(basename "$0") - -let AUTODELETEAFTER-- - -# check if log directory exists -if [ ! -d "${HOME}/log" ] && [ "$(mkdir -p ${HOME}/log)" ]; then - echo "Log directory not found. The script can't create it, either!" - echo "Please create it manually at $HOME/log and then re-run this script" - exit 1 -fi - -# source the envrc files if found -[ -f "$HOME/.envrc" ] && source ~/.envrc -[ -f "$HOME/.env" ] && source ~/.env - -# check for the variable/s in three places -# 1 - hard-coded value -# 2 - optional parameter while invoking the script -# 3 - environment files - -if [ "$DOMAIN" == "" ]; then - if [ "$1" == "" ]; then - if [ "$WP_DOMAIN" != "" ]; then - DOMAIN=$WP_DOMAIN - else - echo "Usage $script_name example.com (S3 bucket name)"; exit 1 - fi - else - DOMAIN=$1 - fi -fi - -if [ "$bucket_name" == "" ]; then - if [ "$2" != "" ]; then - bucket_name=$2 - elif [ "$AWS_S3_BUCKET_NAME" != "" ]; then - bucket_name=$AWS_S3_BUCKET_NAME - fi -fi - -# path to backup -phpbb_path=${SITES_PATH}/${DOMAIN}/${PUBLIC_DIR} -if [ ! -d "$phpbb_path" ]; then - echo "$phpbb_path is not found. Please check the paths and adjust the variables in the script. Exiting now..." - exit 1 -fi - -# where to store the backup file/s -BACKUP_PATH=${HOME}/backups/full-backups -if [ ! -d "$BACKUP_PATH" ] && [ "$(mkdir -p $BACKUP_PATH)" ]; then - echo "BACKUP_PATH is not found at $BACKUP_PATH. The script can't create it, either!" - echo 'You may want to create it manually' - exit 1 -fi -ENCRYPTED_BACKUP_PATH=${HOME}/backups/encrypted-full-backups -if [ -n "$PASSPHRASE" ] && [ ! -d "$ENCRYPTED_BACKUP_PATH" ] && [ "$(mkdir -p $ENCRYPTED_BACKUP_PATH)" ]; then - echo "ENCRYPTED_BACKUP_PATH is not found at $ENCRYPTED_BACKUP_PATH. The script can't create it, either!" - echo 'You may want to create it manually' - exit 1 -fi - -# path to be excluded from the backup -# no trailing slash, please -EXCLUDE_BASE_PATH=${DOMAIN} -if [ "$PUBLIC_DIR" != "" ]; then - EXCLUDE_BASE_PATH=${EXCLUDE_BASE_PATH}/${PUBLIC_DIR} -fi - -declare -A EXC_PATH -EXC_PATH[1]=${EXCLUDE_BASE_PATH}/wp-content/cache -EXC_PATH[2]=${EXCLUDE_BASE_PATH}/wp-content/debug.log -EXC_PATH[3]=${EXCLUDE_BASE_PATH}/.git -# need more? - just use the above format - -EXCLUDES='' -for i in "${!EXC_PATH[@]}" ; do - CURRENT_EXC_PATH=${EXC_PATH[$i]} - EXCLUDES=${EXCLUDES}'--exclude='$CURRENT_EXC_PATH' ' - # remember the trailing space; we'll use it later -done - -#------------- from db-script.sh --------------# -DB_OUTPUT_FILE_NAME=${SITES_PATH}/${DOMAIN}/db-$timestamp.sql - -CONFIG_FILE_PATH=${phpbb_path}/database.inc.php - -DB_NAME=$(/bin/sed -n "/dbname/ s/[';\r]//gp" ${CONFIG_FILE_PATH} | /usr/bin/awk -F '=' '{print $2}') -DB_USER=$(/bin/sed -n "/dbuser/ s/[';\r]//gp" ${CONFIG_FILE_PATH} | /usr/bin/awk -F '=' '{print $2}') -DB_PASS=$(/bin/sed -n "/dbpass/ s/[';\r]//gp" ${CONFIG_FILE_PATH} | /usr/bin/awk -F '=' '{print $2}') - -# take actual DB backup - /usr/bin/mysqldump --add-drop-table ${DB_NAME} -u${DB_USER} -p${DB_PASS} | /bin/gzip > $DB_OUTPUT_FILE_NAME - if [ "$?" != "0" ]; then - echo; echo 'Something went wrong while taking local backup!' - echo "Check $LOG_FILE for any further log info. Exiting now!"; echo; exit 2 - fi -#------------- end of snippet from db-script.sh --------------# - -FULL_BACKUP_FILE_NAME=${BACKUP_PATH}/full-backup-${DOMAIN}-$timestamp.tar.gz - -# let's encrypt everything with a passphrase before sending to AWS -# this is a simple encryption using gpg -ENCRYPTED_FULL_BACKUP_FILE_NAME=${ENCRYPTED_BACKUP_PATH}/full-backup-${DOMAIN}-$timestamp.tar.gz.gpg -LATEST_FULL_BACKUP_FILE_NAME=${BACKUP_PATH}/full-backup-${DOMAIN}-latest.tar.gz - -if [ ! -z "$PASSPHRASE" ]; then - # using symmetric encryption - # option --batch to avoid passphrase prompt - # encrypting database dump - tar hcz -C ${SITES_PATH} ${EXCLUDES} ${DOMAIN} | gpg --symmetric --passphrase $PASSPHRASE --batch -o ${ENCRYPTED_FULL_BACKUP_FILE_NAME} - if [ "$?" != "0" ]; then - echo; echo 'Something went wrong while encrypting full backup'; echo - echo "Check $LOG_FILE for any log info"; echo - else - echo; echo 'Backup successfully encrypted'; echo - fi - rm $LATEST_FULL_BACKUP_FILE_NAME - ln -s ${ENCRYPTED_FULL_BACKUP_FILE_NAME} $LATEST_FULL_BACKUP_FILE_NAME -else - # let's do it using tar - # Create a fresh backup - tar hczf ${FULL_BACKUP_FILE_NAME} -C ${SITES_PATH} ${EXCLUDES} ${DOMAIN} &> /dev/null - if [ "$?" != "0" ]; then - echo; echo 'Something went wrong while encrypting full backup'; echo - echo "Check $LOG_FILE for any log info"; echo - else - echo; echo 'Backup successfully encrypted'; echo - fi - - echo "No PASSPHRASE provided!" - echo "You may want to encrypt your backup before storing them offsite." - echo "[WARNING]" - echo "If your data came from Europe, please check GDPR compliance." - - rm $LATEST_FULL_BACKUP_FILE_NAME - ln -s ${FULL_BACKUP_FILE_NAME} $LATEST_FULL_BACKUP_FILE_NAME -fi - -# remove the reduntant DB backup -rm $DB_OUTPUT_FILE_NAME - -# send backup to AWS S3 bucket -if [ "$bucket_name" != "" ]; then - if [ ! -e "$aws_cli" ] ; then - echo; echo 'Did you run "pip install aws && aws configure"'; echo; - fi - - if [ -z "$PASSPHRASE" ]; then - $aws_cli s3 cp ${FULL_BACKUP_FILE_NAME} s3://$bucket_name/${DOMAIN}/full-backups/ - else - $aws_cli s3 cp ${ENCRYPTED_FULL_BACKUP_FILE_NAME} s3://$bucket_name/${DOMAIN}/full-backups/ - fi - - if [ "$?" != "0" ]; then - echo; echo 'Something went wrong while taking offsite backup'; echo - echo "Check $LOG_FILE for any log info"; echo - else - echo; echo 'Offsite backup successful'; echo - fi -fi - -# Auto delete backups -find $BACKUP_PATH -type f -mtime +$AUTODELETEAFTER -exec rm {} \; -[ -d $ENCRYPTED_BACKUP_PATH ] && find $ENCRYPTED_BACKUP_PATH -type f -mtime +$AUTODELETEAFTER -exec rm {} \; - -echo "Script ended on... $(date +%c)" - -if [ -z "$PASSPHRASE" ]; then - echo 'Full backup is done; please check the latest backup in '${BACKUP_PATH}'.'; - echo "Latest backup is at ${FULL_BACKUP_FILE_NAME}" -else - echo 'Full backup is done; please check the latest backup in '${ENCRYPTED_BACKUP_PATH}'.'; - echo "Latest backup is at ${ENCRYPTED_FULL_BACKUP_FILE_NAME}" -fi -echo