-
-
Save evil5hadow/1112f652c87f2edf3c5a59fb0b0cb3d8 to your computer and use it in GitHub Desktop.
Backup directories and directory listings to a LUKS container and sync everything to S3
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
## Description: | |
# Sync files to and create lists of directory contents in a LUKS container volume and upload it to S3 | |
# Good for periodic backup jobs | |
# Supports rate limiting, encryption in transit and at rest and file path exclusions | |
## Usage: | |
# bash $0 | |
## Dependencies | |
# Packages: | |
# - awscli | |
# - s3cmd | |
# - cryptsetup | |
# - gawk | |
# - sha256sum | |
# - grep | |
# - rsync | |
# Setup: | |
# - luks file container | |
# - configuration parameters below set properly | |
# - if this will be used in cron, youll need to set environment variables for awscli in the crontab | |
# WARNING: | |
# If you experience bitrot or otherwise encounter corruption, these is no detecting of this. | |
# You will overwrite the good data with the bad | |
# This can be mitigated by using versioning, either implemented through this script or enabled in the S3 bucket. | |
## --- | |
## Configuration | |
Debug=0 | |
# encrypted container | |
ScriptWorkingPath='/media/data/backups/syncops' | |
ContainerFileName='cloudsync.img' | |
ContainerFile="${ScriptWorkingPath}/${ContainerFileName}" | |
ContainerMapperName='cloudsync' | |
ContainerMountPath='/media/tmpcrypt' | |
ContainerKeyFile='/media/data/personal/keyfiles/cloudsync.pem' | |
# Escape slashes and use globs | |
ExcludedPaths=( \ | |
'tech/virtualmachines' \ | |
'tech/caches' \ | |
) | |
# Do not leave trailing slash on the end of directory sources | |
SourcePaths=( \ | |
'/media/raid/backups/configuration' \ | |
'/media/raid/backups/drive-order-in-case.txt' \ | |
'/media/raid/misc/papers' \ | |
'/media/raid/misc/research' \ | |
'/media/raid/personal/text' \ | |
'/media/raid/personal/resumes' \ | |
'/media/raid/personal/keyfiles' \ | |
'/media/raid/personal/finances' \ | |
'/media/raid/games/strategy' \ | |
'/media/raid/games/saves' \ | |
'/media/raid/tech' \ | |
'/home/ian/scripts' \ | |
) | |
DestinationPath="${ContainerMountPath}/" | |
LogFile="${ScriptWorkingPath}/`date '+%Y%m%d'`.log" | |
# Pathes to create file lists for the contents of to be included in backup | |
FileListSources=( \ | |
'/media/data/backups' \ | |
'/media/data/misc/applications' \ | |
'/media/data/misc/ebooks' \ | |
'/media/data/music' \ | |
) | |
# SNS ARN for notifications | |
SnsTopic='arn:aws:sns:us-west-1:183912708525:Ian' | |
# awscli / s3cmd | |
AwsProfile='default' | |
S3CmdConfigFile='/home/ian/.s3cfg' | |
AwsConfigFile='/home/ian/.aws/config' | |
AwsCredentialFile='/home/ian/.aws/credentials' | |
AwsRegion='us-west-1' | |
ThrottleLimit=500 # how fast to upload the file to s3 in kilobytes | |
# s3 paths | |
S3BucketName='ians-backups' | |
S3Prefix='personal-workstation' | |
## END CONFIGURATION | |
# set awscli and s3cmd arguments given configuration | |
AwsCliOptions="--storage-class=STANDARD_IA --profile=${AwsProfile} --region=${AwsRegion}" | |
S3CmdOptions="--storage-class=STANDARD_IA --region=${AwsRegion} --config=${S3CmdConfigFile} --limit-rate=${ThrottleLimit}k" | |
if [ $Debug -eq 1 ] ; then | |
AwsCliOptions="${AwsCliOptions} --debug" | |
S3CmdOptions="${S3CmdOptions} --debug" | |
#else | |
# AwsCliOptions="${AwsCliOptions} --only-show-errors" | |
fi | |
## Stuf begins | |
# Configure environment vars for Aws CLI | |
#export AWS_DEFAULT_PROFILE=$AwsProfile | |
export AWS_CONFIG_FILE=$AwsConfigFile | |
export AWS_SHARED_CREDENTIALS_FILE=$AwsCredentialFile | |
function SnsPublication () { | |
local ErrorCode=$1 | |
if [ $ErrorCode -gt 0 ] ; then | |
local ScriptResultCode='Failure' | |
else | |
local ScriptResultCode='Success' | |
local ScriptPostMessage="Successfully synchronized file changes to S3. See $LogFile for incremental changes" | |
fi | |
if [ $ErrorCode -eq 1 ] ; then | |
local ScriptPostMessage="Failed to complete rsync operation." | |
elif [ $ErrorCode -eq 2 ] ; then | |
local ScriptPostMessage="Failed to copy to S3." | |
fi | |
# Publish to SNS topic to notify admin | |
aws sns publish \ | |
--region $AwsRegion \ | |
--topic-arn $SnsTopic \ | |
--subject "Workstation backup job notification ($ScriptResultCode)" \ | |
--message "$ScriptPostMessage" \ | |
2>&1 >> $LogFile | |
} | |
function CloseContainer () { | |
# Immediately flush pending cache writes to disk | |
sync -f ${ContainerMountPath} | |
# Unmount container | |
umount $ContainerMountPath | |
# Close container | |
cryptsetup luksClose $ContainerMapperName | |
} | |
# Open the crypted container | |
cryptsetup --key-file $ContainerKeyFile luksOpen $ContainerFile $ContainerMapperName | |
# Mount it | |
mount /dev/mapper/$ContainerMapperName $ContainerMountPath | |
# Assemble the one-liner | |
CommandPrefix="rsync --archive --delete --checksum --verbose --log-file=${LogFile}" | |
if [ $Debug -eq 1 ] ; then | |
CommandPrefix="${CommandPrefix} --msgs2stderr --debug=ALL" | |
fi | |
for e in "${ExcludedPaths[@]}" ; do | |
CommandExclusions="${CommandExclusions} --exclude=$e" | |
done | |
for i in "${SourcePaths[@]}" ; do | |
CommandSources="${CommandSources} $i" | |
done | |
Command="$CommandPrefix $CommandExclusions $CommandSources $DestinationPath" | |
# rsync | |
RsyncTempLog=`mktemp` | |
$Command 2>&1 > $RsyncTempLog | |
if [[ `grep -ic error $RsyncTempLog` -gt 0 ]] ; then | |
echo "There was an error. Check $LogFile for more info. Response is below." | |
echo $RsyncResponse | |
SnsPublication 1 | |
CloseContainer | |
exit 1 | |
fi | |
cat $RsyncTempLog >> $LogFile | |
# Create file list for each source | |
for fl in "${FileListSources[@]}" ; do | |
FlName=`echo $fl | grep -Po '[a-zA-Z_]+$'` | |
ls -Rla $fl 2>&1 > "${ScriptWorkingPath}/${FlName}.filelist" | |
done | |
# Get hash of container contents | |
ContainerFileList=`ls -Rla $ContainerMountPath` | |
ContainerChecksum=`echo $ContainerFileList | sha256sum | awk '{ print $1 }'` | |
ContainerChecksumFile="${ScriptWorkingPath}/${ContainerMapperName}.sha256sum" | |
[ -f ] ; mv $ContainerChecksumFile ${ContainerChecksumFile}.old | |
echo $ContainerChecksum > $ContainerChecksumFile | |
# if the container hash hasnt changed since last time, dont sync anything | |
if [[ -e ${ContainerChecksumFile} && "$(cat ${ContainerChecksumFile}.old)" == "$ContainerChecksum" ]] ; then | |
echo "No changes since last sync!" | |
exit 0 | |
fi | |
# Sync working directory to S3 | |
# Effectively copies file lists but NOT container or logs | |
AwsResponse=`aws s3 sync --exclude "*.log" --exclude "$ContainerFileName" --size-only $AwsCliOptions $ScriptWorkingPath s3://${S3BucketName}/${S3Prefix}/ 2>&1` | |
AwsReturnCode=$? | |
echo $AwsResponse >> $LogFile | |
[ $AwsReturnCode -ne 0 ] && { echo 'Error copying file lists to S3!' >> $LogFile ; Errors=1 ; } | |
# Sync logfile | |
AwsResponse=`aws s3 cp $AwsCliOptions $LogFile s3://${S3BucketName}/${S3Prefix}/ 2>&1` | |
AwsReturnCode=$? | |
echo $AwsResponse >> $LogFile | |
[ $AwsReturnCode -ne 0 ] && { echo 'Error copying log file to S3!' >> $LogFile ; Errors=1 ; } | |
# Copy container, if necessary | |
if [[ ! -e ${ContainerChecksumFile} || "$(cat ${ContainerChecksumFile}.old)" != "$ContainerChecksum" ]] ; then | |
AwsTempLog=`mktemp` | |
s3cmd $S3CmdOptions put ${ScriptWorkingPath}/${ContainerFileName} s3://${S3BucketName}/${S3Prefix}/ 2>&1 > $AwsTempLog | |
AwsReturnCode=$? | |
echo $AwsResponse >> $LogFile | |
[ $AwsReturnCode -ne 0 ] && { echo 'Error copying container to S3!' >> $LogFile ; Errors=1 ; } | |
# Catch s3 errors | |
if [ `grep -Pic "(Error copying|BadDigest|Client error|stacktrace|unable|does not match MD5 checksum|exit status is '1')" $AwsTempLog` -gt 0 ] ; then | |
echo "There were errors copying to S3. Review the log at ${LogFile}" | |
SnsPublication 2 | |
fi | |
fi | |
CloseContainer | |
[ "$Errors" == 1 ] && exit 1 || SnsPublication 0 | |
echo 'All done!' | |
exit 0 |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment