#!/bin/bash IMPORT_DAYS=(1 3 5 7 9 11 13 15 17 19 21 23 25 27 29) BACKUP_HOST="192.168.0.30" BACKUP_PATH="/media/testdb/dump" BACKUP_USER="testdb" DATABASE_NAME="concepters" DATABASE_USER="admin" DATABASE_PASSWORD="admin123" LOG_PATH="/home/concepters/log" LOG_FILENAME="dbbackup.log" TODAY=$(date +"%Y_%m_%d") DUMP_PATH="/home/concepters/hdd/mysqlbackup" DUMP_FILENAME=$TODAY".dump.sql" DUMP_TOTALPATH="$DUMP_PATH/$DUMP_FILENAME" MIN_DUMP=5 MAX_DUMP=10 DUMP_OPT="--single-transaction" DUMP_SCRIPT="/usr/local/bin/dumpscript.sh" insertlogger() { logger "$0 : $1" && echo "$0 : $1" 1>&2 } #Split new lines IFS=$'\012' #Log file check insertlog() { if [ ! -d $LOG_PATH ];then if ! mkdir $LOG_PATH; then insertlogger "Fail to make log folder" return 1 fi fi if [ ! -f "$LOG_PATH/$LOG_FILENAME" ]; then if ! touch "$LOG_PATH/$LOG_FILENAME"; then insertlogger "Fail to create logfile" return 1 fi fi if [ ! -w "$LOG_PATH/$LOG_FILENAME" ]; then insertlogger "Permission denied to log file" return 1 fi messages="["$(date +"%F %H:%M:%S")"] $0".$$" : $1" echo "$messages" >> "$LOG_PATH/$LOG_FILENAME" || insertlogger $messages return 0 } #Dump day check isDump() { dayofmonth=$(date +"%e") for i in ${IMPORT_DAYS[@]}; do if [ $i -eq $dayofmonth ]; then return 0 fi done return 1 } # Use carefully, this function does not check size of array getOldestDump() { value=( ) # echo $@ r_count=0 for i in $@; do value[$r_count]=$i r_count=$((r_count+1)) done oldestFile=${value[0]}; for i in ${value[@]}; do if [ $i -ot $oldestFile ]; then oldestFile=$i fi done echo $oldestFile } #Get dumpfile list #dumpFileList=$(ls -1t "$DUMP_PATH"[0-2][0-9][0-9][0-9]_[0-1][0-9]_[0-3][0-9].dump.sql) insertlog "Start backup concepters Datadb" dumpFileList=$(ls -1t "$DUMP_PATH/"*.dump.sql) #Get number of lists count=0 for i in ${dumpFileList[@]}; do #echo $i count=$((count+1)) done insertlog "Number of dumpfile = $count" #remove oldest dumpfile if number of dumpfile is greater equal to MAX_DUMP oldestDump= if [ $count -ge $MAX_DUMP ]; then oldestDump=$(getOldestDump $dumpFileList) if rm $oldestDump; then insertlog "Removed $oldestDump" else insertlog "Fail to remove $oldestDump" fi fi # Change dumpfile name if a dumpfile already exists suffix=1 while [ -f "$DUMP_TOTALPATH" ]; do insertlog "File $DUMP_TOTALPATH already exists!!" DUMP_FILENAME="$TODAY""_$suffix"".dump.sql" DUMP_TOTALPATH="$DUMP_PATH/$DUMP_FILENAME" insertlog "Dumpfile will be saved to $DUMP_TOTALPATH" suffix=$((suffix+1)) done # Start dump insertlog "Start mysqldump to $DUMP_TOTALPATH" if ! mysqldump -u"$DATABASE_USER" -p"$DATABASE_PASSWORD" "$DUMP_OPT" "$DATABASE_NAME" > "$DUMP_TOTALPATH"; then insertlog "Mysqldump failed" exit 1 fi insertlog "Success mysqldump to $DUMP_TOTALPATH" # Check dumpfile list, size and free space in the testDB PC testDumpFileList=$(ssh "$BACKUP_USER"@"$BACKUP_HOST" "ls -alt --block-size=k $BACKUP_PATH/*.dump.sql") dfScreen=$(ssh "$BACKUP_USER"@"$BACKUP_HOST" "df -k") lineofdf=$(echo "$dfScreen" | grep "$BACKUP_PATH") #echo $testDumpFileList # Extract useful infomation rxdisk="\s+([0-9]+)\s+([0-9]+)\s+([0-9]+)" rxfile="([0-9]+)K" rxpath="\s+([0-9a-zA-Z_/. ]+\.dump.sql)$" lsdumpfile=$(ls -alt --block-size=k $DUMP_TOTALPATH) dumpfilesize= if [[ $lsdumpfile =~ $rxfile ]]; then dumpfilesize=${BASH_REMATCH[1]} fi diskfreespace= if [[ $lineofdf =~ $rxdisk ]]; then diskfreespace=${BASH_REMATCH[3]} fi filesize=( ) filelist=( ) count=0 for i in ${testDumpFileList[@]}; do if [[ $i =~ $rxfile ]]; then filesize[$count]=${BASH_REMATCH[1]} fi if [[ $i =~ $rxpath ]]; then filelist[$count]=${BASH_REMATCH[1]} fi count=$((count+1)) done #echo "Last file = ${filelist[$((count-1))]}" #for i in "${testDumpFileList[@]}"; do # echo $i #done insertlog "$BACKUP_HOST has $count dump files" insertlog "$BACKUP_HOST has $diskfreespace kbytes free space" # Transfer the dumpfile if free space in the testDB PC is enough to save the dumpfile # This script check free space, size dumpfiles in the testdb PC and a dumpfile made now # Number of dumpfile in the testDB pc MUST BE GRATHER THAN MIN_DUMP # If free space is not enough, you have to buy a new hdd or remove old dumpfiles in the testDB PC # This script will remove older dumpfiles in the testDB PC as guarantee constraints above if [ $diskfreespace -lt $dumpfilesize ]; then if [ $count -lt $MIN_DUMP ]; then insertlog "$BACKUP_HOST has too small free disk space" insertlog "Need to extend $BACKUP_HOST disk space" insertlog "Backup is canceled" exit 1 else insertlog "Check dumpfiles to remove" tempcount=$count calcfreesize=$diskfreespace while [ $calcfreesize -lt $dumpfilesize ]; do calcfreesize=$((calcfreesize+filesize[$((tempcount-1))])) if [ $tempcount -lt $MIN_DUMP ]; then insertlog "$BACKUP_HOST has too small free disk space" insertlog "Need to extend $BACKUP_HOST disk space" insertlog "Backup is canceled" exit 1 fi tempcount=$((tempcount-1)) done for ((i=tempcount-1;i