Compare commits

...

7 Commits
3.1.0 ... 3.1.3

Author SHA1 Message Date
Dave Conroy
3f58cfd284 Release 3.1.3 - See CHANGELOG.md 2022-03-30 10:46:16 -07:00
Dave Conroy
2d01f5e692 Fix for MARIADB not sending DB name for post script 2022-03-30 10:45:27 -07:00
Dave Conroy
dbd0a03b0d SPLIT_DB is supposed to be TRUE 2022-03-30 10:43:22 -07:00
Dave Conroy
6527f4ff63 Add sanity checks for Post scripts to be executible 2022-03-30 10:37:58 -07:00
Dave Conroy
d843d21a1b Release 3.1.2 - See CHANGELOG.md 2022-03-29 08:09:36 -07:00
Dave Conroy
24ed769429 Release 3.1.1 - See CHANGELOG.md 2022-03-28 10:29:00 -07:00
Dave Conroy
cbd87a5ede Update README.md 2022-03-23 19:16:51 -07:00
4 changed files with 58 additions and 29 deletions

View File

@@ -1,3 +1,23 @@
## 3.1.3 2022-03-30 <dave at tiredofit dot ca>
### Changed
- Fix for MariaDB not sending database name to post script
- Check for executible bit on post scripts both via environment variable and /assets/custom
- SPLIT_DB defaulted to TRUE
## 3.1.2 2022-03-29 <dave at tiredofit dot ca>
### Changed
- Fix for blank Notice when individual backup is completed (time taken)
## 3.1.1 2022-03-28 <dave at tiredofit dot ca>
### Changed
- Resolve some issues with backups of Mongo and others not saving the proper timestamp
## 3.1.0 2022-03-23 <dave at tiredofit dot ca>
### Added

View File

@@ -16,7 +16,8 @@ Currently backs up CouchDB, InfluxDB, MySQL, MongoDB, Postgres, Redis servers.
* dump to local filesystem or backup to S3 Compatible services
* select database user and password
* backup all databases
* backup all databases, single, or multiple databases
* backup all to seperate files or one singular file
* choose to have an MD5 or SHA1 sum after backup for verification
* delete old backups after specific amount of time
* choose compression type (none, gz, bz, xz, zstd)

View File

@@ -18,7 +18,7 @@ PARALLEL_COMPRESSION_THREADS=${PARALLEL_COMPRESSION_THREADS:-"$(nproc)"}
S3_CERT_SKIP_VERIFY=${S3_CERT_SKIP_VERIFY:-"TRUE"}
S3_PROTOCOL=${S3_PROTOCOL:-"https"}
SIZE_VALUE=${SIZE_VALUE:-"bytes"}
SPLIT_DB=${SPLIT_DB:-"FALSE"}
SPLIT_DB=${SPLIT_DB:-"TRUE"}
TEMP_LOCATION=${TEMP_LOCATION:-"/tmp/backups"}
dbhost=${DB_HOST}
dbname=${DB_NAME}

View File

@@ -77,58 +77,60 @@ bootstrap_variables() {
}
backup_couch() {
pre_dbbackup
target=couch_${dbname}_${dbhost}_${now}.txt
compression
print_notice "Dumping CouchDB database: '${dbname}'"
print_notice "Dumping CouchDB database: '${dbname}' ${compression_string}"
curl -X GET http://${dbhost}:${dbport}/${dbname}/_all_docs?include_docs=true ${compress_cmd} | $compress_cmd > ${TEMP_LOCATION}/${target}
exit_code=$?
check_exit_code $target
generate_checksum
move_dbbackup
post_dbbackup_hooks
send_statistics
post_dbbackup $dbname
}
backup_influx() {
if [ "${ENABLE_COMPRESSION,,}" = "none" ] || [ "${ENABLE_COMPRESSION,,}" = "false" ] ; then
:
else
print_notice "Compressing InfluxDB backup with gzip"
influx_compression="-portable"
compression_string=" and compressing with gzip"
fi
for db in ${DB_NAME}; do
print_notice "Dumping Influx database: '${db}'"
pre_dbbackup
target=influx_${db}_${dbhost}_${now}
print_notice "Dumping Influx database: '${db}' ${compression_string}"
influxd backup ${influx_compression} -database $db -host ${dbhost}:${dbport} ${TEMP_LOCATION}/${target}
exit_code=$?
check_exit_code $target
generate_checksum
move_dbbackup
send_statistics
post_dbbackup_hooks
post_dbbackup $db
done
}
backup_mongo() {
pre_dbbackup
if [ "${ENABLE_COMPRESSION,,}" = "none" ] || [ "${ENABLE_COMPRESSION,,}" = "false" ] ; then
target=${dbtype}_${dbname}_${dbhost}_${now}.archive
else
print_notice "Compressing MongoDB backup with gzip"
target=${dbtype}_${dbname}_${dbhost}_${now}.archive.gz
mongo_compression="--gzip"
compression_string="and compressing with gzip"
fi
print_notice "Dumping MongoDB database: '${DB_NAME}'"
print_notice "Dumping MongoDB database: '${DB_NAME}' ${compression_string}"
mongodump --archive=${TEMP_LOCATION}/${target} ${mongo_compression} --host ${dbhost} --port ${dbport} ${MONGO_USER_STR}${MONGO_PASS_STR}${MONGO_AUTH_STR}${MONGO_DB_STR} ${EXTRA_OPTS}
exit_code=$?
check_exit_code $target
cd "${TEMP_LOCATION}"
generate_checksum
move_dbbackup
send_statistics
post_dbbackup_hooks
post_dbbackup
}
backup_mssql() {
pre_dbbackup
target=mssql_${dbname}_${dbhost}_${now}.bak
print_notice "Dumping MSSQL database: '${dbname}'"
/opt/mssql-tools/bin/sqlcmd -E -C -S ${dbhost}\,${dbport} -U ${dbuser} -P ${dbpass} Q "BACKUP DATABASE \[${dbname}\] TO DISK = N'${TEMP_LOCATION}/${target}' WITH NOFORMAT, NOINIT, NAME = '${dbname}-full', SKIP, NOREWIND, NOUNLOAD, STATS = 10"
@@ -136,8 +138,7 @@ backup_mssql() {
check_exit_code $target
generate_checksum
move_dbbackup
send_statistics
post_dbbackup_hooks
post_dbbackup $dbname
}
backup_mysql() {
@@ -175,7 +176,7 @@ backup_mysql() {
check_exit_code $target
generate_checksum
move_dbbackup
post_dbbackup
post_dbbackup $db
done
else
print_debug "Not splitting database dumps into their own files"
@@ -188,7 +189,7 @@ backup_mysql() {
check_exit_code $target
generate_checksum
move_dbbackup
post_dbbackup
post_dbbackup all
fi
}
@@ -222,7 +223,7 @@ backup_pgsql() {
check_exit_code $target
generate_checksum
move_dbbackup
post_dbbackup
post_dbbackup $db
done
else
print_debug "Not splitting database dumps into their own files"
@@ -243,14 +244,14 @@ backup_pgsql() {
check_exit_code $target
generate_checksum
move_dbbackup
post_dbbackup
post_dbbackup all
fi
}
backup_redis() {
pre_dbbackup
print_notice "Dumping Redis - Flushing Redis Cache First"
target=redis_${db}_${dbhost}_${now}.rdb
target=redis_all_${dbhost}_${now}.rdb
echo bgsave | redis-cli -h ${dbhost} -p ${dbport} ${REDIS_PASS_STR} --rdb ${TEMP_LOCATION}/${target} ${EXTRA_OPTS}
sleep 10
try=5
@@ -270,7 +271,7 @@ backup_redis() {
$compress_cmd "${TEMP_LOCATION}/${target_original}"
generate_checksum
move_dbbackup
post_dbbackup
post_dbbackup all
}
backup_sqlite3() {
@@ -286,7 +287,7 @@ backup_sqlite3() {
cat "${TEMP_LOCATION}"/backup.sqlite3 | $compress_cmd > "${TEMP_LOCATION}/${target}"
generate_checksum
move_dbbackup
post_dbbackup
post_dbbackup $db
}
check_availability() {
@@ -542,21 +543,28 @@ post_dbbackup() {
### Post Script Support
if [ -n "${POST_SCRIPT}" ] ; then
print_notice "Found POST_SCRIPT environment variable. Executing '${POST_SCRIPT}"
eval "${POST_SCRIPT}" "${exit_code}" "${dbtype}" "${dbhost}" "${dbname}" "${dbbackup_start_time}" "${dbbackup_finish_time}" "${dbbackup_total_time}" "${target}" "${FILESIZE}" "${checksum_value}"
if [ -x "${POST_SCRIPT}" ] ; then
print_notice "Found POST_SCRIPT environment variable. Executing '${POST_SCRIPT}"
eval "${POST_SCRIPT}" "${exit_code}" "${dbtype}" "${dbhost}" "${1}" "${dbbackup_start_time}" "${dbbackup_finish_time}" "${dbbackup_total_time}" "${target}" "${FILESIZE}" "${checksum_value}"
else
print_error "Can't execute POST_SCRIPT environment variable '${POST_SCRIPT}' as its filesystem bit is not executible!"
fi
fi
### Post Backup Custom Script Support
if [ -d "/assets/custom-scripts/" ] ; then
print_notice "Found Post Backup Custom Script to execute"
for f in $(find /assets/custom-scripts/ -name \*.sh -type f); do
print_notice "Running Script: '${f}'"
## script EXIT_CODE DB_TYPE DB_HOST DB_NAME STARTEPOCH FINISHEPOCH DURATIONEPOCH BACKUP_FILENAME FILESIZE CHECKSUMVALUE
${f} "${exit_code}" "${dbtype}" "${dbhost}" "${dbname}" "${dbbackup_start_time}" "${dbbackup_finish_time}" "${dbbackup_total_time}" "${target}" "${FILESIZE}" "${checksum_value}"
if [ -x "${f}" ] ; then
print_notice "Executing post backup custom script : '${f}'"
## script EXIT_CODE DB_TYPE DB_HOST DB_NAME STARTEPOCH FINISHEPOCH DURATIONEPOCH BACKUP_FILENAME FILESIZE CHECKSUMVALUE
${f} "${exit_code}" "${dbtype}" "${dbhost}" "${1}" "${dbbackup_start_time}" "${dbbackup_finish_time}" "${dbbackup_total_time}" "${target}" "${FILESIZE}" "${checksum_value}"
else
print_error "Can't run post backup custom script: '${f}' as its filesystem bit is not executible!"
fi
done
fi
print_notice "DB Backup for '${db}' time taken: $(echo ${dbbackup_total_time} | awk '{printf "Hours: %d Minutes: %02d Seconds: %02d", $1/3600, ($1/60)%60, $1%60}')"
print_notice "DB Backup for '${1}' time taken: $(echo ${dbbackup_total_time} | awk '{printf "Hours: %d Minutes: %02d Seconds: %02d", $1/3600, ($1/60)%60, $1%60}')"
}
sanity_test() {