Compare commits

...

14 Commits

Author SHA1 Message Date
dave@tiredofit.ca
f7f72ba2c1 Release 4.0.35 - See CHANGELOG.md 2024-01-14 20:22:08 -08:00
Dave Conroy
2f05d76f4e README weirdness 2024-01-03 17:33:52 -08:00
Dave Conroy
c9a634ff25 Convert > to - in README 2024-01-03 17:21:01 -08:00
dave@tiredofit.ca
0ce21e8f43 Release 4.0.34 - See CHANGELOG.md 2024-01-02 14:01:28 -08:00
Dave Conroy
a700eb0fef Merge pull request #315 from ToshY/docs/latest-symlink-format
[Docs] Updated `DEFAULT_CREATE_LATEST_SYMLINK` description format
2024-01-02 13:21:59 -08:00
Dave Conroy
7baa3774c7 Merge pull request #318 from devmethodgit/main
Fix environment variables in examples
2024-01-02 13:21:43 -08:00
Dave Conroy
341e4d12ea Update case statement to support arm64|aarch64 2024-01-02 13:21:07 -08:00
Dave Conroy
5c51bbcb7e Wrap if statement in double brackets 2024-01-02 12:54:57 -08:00
@vladimirzyuzin
24d9a9a937 Fix environment variables 2023-12-30 20:58:26 +03:00
ToshY
591b8d6dbd updated create latest symlink description format 2023-12-26 12:24:52 +00:00
dave@tiredofit.ca
a5b15b4412 Release 4.0.33 - See CHANGELOG.md 2023-12-18 07:58:54 -08:00
dave@tiredofit.ca
6692cf9834 Release 4.0.32 - See CHANGELOG.md 2023-12-15 15:32:32 -08:00
dave@tiredofit.ca
c37de5778d Release 4.0.31 - See CHANGELOG.md 2023-12-12 19:25:40 -08:00
dave@tiredofit.ca
eeeafd6ab8 Release 4.0.30 - See CHANGELOG.md 2023-12-11 15:21:01 -08:00
8 changed files with 130 additions and 82 deletions

View File

@@ -1,3 +1,40 @@
## 4.0.35 2024-01-14 <dave at tiredofit dot ca>
### Changed
- Fix issue with emaail notifications and not being able to add from statement
## 4.0.34 2024-01-02 <dave at tiredofit dot ca>
### Changed
- Change the way architectures are detected to re-enable backups with MSSQL and Influx2
## 4.0.33 2023-12-18 <dave at tiredofit dot ca>
### Changed
- Allow _OPTS variabls to contain spaces
- Switch references of _DUMP_OPTS to _BACKUP_OPTS
## 4.0.32 2023-12-15 <dave at tiredofit dot ca>
### Changed
- Fix issue with directories not properly being backed up (InfluxDB)
## 4.0.31 2023-12-12 <dave at tiredofit dot ca>
### Changed
- Support backing up databases with spaces in them
## 4.0.30 2023-12-11 <dave at tiredofit dot ca>
### Added
- Seperate each job with its own temporary folder for isolation and to better cleanup jobs that backup as a directory instead of a flat file
## 4.0.29 2023-12-04 <dave at tiredofit dot ca>
### Changed

View File

@@ -1,5 +1,5 @@
ARG DISTRO=alpine
ARG DISTRO_VARIANT=edge
ARG DISTRO_VARIANT=3.19
FROM docker.io/tiredofit/${DISTRO}:${DISTRO_VARIANT}
LABEL maintainer="Dave Conroy (github.com/tiredofit)"
@@ -72,12 +72,12 @@ RUN source /assets/functions/00-container && \
apkArch="$(uname -m)"; \
case "$apkArch" in \
x86_64) mssql=true ; mssql_arch=amd64; influx2=true ; influx_arch=amd64; ;; \
arm64 ) mssql=true ; mssql_arch=amd64; influx2=true ; influx_arch=arm64 ;; \
arm64 | aarch64 ) mssql=true ; mssql_arch=amd64; influx2=true ; influx_arch=arm64 ;; \
*) sleep 0.1 ;; \
esac; \
\
if [ $mssql = "true" ] ; then curl -O https://download.microsoft.com/download/3/5/5/355d7943-a338-41a7-858d-53b259ea33f5/msodbcsql18_${MSODBC_VERSION}_${mssql_arch}.apk ; curl -O https://download.microsoft.com/download/3/5/5/355d7943-a338-41a7-858d-53b259ea33f5/mssql-tools18_${MSSQL_VERSION}_${mssql_arch}.apk ; echo y | apk add --allow-untrusted msodbcsql18_${MSODBC_VERSION}_${mssql_arch}.apk mssql-tools18_${MSSQL_VERSION}_${mssql_arch}.apk ; else echo >&2 "Detected non x86_64 or ARM64 build variant, skipping MSSQL installation" ; fi; \
if [ $influx2 = "true" ] ; then curl -sSL https://dl.influxdata.com/influxdb/releases/influxdb2-client-${INFLUX2_CLIENT_VERSION}-linux-${influx_arch}.tar.gz | tar xvfz - --strip=1 -C /usr/src/ ; chmod +x /usr/src/influx ; mv /usr/src/influx /usr/sbin/ ; else echo >&2 "Unable to build Influx 2 on this system" ; fi ; \
if [[ $mssql = "true" ]] ; then curl -O https://download.microsoft.com/download/3/5/5/355d7943-a338-41a7-858d-53b259ea33f5/msodbcsql18_${MSODBC_VERSION}_${mssql_arch}.apk ; curl -O https://download.microsoft.com/download/3/5/5/355d7943-a338-41a7-858d-53b259ea33f5/mssql-tools18_${MSSQL_VERSION}_${mssql_arch}.apk ; echo y | apk add --allow-untrusted msodbcsql18_${MSODBC_VERSION}_${mssql_arch}.apk mssql-tools18_${MSSQL_VERSION}_${mssql_arch}.apk ; else echo >&2 "Detected non x86_64 or ARM64 build variant, skipping MSSQL installation" ; fi; \
if [[ $influx2 = "true" ]] ; then curl -sSL https://dl.influxdata.com/influxdb/releases/influxdb2-client-${INFLUX2_CLIENT_VERSION}-linux-${influx_arch}.tar.gz | tar xvfz - --strip=1 -C /usr/src/ ; chmod +x /usr/src/influx ; mv /usr/src/influx /usr/sbin/ ; else echo >&2 "Unable to build Influx 2 on this system" ; fi ; \
clone_git_repo https://github.com/influxdata/influxdb "${INFLUX1_CLIENT_VERSION}" && \
go build -o /usr/sbin/influxd ./cmd/influxd && \
strip /usr/sbin/influxd && \

View File

@@ -325,7 +325,7 @@ If `DEFAULT_BACKUP_LOCTION` = `FILESYSTEM` then the following options are used:
| Variable | Description | Default |
| ------------------------------------ | ----------------------------------------------------------------------------------------------------- | ------------------------------------- |
| `DEFAULT_CREATE_LATEST_SYMLINK` | Create a symbolic link pointing to last backup in this format: `latest-(DB_TYPE)-(DB_NAME)-(DB_HOST)` | `TRUE` |
| `DEFAULT_CREATE_LATEST_SYMLINK` | Create a symbolic link pointing to last backup in this format: `latest-(DB_TYPE)_(DB_NAME)_(DB_HOST)` | `TRUE` |
| `DEFAULT_FILESYSTEM_PATH` | Directory where the database dumps are kept. | `/backup` |
| `DEFAULT_FILESYSTEM_PATH_PERMISSION` | Permissions to apply to backup directory | `700` |
| `DEFAULT_FILESYSTEM_ARCHIVE_PATH` | Optional Directory where the database dumps archives are kept | `${DEFAULT_FILESYSTEM_PATH}/archive/` |

View File

@@ -31,8 +31,8 @@ services:
- BACKUP_JOB_CONCURRENCY=1 # Only run one job at a time
- DEFAULT_CHECKSUM=NONE # Don't create checksums
- DEFAULT_COMPRESSION=ZSTD # Compress all with ZSTD
- DEFAULT_DUMP_INTERVAL=1440 # Backup every 1440 minutes
- DEFAULT_DUMP_BEGIN=0000 # Start backing up at midnight
- DEFAULT_BACKUP_INTERVAL=1440 # Backup every 1440 minutes
- DEFAULT_BACKUP_BEGIN=0000 # Start backing up at midnight
- DEFAULT_CLEANUP_TIME=8640 # Cleanup backups after a week
- DB01_TYPE=mariadb
@@ -40,8 +40,8 @@ services:
- DB01_NAME=example
- DB01_USER=example
- DB01_PASS=examplepassword
- DB01_DUMP_INTERVAL=30 # (override) Backup every 30 minutes
- DB01_DUMP_BEGIN=+1 # (override) Backup starts immediately
- DB01_BACKUP_INTERVAL=30 # (override) Backup every 30 minutes
- DB01_BACKUP_BEGIN=+1 # (override) Backup starts immediately
- DB01_CLEANUP_TIME=180 # (override) Cleanup backups they are older than 180 minutes
- DB01_CHECKSUM=SHA1 # (override) Create a SHA1 checksum
- DB01_COMPRESSION=GZ # (override) Compress with GZIP
@@ -51,8 +51,8 @@ services:
#- DB02_NAME=example
#- DB02_USER=example
#- DB02_PASS=examplepassword
#- DB02_DUMP_INTERVAL=60 # (override) Backup every 60 minutes
#- DB02_DUMP_BEGIN=+10 # (override) Backup starts in ten minutes
#- DB02_BACKUP_INTERVAL=60 # (override) Backup every 60 minutes
#- DB02_BACKUP_BEGIN=+10 # (override) Backup starts in ten minutes
#- DB02_CLEANUP_TIME=240 # (override) Cleanup backups they are older than 240 minutes
#- DB02_CHECKSUM=MD5 # (override) Create a SHA1 checksum
#- DB02_COMPRESSION=BZ # (override) Compress with BZIP

View File

@@ -45,7 +45,7 @@ services:
- DB01_NAME=test1 # Create this database
- DB01_USER=sa
- DB01_PASS=5hQa0utRFBpIY3yhoIyE
- DB01_DUMP_INTERVAL=5 # backup every 5 minute
- DB01_BACKUP_INTERVAL=5 # backup every 5 minute
# - DB01_DUMP_BEGIN=0000 # backup starts at midnight vs not set immediately
- DB01_CLEANUP_TIME=60 # clean backups they are older than 60 minutes
- DB01_CHECKSUM=SHA1 # Set Checksum to be SHA1

View File

@@ -46,7 +46,7 @@ services:
- DB01_NAME=test1
- DB01_USER=sa
- DB01_PASS=5hQa0utRFBpIY3yhoIyE
- DB01_DUMP_INTERVAL=1 # backup every minute
- DB01_BACKUP_INTERVAL=1 # backup every minute
# - DB01_DUMP_BEGIN=0000 # backup starts at midnight vs unset immediately
- DB01_CLEANUP_TIME=5 # clean backups they are older than 5 minute
- DB01_CHECKSUM=NONE

View File

@@ -107,8 +107,8 @@ while true; do
s6-svc -d /var/run/s6/legacy-services/dbbackup-{{BACKUP_NUMBER}}
else
if [ ! "${time_cron}" = "true" ]; then
print_notice "Sleeping for another $(($backup_job_backup_interval*60-backup_job_total_time)) seconds. Waking up at $(date -d@"$(( $(date +%s)+$(($backup_job_backup_interval*60-backup_job_total_time))))" +'%Y-%m-%d %T %Z') "
silent sleep $(($backup_job_backup_interval*60-backup_job_total_time))
print_notice "Sleeping for another $((backup_job_backup_interval*60-backup_job_total_time)) seconds. Waking up at $(date -d@"$(( $(date +%s)+$((backup_job_backup_interval*60-backup_job_total_time))))" +'%Y-%m-%d %T %Z') "
silent sleep $((backup_job_backup_interval*60-backup_job_total_time))
else
time_last_run=$(date +"%s")
timer cron "${backup_job_backup_begin}" "${time_current}" "${time_last_run}"

View File

@@ -153,6 +153,11 @@ bootstrap_variables() {
fi
##
if grep -qo ".*_NAME='.*'" "${backup_instance_vars}"; then
print_debug "[bootstrap_variables] [backup_init] Found _NAME variable with quotes"
sed -i "s|_NAME='\(.*\)'|_NAME=\1|g" "${backup_instance_vars}"
fi
if grep -qo ".*_PASS='.*'" "${backup_instance_vars}"; then
print_debug "[bootstrap_variables] [backup_init] Found _PASS variable with quotes"
sed -i "s|_PASS='\(.*\)'|_PASS=\1|g" "${backup_instance_vars}"
@@ -163,6 +168,11 @@ bootstrap_variables() {
sed -i "s|MONGO_CUSTOM_URI='\(.*\)'|MONGO_CUSTOM_URI=\1|g" "${backup_instance_vars}"
fi
if grep -qo ".*_OPTS='.*'" "${backup_instance_vars}"; then
print_debug "[bootstrap_variables] [backup_init] Found _OPTS variable with quotes"
sed -i "s|_OPTS='\(.*\)'|_OPTS=\1|g" "${backup_instance_vars}"
fi
transform_backup_instance_variable() {
if grep -q "^DB${1}_${2}=" "${backup_instance_vars}" && [ "$(grep "^DB${1}_${2}=" "${backup_instance_vars}" | cut -d = -f2)" != "unset" ]; then
export "$3"="$(grep "^DB${1}_${2}=" "${backup_instance_vars}" | cut -d = -f2-)"
@@ -199,7 +209,7 @@ bootstrap_variables() {
transform_backup_instance_variable "${backup_instance_number}" ENCRYPT_PASSPHRASE backup_job_encrypt_passphrase
transform_backup_instance_variable "${backup_instance_number}" ENCRYPT_PRIVATE_KEY backup_job_encrypt_private_key
transform_backup_instance_variable "${backup_instance_number}" ENCRYPT_PUBLIC_KEY backup_job_encrypt_public_key
transform_backup_instance_variable "${backup_instance_number}" EXTRA_DUMP_OPTS backup_job_extra_dump_opts
transform_backup_instance_variable "${backup_instance_number}" EXTRA_BACKUP_OPTS backup_job_extra_backup_opts
transform_backup_instance_variable "${backup_instance_number}" EXTRA_ENUMERATION_OPTS backup_job_extra_enumeration_opts
transform_backup_instance_variable "${backup_instance_number}" EXTRA_OPTS backup_job_extra_opts
transform_backup_instance_variable "${backup_instance_number}" FILESYSTEM_ARCHIVE_PATH backup_job_filesystem_archive_path
@@ -505,23 +515,20 @@ backup_influx() {
backup_job_filename=influx_${db}_${backup_job_db_host#*//}_${now}
backup_job_filename_base=influx_${db}_${backup_job_db_host#*//}
if var_true "${DEBUG_BACKUP_INFLUX}" ; then debug off; fi
compression
pre_dbbackup "${db}"
write_log notice "Dumping Influx database: '${db}'"
if var_true "${DEBUG_BACKUP_INFLUX}" ; then debug on; fi
run_as_user influxd backup ${influx_compression} ${bucket} -portable -host ${backup_job_db_host}:${backup_job_db_port} ${backup_job_extra_opts} ${backup_job_extra_dump_opts} "${TEMP_PATH}"/"${backup_job_filename_dir}"
run_as_user influxd backup ${influx_compression} ${bucket} -portable -host ${backup_job_db_host}:${backup_job_db_port} ${backup_job_extra_opts} ${backup_job_extra_backup_opts} "${temporary_directory}"/"${backup_job_filename}"
exit_code=$?
check_exit_code backup "${backup_job_filename_dir}"
write_log notice "Creating archive file of '${backup_job_filename_dir}' with tar ${compression_string}"
run_as_user tar cf - "${TEMP_PATH}"/"${backup_job_filename_dir}" | ${dir_compress_cmd} | run_as_user tee "${TEMP_PATH}"/"${backup_job_filename_dir}".tar"${extension}" > /dev/null
backup_job_filename=influx_${db}_${backup_job_db_host#*//}_${now}.tar${extension}
backup_job_filename_base=influx_${db}_${backup_job_db_host#*//}
check_exit_code backup "${backup_job_filename}"
compression
create_archive
if var_true "${DEBUG_BACKUP_INFLUX}" ; then debug off; fi
timer backup finish
file_encryption
generate_checksum
move_dbbackup
check_exit_code move "${backup_job_filename_dir}"
check_exit_code move "${backup_job_filename}"
post_dbbackup "${db}"
cleanup_old_data
done
@@ -535,22 +542,20 @@ backup_influx() {
if var_true "${DEBUG_BACKUP_INFLUX}" ; then debug off; fi
backup_job_filename=influx2_${db}_${backup_job_db_host#*//}_${now}
backup_job_filename_base=influx2_${db}_${backup_job_db_host#*//}
compression
pre_dbbackup "${db}"
write_log notice "Dumping Influx2 database: '${db}'"
if var_true "${DEBUG_BACKUP_INFLUX}" ; then debug on; fi
run_as_user influx backup --org ${backup_job_db_user} ${bucket} --host ${backup_job_db_host}:${backup_job_db_port} --token ${backup_job_db_pass} ${backup_job_extra_opts} ${backup_job_extra_dump_opts} --compression none "${TEMP_PATH}"/"${backup_job_filename_dir}"
run_as_user influx backup --org ${backup_job_db_user} ${bucket} --host ${backup_job_db_host}:${backup_job_db_port} --token ${backup_job_db_pass} ${backup_job_extra_opts} ${backup_job_extra_backup_opts} --compression none "${temporary_directory}"/"${backup_job_filename}"
exit_code=$?
check_exit_code backup "${backup_job_filename_dir}"
if var_true "${DEBUG_BACKUP_INFLUX}" ; then debug off; fi
check_exit_code backup "${backup_job_filename}"
compression
create_archive
backup_job_filename=influx2_${db}_${backup_job_db_host#*//}_${now}.tar${extension}
backup_job_filename_base=influx2_${db}_${backup_job_db_host#*//}
if var_true "${DEBUG_BACKUP_INFLUX}" ; then debug off; fi
timer backup finish
file_encryption
generate_checksum
move_dbbackup
check_exit_code move "${backup_job_filename_dir}"
check_exit_code move "${backup_job_filename}"
post_dbbackup "${db}"
cleanup_old_data
done
@@ -571,15 +576,15 @@ backup_mongo() {
compression_string="and compressing with gzip"
fi
if [ -n "${backup_job_mongo_custom_uri}" ] ; then
mongo_backup_parameter="--uri=${backup_job_mongo_custom_uri} ${backup_job_extra_opts} ${backup_job_extra_dump_opts}"
mongo_backup_parameter="--uri=${backup_job_mongo_custom_uri} ${backup_job_extra_opts} ${backup_job_extra_backup_opts}"
else
mongo_backup_parameter="--host ${backup_job_db_host} --port ${backup_job_db_port} ${MONGO_USER_STR}${MONGO_PASS_STR}${MONGO_AUTH_STR}${MONGO_DB_STR} ${backup_job_extra_opts} ${backup_job_extra_dump_opts}"
mongo_backup_parameter="--host ${backup_job_db_host} --port ${backup_job_db_port} ${MONGO_USER_STR}${MONGO_PASS_STR}${MONGO_AUTH_STR}${MONGO_DB_STR} ${backup_job_extra_opts} ${backup_job_extra_backup_opts}"
fi
if var_true "${DEBUG_BACKUP_MONGO}" ; then debug off; fi
pre_dbbackup "${backup_job_db_name}"
write_log notice "Dumping MongoDB database: '${backup_job_db_name}' ${compression_string}"
if var_true "${DEBUG_BACKUP_MONGO}" ; then debug on; fi
silent run_as_user ${play_fair} mongodump --archive=${TEMP_PATH}/${backup_job_filename} ${mongo_compression} ${mongo_backup_parameter}
silent run_as_user ${play_fair} mongodump --archive=${temporary_directory}/${backup_job_filename} ${mongo_compression} ${mongo_backup_parameter}
exit_code=$?
if var_true "${DEBUG_BACKUP_MONGO}" ; then debug off; fi
check_exit_code backup "${backup_job_filename}"
@@ -602,14 +607,14 @@ backup_mssql() {
pre_dbbackup "${backup_job_db_name}"
write_log notice "Dumping MSSQL database: '${backup_job_db_name}'"
if var_true "${DEBUG_BACKUP_MSSQL}" ; then debug on; fi
silent run_as_user ${play_fair} /opt/mssql-tools18/bin/sqlcmd -C -S ${backup_job_db_host}\,${backup_job_db_port} -U ${backup_job_db_user} -P ${backup_job_db_pass} -Q "BACKUP DATABASE [${backup_job_db_name}] TO DISK = N'${TEMP_PATH}/${backup_job_filename}' WITH NOFORMAT, NOINIT, NAME = '${backup_job_db_name}-full', SKIP, NOREWIND, NOUNLOAD, STATS = 10"
silent run_as_user ${play_fair} /opt/mssql-tools18/bin/sqlcmd -C -S ${backup_job_db_host}\,${backup_job_db_port} -U ${backup_job_db_user} -P ${backup_job_db_pass} -Q "BACKUP DATABASE [${backup_job_db_name}] TO DISK = N'${temporary_directory}/${backup_job_filename}' WITH NOFORMAT, NOINIT, NAME = '${backup_job_db_name}-full', SKIP, NOREWIND, NOUNLOAD, STATS = 10"
exit_code=$?
if var_true "${DEBUG_BACKUP_MSSQL}" ; then debug off; fi
backup_job_filename_original=${backup_job_filename}
compression
pre_dbbackup all
if var_true "${DEBUG_BACKUP_MSSQL}" ; then debug on; fi
run_as_user ${compress_cmd} "${TEMP_PATH}/${backup_job_filename_original}"
run_as_user ${compress_cmd} "${temporary_directory}/${backup_job_filename_original}"
check_exit_code backup "${backup_job_filename}"
if var_true "${DEBUG_BACKUP_MSSQL}" ; then debug off; fi
timer backup finish
@@ -627,13 +632,13 @@ backup_mssql() {
pre_dbbackup "${backup_job_db_name}"
write_log notice "Dumping MSSQL database: '${backup_job_db_name}'"
if var_true "${DEBUG_BACKUP_MSSQL}" ; then debug on; fi
silent run_as_user ${play_fair} /opt/mssql-tools18/bin/sqlcmd -C -S ${backup_job_db_host}\,${backup_job_db_port} -U ${backup_job_db_user} -P ${backup_job_db_pass} -Q "BACKUP LOG [${backup_job_db_name}] TO DISK = N'${TEMP_PATH}/${backup_job_filename}' WITH NOFORMAT, NOINIT, NAME = '${backup_job_db_name}-log', SKIP, NOREWIND, NOUNLOAD, STATS = 10"
silent run_as_user ${play_fair} /opt/mssql-tools18/bin/sqlcmd -C -S ${backup_job_db_host}\,${backup_job_db_port} -U ${backup_job_db_user} -P ${backup_job_db_pass} -Q "BACKUP LOG [${backup_job_db_name}] TO DISK = N'${temporary_directory}/${backup_job_filename}' WITH NOFORMAT, NOINIT, NAME = '${backup_job_db_name}-log', SKIP, NOREWIND, NOUNLOAD, STATS = 10"
exit_code=$?
if var_true "${DEBUG_BACKUP_MSSQL}" ; then debug off; fi
backup_job_filename_original=${backup_job_filename}
compression
pre_dbbackup all
run_as_user ${compress_cmd} "${TEMP_PATH}/${backup_job_filename_original}"
run_as_user ${compress_cmd} "${temporary_directory}/${backup_job_filename_original}"
file_encryption
timer backup finish
@@ -683,7 +688,7 @@ backup_mysql() {
pre_dbbackup "${db}"
write_log notice "Dumping MySQL/MariaDB database: '${db}' ${compression_string}"
if var_true "${DEBUG_BACKUP_MYSQL}" ; then debug on; fi
run_as_user ${play_fair} mysqldump --max-allowed-packet=${backup_job_mysql_max_allowed_packet} -h ${backup_job_db_host} -P ${backup_job_db_port} -u${backup_job_db_user} ${events} ${single_transaction} ${stored_procedures} ${mysql_tls_args} ${backup_job_extra_opts} ${backup_job_extra_dump_opts} $db | ${compress_cmd} | run_as_user tee "${TEMP_PATH}"/"${backup_job_filename}" > /dev/null
run_as_user ${play_fair} mysqldump --max-allowed-packet=${backup_job_mysql_max_allowed_packet} -h ${backup_job_db_host} -P ${backup_job_db_port} -u${backup_job_db_user} ${events} ${single_transaction} ${stored_procedures} ${mysql_tls_args} ${backup_job_extra_opts} ${backup_job_extra_backup_opts} $db | ${compress_cmd} | run_as_user tee "${temporary_directory}"/"${backup_job_filename}" > /dev/null
exit_code=$?
if var_true "${DEBUG_BACKUP_MYSQL}" ; then debug off; fi
check_exit_code backup "${backup_job_filename}"
@@ -704,7 +709,7 @@ backup_mysql() {
pre_dbbackup all
write_log notice "Dumping all MySQL / MariaDB databases: '$(echo ${db_names} | xargs | tr ' ' ',')' ${compression_string}"
if var_true "${DEBUG_BACKUP_MYSQL}" ; then debug on; fi
run_as_user ${play_fair} mysqldump --max-allowed-packet=${backup_job_mysql_max_allowed_packet} -h ${backup_job_db_host} -P ${backup_job_db_port} -u${backup_job_db_user} ${events} ${single_transaction} ${stored_procedures} ${mysql_tls_args} ${backup_job_extra_opts} ${backup_job_extra_dump_opts} --databases $(echo ${db_names} | xargs) | ${compress_cmd} | run_as_user tee "${TEMP_PATH}"/"${backup_job_filename}" > /dev/null
run_as_user ${play_fair} mysqldump --max-allowed-packet=${backup_job_mysql_max_allowed_packet} -h ${backup_job_db_host} -P ${backup_job_db_port} -u${backup_job_db_user} ${events} ${single_transaction} ${stored_procedures} ${mysql_tls_args} ${backup_job_extra_opts} ${backup_job_extra_backup_opts} --databases $(echo ${db_names} | xargs) | ${compress_cmd} | run_as_user tee "${temporary_directory}"/"${backup_job_filename}" > /dev/null
exit_code=$?
if var_true "${DEBUG_BACKUP_MYSQL}" ; then debug off; fi
check_exit_code backup "${backup_job_filename}"
@@ -726,7 +731,7 @@ backup_pgsql() {
pre_dbbackup "globals"
print_notice "Dumping PostgresSQL globals: with 'pg_dumpall -g' ${compression_string}"
if var_true "${DEBUG_BACKUP_PGSQL}" ; then debug on; fi
run_as_user ${play_fair} pg_dumpall -h "${backup_job_db_host}" -U "${backup_job_db_user}" -p "${backup_job_db_port}" -g ${backup_job_extra_opts} ${backup_job_extra_dump_opts} | ${compress_cmd} | run_as_user tee "${TEMP_PATH}"/"${backup_job_filename}" > /dev/null
run_as_user ${play_fair} pg_dumpall -h "${backup_job_db_host}" -U "${backup_job_db_user}" -p "${backup_job_db_port}" -g ${backup_job_extra_opts} ${backup_job_extra_backup_opts} | ${compress_cmd} | run_as_user tee "${temporary_directory}"/"${backup_job_filename}" > /dev/null
exit_code=$?
if var_true "${DEBUG_BACKUP_PGSQL}" ; then debug off; fi
check_exit_code "${backup_job_filename}"
@@ -775,7 +780,7 @@ backup_pgsql() {
pre_dbbackup "${db}"
write_log notice "Dumping PostgresSQL database: '${db}' ${compression_string}"
if var_true "${DEBUG_BACKUP_PGSQL}" ; then debug on; fi
run_as_user ${play_fair} pg_dump -h "${backup_job_db_host}" -p "${backup_job_db_port}" -U "${backup_job_db_user}" $db ${backup_job_extra_opts} ${backup_job_extra_dump_opts} | ${compress_cmd} | run_as_user tee "${TEMP_PATH}"/"${backup_job_filename}" > /dev/null
run_as_user ${play_fair} pg_dump -h "${backup_job_db_host}" -p "${backup_job_db_port}" -U "${backup_job_db_user}" $db ${backup_job_extra_opts} ${backup_job_extra_backup_opts} | ${compress_cmd} | run_as_user tee "${temporary_directory}"/"${backup_job_filename}" > /dev/null
exit_code=$?
if var_true "${DEBUG_BACKUP_PGSQL}" ; then debug off; fi
check_exit_code backup "${backup_job_filename}"
@@ -805,7 +810,7 @@ backup_pgsql() {
for x_db_name in ${tmp_db_names} ; do
pgexclude_arg=$(echo ${pgexclude_arg} --exclude-database=${x_db_name})
done
run_as_user ${play_fair} pg_dumpall -h ${backup_job_db_host} -p ${backup_job_db_port} -U ${backup_job_db_user} ${pgexclude_arg} ${backup_job_extra_opts} ${backup_job_extra_dump_opts} | ${compress_cmd} | run_as_user tee "${TEMP_PATH}"/"${backup_job_filename}" > /dev/null
run_as_user ${play_fair} pg_dumpall -h ${backup_job_db_host} -p ${backup_job_db_port} -U ${backup_job_db_user} ${pgexclude_arg} ${backup_job_extra_opts} ${backup_job_extra_backup_opts} | ${compress_cmd} | run_as_user tee "${temporary_directory}"/"${backup_job_filename}" > /dev/null
exit_code=$?
if var_true "${DEBUG_BACKUP_PGSQL}" ; then debug off; fi
check_exit_code backup "${backup_job_filename}"
@@ -826,7 +831,7 @@ backup_redis() {
backup_job_filename=redis_all_${backup_job_db_host,,}_${now}.rdb
backup_job_filename_base=redis_${backup_job_db_host,,}
if var_true "${DEBUG_BACKUP_REDIS}" ; then debug on; fi
echo bgsave | silent run_as_user ${play_fair} redis-cli -h ${backup_job_db_host} -p ${backup_job_db_port} ${REDIS_PASS_STR} --rdb ${TEMP_PATH}/${backup_job_filename} ${backup_job_extra_opts} ${backup_job_extra_dump_opts}
echo bgsave | silent run_as_user ${play_fair} redis-cli -h ${backup_job_db_host} -p ${backup_job_db_port} ${REDIS_PASS_STR} --rdb ${temporary_directory}/${backup_job_filename} ${backup_job_extra_opts} ${backup_job_extra_backup_opts}
sleep 10
try=5
while [ $try -gt 0 ] ; do
@@ -846,7 +851,7 @@ backup_redis() {
compression
pre_dbbackup all
if var_true "${DEBUG_BACKUP_REDIS}" ; then debug on; fi
run_as_user ${compress_cmd} "${TEMP_PATH}/${backup_job_filename_original}"
run_as_user ${compress_cmd} "${temporary_directory}/${backup_job_filename_original}"
if var_true "${DEBUG_BACKUP_REDIS}" ; then debug off; fi
timer backup finish
check_exit_code backup "${backup_job_filename}"
@@ -867,16 +872,16 @@ backup_sqlite3() {
pre_dbbackup "${db}"
write_log notice "Dumping sqlite3 database: '${backup_job_db_host}' ${compression_string}"
if var_true "${DEBUG_BACKUP_SQLITE3}" ; then debug on; fi
silent ${play_fair} sqlite3 "${backup_job_db_host}" ".backup '${TEMP_PATH}/backup_${now}.sqlite3'"
silent ${play_fair} sqlite3 "${backup_job_db_host}" ".backup '${temporary_directory}/backup_${now}.sqlite3'"
exit_code=$?
check_exit_code backup "${backup_job_filename}"
if [ ! -f "${TEMP_PATH}"/backup_${now}.sqlite3 ] ; then
if [ ! -f "${temporary_directory}"/backup_${now}.sqlite3 ] ; then
print_error "SQLite3 backup failed! Exitting"
return 1
fi
compression
run_as_user ${play_fair} cat "${TEMP_PATH}"/backup_${now}.sqlite3 | ${dir_compress_cmd} | run_as_user tee "${TEMP_PATH}/${backup_job_filename}" > /dev/null
rm -rf "${TEMP_PATH}"/backup_${now}.sqlite3
run_as_user ${play_fair} cat "${temporary_directory}"/backup_${now}.sqlite3 | ${dir_compress_cmd} | run_as_user tee "${temporary_directory}/${backup_job_filename}" > /dev/null
rm -rf "${temporary_directory}"/backup_${now}.sqlite3
if var_true "${DEBUG_BACKUP_SQLITE3}" ; then debug off; fi
timer backup finish
file_encryption
@@ -1153,8 +1158,10 @@ compression() {
create_archive() {
if var_true "${DEBUG_CREATE_ARCHIVE}" ; then debug on; fi
if [ "${exit_code}" = "0" ] ; then
write_log notice "Creating archive file of '${backup_job_filename_dir}' with tar ${compression_string}"
run_as_user tar cf - "${TEMP_PATH}"/"${backup_job_filename_dir}" | ${dir_compress_cmd} | run_as_user tee "${TEMP_PATH}"/"${backup_job_filename_dir}".tar"${extension}" > /dev/null
write_log notice "Creating archive file of '${backup_job_filename}' with tar ${compression_string}"
run_as_user tar cf - "${temporary_directory}"/"${backup_job_filename_dir}" | ${dir_compress_cmd} | run_as_user tee "${temporary_directory}"/"${backup_job_filename_dir}".tar"${extension}" > /dev/null
backup_job_filename="${backup_job_filename_dir}".tar"${extension}"
rm -rf "${temporary_directory}"/"${backup_job_filename_dir}"
else
write_log error "Skipping creating archive file because backup did not complete successfully"
fi
@@ -1276,7 +1283,7 @@ file_encryption() {
print_notice "Encrypting with GPG Passphrase"
encrypt_routines_start_time=$(date +'%s')
encrypt_tmp_dir=$(run_as_user mktemp -d)
echo "${backup_job_encrypt_passphrase}" | silent run_as_user ${play_fair} gpg --batch --home ${encrypt_tmp_dir} --yes --passphrase-fd 0 -c "${TEMP_PATH}"/"${backup_job_filename}"
echo "${backup_job_encrypt_passphrase}" | silent run_as_user ${play_fair} gpg --batch --home ${encrypt_tmp_dir} --yes --passphrase-fd 0 -c "${temporary_directory}"/"${backup_job_filename}"
rm -rf "${encrypt_tmp_dir}"
elif [ -z "${backup_job_encrypt_passphrase}" ] && [ -n "${backup_job_encrypt_public_key}" ] && [ -n "${backup_job_encrypt_private_key}" ]; then
if [ -f "${backup_job_encrypt_private_key}" ]; then
@@ -1288,13 +1295,13 @@ file_encryption() {
silent run_as_user gpg --home ${encrypt_tmp_dir} --batch --import "${encrypt_tmp_dir}"/private_key.asc
print_debug "[file_encryption] [key] Encrypting to Public Key"
cat "${backup_job_encrypt_public_key}" | run_as_user tee "${encrypt_tmp_dir}"/public_key.asc > /dev/null
silent run_as_user ${play_fair} gpg --batch --yes --home "${encrypt_tmp_dir}" --encrypt --recipient-file "${encrypt_tmp_dir}"/public_key.asc "${TEMP_PATH}"/"${backup_job_filename}"
silent run_as_user ${play_fair} gpg --batch --yes --home "${encrypt_tmp_dir}" --encrypt --recipient-file "${encrypt_tmp_dir}"/public_key.asc "${temporary_directory}"/"${backup_job_filename}"
rm -rf "${encrypt_tmp_dir}"
fi
fi
if [ -f "${TEMP_PATH}"/"${backup_job_filename}".gpg ]; then
if [ -f "${temporary_directory}"/"${backup_job_filename}".gpg ]; then
print_debug "[file_encryption] Deleting original file"
rm -rf "${TEMP_PATH:?}"/"${backup_job_filename:?}"
rm -rf "${temporary_directory:?}"/"${backup_job_filename:?}"
backup_job_filename="${backup_job_filename}.gpg"
encrypt_routines_finish_time=$(date +'%s')
@@ -1333,7 +1340,7 @@ generate_checksum() {
checksum_routines_start_time=$(date +'%s')
write_log notice "Generating ${checksum_extension^^} sum for '${backup_job_filename}'"
cd "${TEMP_PATH}"
cd "${temporary_directory}"
run_as_user ${checksum_command} "${backup_job_filename}" | run_as_user tee "${backup_job_filename}"."${checksum_extension}" > /dev/null
chmod ${backup_job_filesystem_permission} "${backup_job_filename}"."${checksum_extension}"
checksum_value=$(run_as_user cat "${backup_job_filename}"."${checksum_extension}" | awk '{print $1}')
@@ -1381,7 +1388,7 @@ notify() {
if [ -z "${SMTP_HOST}" ] ; then write_log error "[notifications] No SMTP_HOST variable set - Skipping sending Email notifications" ; skip_mail=true ; fi
if [ -z "${SMTP_PORT}" ] ; then write_log error "[notifications] No SMTP_PORT variable set - Skipping sending Email notifications" ; skip_mail=true ; fi
if var_nottrue "${skip_mail}" ; then
if ! grep -q ^from /etc/msmptrc ; then
if ! grep -q ^from /etc/msmtprc ; then
echo "from ${MAIL_FROM}" >> /etc/msmtprc
fi
mail_recipients=$(echo "${MAIL_TO}" | tr "," "\n")
@@ -1505,8 +1512,8 @@ EOF
move_dbbackup() {
if var_true "${DEBUG_MOVE_DBBACKUP}" ; then debug on; fi
if [ "${exit_code}" = "0" ] ; then
dbbackup_size="$(run_as_user stat -c%s "${TEMP_PATH}"/"${backup_job_filename}")"
dbbackup_date="$(run_as_user date -r "${TEMP_PATH}"/"${backup_job_filename}" +'%s')"
dbbackup_size="$(run_as_user stat -c%s "${temporary_directory}"/"${backup_job_filename}")"
dbbackup_date="$(run_as_user date -r "${temporary_directory}"/"${backup_job_filename}" +'%s')"
case "${backup_job_size_value,,}" in
"b" | "bytes" )
@@ -1520,37 +1527,37 @@ move_dbbackup() {
;;
esac
if [ "${backup_job_size_value}" = "1" ] ; then
filesize=$(run_as_user stat -c%s "${TEMP_PATH}"/"${backup_job_filename}")
filesize=$(run_as_user stat -c%s "${temporary_directory}"/"${backup_job_filename}")
write_log notice "Backup of '${backup_job_filename}' created with the size of ${filesize} bytes"
else
filesize=$(run_as_user du -h "${TEMP_PATH}"/"${backup_job_filename}" | awk '{ print $1}')
filesize=$(run_as_user du -h "${temporary_directory}"/"${backup_job_filename}" | awk '{ print $1}')
write_log notice "Backup of '${backup_job_filename}' created with the size of ${filesize}"
fi
chmod "${backup_job_filesystem_permission}" "${TEMP_PATH}"/"${backup_job_filename}"
chmod "${backup_job_filesystem_permission}" "${temporary_directory}"/"${backup_job_filename}"
case "${backup_job_backup_location,,}" in
"file" | "filesystem" )
write_log debug "Moving backup to filesystem"
run_as_user mkdir -p "${backup_job_filesystem_path}"
if [ "${backup_job_checksum,,}" != "none" ] ; then run_as_user mv "${TEMP_PATH}"/*."${checksum_extension}" "${backup_job_filesystem_path}"/ ; fi
if [ "${backup_job_checksum,,}" != "none" ] ; then run_as_user mv "${temporary_directory}"/*."${checksum_extension}" "${backup_job_filesystem_path}"/ ; fi
if var_true "${DEBUG_MOVE_DBBACKUP}"; then
cat <<EOF
## BEGIN Before Moving file from TEMP_PATH $(TZ=${TIMEZONE} date)
## BEGIN Before Moving file from temporary_directory $(TZ=${TIMEZONE} date)
##
$(ls -l "${TEMP_PATH}"/*)
$(ls -l "${temporary_directory}"/*)
## END
EOF
fi
run_as_user mv "${TEMP_PATH}"/"${backup_job_filename}" "${backup_job_filesystem_path}"/"${backup_job_filename}"
run_as_user mv "${temporary_directory}"/"${backup_job_filename}" "${backup_job_filesystem_path}"/"${backup_job_filename}"
move_exit_code=$?
if var_true "${DEBUG_MOVE_DBBACKUP}"; then
cat <<EOF
## BEGIN After Moving file from TEMP_PATH $(TZ=${TIMEZONE} date)
## BEGIN After Moving file from temporary_directory $(TZ=${TIMEZONE} date)
##
$(ls -l "${TEMP_PATH}"/*)
$(ls -l "${temporary_directory}"/*)
## END
@@ -1592,14 +1599,14 @@ EOF
[[ ( -n "${backup_job_s3_host}" ) ]] && PARAM_AWS_ENDPOINT_URL=" --endpoint-url ${backup_job_s3_protocol}://${backup_job_s3_host}"
silent aws ${PARAM_AWS_ENDPOINT_URL} s3 cp ${TEMP_PATH}/${backup_job_filename} s3://${backup_job_s3_bucket}/${backup_job_s3_path}/${backup_job_filename} ${s3_ssl} ${s3_ca_cert} ${backup_job_s3_extra_opts}
silent aws ${PARAM_AWS_ENDPOINT_URL} s3 cp ${temporary_directory}/${backup_job_filename} s3://${backup_job_s3_bucket}/${backup_job_s3_path}/${backup_job_filename} ${s3_ssl} ${s3_ca_cert} ${backup_job_s3_extra_opts}
move_exit_code=$?
if [ "${backup_job_checksum}" != "none" ] ; then
silent run_as_user ${play_fair} aws ${PARAM_AWS_ENDPOINT_URL} s3 cp ${TEMP_PATH}/*.${checksum_extension} s3://${backup_job_s3_bucket}/${backup_job_s3_path}/ ${s3_ssl} ${s3_ca_cert} ${backup_job_s3_extra_opts}
run_as_user rm -rf "${TEMP_PATH}"/"${backup_job_filename}"."${checksum_extension}"
silent run_as_user ${play_fair} aws ${PARAM_AWS_ENDPOINT_URL} s3 cp ${temporary_directory}/*.${checksum_extension} s3://${backup_job_s3_bucket}/${backup_job_s3_path}/ ${s3_ssl} ${s3_ca_cert} ${backup_job_s3_extra_opts}
run_as_user rm -rf "${temporary_directory}"/"${backup_job_filename}"."${checksum_extension}"
fi
run_as_user rm -rf "${TEMP_PATH}"/"${backup_job_filename}"
run_as_user rm -rf "${temporary_directory}"/"${backup_job_filename}"
;;
"blobxfer" )
if [ -z "${backup_job_blobxfer_storage_account}" ] || [ -z "${backup_job_blobxfer_storage_account_key}" ]; then
@@ -1610,15 +1617,15 @@ EOF
write_log info "Moving backup to external storage with blobxfer"
mkdir -p "${backup_job_filesystem_path}"
if [ "${backup_job_checksum}" != "none" ] ; then run_as_user mv "${TEMP_PATH}"/*."${checksum_extension}" "${backup_job_filesystem_path}"/; fi
if [ "${backup_job_checksum}" != "none" ] ; then run_as_user mv "${temporary_directory}"/*."${checksum_extension}" "${backup_job_filesystem_path}"/; fi
run_as_user mv "${TEMP_PATH}"/"${backup_job_filename}" "${backup_job_filesystem_path}"/"${backup_job_filename}"
run_as_user mv "${temporary_directory}"/"${backup_job_filename}" "${backup_job_filesystem_path}"/"${backup_job_filename}"
silent run_as_user ${play_fair} blobxfer upload --mode file --remote-path ${backup_job_blobxfer_remote_path} --storage-account ${backup_job_blobxfer_storage_account} --storage-account-key ${backup_job_blobxfer_storage_account_key} --local-path ${backup_job_filesystem_path}
move_exit_code=$?
if [ "${backup_job_checksum}" != "none" ] ; then run_as_user rm -rf "${TEMP_PATH}"/"${backup_job_filename}"."${checksum_extension}" ; fi
run_as_user rm -rf "${TEMP_PATH}"/"${backup_job_filename}"
if [ "${backup_job_checksum}" != "none" ] ; then run_as_user rm -rf "${temporary_directory}"/"${backup_job_filename}"."${checksum_extension}" ; fi
run_as_user rm -rf "${temporary_directory}"/"${backup_job_filename}"
fi
;;
esac
@@ -1626,13 +1633,15 @@ EOF
write_log error "Skipping moving DB Backup to final location because backup did not complete successfully"
fi
run_as_user rm -rf "${TEMP_PATH}"/"${backup_job_filename}"
run_as_user rm -rf "${temporary_directory}"/"${backup_job_filename}"
if var_true "${DEBUG_MOVE_DBBACKUP}" ; then debug off; fi
}
prepare_dbbackup() {
timer backup start
now=$(run_as_user date +"%Y%m%d-%H%M%S")
temporary_directory=$(mktemp -d -p "${TEMP_PATH}" -t ${backup_instance_number}_dbbackup.XXXXXX)
chown -R "${DBBACKUP_USER}":"${DBBACKUP_GROUP}" "${temporary_directory}"
backup_job_filename_base=${dbtype}_${backup_job_db_name,,}_${backup_job_db_host,,}
backup_job_filename=${dbtype}_${backup_job_db_name,,}_${backup_job_db_host,,}_${now}.sql
}
@@ -1747,6 +1756,8 @@ EOZP
write_log notice "DB Backup for '${1}' time taken: $(echo ${dbbackup_total_time} | awk '{printf "Hours: %d Minutes: %02d Seconds: %02d", $1/3600, ($1/60)%60, $1%60}')"
if var_true "${DEBUG_POST_DBBACKUP}" ; then debug on; fi
cd "${TEMP_PATH}"
rm -rf "${temporary_directory}"
}
process_limiter() {