Compare commits

..

4 Commits
4.0.4 ... 4.0.6

Author SHA1 Message Date
dave@tiredofit.ca
5cf00a8b8e Release 4.0.6 - See CHANGELOG.md 2023-11-10 17:53:47 -08:00
dave@tiredofit.ca
2bc730013e Release 4.0.5 - See CHANGELOG.md 2023-11-10 07:25:25 -08:00
Dave Conroy
d628ed8ff4 Expand upon DEBUG_ statements to give more detail 2023-11-10 07:24:31 -08:00
Dave Conroy
d7399667a1 Update _FILESYSTEM_PERMISSIONS from 700 to 600 and add undocumented DBBACKUP_USER|GROUP variable 2023-11-10 07:16:56 -08:00
4 changed files with 108 additions and 29 deletions

View File

@@ -1,3 +1,22 @@
## 4.0.6 2023-11-10 <dave at tiredofit dot ca>
### Added
- Add additional DEBUG_ statements
### Changed
- Fix issue with Influx DB not properly detecting the correct version
## 4.0.5 2023-11-10 <dave at tiredofit dot ca>
### Added
- Add undocumented DBBACKUP_USER|GROUP environment variables for troubleshooting permissions
- Add more verbosity when using DEBUG_ statements
### Changed
- Change _FILESYSTEM_PERMISSION to 600 from 700
## 4.0.4 2023-11-09 <dave at tiredofit dot ca> ## 4.0.4 2023-11-09 <dave at tiredofit dot ca>
### Added ### Added

View File

@@ -327,7 +327,7 @@ If `DEFAULT_BACKUP_LOCTION` = `FILESYSTEM` then the following options are used:
| `DEFAULT_CREATE_LATEST_SYMLINK` | Create a symbolic link pointing to last backup in this format: `latest-(DB_TYPE)-(DB_NAME)-(DB_HOST)` | `TRUE` | | `DEFAULT_CREATE_LATEST_SYMLINK` | Create a symbolic link pointing to last backup in this format: `latest-(DB_TYPE)-(DB_NAME)-(DB_HOST)` | `TRUE` |
| `DEFAULT_FILESYSTEM_PATH` | Directory where the database dumps are kept. | `/backup` | | `DEFAULT_FILESYSTEM_PATH` | Directory where the database dumps are kept. | `/backup` |
| `DEFAULT_FILESYSTEM_ARCHIVE_PATH` | Optional Directory where the database dumps archives are kept | `${DEFAULT_FILESYSTEM_PATH}/archive/` | | `DEFAULT_FILESYSTEM_ARCHIVE_PATH` | Optional Directory where the database dumps archives are kept | `${DEFAULT_FILESYSTEM_PATH}/archive/` |
| `DEFAULT_FILESYSTEM_PERMISSION` | Directory and File permissions to apply to files. | `700` | | `DEFAULT_FILESYSTEM_PERMISSION` | Directory and File permissions to apply to files. | `600` |
###### S3 ###### S3
@@ -602,7 +602,7 @@ If `DB01_BACKUP_LOCTION` = `FILESYSTEM` then the following options are used:
| `DB01_CREATE_LATEST_SYMLINK` | Create a symbolic link pointing to last backup in this format: `latest-(DB_TYPE)-(DB_NAME)-(DB_HOST)` | `TRUE` | | `DB01_CREATE_LATEST_SYMLINK` | Create a symbolic link pointing to last backup in this format: `latest-(DB_TYPE)-(DB_NAME)-(DB_HOST)` | `TRUE` |
| `DB01_FILESYSTEM_PATH` | Directory where the database dumps are kept. | `/backup` | | `DB01_FILESYSTEM_PATH` | Directory where the database dumps are kept. | `/backup` |
| `DB01_FILESYSTEM_ARCHIVE_PATH` | Optional Directory where the database dumps archives are kept | `${DB01_FILESYSTEM_PATH/archive/` | | `DB01_FILESYSTEM_ARCHIVE_PATH` | Optional Directory where the database dumps archives are kept | `${DB01_FILESYSTEM_PATH/archive/` |
| `DB01_FILESYSTEM_PERMISSION` | Directory and File permissions to apply to files. | `700` | | `DB01_FILESYSTEM_PERMISSION` | Directory and File permissions to apply to files. | `600` |
###### S3 ###### S3

View File

@@ -1,6 +1,8 @@
#!/command/with-contenv bash #!/command/with-contenv bash
BACKUP_JOB_CONCURRENCY=${BACKUP_JOB_CONCURRENCY:-"1"} BACKUP_JOB_CONCURRENCY=${BACKUP_JOB_CONCURRENCY:-"1"}
DBBACKUP_USER=${DBBACKUP_USER:-"dbbackup"}
DBBACKUP_GROUP=${DBBACKUP_USER:-"${DBBACKUP_USER}"} # Must go after DBBACKUP_USER
DEFAULT_BACKUP_BEGIN=${DEFAULT_BACKUP_BEGIN:-+0} DEFAULT_BACKUP_BEGIN=${DEFAULT_BACKUP_BEGIN:-+0}
DEFAULT_BACKUP_INTERVAL=${DEFAULT_BACKUP_INTERVAL:-1440} DEFAULT_BACKUP_INTERVAL=${DEFAULT_BACKUP_INTERVAL:-1440}
DEFAULT_BACKUP_INTERVAL=${DEFAULT_BACKUP_INTERVAL:-1440} DEFAULT_BACKUP_INTERVAL=${DEFAULT_BACKUP_INTERVAL:-1440}
@@ -13,7 +15,7 @@ DEFAULT_CREATE_LATEST_SYMLINK=${DEFAULT_CREATE_LATEST_SYMLINK:-"TRUE"}
DEFAULT_ENABLE_PARALLEL_COMPRESSION=${DEFAULT_ENABLE_PARALLEL_COMPRESSION:-"TRUE"} DEFAULT_ENABLE_PARALLEL_COMPRESSION=${DEFAULT_ENABLE_PARALLEL_COMPRESSION:-"TRUE"}
DEFAULT_ENCRYPT=${DEFAULT_ENCRYPT:-"FALSE"} DEFAULT_ENCRYPT=${DEFAULT_ENCRYPT:-"FALSE"}
DEFAULT_FILESYSTEM_PATH=${DEFAULT_FILESYSTEM_PATH:-"/backup"} DEFAULT_FILESYSTEM_PATH=${DEFAULT_FILESYSTEM_PATH:-"/backup"}
DEFAULT_FILESYSTEM_PERMISSION=${DEFAULT_FILESYSTEM_PERMISSION:-"700"} DEFAULT_FILESYSTEM_PERMISSION=${DEFAULT_FILESYSTEM_PERMISSION:-"600"}
DEFAULT_FILESYSTEM_ARCHIVE_PATH=${DEFAULT_FILESYSTEM_ARCHIVE_PATH:-"${DEFAULT_FILESYSTEM_PATH}/archive/"} DEFAULT_FILESYSTEM_ARCHIVE_PATH=${DEFAULT_FILESYSTEM_ARCHIVE_PATH:-"${DEFAULT_FILESYSTEM_PATH}/archive/"}
DEFAULT_LOG_LEVEL=${DEFAULT_LOG_LEVEL:-"notice"} DEFAULT_LOG_LEVEL=${DEFAULT_LOG_LEVEL:-"notice"}
DEFAULT_MYSQL_ENABLE_TLS=${DEFAULT_MYSQL_ENABLE_TLS:-"FALSE"} DEFAULT_MYSQL_ENABLE_TLS=${DEFAULT_MYSQL_ENABLE_TLS:-"FALSE"}

View File

@@ -5,11 +5,11 @@ bootstrap_filesystem() {
if [ ! -d "${backup_job_filesystem_path}" ]; then if [ ! -d "${backup_job_filesystem_path}" ]; then
mkdir -p "${backup_job_filesystem_path}" mkdir -p "${backup_job_filesystem_path}"
fi fi
if [ "$(stat -c %U "${backup_job_filesystem_path}")" != "dbbackup" ] ; then chown -R dbbackup:dbbackup "${backup_job_filesystem_path}" ; fi if [ "$(stat -c %U "${backup_job_filesystem_path}")" != "${DBBACKUP_USER}" ] ; then chown -R "${DBBACKUP_USER}":"${DBBACKUP_GROUP}" "${backup_job_filesystem_path}" ; fi
if [ "$(stat -c %a "${backup_job_filesystem_path}")" != "${backup_job_filesystem_permission}" ] ; then chmod -R "${backup_job_filesystem_permission}" "${backup_job_filesystem_path}" ; fi if [ "$(stat -c %a "${backup_job_filesystem_path}")" != "${backup_job_filesystem_permission}" ] ; then chmod -R "${backup_job_filesystem_permission}" "${backup_job_filesystem_path}" ; fi
if [ -d "${backup_job_filesystem_archive_path}" ]; then if [ -d "${backup_job_filesystem_archive_path}" ]; then
if [ "$(stat -c %U "${backup_job_filesystem_archive_path}")" != "dbbackup" ] ; then chown -R dbbackup:dbbackup "${backup_job_filesystem_archive_path}" ; fi if [ "$(stat -c %U "${backup_job_filesystem_archive_path}")" != "${DBBACKUP_USER}" ] ; then chown -R "${DBBACKUP_USER}":"${DBBACKUP_GROUP}" "${backup_job_filesystem_archive_path}" ; fi
if [ "$(stat -c %a "${backup_job_filesystem_archive_path}")" != "${backup_job_filesystem_permission}" ] ; then chmod -R "${backup_job_filesystem_permission}" "${backup_job_filesystem_archive_path}" ; fi if [ "$(stat -c %a "${backup_job_filesystem_archive_path}")" != "${backup_job_filesystem_permission}" ] ; then chmod -R "${backup_job_filesystem_permission}" "${backup_job_filesystem_archive_path}" ; fi
fi fi
@@ -17,14 +17,14 @@ bootstrap_filesystem() {
mkdir -p "${LOG_PATH}" mkdir -p "${LOG_PATH}"
fi fi
if [ "$(stat -c %U "${LOG_PATH}")" != "dbbackup" ] ; then chown dbbackup:dbbackup "${LOG_PATH}" ; fi if [ "$(stat -c %U "${LOG_PATH}")" != "${DBBACKUP_USER}" ] ; then chown -R "${DBBACKUP_USER}":"${DBBACKUP_GROUP}" "${LOG_PATH}" ; fi
if [ ! -d "${LOG_PATH}"/"$(date +'%Y%m%d')" ]; then run_as_user mkdir -p "${LOG_PATH}"/"$(date +'%Y%m%d')"; fi if [ ! -d "${LOG_PATH}"/"$(date +'%Y%m%d')" ]; then run_as_user mkdir -p "${LOG_PATH}"/"$(date +'%Y%m%d')"; fi
if [ "$(stat -c %a "${LOG_PATH}")" != "755" ] ; then chmod -R 755 "${LOG_PATH}" ; fi if [ "$(stat -c %a "${LOG_PATH}")" != "755" ] ; then chmod -R 755 "${LOG_PATH}" ; fi
if [ ! -d "${TEMP_PATH}" ]; then if [ ! -d "${TEMP_PATH}" ]; then
mkdir -p "${TEMP_PATH}" mkdir -p "${TEMP_PATH}"
fi fi
if [ "$(stat -c %U "${TEMP_PATH}")" != "dbbackup" ] ; then chown -R dbbackup:dbbackup "${TEMP_PATH}" ; fi if [ "$(stat -c %U "${TEMP_PATH}")" != "${DBBACKUP_USER}" ] ; then chown -R "${DBBACKUP_USER}":"${DBBACKUP_GROUP}" "${TEMP_PATH}" ; fi
if var_true "${DEBUG_BOOTSTRAP_FILESYSTEM}" ; then debug off; fi if var_true "${DEBUG_BOOTSTRAP_FILESYSTEM}" ; then debug off; fi
} }
@@ -237,6 +237,14 @@ bootstrap_variables() {
transform_backup_instance_variable "${backup_instance_number}" USER backup_job_db_user transform_backup_instance_variable "${backup_instance_number}" USER backup_job_db_user
backup_job_backup_begin=$(echo "${backup_job_backup_begin}" | sed -e "s|'||g" -e 's|"||g') backup_job_backup_begin=$(echo "${backup_job_backup_begin}" | sed -e "s|'||g" -e 's|"||g')
if var_true "${DEBUG_BACKUP_INSTANCE_VARIABLE}" ; then cat <<EOF
## BEGIN Variable Dump $(TZ=${TIMEZONE} date)
$(cat ${backup_instance_vars})
## END
EOF
fi
rm -rf "${backup_instance_vars}" rm -rf "${backup_instance_vars}"
} }
@@ -315,7 +323,7 @@ bootstrap_variables() {
## Check is Variable is Defined ## Check is Variable is Defined
## Usage: check_var transformed_varname real_varname "Description" ## Usage: check_var transformed_varname real_varname "Description"
output_off output_off
print_debug "Looking for existence of $2 environment variable" print_debug "[parse_variables] Looking for existence of $2 environment variable"
if [ ! -v "$1" ]; then if [ ! -v "$1" ]; then
print_error "No '$3' Entered! - Set '\$$2' environment variable - Halting Backup Number ${v_instance}" print_error "No '$3' Entered! - Set '\$$2' environment variable - Halting Backup Number ${v_instance}"
s6-svc -d /var/run/s6/legacy-services/dbbackup-"${v_instance}" s6-svc -d /var/run/s6/legacy-services/dbbackup-"${v_instance}"
@@ -468,20 +476,25 @@ backup_couch() {
} }
backup_influx() { backup_influx() {
if var_true "${DEBUG_BACKUP_INFLUX}" ; then debug on; fi
if [ "${backup_job_db_name,,}" = "all" ] ; then if [ "${backup_job_db_name,,}" = "all" ] ; then
write_log debug "Preparing to back up everything" write_log debug "[backup_influx] Preparing to back up everything"
db_names=justbackupeverything db_names=justbackupeverything
else else
db_names=$(echo "${backup_job_db_name}" | tr ',' '\n') db_names=$(echo "${backup_job_db_name}" | tr ',' '\n')
fi fi
if var_true "${DEBUG_BACKUP_INFLUX}" ; then debug off; fi
case "${backup_job_db_influx_version,,}" in case "${backup_job_influx_version,,}" in
1 ) 1 )
print_debug "[backup_influx] Influx DB Version 1 selected"
for db in ${db_names}; do for db in ${db_names}; do
prepare_dbbackup prepare_dbbackup
if var_true "${DEBUG_BACKUP_INFLUX}" ; then debug on; fi
if [ "${db}" != "justbackupeverything" ] ; then bucket="-db ${db}" ; else db=all ; fi if [ "${db}" != "justbackupeverything" ] ; then bucket="-db ${db}" ; else db=all ; fi
backup_job_filename=influx_${db}_${backup_job_db_host#*//}_${now} backup_job_filename=influx_${db}_${backup_job_db_host#*//}_${now}
backup_job_filename_base=influx_${db}_${backup_job_db_host#*//} backup_job_filename_base=influx_${db}_${backup_job_db_host#*//}
if var_true "${DEBUG_BACKUP_INFLUX}" ; then debug off; fi
compression compression
pre_dbbackup "${db}" pre_dbbackup "${db}"
write_log notice "Dumping Influx database: '${db}'" write_log notice "Dumping Influx database: '${db}'"
@@ -503,9 +516,12 @@ backup_influx() {
done done
;; ;;
2 ) 2 )
print_debug "[backup_influx] Influx DB Version 2 selected"
for db in ${db_names}; do for db in ${db_names}; do
prepare_dbbackup prepare_dbbackup
if var_true "${DEBUG_BACKUP_INFLUX}" ; then debug on; fi
if [ "${db}" != "justbackupeverything" ] ; then bucket="--bucket $db" ; else db=all ; fi if [ "${db}" != "justbackupeverything" ] ; then bucket="--bucket $db" ; else db=all ; fi
if var_true "${DEBUG_BACKUP_INFLUX}" ; then debug off; fi
backup_job_filename=influx2_${db}_${backup_job_db_host#*//}_${now} backup_job_filename=influx2_${db}_${backup_job_db_host#*//}_${now}
backup_job_filename_base=influx2_${db}_${backup_job_db_host#*//} backup_job_filename_base=influx2_${db}_${backup_job_db_host#*//}
compression compression
@@ -515,10 +531,10 @@ backup_influx() {
run_as_user influx backup --org ${backup_job_db_user} ${bucket} --host ${backup_job_db_host}:${backup_job_db_port} --token ${backup_job_db_pass} ${backup_job_extra_opts} ${backup_job_extra_dump_opts} --compression none "${TEMP_PATH}"/"${backup_job_filename_dir}" run_as_user influx backup --org ${backup_job_db_user} ${bucket} --host ${backup_job_db_host}:${backup_job_db_port} --token ${backup_job_db_pass} ${backup_job_extra_opts} ${backup_job_extra_dump_opts} --compression none "${TEMP_PATH}"/"${backup_job_filename_dir}"
exit_code=$? exit_code=$?
check_exit_code backup "${backup_job_filename_dir}" check_exit_code backup "${backup_job_filename_dir}"
if var_true "${DEBUG_BACKUP_INFLUX}" ; then debug off; fi
create_archive create_archive
backup_job_filename=influx2_${db}_${backup_job_db_host#*//}_${now}.tar${extension} backup_job_filename=influx2_${db}_${backup_job_db_host#*//}_${now}.tar${extension}
backup_job_filename_base=influx2_${db}_${backup_job_db_host#*//} backup_job_filename_base=influx2_${db}_${backup_job_db_host#*//}
if var_true "${DEBUG_BACKUP_INFLUX}" ; then debug off; fi
timer backup finish timer backup finish
file_encryption file_encryption
generate_checksum generate_checksum
@@ -532,6 +548,7 @@ backup_influx() {
backup_mongo() { backup_mongo() {
prepare_dbbackup prepare_dbbackup
if var_true "${DEBUG_BACKUP_MONGO}" ; then debug on; fi
if [ "${backup_job_compression,,}" = "none" ] ; then if [ "${backup_job_compression,,}" = "none" ] ; then
backup_job_filename=${dbtype}_${backup_job_db_name,,}_${backup_job_db_host,,}_${now}.archive backup_job_filename=${dbtype}_${backup_job_db_name,,}_${backup_job_db_host,,}_${now}.archive
backup_job_filename_base=${dbtype}_${backup_job_db_name,,}_${backup_job_db_host,,} backup_job_filename_base=${dbtype}_${backup_job_db_name,,}_${backup_job_db_host,,}
@@ -546,6 +563,7 @@ backup_mongo() {
else else
mongo_backup_parameter="--host ${backup_job_db_host} --port ${backup_job_db_port} ${MONGO_USER_STR}${MONGO_PASS_STR}${MONGO_AUTH_STR}${MONGO_DB_STR} ${backup_job_extra_opts} ${backup_job_extra_dump_opts}" mongo_backup_parameter="--host ${backup_job_db_host} --port ${backup_job_db_port} ${MONGO_USER_STR}${MONGO_PASS_STR}${MONGO_AUTH_STR}${MONGO_DB_STR} ${backup_job_extra_opts} ${backup_job_extra_dump_opts}"
fi fi
if var_true "${DEBUG_BACKUP_MONGO}" ; then debug off; fi
pre_dbbackup "${backup_job_db_name}" pre_dbbackup "${backup_job_db_name}"
write_log notice "Dumping MongoDB database: '${DB_NAME}' ${compression_string}" write_log notice "Dumping MongoDB database: '${DB_NAME}' ${compression_string}"
if var_true "${DEBUG_BACKUP_MONGO}" ; then debug on; fi if var_true "${DEBUG_BACKUP_MONGO}" ; then debug on; fi
@@ -577,8 +595,10 @@ backup_mssql() {
backup_job_filename_original=${backup_job_filename} backup_job_filename_original=${backup_job_filename}
compression compression
pre_dbbackup all pre_dbbackup all
if var_true "${DEBUG_BACKUP_MSSQL}" ; then debug on; fi
run_as_user ${compress_cmd} "${TEMP_PATH}/${backup_job_filename_original}" run_as_user ${compress_cmd} "${TEMP_PATH}/${backup_job_filename_original}"
check_exit_code backup "${backup_job_filename}" check_exit_code backup "${backup_job_filename}"
if var_true "${DEBUG_BACKUP_MSSQL}" ; then debug off; fi
timer backup finish timer backup finish
file_encryption file_encryption
generate_checksum generate_checksum
@@ -612,7 +632,7 @@ backup_mssql() {
} }
backup_mysql() { backup_mysql() {
if var_true "${DEBUG_BACKUP_MYSQL}" ; then debug on; fi
if var_true "${backup_job_mysql_events}" ; then if var_true "${backup_job_mysql_events}" ; then
events="--events" events="--events"
fi fi
@@ -636,7 +656,7 @@ backup_mysql() {
else else
db_names=$(echo "${backup_job_db_name}" | tr ',' '\n') db_names=$(echo "${backup_job_db_name}" | tr ',' '\n')
fi fi
if var_true "${DEBUG_BACKUP_MYSQL}" ; then debug off; fi
write_log debug "Databases Found: $(echo ${db_names} | xargs | tr ' ' ',')" write_log debug "Databases Found: $(echo ${db_names} | xargs | tr ' ' ',')"
if var_true "${backup_job_split_db}" ; then if var_true "${backup_job_split_db}" ; then
@@ -701,6 +721,7 @@ backup_pgsql() {
post_dbbackup "globals" post_dbbackup "globals"
} }
if var_true "${DEBUG_BACKUP_PGSQL}" ; then debug on; fi
export PGPASSWORD=${backup_job_db_pass} export PGPASSWORD=${backup_job_db_pass}
if [ -n "${backup_job_db_auth}" ] ; then if [ -n "${backup_job_db_auth}" ] ; then
authdb=${backup_job_db_auth} authdb=${backup_job_db_auth}
@@ -724,7 +745,7 @@ backup_pgsql() {
fi fi
if var_false "${_postgres_backup_globals}" && var_true "${backup_job_backup_pgsql_globals}" ; then _postgres_backup_globals=true; fi if var_false "${_postgres_backup_globals}" && var_true "${backup_job_backup_pgsql_globals}" ; then _postgres_backup_globals=true; fi
if var_true "${DEBUG_BACKUP_PGSQL}" ; then debug off; fi
write_log debug "Databases Found: $(echo ${db_names} | xargs | tr ' ' ',')" write_log debug "Databases Found: $(echo ${db_names} | xargs | tr ' ' ',')"
if var_true "${backup_job_split_db}" ; then if var_true "${backup_job_split_db}" ; then
@@ -756,6 +777,7 @@ backup_pgsql() {
compression compression
pre_dbbackup all pre_dbbackup all
write_log notice "Dumping all PostgreSQL databases: '$(echo ${db_names} | xargs | tr ' ' ',')' ${compression_string}" write_log notice "Dumping all PostgreSQL databases: '$(echo ${db_names} | xargs | tr ' ' ',')' ${compression_string}"
if var_true "${DEBUG_BACKUP_PGSQL}" ; then debug on; fi
tmp_db_names=$(run_as_user psql -h ${backup_job_db_host} -p ${backup_job_db_port} -U ${backup_job_db_user} -d ${authdb} -c 'COPY (SELECT datname FROM pg_database WHERE datistemplate = false) TO STDOUT;' ) tmp_db_names=$(run_as_user psql -h ${backup_job_db_host} -p ${backup_job_db_port} -U ${backup_job_db_user} -d ${authdb} -c 'COPY (SELECT datname FROM pg_database WHERE datistemplate = false) TO STDOUT;' )
for r_db_name in $(echo $db_names | xargs); do for r_db_name in $(echo $db_names | xargs); do
tmp_db_names=$(echo "$tmp_db_names" | xargs | sed "s|${r_db_name}||g" ) tmp_db_names=$(echo "$tmp_db_names" | xargs | sed "s|${r_db_name}||g" )
@@ -764,7 +786,6 @@ backup_pgsql() {
for x_db_name in ${tmp_db_names} ; do for x_db_name in ${tmp_db_names} ; do
pgexclude_arg=$(echo ${pgexclude_arg} --exclude-database=${x_db_name}) pgexclude_arg=$(echo ${pgexclude_arg} --exclude-database=${x_db_name})
done done
if var_true "${DEBUG_BACKUP_PGSQL}" ; then debug on; fi
run_as_user ${play_fair} pg_dumpall -h ${backup_job_db_host} -p ${backup_job_db_port} -U ${backup_job_db_user} ${pgexclude_arg} ${backup_job_extra_opts} ${backup_job_extra_dump_opts} | ${compress_cmd} | run_as_user tee "${TEMP_PATH}"/"${backup_job_filename}" > /dev/null run_as_user ${play_fair} pg_dumpall -h ${backup_job_db_host} -p ${backup_job_db_port} -U ${backup_job_db_user} ${pgexclude_arg} ${backup_job_extra_opts} ${backup_job_extra_dump_opts} | ${compress_cmd} | run_as_user tee "${TEMP_PATH}"/"${backup_job_filename}" > /dev/null
exit_code=$? exit_code=$?
if var_true "${DEBUG_BACKUP_PGSQL}" ; then debug off; fi if var_true "${DEBUG_BACKUP_PGSQL}" ; then debug off; fi
@@ -801,11 +822,13 @@ backup_redis() {
sleep 5 sleep 5
done done
backup_job_filename_original=${backup_job_filename} backup_job_filename_original=${backup_job_filename}
if var_true "${DEBUG_BACKUP_REDIS}" ; then debug off; fi
compression compression
pre_dbbackup all pre_dbbackup all
run_as_user ${compress_cmd} "${TEMP_PATH}/${backup_job_filename_original}"
timer backup finish
if var_true "${DEBUG_BACKUP_REDIS}" ; then debug on; fi if var_true "${DEBUG_BACKUP_REDIS}" ; then debug on; fi
run_as_user ${compress_cmd} "${TEMP_PATH}/${backup_job_filename_original}"
if var_true "${DEBUG_BACKUP_REDIS}" ; then debug off; fi
timer backup finish
check_exit_code backup "${backup_job_filename}" check_exit_code backup "${backup_job_filename}"
file_encryption file_encryption
generate_checksum generate_checksum
@@ -1036,6 +1059,7 @@ compression() {
case "${backup_job_compression,,}" in case "${backup_job_compression,,}" in
bz* ) bz* )
print_debug "[compression] Selected BZIP"
compress_cmd="${play_fair} pbzip2 -q -${backup_job_compression_level} -p${backup_job_parallel_compression_threads} " compress_cmd="${play_fair} pbzip2 -q -${backup_job_compression_level} -p${backup_job_parallel_compression_threads} "
compression_type="bzip2" compression_type="bzip2"
dir_compress_cmd=${compress_cmd} dir_compress_cmd=${compress_cmd}
@@ -1044,6 +1068,7 @@ compression() {
backup_job_filename=${backup_job_filename}.bz2 backup_job_filename=${backup_job_filename}.bz2
;; ;;
gz* ) gz* )
print_debug "[compression] Selected GZIP"
compress_cmd="${play_fair} pigz -q -${backup_job_compression_level} -p ${backup_job_parallel_compression_threads} ${gz_rsyncable}" compress_cmd="${play_fair} pigz -q -${backup_job_compression_level} -p ${backup_job_parallel_compression_threads} ${gz_rsyncable}"
compression_type="gzip" compression_type="gzip"
extension=".gz" extension=".gz"
@@ -1052,6 +1077,7 @@ compression() {
backup_job_filename=${backup_job_filename}.gz backup_job_filename=${backup_job_filename}.gz
;; ;;
xz* ) xz* )
print_debug "[compression] Selected XZIP"
compress_cmd="${play_fair} pixz -${backup_job_compression_level} -p ${backup_job_parallel_compression_threads} " compress_cmd="${play_fair} pixz -${backup_job_compression_level} -p ${backup_job_parallel_compression_threads} "
compression_type="xzip" compression_type="xzip"
dir_compress_cmd=${compress_cmd} dir_compress_cmd=${compress_cmd}
@@ -1060,6 +1086,7 @@ compression() {
backup_job_filename=${backup_job_filename}.xz backup_job_filename=${backup_job_filename}.xz
;; ;;
zst* ) zst* )
print_debug "[compression] Selected ZSTD"
compress_cmd="${play_fair} zstd -q -q --rm -${backup_job_compression_level} -T${backup_job_parallel_compression_threads} ${gz_rsyncable}" compress_cmd="${play_fair} zstd -q -q --rm -${backup_job_compression_level} -T${backup_job_parallel_compression_threads} ${gz_rsyncable}"
compression_type="zstd" compression_type="zstd"
dir_compress_cmd=${compress_cmd} dir_compress_cmd=${compress_cmd}
@@ -1108,9 +1135,10 @@ create_schedulers() {
backup() { backup() {
bootstrap_variables upgrade BACKUP bootstrap_variables upgrade BACKUP
local backup_instances=$(printenv | sort | grep -c "^DB[0-9]._HOST") local backup_instances=$(printenv | sort | grep -c "^DB[0-9]._HOST")
print_debug "[create_schedulers] Found '${backup_instances}' DB_HOST instances"
if [ -n "${DB_HOST}" ] && [ "${backup_instances}" ]; then if [ -n "${DB_HOST}" ] && [ "${backup_instances}" ]; then
backup_instances=1; backup_instances=1;
print_debug "Detected using old DB_ variables" print_debug "[create_schedulers] Detected using old DB_ variables"
fi fi
for (( instance = 01; instance <= backup_instances; )) ; do for (( instance = 01; instance <= backup_instances; )) ; do
@@ -1186,7 +1214,7 @@ file_encryption() {
if var_true "${DEBUG_FILE_ENCRYPTION}" ; then debug on; fi if var_true "${DEBUG_FILE_ENCRYPTION}" ; then debug on; fi
if var_true "${backup_job_encrypt}" ; then if var_true "${backup_job_encrypt}" ; then
if [ "${exit_code}" = "0" ] ; then if [ "${exit_code}" = "0" ] ; then
print_debug "Encrypting" print_debug "[file_encryption] Encrypting"
output_off output_off
if [ -n "${backup_job_encrypt_passphrase}" ] && [ -n "${backup_job_encrypt_pubkey}" ]; then if [ -n "${backup_job_encrypt_passphrase}" ] && [ -n "${backup_job_encrypt_pubkey}" ]; then
print_error "Can't encrypt as both ENCRYPT_PASSPHRASE and ENCRYPT_PUBKEY exist!" print_error "Can't encrypt as both ENCRYPT_PASSPHRASE and ENCRYPT_PUBKEY exist!"
@@ -1207,6 +1235,7 @@ file_encryption() {
fi fi
fi fi
if [ -f "${TEMP_PATH}"/"${backup_job_filename}".gpg ]; then if [ -f "${TEMP_PATH}"/"${backup_job_filename}".gpg ]; then
print_debug "[file_encryption] Deleting original file"
rm -rf "${TEMP_PATH:?}"/"${backup_job_filename:?}" rm -rf "${TEMP_PATH:?}"/"${backup_job_filename:?}"
backup_job_filename="${backup_job_filename}.gpg" backup_job_filename="${backup_job_filename}.gpg"
@@ -1382,23 +1411,23 @@ EOF
for notification_type in $notification_types ; do for notification_type in $notification_types ; do
case "${notification_type,,}" in case "${notification_type,,}" in
"custom" ) "custom" )
print_debug "Sending Notification via custom" print_debug "[notify] Sending Notification via custom"
notification_custom "${1}" "${2}" "${3}" "${4}" "${5}" notification_custom "${1}" "${2}" "${3}" "${4}" "${5}"
;; ;;
"email" | "mail" ) "email" | "mail" )
print_debug "Sending Notification via email" print_debug "[notify] Sending Notification via email"
notification_email "${1}" "${2}" "${3}" "${4}" "${5}" notification_email "${1}" "${2}" "${3}" "${4}" "${5}"
;; ;;
"matrix" ) "matrix" )
print_debug "Sending Notification via Matrix" print_debug "[notify] Sending Notification via Matrix"
notification_matrix "${1}" "${2}" "${3}" "${4}" "${5}" notification_matrix "${1}" "${2}" "${3}" "${4}" "${5}"
;; ;;
"mattermost" ) "mattermost" )
print_debug "Sending Notification via Mattermost" print_debug "[notify] Sending Notification via Mattermost"
notification_mattermost "${1}" "${2}" "${3}" "${4}" "${5}" notification_mattermost "${1}" "${2}" "${3}" "${4}" "${5}"
;; ;;
"rocketchat" ) "rocketchat" )
print_debug "Sending Notification via Rocketchat" print_debug "[notify] Sending Notification via Rocketchat"
notification_rocketchat "${1}" "${2}" "${3}" "${4}" "${5}" notification_rocketchat "${1}" "${2}" "${3}" "${4}" "${5}"
;; ;;
* ) * )
@@ -1441,8 +1470,37 @@ move_dbbackup() {
write_log debug "Moving backup to filesystem" write_log debug "Moving backup to filesystem"
run_as_user mkdir -p "${backup_job_filesystem_path}" run_as_user mkdir -p "${backup_job_filesystem_path}"
if [ "${backup_job_checksum,,}" != "none" ] ; then run_as_user mv "${TEMP_PATH}"/*."${checksum_extension}" "${backup_job_filesystem_path}"/ ; fi if [ "${backup_job_checksum,,}" != "none" ] ; then run_as_user mv "${TEMP_PATH}"/*."${checksum_extension}" "${backup_job_filesystem_path}"/ ; fi
if var_true "${DEBUG_MOVE_DBBACKUP}"; then
cat <<EOF
## BEGIN Before Moving file from TEMP_PATH $(TZ=${TIMEZONE} date)
##
$(ls -l "${TEMP_PATH}"/*)
## END
EOF
fi
run_as_user mv "${TEMP_PATH}"/"${backup_job_filename}" "${backup_job_filesystem_path}"/"${backup_job_filename}" run_as_user mv "${TEMP_PATH}"/"${backup_job_filename}" "${backup_job_filesystem_path}"/"${backup_job_filename}"
move_exit_code=$? move_exit_code=$?
if var_true "${DEBUG_MOVE_DBBACKUP}"; then
cat <<EOF
## BEGIN After Moving file from TEMP_PATH $(TZ=${TIMEZONE} date)
##
$(ls -l "${TEMP_PATH}"/*)
## END
## BEGIN After Moving file to _FILESYSTEM_PATH $(TZ=${TIMEZONE} date)
##
$(ls -l "${backup_job_filesystem_path}"/*)
## END
EOF
fi
if var_true "${backup_job_create_latest_symlink}" ; then if var_true "${backup_job_create_latest_symlink}" ; then
run_as_user ln -sfr "${backup_job_filesystem_path}"/"${backup_job_filename}" "${backup_job_filesystem_path}"/latest-"${backup_job_filename_base}" run_as_user ln -sfr "${backup_job_filesystem_path}"/"${backup_job_filename}" "${backup_job_filesystem_path}"/latest-"${backup_job_filename_base}"
fi fi
@@ -1648,7 +1706,7 @@ process_limiter() {
} }
run_as_user() { run_as_user() {
s6-setuidgid dbbackup $@ sudo -u "${DBBACKUP_USER}" $@
} }
setup_mode() { setup_mode() {
@@ -1881,18 +1939,18 @@ timer() {
;; ;;
datetime) datetime)
time_begin=$(date -d "${backup_job_backup_begin}" +%s) time_begin=$(date -d "${backup_job_backup_begin}" +%s)
print_debug "BACKUP_BEGIN time = ${time_begin}" print_debug "[timer] [datetime] BACKUP_BEGIN time = ${time_begin}"
time_wait=$(( time_begin - time_current )) time_wait=$(( time_begin - time_current ))
print_debug "Difference in seconds: ${time_wait}" print_debug "[timer] [datetime] Difference in seconds: ${time_wait}"
if (( ${time_wait} < 0 )); then if (( ${time_wait} < 0 )); then
time_wait=$(( (${time_wait} + (${backup_job_backup_interval} - 1)) / (${backup_job_backup_interval} * 60) )) time_wait=$(( (${time_wait} + (${backup_job_backup_interval} - 1)) / (${backup_job_backup_interval} * 60) ))
time_wait=$(( ${time_wait} * -1 )) time_wait=$(( ${time_wait} * -1 ))
print_debug "Difference in seconds (rounded) time_wait is in the past : ${time_wait}" print_debug "[timer] [datetime] Difference in seconds (rounded) time_wait is in the past : ${time_wait}"
fi fi
time_future=$(( time_current + time_wait )) time_future=$(( time_current + time_wait ))
print_debug "Future execution time = ${time_future}" print_debug "[timer] [datetime] Future execution time = ${time_future}"
;; ;;
job) job)
case "${2}" in case "${2}" in