Compare commits

..

30 Commits

Author SHA1 Message Date
dave@tiredofit.ca
17daf26084 Release 4.0.29 - See CHANGELOG.md 2023-12-04 11:29:14 -08:00
Dave Conroy
b53cda99f7 Don't execute blobxfer functions if both key and secret are not set 2023-12-04 11:09:43 -08:00
Dave Conroy
2cf3e2ae70 Show proper DB Name when backing up Mongo or MSSQL 2023-12-04 08:06:57 -08:00
dave@tiredofit.ca
c7ee94aec2 Release 4.0.28 - See CHANGELOG.md 2023-12-04 07:04:08 -08:00
Dave Conroy
f44233e51a AWS CLI 1.31.5 2023-12-04 07:02:40 -08:00
dave@tiredofit.ca
ccda858b18 Release 4.0.27 - See CHANGELOG.md 2023-12-04 07:00:39 -08:00
Dave Conroy
d58b27d5ef Use alternate cron 2023-12-03 22:04:12 -08:00
dave@tiredofit.ca
fb9fe8a032 Release 4.0.26 - See CHANGELOG.md 2023-11-30 08:55:34 -08:00
Dave Conroy
b705982ae1 Restore missing _SPLIT_DB environment variable information for MySQL/Postgres 2023-11-30 08:54:49 -08:00
dave@tiredofit.ca
f031d787ae Release 4.0.25 - See CHANGELOG.md 2023-11-29 10:43:25 -08:00
Dave Conroy
3eed5fc8a0 Switch BLOBXFER_STORAGE_KEY to BLOBXFER_STORAGE_ACCOUNT_KEY 2023-11-29 10:39:58 -08:00
dave@tiredofit.ca
be619fb707 Release 4.0.24 - See CHANGELOG.md 2023-11-28 15:06:50 -08:00
dave@tiredofit.ca
cccc088b35 Release 4.0.23 - See CHANGELOG.md 2023-11-28 08:05:11 -08:00
dave@tiredofit.ca
4579f4057c Release 4.0.22 - See CHANGELOG.md 2023-11-25 08:50:25 -08:00
dave@tiredofit.ca
cd683648d0 Release 4.0.21 - See CHANGELOG.md 2023-11-22 15:40:38 -08:00
dave@tiredofit.ca
11f55f3d82 Release 4.0.20 - See CHANGELOG.md 2023-11-21 15:18:22 -08:00
dave@tiredofit.ca
674a98fcd8 Release 4.0.19 - See CHANGELOG.md 2023-11-20 15:26:21 -08:00
dave@tiredofit.ca
77c747e01b Release 4.0.18 - See CHANGELOG.md 2023-11-18 09:53:41 -08:00
Dave Conroy
2e30558a27 Merge pull request #282 from joergmschulz/patch-1
Update 10-db-backup msmtp -C
2023-11-18 09:52:23 -08:00
joergmschulz
c746fb641e Update 10-db-backup msmtp -C
the config file is referenced by -C , not -c
2023-11-17 23:28:08 +01:00
dave@tiredofit.ca
ca2f04cd59 Release 4.0.17 - See CHANGELOG.md 2023-11-17 08:16:34 -08:00
dave@tiredofit.ca
dfa94ecab7 Release 4.0.16 - See CHANGELOG.md 2023-11-17 08:07:54 -08:00
Dave Conroy
eaea6dc348 Update README.md 2023-11-16 09:38:18 -08:00
dave@tiredofit.ca
34abe88159 Release 4.0.15 - See CHANGELOG.md 2023-11-16 09:35:56 -08:00
Dave Conroy
5ffbeeb163 Merge pull request #280 from joergmschulz/patch-1
warn instead of warning
2023-11-14 07:14:55 -08:00
joergmschulz
c82cee80f8 warn instead of warning
see #279
2023-11-14 08:53:38 +01:00
dave@tiredofit.ca
ab059ccdf1 Release 4.0.14 - See CHANGELOG.md 2023-11-13 15:16:36 -08:00
dave@tiredofit.ca
1e8ccf4d56 Release 4.0.13 - See CHANGELOG.md 2023-11-12 17:07:07 -08:00
dave@tiredofit.ca
65c40cac0a Release 4.0.12 - See CHANGELOG.md 2023-11-12 09:03:01 -08:00
dave@tiredofit.ca
a9f2d51ff9 Release 4.0.11 - See CHANGELOG.md 2023-11-11 13:43:57 -08:00
7 changed files with 260 additions and 88 deletions

View File

@@ -1,3 +1,120 @@
## 4.0.29 2023-12-04 <dave at tiredofit dot ca>
### Changed
- Skip blobxfer if either account or key is not present
## 4.0.28 2023-12-04 <dave at tiredofit dot ca>
### Changed
- AWS Cli 1.31.5
- Switch to using PIP for installing AWS-Cli to remove deprecation warnings
## 4.0.27 2023-12-04 <dave at tiredofit dot ca>
### Changed
- Switch to using actual crontab for cron expressions
## 4.0.26 2023-11-30 <dave at tiredofit dot ca>
### Added
- AWS CLI 1.31.4
## 4.0.25 2023-11-29 <dave at tiredofit dot ca>
### Changed
- Fix #297 - Add parameters to blobxfer to restore functionality
## 4.0.24 2023-11-28 <dave at tiredofit dot ca>
### Changed
- Fix issue with cron parsing and 0 being a value getting clobbered by sort command
## 4.0.23 2023-11-28 <dave at tiredofit dot ca>
### Changed
- Resolve issue with custom notification scripts not executing
## 4.0.22 2023-11-25 <dave at tiredofit dot ca>
### Changed
- Move cleanup_old_data routines to happen within backup_ function to properly accomodate for globals, and ALL DB_NAME use cases
## 4.0.21 2023-11-22 <dave at tiredofit dot ca>
### Changed
- Fix for SQLite backups not being cleaned up properly due to a malformed base
## 4.0.20 2023-11-21 <dave at tiredofit dot ca>
### Changed
- Update base image to support S6 Overlay 3.1.6.2 to solve shutdown issues specifically with MODE=MANUAL and MANUAL_RUN_FOREVER=TRUE
- Add some safety nets for Manual scheduling
## 4.0.19 2023-11-20 <dave at tiredofit dot ca>
### Changed
- Make adjustments to cron scheduling feature to be able to handle whitespace properly"
## 4.0.18 2023-11-18 <joergmschulz@github>
### Changed
- Fix loading msmtp configuration
## 4.0.17 2023-11-17 <dave at tiredofit dot ca>
### Changed
- Provide more details when notifying via instant messages
## 4.0.16 2023-11-17 <dave at tiredofit dot ca>
### Changed
- Switch to using msmtp instead of s-mail for notify()
## 4.0.15 2023-11-16 <dave at tiredofit dot ca>
### Changed
- Fix cleanup of old backups
## 4.0.14 2023-11-13 <dave at tiredofit dot ca>
### Changed
- Bugfix when PRE/POST scripts found not giving legacy warning
- Run pre / post scripts as root
## 4.0.13 2023-11-12 <dave at tiredofit dot ca>
### Changed
- Check for any quotes if using MONGO_CUSTOM_URI and remove
## 4.0.12 2023-11-12 <dave at tiredofit dot ca>
### Changed
- Allow creating schedulers if _MONGO_CUSTOM_URI is set and _DB_HOST blank
## 4.0.11 2023-11-11 <dave at tiredofit dot ca>
### Changed
- Resolve issue with backing up ALL databases with PGSQL and MySQL
## 4.0.10 2023-11-11 <dave at tiredofit dot ca> ## 4.0.10 2023-11-11 <dave at tiredofit dot ca>
### Changed ### Changed

View File

@@ -9,8 +9,8 @@ ENV INFLUX1_CLIENT_VERSION=1.8.0 \
INFLUX2_CLIENT_VERSION=2.7.3 \ INFLUX2_CLIENT_VERSION=2.7.3 \
MSODBC_VERSION=18.3.2.1-1 \ MSODBC_VERSION=18.3.2.1-1 \
MSSQL_VERSION=18.3.1.1-1 \ MSSQL_VERSION=18.3.1.1-1 \
AWS_CLI_VERSION=1.29.78 \ AWS_CLI_VERSION=1.31.5 \
CONTAINER_ENABLE_MESSAGING=FALSE \ CONTAINER_ENABLE_MESSAGING=TRUE \
CONTAINER_ENABLE_MONITORING=TRUE \ CONTAINER_ENABLE_MONITORING=TRUE \
IMAGE_NAME="tiredofit/db-backup" \ IMAGE_NAME="tiredofit/db-backup" \
IMAGE_REPO_URL="https://github.com/tiredofit/docker-db-backup/" IMAGE_REPO_URL="https://github.com/tiredofit/docker-db-backup/"
@@ -76,10 +76,8 @@ RUN source /assets/functions/00-container && \
*) sleep 0.1 ;; \ *) sleep 0.1 ;; \
esac; \ esac; \
\ \
if [ $mssql = "true" ] ; then curl -O https://download.microsoft.com/download/3/5/5/355d7943-a338-41a7-858d-53b259ea33f5/msodbcsql18_${MSODBC_VERSION}_${mssql_arch}.apk ; curl -O https://download.microsoft.com/download/3/5/5/355d7943-a338-41a7-858d-53b259ea33f5/mssql-tools18_${MSSQL_VERSION}_${mssql_arch}.apk ; ls -l ; echo y | apk add --allow-untrusted msodbcsql18_${MSODBC_VERSION}_${mssql_arch}.apk mssql-tools18_${MSSQL_VERSION}_${mssql_arch}.apk ; else echo >&2 "Detected non x86_64 or ARM64 build variant, skipping MSSQL installation" ; fi; \ if [ $mssql = "true" ] ; then curl -O https://download.microsoft.com/download/3/5/5/355d7943-a338-41a7-858d-53b259ea33f5/msodbcsql18_${MSODBC_VERSION}_${mssql_arch}.apk ; curl -O https://download.microsoft.com/download/3/5/5/355d7943-a338-41a7-858d-53b259ea33f5/mssql-tools18_${MSSQL_VERSION}_${mssql_arch}.apk ; echo y | apk add --allow-untrusted msodbcsql18_${MSODBC_VERSION}_${mssql_arch}.apk mssql-tools18_${MSSQL_VERSION}_${mssql_arch}.apk ; else echo >&2 "Detected non x86_64 or ARM64 build variant, skipping MSSQL installation" ; fi; \
if [ $influx2 = "true" ] ; then curl -sSL https://dl.influxdata.com/influxdb/releases/influxdb2-client-${INFLUX2_CLIENT_VERSION}-linux-${influx_arch}.tar.gz | tar xvfz - --strip=1 -C /usr/src/ ; chmod +x /usr/src/influx ; mv /usr/src/influx /usr/sbin/ ; else echo >&2 "Unable to build Influx 2 on this system" ; fi ; \ if [ $influx2 = "true" ] ; then curl -sSL https://dl.influxdata.com/influxdb/releases/influxdb2-client-${INFLUX2_CLIENT_VERSION}-linux-${influx_arch}.tar.gz | tar xvfz - --strip=1 -C /usr/src/ ; chmod +x /usr/src/influx ; mv /usr/src/influx /usr/sbin/ ; else echo >&2 "Unable to build Influx 2 on this system" ; fi ; \
clone_git_repo https://github.com/aws/aws-cli "${AWS_CLI_VERSION}" && \
python3 setup.py install --prefix=/usr && \
clone_git_repo https://github.com/influxdata/influxdb "${INFLUX1_CLIENT_VERSION}" && \ clone_git_repo https://github.com/influxdata/influxdb "${INFLUX1_CLIENT_VERSION}" && \
go build -o /usr/sbin/influxd ./cmd/influxd && \ go build -o /usr/sbin/influxd ./cmd/influxd && \
strip /usr/sbin/influxd && \ strip /usr/sbin/influxd && \
@@ -99,6 +97,7 @@ RUN source /assets/functions/00-container && \
make && \ make && \
make install && \ make install && \
\ \
pip3 install --break-system-packages awscli==${AWS_CLI_VERSION} && \
pip3 install --break-system-packages blobxfer && \ pip3 install --break-system-packages blobxfer && \
\ \
package remove .db-backup-build-deps && \ package remove .db-backup-build-deps && \

View File

@@ -536,6 +536,7 @@ Encryption will occur after compression and the resulting filename will have a `
| `DB01_NAME` | Schema Name e.g. `database` or `ALL` to backup all databases the user has access to. | | | | `DB01_NAME` | Schema Name e.g. `database` or `ALL` to backup all databases the user has access to. | | |
| | Backup multiple by separating with commas eg `db1,db2` | | x | | | Backup multiple by separating with commas eg `db1,db2` | | x |
| `DB01_NAME_EXCLUDE` | If using `ALL` - use this as to exclude databases separated via commas from being backed up | | x | | `DB01_NAME_EXCLUDE` | If using `ALL` - use this as to exclude databases separated via commas from being backed up | | x |
| `DB01_SPLIT_DB` | If using `ALL` - use this to split each database into its own file as opposed to one singular file | `FALSE` | |
| `DB01_PORT` | MySQL / MariaDB Port | `3306` | x | | `DB01_PORT` | MySQL / MariaDB Port | `3306` | x |
| `DB01_MYSQL_EVENTS` | Backup Events for | `TRUE` | | | `DB01_MYSQL_EVENTS` | Backup Events for | `TRUE` | |
| `DB01_MYSQL_MAX_ALLOWED_PACKET` | Max allowed packet | `512M` | | | `DB01_MYSQL_MAX_ALLOWED_PACKET` | Max allowed packet | `512M` | |
@@ -577,6 +578,7 @@ Encryption will occur after compression and the resulting filename will have a `
| `DB01_EXTRA_ENUMERATION_OPTS` | Pass extra arguments to the database enumeration command only, add them here e.g. `--extra-command` | | | | `DB01_EXTRA_ENUMERATION_OPTS` | Pass extra arguments to the database enumeration command only, add them here e.g. `--extra-command` | | |
| `DB01_NAME` | Schema Name e.g. `database` or `ALL` to backup all databases the user has access to. | | | | `DB01_NAME` | Schema Name e.g. `database` or `ALL` to backup all databases the user has access to. | | |
| | Backup multiple by separating with commas eg `db1,db2` | | x | | | Backup multiple by separating with commas eg `db1,db2` | | x |
| `DB01_SPLIT_DB` | If using `ALL` - use this to split each database into its own file as opposed to one singular file | `FALSE` | |
| `DB01_PORT` | PostgreSQL Port | `5432` | x | | `DB01_PORT` | PostgreSQL Port | `5432` | x |
###### Redis ###### Redis
@@ -601,13 +603,13 @@ Options that are related to the value of `DB01_BACKUP_LOCATION`
If `DB01_BACKUP_LOCTION` = `FILESYSTEM` then the following options are used: If `DB01_BACKUP_LOCTION` = `FILESYSTEM` then the following options are used:
| Variable | Description | Default | | Variable | Description | Default |
| --------------------------------- | ----------------------------------------------------------------------------------------------------- | --------------------------------- | | --------------------------------- | ----------------------------------------------------------------------------------------------------- | ---------------------------------- |
| `DB01_CREATE_LATEST_SYMLINK` | Create a symbolic link pointing to last backup in this format: `latest-(DB_TYPE)-(DB_NAME)-(DB_HOST)` | `TRUE` | | `DB01_CREATE_LATEST_SYMLINK` | Create a symbolic link pointing to last backup in this format: `latest-(DB_TYPE)-(DB_NAME)-(DB_HOST)` | `TRUE` |
| `DB01_FILESYSTEM_PATH` | Directory where the database dumps are kept. | `/backup` | | `DB01_FILESYSTEM_PATH` | Directory where the database dumps are kept. | `/backup` |
| `DB01_FILESYSTEM_PATH_PERMISSION` | Permissions to apply to backup directory | `700` | | `DB01_FILESYSTEM_PATH_PERMISSION` | Permissions to apply to backup directory | `700` |
| `DB01_FILESYSTEM_ARCHIVE_PATH` | Optional Directory where the database dumps archives are kept | `${DB01_FILESYSTEM_PATH/archive/` | | `DB01_FILESYSTEM_ARCHIVE_PATH` | Optional Directory where the database dumps archives are kept | `${DB01_FILESYSTEM_PATH}/archive/` |
| `DB01_FILESYSTEM_PERMISSION` | Directory and File permissions to apply to files. | `600` | | `DB01_FILESYSTEM_PERMISSION` | Directory and File permissions to apply to files. | `600` |
###### S3 ###### S3
@@ -742,6 +744,9 @@ $5 body/error message
##### Email Notifications ##### Email Notifications
See more details in the base image listed above for more mail environment variables.
| Parameter | Description | Default | `_FILE` | | Parameter | Description | Default | `_FILE` |
| ----------- | ----------------------------------------------------------------------------------------- | ------- | ------- | | ----------- | ----------------------------------------------------------------------------------------- | ------- | ------- |
| `MAIL_FROM` | What email address to send mail from for errors | | | | `MAIL_FROM` | What email address to send mail from for errors | | |

View File

@@ -57,7 +57,7 @@ services:
# Add here azure storage account # Add here azure storage account
- DB01_BLOBXFER_STORAGE_ACCOUNT={TODO Add Storage Name} - DB01_BLOBXFER_STORAGE_ACCOUNT={TODO Add Storage Name}
# Add here azure storage account key # Add here azure storage account key
- SB01_BLOBXFER_STORAGE_ACCOUNT_KEY={TODO Add Key} - DB01_BLOBXFER_STORAGE_ACCOUNT_KEY={TODO Add Key}
- DB01_BLOBXFER_REMOTE_PATH=docker-db-backup - DB01_BLOBXFER_REMOTE_PATH=docker-db-backup
restart: always restart: always
networks: networks:

View File

@@ -16,7 +16,7 @@ fi
trap ctrl_c INT trap ctrl_c INT
if [[ "${MODE,,}" =~ "standalone" ]] || [ "${1,,}" = "manual" ] || [ "${1,,}" = "now" ]; then if [[ "${MODE,,}" =~ "standalone" ]] || [ "${MODE,,}" = "manual" ] || [ "${1,,}" = "manual" ] || [ "${1,,}" = "now" ]; then
print_debug "Detected Manual Mode" print_debug "Detected Manual Mode"
persist=false persist=false
backup_job_backup_begin=+0 backup_job_backup_begin=+0
@@ -24,7 +24,6 @@ else
silent sleep {{BACKUP_NUMBER}} silent sleep {{BACKUP_NUMBER}}
time_last_run=0 time_last_run=0
time_current=$(date +'%s') time_current=$(date +'%s')
if [[ "${backup_job_backup_begin}" =~ ^\+(.*)$ ]]; then if [[ "${backup_job_backup_begin}" =~ ^\+(.*)$ ]]; then
print_debug "BACKUP_BEGIN is a jump of minute starting with +" print_debug "BACKUP_BEGIN is a jump of minute starting with +"
timer plusvalue timer plusvalue
@@ -34,10 +33,21 @@ else
elif [[ "${backup_job_backup_begin}" =~ ([0-9]{4})-([0-9]{2})-([0-9]{2})[[:space:]]([0-9]{2}):([0-9]{2}):([0-9]{2}) ]]; then elif [[ "${backup_job_backup_begin}" =~ ([0-9]{4})-([0-9]{2})-([0-9]{2})[[:space:]]([0-9]{2}):([0-9]{2}):([0-9]{2}) ]]; then
print_debug "BACKUP_BEGIN is a full date timestamp" print_debug "BACKUP_BEGIN is a full date timestamp"
timer datetime timer datetime
#elif echo "${backup_job_backup_begin//\*/#}" | grep -qP "^(.*((\d+,)+\d+|(\d+(\/|-)\d+)|\d+|#) ?){5}$" ; then # Allow slashes, yet not supporting advanced cron yet
elif echo "${backup_job_backup_begin//\*/#}" | grep -qP "^(((\d+,)+\d+|(\d+(\/|-)\d+)|\d+|#) ?){5}$" ; then elif echo "${backup_job_backup_begin//\*/#}" | grep -qP "^(((\d+,)+\d+|(\d+(\/|-)\d+)|\d+|#) ?){5}$" ; then
print_debug "BACKUP_BEGIN is a cron expression" print_debug "BACKUP_BEGIN is a cron expression"
time_last_run=$(date +"%s") if var_false "${CRON_ALTERNATE}"; then
timer cron "${backup_job_backup_begin}" "${time_current}" "${time_last_run}" time_last_run=$(date +"%s")
backup_job_backup_begin=${backup_job_backup_begin//\"/}
backup_job_backup_begin=${backup_job_backup_begin//\'/}
timer cron "${backup_job_backup_begin}" "${time_current}" "${time_last_run}"
else
echo "${backup_job_backup_begin} /var/run/s6/legacy-services/dbbackup-{{BACKUP_NUMBER}}/run now" > /tmp/.container/cron/{{BACKUP_NUMBER}}-backup
crontab -l | { cat; echo "${backup_job_backup_begin} /var/run/s6/legacy-services/dbbackup-{{BACKUP_NUMBER}}/run now"; } | crontab -
s6-svc -d /var/run/s6/legacy-services/dbbackup-{{BACKUP_NUMBER}}
exit 0
fi
else else
print_error "_BACKUP_BEGIN is invalid - Unable to perform scheduling" print_error "_BACKUP_BEGIN is invalid - Unable to perform scheduling"
cat <<EOF cat <<EOF

View File

@@ -1,6 +1,7 @@
#!/command/with-contenv bash #!/command/with-contenv bash
BACKUP_JOB_CONCURRENCY=${BACKUP_JOB_CONCURRENCY:-"1"} BACKUP_JOB_CONCURRENCY=${BACKUP_JOB_CONCURRENCY:-"1"}
CRON_ALTERNATE=${CRON_ALTERNATE:-"TRUE"}
DBBACKUP_USER=${DBBACKUP_USER:-"dbbackup"} DBBACKUP_USER=${DBBACKUP_USER:-"dbbackup"}
DBBACKUP_GROUP=${DBBACKUP_GROUP:-"${DBBACKUP_USER}"} # Must go after DBBACKUP_USER DBBACKUP_GROUP=${DBBACKUP_GROUP:-"${DBBACKUP_USER}"} # Must go after DBBACKUP_USER
DEFAULT_BACKUP_BEGIN=${DEFAULT_BACKUP_BEGIN:-+0} DEFAULT_BACKUP_BEGIN=${DEFAULT_BACKUP_BEGIN:-+0}

View File

@@ -94,7 +94,7 @@ bootstrap_variables() {
DB"${backup_instance_number}"_BLOBXFER_STORAGE_ACCOUNT_KEY \ DB"${backup_instance_number}"_BLOBXFER_STORAGE_ACCOUNT_KEY \
DB"${backup_instance_number}"_BLOBXFER_REMOTE_PATH \ DB"${backup_instance_number}"_BLOBXFER_REMOTE_PATH \
BLOBXFER_STORAGE_ACCOUNT \ BLOBXFER_STORAGE_ACCOUNT \
BLOBXFER_STORAGE_KEY \ BLOBXFER_STORAGE_ACCOUNT_KEY \
DB_HOST \ DB_HOST \
DB_NAME \ DB_NAME \
DB_PORT \ DB_PORT \
@@ -158,6 +158,11 @@ bootstrap_variables() {
sed -i "s|_PASS='\(.*\)'|_PASS=\1|g" "${backup_instance_vars}" sed -i "s|_PASS='\(.*\)'|_PASS=\1|g" "${backup_instance_vars}"
fi fi
if grep -qo "MONGO_CUSTOM_URI='.*'" "${backup_instance_vars}"; then
print_debug "[bootstrap_variables] [backup_init] Found _MONGO_CUSTOM_URI variable with quotes"
sed -i "s|MONGO_CUSTOM_URI='\(.*\)'|MONGO_CUSTOM_URI=\1|g" "${backup_instance_vars}"
fi
transform_backup_instance_variable() { transform_backup_instance_variable() {
if grep -q "^DB${1}_${2}=" "${backup_instance_vars}" && [ "$(grep "^DB${1}_${2}=" "${backup_instance_vars}" | cut -d = -f2)" != "unset" ]; then if grep -q "^DB${1}_${2}=" "${backup_instance_vars}" && [ "$(grep "^DB${1}_${2}=" "${backup_instance_vars}" | cut -d = -f2)" != "unset" ]; then
export "$3"="$(grep "^DB${1}_${2}=" "${backup_instance_vars}" | cut -d = -f2-)" export "$3"="$(grep "^DB${1}_${2}=" "${backup_instance_vars}" | cut -d = -f2-)"
@@ -183,7 +188,7 @@ bootstrap_variables() {
transform_backup_instance_variable "${backup_instance_number}" BLACKOUT_END backup_job_snapshot_blackout_finish transform_backup_instance_variable "${backup_instance_number}" BLACKOUT_END backup_job_snapshot_blackout_finish
transform_backup_instance_variable "${backup_instance_number}" BLOBXFER_REMOTE_PATH backup_job_blobxfer_remote_path transform_backup_instance_variable "${backup_instance_number}" BLOBXFER_REMOTE_PATH backup_job_blobxfer_remote_path
transform_backup_instance_variable "${backup_instance_number}" BLOBXFER_STORAGE_ACCOUNT backup_job_blobxfer_storage_account transform_backup_instance_variable "${backup_instance_number}" BLOBXFER_STORAGE_ACCOUNT backup_job_blobxfer_storage_account
transform_backup_instance_variable "${backup_instance_number}" BLOBXFER_STORAGE_KEY backup_job_blobxfer_storage_key transform_backup_instance_variable "${backup_instance_number}" BLOBXFER_STORAGE_ACCOUNT_KEY backup_job_blobxfer_storage_account_key
transform_backup_instance_variable "${backup_instance_number}" CHECKSUM backup_job_checksum transform_backup_instance_variable "${backup_instance_number}" CHECKSUM backup_job_checksum
transform_backup_instance_variable "${backup_instance_number}" CLEANUP_TIME backup_job_cleanup_time transform_backup_instance_variable "${backup_instance_number}" CLEANUP_TIME backup_job_cleanup_time
transform_backup_instance_variable "${backup_instance_number}" COMPRESSION backup_job_compression transform_backup_instance_variable "${backup_instance_number}" COMPRESSION backup_job_compression
@@ -477,6 +482,7 @@ backup_couch() {
move_dbbackup move_dbbackup
check_exit_code move "${backup_job_filename}" check_exit_code move "${backup_job_filename}"
post_dbbackup ${backup_job_db_name} post_dbbackup ${backup_job_db_name}
cleanup_old_data
} }
backup_influx() { backup_influx() {
@@ -517,6 +523,7 @@ backup_influx() {
move_dbbackup move_dbbackup
check_exit_code move "${backup_job_filename_dir}" check_exit_code move "${backup_job_filename_dir}"
post_dbbackup "${db}" post_dbbackup "${db}"
cleanup_old_data
done done
;; ;;
2 ) 2 )
@@ -545,6 +552,7 @@ backup_influx() {
move_dbbackup move_dbbackup
check_exit_code move "${backup_job_filename_dir}" check_exit_code move "${backup_job_filename_dir}"
post_dbbackup "${db}" post_dbbackup "${db}"
cleanup_old_data
done done
;; ;;
esac esac
@@ -569,7 +577,7 @@ backup_mongo() {
fi fi
if var_true "${DEBUG_BACKUP_MONGO}" ; then debug off; fi if var_true "${DEBUG_BACKUP_MONGO}" ; then debug off; fi
pre_dbbackup "${backup_job_db_name}" pre_dbbackup "${backup_job_db_name}"
write_log notice "Dumping MongoDB database: '${DB_NAME}' ${compression_string}" write_log notice "Dumping MongoDB database: '${backup_job_db_name}' ${compression_string}"
if var_true "${DEBUG_BACKUP_MONGO}" ; then debug on; fi if var_true "${DEBUG_BACKUP_MONGO}" ; then debug on; fi
silent run_as_user ${play_fair} mongodump --archive=${TEMP_PATH}/${backup_job_filename} ${mongo_compression} ${mongo_backup_parameter} silent run_as_user ${play_fair} mongodump --archive=${TEMP_PATH}/${backup_job_filename} ${mongo_compression} ${mongo_backup_parameter}
exit_code=$? exit_code=$?
@@ -581,6 +589,7 @@ backup_mongo() {
move_dbbackup move_dbbackup
check_exit_code move "${backup_job_filename}" check_exit_code move "${backup_job_filename}"
post_dbbackup "${backup_job_db_name}" post_dbbackup "${backup_job_db_name}"
cleanup_old_data
if var_true "${DEBUG_BACKUP_MONGO}" ; then debug off; fi if var_true "${DEBUG_BACKUP_MONGO}" ; then debug off; fi
} }
@@ -591,7 +600,7 @@ backup_mssql() {
backup_job_filename=mssql_${backup_job_db_name,,}_${backup_job_db_host,,}_${now}.bak backup_job_filename=mssql_${backup_job_db_name,,}_${backup_job_db_host,,}_${now}.bak
backup_job_filename_base=mssql_${backup_job_db_name,,}_${backup_job_db_host,,} backup_job_filename_base=mssql_${backup_job_db_name,,}_${backup_job_db_host,,}
pre_dbbackup "${backup_job_db_name}" pre_dbbackup "${backup_job_db_name}"
write_log notice "Dumping MSSQL database: '${DB_NAME}'" write_log notice "Dumping MSSQL database: '${backup_job_db_name}'"
if var_true "${DEBUG_BACKUP_MSSQL}" ; then debug on; fi if var_true "${DEBUG_BACKUP_MSSQL}" ; then debug on; fi
silent run_as_user ${play_fair} /opt/mssql-tools18/bin/sqlcmd -C -S ${backup_job_db_host}\,${backup_job_db_port} -U ${backup_job_db_user} -P ${backup_job_db_pass} -Q "BACKUP DATABASE [${backup_job_db_name}] TO DISK = N'${TEMP_PATH}/${backup_job_filename}' WITH NOFORMAT, NOINIT, NAME = '${backup_job_db_name}-full', SKIP, NOREWIND, NOUNLOAD, STATS = 10" silent run_as_user ${play_fair} /opt/mssql-tools18/bin/sqlcmd -C -S ${backup_job_db_host}\,${backup_job_db_port} -U ${backup_job_db_user} -P ${backup_job_db_pass} -Q "BACKUP DATABASE [${backup_job_db_name}] TO DISK = N'${TEMP_PATH}/${backup_job_filename}' WITH NOFORMAT, NOINIT, NAME = '${backup_job_db_name}-full', SKIP, NOREWIND, NOUNLOAD, STATS = 10"
exit_code=$? exit_code=$?
@@ -609,13 +618,14 @@ backup_mssql() {
move_dbbackup move_dbbackup
check_exit_code move "${backup_job_filename}" check_exit_code move "${backup_job_filename}"
post_dbbackup "${backup_job_db_name}" post_dbbackup "${backup_job_db_name}"
cleanup_old_data
;; ;;
trn|transaction ) trn|transaction )
prepare_dbbackup prepare_dbbackup
backup_job_filename=mssql_${backup_job_db_name,,}_${backup_job_db_host,,}_${now}.trn backup_job_filename=mssql_${backup_job_db_name,,}_${backup_job_db_host,,}_${now}.trn
backup_job_filename_base=mssql_${backup_job_db_name,,}_trn_${backup_job_db_host,,} backup_job_filename_base=mssql_${backup_job_db_name,,}_trn_${backup_job_db_host,,}
pre_dbbackup "${backup_job_db_name}" pre_dbbackup "${backup_job_db_name}"
write_log notice "Dumping MSSQL database: '${DB_NAME}'" write_log notice "Dumping MSSQL database: '${backup_job_db_name}'"
if var_true "${DEBUG_BACKUP_MSSQL}" ; then debug on; fi if var_true "${DEBUG_BACKUP_MSSQL}" ; then debug on; fi
silent run_as_user ${play_fair} /opt/mssql-tools18/bin/sqlcmd -C -S ${backup_job_db_host}\,${backup_job_db_port} -U ${backup_job_db_user} -P ${backup_job_db_pass} -Q "BACKUP LOG [${backup_job_db_name}] TO DISK = N'${TEMP_PATH}/${backup_job_filename}' WITH NOFORMAT, NOINIT, NAME = '${backup_job_db_name}-log', SKIP, NOREWIND, NOUNLOAD, STATS = 10" silent run_as_user ${play_fair} /opt/mssql-tools18/bin/sqlcmd -C -S ${backup_job_db_host}\,${backup_job_db_port} -U ${backup_job_db_user} -P ${backup_job_db_pass} -Q "BACKUP LOG [${backup_job_db_name}] TO DISK = N'${TEMP_PATH}/${backup_job_filename}' WITH NOFORMAT, NOINIT, NAME = '${backup_job_db_name}-log', SKIP, NOREWIND, NOUNLOAD, STATS = 10"
exit_code=$? exit_code=$?
@@ -624,13 +634,14 @@ backup_mssql() {
compression compression
pre_dbbackup all pre_dbbackup all
run_as_user ${compress_cmd} "${TEMP_PATH}/${backup_job_filename_original}" run_as_user ${compress_cmd} "${TEMP_PATH}/${backup_job_filename_original}"
check_exit_code backup "${backup_job_filename}"
file_encryption file_encryption
timer backup finish timer backup finish
generate_checksum generate_checksum
move_dbbackup move_dbbackup
check_exit_code move "${backup_job_filename}" check_exit_code move "${backup_job_filename}"
post_dbbackup "${backup_job_db_name}" post_dbbackup "${backup_job_db_name}"
cleanup_old_data
;; ;;
esac esac
} }
@@ -682,6 +693,7 @@ backup_mysql() {
move_dbbackup move_dbbackup
check_exit_code move "${backup_job_filename}" check_exit_code move "${backup_job_filename}"
post_dbbackup "${db}" post_dbbackup "${db}"
cleanup_old_data
done done
else else
write_log debug "Not splitting database dumps into their own files" write_log debug "Not splitting database dumps into their own files"
@@ -702,6 +714,7 @@ backup_mysql() {
move_dbbackup move_dbbackup
check_exit_code move "${backup_job_filename}" check_exit_code move "${backup_job_filename}"
post_dbbackup all post_dbbackup all
cleanup_old_data
fi fi
} }
@@ -723,6 +736,7 @@ backup_pgsql() {
move_dbbackup move_dbbackup
check_exit_code move "${backup_job_filename}" check_exit_code move "${backup_job_filename}"
post_dbbackup "globals" post_dbbackup "globals"
cleanup_old_data
} }
if var_true "${DEBUG_BACKUP_PGSQL}" ; then debug on; fi if var_true "${DEBUG_BACKUP_PGSQL}" ; then debug on; fi
@@ -734,7 +748,7 @@ backup_pgsql() {
fi fi
if [ "${backup_job_db_name,,}" = "all" ] ; then if [ "${backup_job_db_name,,}" = "all" ] ; then
write_log debug "Preparing to back up all databases" write_log debug "Preparing to back up all databases"
db_names=$(run_as_user psql -h ${backup_job_db_host} -U ${backup_job_db_user} -p ${backup_job_db_port} -d ${authdb} -c 'COPY (SELECT datname FROM pg_database WHERE datistemplate = false) TO STDOUT;' ) db_names=$(psql -h ${backup_job_db_host} -U ${backup_job_db_user} -p ${backup_job_db_port} -d ${authdb} -c 'COPY (SELECT datname FROM pg_database WHERE datistemplate = false) TO STDOUT;' )
if [ -n "${backup_job_db_name_exclude}" ] ; then if [ -n "${backup_job_db_name_exclude}" ] ; then
db_names_exclusions=$(echo "${backup_job_db_name_exclude}" | tr ',' '\n') db_names_exclusions=$(echo "${backup_job_db_name_exclude}" | tr ',' '\n')
for db_exclude in ${db_names_exclusions} ; do for db_exclude in ${db_names_exclusions} ; do
@@ -771,6 +785,7 @@ backup_pgsql() {
move_dbbackup move_dbbackup
check_exit_code move "${backup_job_filename}" check_exit_code move "${backup_job_filename}"
post_dbbackup "${db}" post_dbbackup "${db}"
cleanup_old_data
done done
if var_true "${_postgres_backup_globals}" ; then backup_pgsql_globals; fi if var_true "${_postgres_backup_globals}" ; then backup_pgsql_globals; fi
else else
@@ -782,7 +797,7 @@ backup_pgsql() {
pre_dbbackup all pre_dbbackup all
write_log notice "Dumping all PostgreSQL databases: '$(echo ${db_names} | xargs | tr ' ' ',')' ${compression_string}" write_log notice "Dumping all PostgreSQL databases: '$(echo ${db_names} | xargs | tr ' ' ',')' ${compression_string}"
if var_true "${DEBUG_BACKUP_PGSQL}" ; then debug on; fi if var_true "${DEBUG_BACKUP_PGSQL}" ; then debug on; fi
tmp_db_names=$(run_as_user psql -h ${backup_job_db_host} -p ${backup_job_db_port} -U ${backup_job_db_user} -d ${authdb} -c 'COPY (SELECT datname FROM pg_database WHERE datistemplate = false) TO STDOUT;' ) tmp_db_names=$(psql -h ${backup_job_db_host} -p ${backup_job_db_port} -U ${backup_job_db_user} -d ${authdb} -c 'COPY (SELECT datname FROM pg_database WHERE datistemplate = false) TO STDOUT;' )
for r_db_name in $(echo $db_names | xargs); do for r_db_name in $(echo $db_names | xargs); do
tmp_db_names=$(echo "$tmp_db_names" | xargs | sed "s|${r_db_name}||g" ) tmp_db_names=$(echo "$tmp_db_names" | xargs | sed "s|${r_db_name}||g" )
done done
@@ -800,6 +815,7 @@ backup_pgsql() {
move_dbbackup move_dbbackup
check_exit_code move "${backup_job_filename}" check_exit_code move "${backup_job_filename}"
post_dbbackup all post_dbbackup all
cleanup_old_data
if var_true "${_postgres_backup_globals}" ; then backup_pgsql_globals; fi if var_true "${_postgres_backup_globals}" ; then backup_pgsql_globals; fi
fi fi
} }
@@ -839,6 +855,7 @@ backup_redis() {
move_dbbackup move_dbbackup
check_exit_code move "${backup_job_filename}" check_exit_code move "${backup_job_filename}"
post_dbbackup all post_dbbackup all
cleanup_old_data
} }
backup_sqlite3() { backup_sqlite3() {
@@ -846,7 +863,7 @@ backup_sqlite3() {
db=$(basename "${backup_job_db_host}") db=$(basename "${backup_job_db_host}")
db="${db%.*}" db="${db%.*}"
backup_job_filename=sqlite3_${db}_${now}.sqlite3 backup_job_filename=sqlite3_${db}_${now}.sqlite3
backup_job_filename_base=sqlite3_${db}.sqlite3 backup_job_filename_base=sqlite3_${db}
pre_dbbackup "${db}" pre_dbbackup "${db}"
write_log notice "Dumping sqlite3 database: '${backup_job_db_host}' ${compression_string}" write_log notice "Dumping sqlite3 database: '${backup_job_db_host}' ${compression_string}"
if var_true "${DEBUG_BACKUP_SQLITE3}" ; then debug on; fi if var_true "${DEBUG_BACKUP_SQLITE3}" ; then debug on; fi
@@ -867,6 +884,7 @@ backup_sqlite3() {
move_dbbackup move_dbbackup
check_exit_code move "${backup_job_filename}" check_exit_code move "${backup_job_filename}"
post_dbbackup "${db}" post_dbbackup "${db}"
cleanup_old_data
} }
check_availability() { check_availability() {
@@ -1025,8 +1043,12 @@ cleanup_old_data() {
write_log info "Cleaning up old backups on filesystem" write_log info "Cleaning up old backups on filesystem"
run_as_user mkdir -p "${backup_job_filesystem_path}" run_as_user mkdir -p "${backup_job_filesystem_path}"
find "${backup_job_filesystem_path}"/ -type f -mmin +"${backup_job_cleanup_time}" -iname "${backup_job_filename_base}*" -exec rm -f {} \; find "${backup_job_filesystem_path}"/ -type f -mmin +"${backup_job_cleanup_time}" -iname "${backup_job_filename_base}*" -exec rm -f {} \;
write_log info "Syncing changes via blobxfer" if [ -z "${backup_job_blobxfer_storage_account}" ] || [ -z "${backup_job_blobxfer_storage_account_key}" ]; then
silent run_as_user blobxfer upload --mode file --remote-path ${backup_job_blobxfer_remote_path} --local-path ${backup_job_filesystem_path} --delete --delete-only write_log warn "Variable _BLOBXFER_STORAGE_ACCOUNT or _BLOBXFER_STORAGE_ACCOUNT_KEY is not set. Skipping blobxfer functions"
else
write_log info "Syncing changes via blobxfer"
silent run_as_user blobxfer upload --mode file --remote-path ${backup_job_blobxfer_remote_path} --storage-account ${backup_job_blobxfer_storage_account} --storage-account-key ${backup_job_blobxfer_storage_account_key} --local-path ${backup_job_filesystem_path} --delete --delete-only
fi
;; ;;
"file" | "filesystem" ) "file" | "filesystem" )
write_log info "Cleaning up old backups on filesystem" write_log info "Cleaning up old backups on filesystem"
@@ -1141,10 +1163,11 @@ create_archive() {
create_schedulers() { create_schedulers() {
if var_true "${DEBUG_CREATE_SCHEDULERS}" ; then debug on; fi if var_true "${DEBUG_CREATE_SCHEDULERS}" ; then debug on; fi
backup() { backup() {
bootstrap_variables upgrade BACKUP local backup_instances=$(set -o posix ; set | grep -Pc "^(DB[0-9]._HOST=|.*MONGO_CUSTOM_URI=)")
local backup_instances=$(printenv | sort | grep -c "^DB[0-9]._HOST")
print_debug "[create_schedulers] Found '${backup_instances}' DB_HOST instances" print_debug "[create_schedulers] Found '${backup_instances}' DB_HOST instances"
if [ -n "${DB_HOST}" ] && [ "${backup_instances}" ]; then if [ -n "${DB_HOST}" ] && [ "${backup_instances}" ]; then
backup_instances=1; backup_instances=1;
print_debug "[create_schedulers] Detected using old DB_ variables" print_debug "[create_schedulers] Detected using old DB_ variables"
@@ -1154,13 +1177,14 @@ create_schedulers() {
instance=$(printf "%02d" $instance) instance=$(printf "%02d" $instance)
cp -R /assets/dbbackup/template-dbbackup /etc/services.available/dbbackup-"${instance}" cp -R /assets/dbbackup/template-dbbackup /etc/services.available/dbbackup-"${instance}"
sed -i "s|{{BACKUP_NUMBER}}|${instance}|g" /etc/services.available/dbbackup-"${instance}"/run sed -i "s|{{BACKUP_NUMBER}}|${instance}|g" /etc/services.available/dbbackup-"${instance}"/run
if [ "${MODE,,}" = "manual" ] ; then service_stop dbbackup-"${instance}" ; fi
cat <<EOF >> /usr/bin/backup"${instance}"-now cat <<EOF >> /usr/bin/backup"${instance}"-now
#!/bin/bash #!/bin/bash
source /assets/functions/00-container source /assets/functions/00-container
PROCESS_NAME=db-backup${instance} PROCESS_NAME=db-backup${instance}
print_info "Starting Manual Backup for db-backup${instance}" print_info "Starting Manual Backup for db-backup${instance}"
/var/run/s6/legacy-services/dbbackup-${instance}/run now #/var/run/s6/legacy-services/dbbackup-${instance}/run now
/etc/services.available/dbbackup-${instance}/run now
EOF EOF
chmod +x /usr/bin/backup"${instance}"-now chmod +x /usr/bin/backup"${instance}"-now
@@ -1170,11 +1194,11 @@ EOF
cat <<EOF > /usr/bin/backup-now cat <<EOF > /usr/bin/backup-now
#!/bin/bash #!/bin/bash
/usr/bin/backup${instance}-now /usr/bin/backup${instance}-now now
EOF EOF
else else
echo "/usr/bin/backup${instance}-now" >> /usr/bin/backup-now echo "/usr/bin/backup${instance}-now now" >> /usr/bin/backup-now
fi fi
instance=$(echo "${instance} +1" | bc) instance=$(echo "${instance} +1" | bc)
@@ -1331,20 +1355,20 @@ EOF
notify() { notify() {
if var_true "${DEBUG_NOTIFY}" ; then debug on; fi if var_true "${DEBUG_NOTIFY}" ; then debug on; fi
notification_custom() { notification_custom() {
if [ -n "${NOTIFICATION_SCRIPT}" ] ; then if [ -n "${NOTIFICATION_CUSTOM_SCRIPT}" ] ; then
if var_true "${NOTIFICATION_SCRIPT_SKIP_X_VERIFY}" ; then if var_true "${NOTIFICATION_CUSTOM_SCRIPT_SKIP_X_VERIFY}" ; then
eval "${NOTIFICATION_SCRIPT}" "${1}" "${2}" "${3}" "${4}" "${5}" eval "${NOTIFICATION_CUSTOM_SCRIPT}" "${1}" "${2}" "${3}" "${4}" "${5}"
else else
if [ -x "${NOTIFICATION_SCRIPT}" ] ; then if [ -x "${NOTIFICATION_CUSTOM_SCRIPT}" ] ; then
write_log notice "Found NOTIFICATION_SCRIPT environment variable. Executing '${NOTIFICATION_SCRIPT}" write_log notice "Found NOTIFICATION_CUSTOM_SCRIPT environment variable. Executing '${NOTIFICATION_CUSTOM_SCRIPT}"
# script timestamp logfile errorcode subject body # script timestamp logfile errorcode subject body
eval "${NOTIFICATION_SCRIPT}" "${1}" "${2}" "${3}" "${4}" "${5}" eval "${NOTIFICATION_CUSTOM_SCRIPT}" "${1}" "${2}" "${3}" "${4}" "${5}"
else else
write_log error "Can't execute NOTIFICATION_SCRIPT environment variable '${NOTIFICATION_SCRIPT}' as its filesystem bit is not executible!" write_log error "Can't execute NOTIFICATION_CUSTOM_SCRIPT environment variable '${NOTIFICATION_CUSTOM_SCRIPT}' as its filesystem bit is not executible!"
fi fi
fi fi
else else
print_error "[notifications] No NOTIFICATION_SCRIPT variable set - Skipping sending Custom notifications" print_error "[notifications] No NOTIFICATION_CUSTOM_SCRIPT variable set - Skipping sending Custom notifications"
fi fi
} }
@@ -1357,18 +1381,20 @@ notify() {
if [ -z "${SMTP_HOST}" ] ; then write_log error "[notifications] No SMTP_HOST variable set - Skipping sending Email notifications" ; skip_mail=true ; fi if [ -z "${SMTP_HOST}" ] ; then write_log error "[notifications] No SMTP_HOST variable set - Skipping sending Email notifications" ; skip_mail=true ; fi
if [ -z "${SMTP_PORT}" ] ; then write_log error "[notifications] No SMTP_PORT variable set - Skipping sending Email notifications" ; skip_mail=true ; fi if [ -z "${SMTP_PORT}" ] ; then write_log error "[notifications] No SMTP_PORT variable set - Skipping sending Email notifications" ; skip_mail=true ; fi
if var_nottrue "${skip_mail}" ; then if var_nottrue "${skip_mail}" ; then
if ! grep -q ^from /etc/msmptrc ; then
echo "from ${MAIL_FROM}" >> /etc/msmtprc
fi
mail_recipients=$(echo "${MAIL_TO}" | tr "," "\n") mail_recipients=$(echo "${MAIL_TO}" | tr "," "\n")
for mail_recipient in $mail_recipients ; do for mail_recipient in $mail_recipients ; do
cat <<EOF | s-nail -v \ cat <<EOF | msmtp -t "${mail_recipient}" -C /etc/msmtprc
-r "${MAIL_FROM}" \ To: ${mail_recipient}
-s "[db-backup] [${DOMAIN}] ${3}" \ Subject: [db-backup] ${4}
-S smtp="${SMTP_HOST}":"${SMTP_PORT}" \
"${mail_recipient}"
Time: ${1} Time: ${1}
Log File: {2} Log File: {2}
Error Code: ${3} Error Code: ${3}
${4} ${5}
EOF EOF
done done
fi fi
@@ -1384,7 +1410,7 @@ EOF
if [ -z "${MATTERMOST_WEBHOOK_URL}" ] ; then write_log error "[notifications] No MATTERMOST_WEBHOOK_URL variable set - Skipping sending Mattermost notifications" ; skip_mattermost=true ; fi if [ -z "${MATTERMOST_WEBHOOK_URL}" ] ; then write_log error "[notifications] No MATTERMOST_WEBHOOK_URL variable set - Skipping sending Mattermost notifications" ; skip_mattermost=true ; fi
if var_nottrue "${skip_mattermost}" ; then if var_nottrue "${skip_mattermost}" ; then
emoji=":bomb:" emoji=":bomb:"
message="*[db-backup] ${3}*\n${4}\n*Timestamp:* ${1}\n*Logfile:* ${2}\n*Error Code: ${3}" message="*[db-backup] ${4}*\n${5}\n*Timestamp:* ${1}\n*Logfile:* ${2}\n*Error Code: ${3}"
mattermost_recipients=$(echo "${MATTERMOST_RECIPIENT}" | tr "," "\n") mattermost_recipients=$(echo "${MATTERMOST_RECIPIENT}" | tr "," "\n")
for mattermost_recipient in $mattermost_recipients ; do for mattermost_recipient in $mattermost_recipients ; do
payload="payload={\"channel\": \"${mattermost_recipient//\"/\\\"}\", \"username\": \"${MATTERMOST_USERNAME//\"/\\\"}\", \"text\": \"${message//\"/\\\"}\", \"icon_emoji\": \"${emoji}\"}" payload="payload={\"channel\": \"${mattermost_recipient//\"/\\\"}\", \"username\": \"${MATTERMOST_USERNAME//\"/\\\"}\", \"text\": \"${message//\"/\\\"}\", \"icon_emoji\": \"${emoji}\"}"
@@ -1409,7 +1435,7 @@ EOF
for matrix_room in $matrix_rooms ; do for matrix_room in $matrix_rooms ; do
curl \ curl \
-XPOST \ -XPOST \
-d "{\"msgtype\":\"m.text\", \"body\":\"*[db-backup] ${3}*\n${4}\n*Timestamp:* ${1}\n*Logfile:* ${2}\n*Error Code: ${3}*\"}" \ -d "{\"msgtype\":\"m.text\", \"body\":\"*[db-backup] ${4}*\n${5}\n*Timestamp:* ${1}\n*Logfile:* ${2}\n*Error Code: ${3}*\"}" \
"${MATRIX_HOST}/_matrix/client/r0/rooms/${matrix_room}/send/m.room.message?access_token=${MATRIX_ACCESS_TOKEN}" "${MATRIX_HOST}/_matrix/client/r0/rooms/${matrix_room}/send/m.room.message?access_token=${MATRIX_ACCESS_TOKEN}"
done done
fi fi
@@ -1425,7 +1451,7 @@ EOF
if [ -z "${ROCKETCHAT_WEBHOOK_URL}" ] ; then write_log error "[notifications] No ROCKETCHAT_WEBHOOK_URL variable set - Skipping sending Rocket.Chat notifications" ; skip_rocketchat=true ; fi if [ -z "${ROCKETCHAT_WEBHOOK_URL}" ] ; then write_log error "[notifications] No ROCKETCHAT_WEBHOOK_URL variable set - Skipping sending Rocket.Chat notifications" ; skip_rocketchat=true ; fi
if var_nottrue "${skip_rocketchat}" ; then if var_nottrue "${skip_rocketchat}" ; then
emoji=":bomb:" emoji=":bomb:"
message="*[db-backup] ${3}*\n${4}\n*Timestamp:* ${1}\n*Logfile:* ${2}\n*Error Code: ${3}" message="*[db-backup] ${4}*\n${5}\n*Timestamp:* ${1}\n*Logfile:* ${2}\n*Error Code: ${3}"
rocketchat_recipients=$(echo "${ROCKETCHAT_RECIPIENT}" | tr "," "\n") rocketchat_recipients=$(echo "${ROCKETCHAT_RECIPIENT}" | tr "," "\n")
for rocketchat_recipient in $rocketchat_recipients ; do for rocketchat_recipient in $rocketchat_recipients ; do
payload="payload={\"channel\": \"${rocketchat_recipient//\"/\\\"}\", \"username\": \"${ROCKETCHAT_USERNAME//\"/\\\"}\", \"text\": \"${message//\"/\\\"}\", \"icon_emoji\": \"${emoji}\"}" payload="payload={\"channel\": \"${rocketchat_recipient//\"/\\\"}\", \"username\": \"${ROCKETCHAT_USERNAME//\"/\\\"}\", \"text\": \"${message//\"/\\\"}\", \"icon_emoji\": \"${emoji}\"}"
@@ -1444,7 +1470,7 @@ EOF
# $4 body # $4 body
if var_true "${ENABLE_NOTIFICATIONS}" ; then if var_true "${ENABLE_NOTIFICATIONS}" ; then
notification_types=$(echo "${NOTIIFICATION_TYPE}" | tr "," "\n") notification_types=$(echo "${NOTIFICATION_TYPE}" | tr "," "\n")
for notification_type in $notification_types ; do for notification_type in $notification_types ; do
case "${notification_type,,}" in case "${notification_type,,}" in
"custom" ) "custom" )
@@ -1576,20 +1602,24 @@ EOF
run_as_user rm -rf "${TEMP_PATH}"/"${backup_job_filename}" run_as_user rm -rf "${TEMP_PATH}"/"${backup_job_filename}"
;; ;;
"blobxfer" ) "blobxfer" )
write_log info "Synchronize local storage from S3 Bucket with blobxfer" if [ -z "${backup_job_blobxfer_storage_account}" ] || [ -z "${backup_job_blobxfer_storage_account_key}" ]; then
${play_fair} blobxfer download --mode file --remote-path ${backup_job_blobxfer_remote_path} --local-path ${backup_job_filesystem_path} --delete write_log warn "Variable _BLOBXFER_STORAGE_ACCOUNT or _BLOBXFER_STORAGE_ACCOUNT_KEY is not set. Skipping blobxfer functions"
else
write_log info "Synchronize local storage from S3 Bucket with blobxfer"
${play_fair} blobxfer download --mode file --remote-path ${backup_job_blobxfer_remote_path} --storage-account ${backup_job_blobxfer_storage_account} --storage-account-key ${backup_job_blobxfer_storage_account_key} --local-path ${backup_job_filesystem_path} --delete
write_log info "Moving backup to external storage with blobxfer" write_log info "Moving backup to external storage with blobxfer"
mkdir -p "${backup_job_filesystem_path}" mkdir -p "${backup_job_filesystem_path}"
if [ "${backup_job_checksum}" != "none" ] ; then run_as_user mv "${TEMP_PATH}"/*."${checksum_extension}" "${backup_job_filesystem_path}"/; fi if [ "${backup_job_checksum}" != "none" ] ; then run_as_user mv "${TEMP_PATH}"/*."${checksum_extension}" "${backup_job_filesystem_path}"/; fi
run_as_user mv "${TEMP_PATH}"/"${backup_job_filename}" "${backup_job_filesystem_path}"/"${backup_job_filename}" run_as_user mv "${TEMP_PATH}"/"${backup_job_filename}" "${backup_job_filesystem_path}"/"${backup_job_filename}"
silent run_as_user ${play_fair} blobxfer upload --mode file --remote-path ${backup_job_blobxfer_remote_path} --local-path ${backup_job_filesystem_path} silent run_as_user ${play_fair} blobxfer upload --mode file --remote-path ${backup_job_blobxfer_remote_path} --storage-account ${backup_job_blobxfer_storage_account} --storage-account-key ${backup_job_blobxfer_storage_account_key} --local-path ${backup_job_filesystem_path}
move_exit_code=$? move_exit_code=$?
if [ "${backup_job_checksum}" != "none" ] ; then run_as_user rm -rf "${TEMP_PATH}"/"${backup_job_filename}"."${checksum_extension}" ; fi if [ "${backup_job_checksum}" != "none" ] ; then run_as_user rm -rf "${TEMP_PATH}"/"${backup_job_filename}"."${checksum_extension}" ; fi
run_as_user rm -rf "${TEMP_PATH}"/"${backup_job_filename}" run_as_user rm -rf "${TEMP_PATH}"/"${backup_job_filename}"
fi
;; ;;
esac esac
else else
@@ -1625,20 +1655,20 @@ pre_dbbackup() {
### Pre Backup Custom Script Support ### Pre Backup Custom Script Support
if [ -d "/assets/custom-scripts/pre" ] && dir_notempty "/assets/custom-scripts/pre" ; then if [ -d "/assets/custom-scripts/pre" ] && dir_notempty "/assets/custom-scripts/pre" ; then
write_log warning "Found Custom Post Scripts in /assets/custom-scripts/pre - Automatically moving them to '${backup_job_script_location_pre}'" write_log warn "Found Custom Post Scripts in /assets/custom-scripts/pre - Automatically moving them to '${backup_job_script_location_pre}'"
run_as_user mkdir -p "${backup_job_script_location_pre}" mkdir -p "${backup_job_script_location_pre}"
silent run_as_user cp /assets/custom-scripts/pre/* "${backup_job_script_location_pre}" silent cp -aR /assets/custom-scripts/pre/* "${backup_job_script_location_pre}"
fi fi
if [ -d "${backup_job_script_location_pre}" ] && dir_notempty "${backup_job_script_location_pre}" ; then if [ -d "${backup_job_script_location_pre}" ] && dir_notempty "${backup_job_script_location_pre}" ; then
for f in $(find ${backup_job_script_location_pre} -name \*.sh -type f); do for f in $(find ${backup_job_script_location_pre} -name \*.sh -type f); do
if var_true "${backup_job_pre_script_x_verify}" ; then if var_true "${backup_job_pre_script_x_verify}" ; then
run_as_user ${f} "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${backup_job_filename}" ${f} "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${backup_job_filename}"
else else
if [ -x "${f}" ] ; then if [ -x "${f}" ] ; then
write_log notice "Executing pre backup custom script : '${f}'" write_log notice "Executing pre backup custom script : '${f}'"
## script DB_TYPE DB_HOST DB_NAME STARTEPOCH BACKUP_FILENAME ## script DB_TYPE DB_HOST DB_NAME STARTEPOCH BACKUP_FILENAME
run_as_user ${f} "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${backup_job_filename}" ${f} "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${backup_job_filename}"
else else
write_log error "Can't run pre backup custom script: '${f}' as its filesystem bit is not executible!" write_log error "Can't run pre backup custom script: '${f}' as its filesystem bit is not executible!"
fi fi
@@ -1681,11 +1711,11 @@ EOZP
### Post Script Support ### Post Script Support
if [ -n "${backup_job_post_script}" ] ; then if [ -n "${backup_job_post_script}" ] ; then
if var_true "${backup_job_post_script_x_verify}" ; then if var_true "${backup_job_post_script_x_verify}" ; then
run_as_user eval "${backup_job_post_script}" "${exit_code}" "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${dbbackup_finish_time}" "${dbbackup_total_time}" "${backup_job_filename}" "${filesize}" "${checksum_value}" "${move_exit_code}" eval "${backup_job_post_script}" "${exit_code}" "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${dbbackup_finish_time}" "${dbbackup_total_time}" "${backup_job_filename}" "${filesize}" "${checksum_value}" "${move_exit_code}"
else else
if [ -x "${backup_job_post_script}" ] ; then if [ -x "${backup_job_post_script}" ] ; then
write_log notice "Found POST_SCRIPT environment variable. Executing '${backup_job_post_script}" write_log notice "Found POST_SCRIPT environment variable. Executing '${backup_job_post_script}"
run_as_user eval "${backup_job_post_script}" "${exit_code}" "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${dbbackup_finish_time}" "${dbbackup_total_time}" "${backup_job_filename}" "${filesize}" "${checksum_value}" "${move_exit_code}" eval "${backup_job_post_script}" "${exit_code}" "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${dbbackup_finish_time}" "${dbbackup_total_time}" "${backup_job_filename}" "${filesize}" "${checksum_value}" "${move_exit_code}"
else else
write_log error "Can't execute POST_SCRIPT environment variable '${backup_job_post_script}' as its filesystem bit is not executible!" write_log error "Can't execute POST_SCRIPT environment variable '${backup_job_post_script}' as its filesystem bit is not executible!"
fi fi
@@ -1694,20 +1724,20 @@ EOZP
### Post Backup Custom Script Support ### Post Backup Custom Script Support
if [ -d "/assets/custom-scripts/" ] && dir_notempty "/assets/custom-scripts" ; then if [ -d "/assets/custom-scripts/" ] && dir_notempty "/assets/custom-scripts" ; then
write_log warning "Found Custom Post Scripts in /assets/custom-scripts/ - Automatically moving them to '${backup_job_script_location_post}'" write_log warn "Found Custom Post Scripts in /assets/custom-scripts/ - Automatically moving them to '${backup_job_script_location_post}'"
run_as_user mkdir -p "${backup_job_script_location_post}" mkdir -p "${backup_job_script_location_post}"
silent run_as_user cp /assets/custom-scripts/* "${backup_job_script_location_post}" cp -aR /assets/custom-scripts/* "${backup_job_script_location_post}"
fi fi
if [ -d "${backup_job_script_location_post}" ] && dir_notempty "${backup_job_script_location_post}" ; then if [ -d "${backup_job_script_location_post}" ] && dir_notempty "${backup_job_script_location_post}" ; then
for f in $(run_as_user find "${backup_job_script_location_post}" -name \*.sh -type f); do for f in $(run_as_user find "${backup_job_script_location_post}" -name \*.sh -type f); do
if var_true "${backup_job_post_script_x_verify}" ; then if var_true "${backup_job_post_script_x_verify}" ; then
run_as_user ${f} "${exit_code}" "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${dbbackup_finish_time}" "${dbbackup_total_time}" "${backup_job_filename}" "${filesize}" "${checksum_value}" "${move_exit_code}" ${f} "${exit_code}" "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${dbbackup_finish_time}" "${dbbackup_total_time}" "${backup_job_filename}" "${filesize}" "${checksum_value}" "${move_exit_code}"
else else
if [ -x "${f}" ] ; then if [ -x "${f}" ] ; then
write_log notice "Executing post backup custom script : '${f}'" write_log notice "Executing post backup custom script : '${f}'"
## script EXIT_CODE DB_TYPE DB_HOST DB_NAME STARTEPOCH FINISHEPOCH DURATIONEPOCH BACKUP_FILENAME FILESIZE CHECKSUMVALUE ## script EXIT_CODE DB_TYPE DB_HOST DB_NAME STARTEPOCH FINISHEPOCH DURATIONEPOCH BACKUP_FILENAME FILESIZE CHECKSUMVALUE
run_as_user ${f} "${exit_code}" "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${dbbackup_finish_time}" "${dbbackup_total_time}" "${backup_job_filename}" "${filesize}" "${checksum_value}" "${move_exit_code}" ${f} "${exit_code}" "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${dbbackup_finish_time}" "${dbbackup_total_time}" "${backup_job_filename}" "${filesize}" "${checksum_value}" "${move_exit_code}"
else else
write_log error "Can't run post backup custom script: '${f}' as its filesystem bit is not executible!" write_log error "Can't run post backup custom script: '${f}' as its filesystem bit is not executible!"
fi fi
@@ -1743,7 +1773,7 @@ process_limiter() {
} }
run_as_user() { run_as_user() {
sudo -Eu "${DBBACKUP_USER}" $@ sudo -Eu "${DBBACKUP_USER}" "$@"
} }
setup_mode() { setup_mode() {
@@ -1835,7 +1865,7 @@ timer() {
if [ "${expression_step}" != "${expression}" ]; then if [ "${expression_step}" != "${expression}" ]; then
for step in ${validate_temp}; do for step in ${validate_temp}; do
if [ $(( ( step - expression_start ) % expression_step )) -eq 0 ]; then if [ $(( ( step - expression_start ) % expression_step )) -eq 0 ]; then
validate_all="$validate_all ${step}" validate_all="${validate_all} ${step}"
fi fi
done done
else else
@@ -1843,15 +1873,16 @@ timer() {
fi fi
done done
validate_all=$(echo "${validate_all}" | tr ' ' '\n' | sort -n -u | tr '\n' ' ') validate_all=$(echo "${validate_all}" | tr ' ' '\n' | sort -g -u | tr '\n' ' ')
for entry in $validate_all; do for entry in $validate_all; do
if [ "${entry}" -ge "${3}" ]; then if [ ${entry} -ge ${3} ]; then
echo "${entry}" echo "${entry}"
return 0 return 0
fi fi
done done
echo "${validate_all%% *}" echo "${validate_all// /}"
#echo "${validate_all%% *}"
} }
local cron_compare="${3}" local cron_compare="${3}"
@@ -1859,18 +1890,21 @@ timer() {
local cron_compare_difference=$(( cron_compare - ${4} )) local cron_compare_difference=$(( cron_compare - ${4} ))
if [ "${cron_compare_difference}" -lt 60 ]; then if [ "${cron_compare_difference}" -lt 60 ]; then
cron_compare=$((${cron_compare} + $(( 60 - cron_compare_difference )) )) cron_compare=$((cron_compare + $(( 60 - cron_compare_difference )) ))
fi fi
local cron_current_seconds="$(date --date=@"${cron_compare_seconds}" +"%-S")" local cron_current_seconds="$(date --date=@"${cron_compare_seconds}" +"%-S")"
if [ "${cron_current_seconds}" -ne 0 ]; then if [ "${cron_current_seconds}" -ne 0 ]; then
cron_compare_seconds=$(( cron_compare_seconds - cron_current_seconds )) cron_compare=$(( cron_compare_seconds - cron_current_seconds ))
fi fi
local cron_minute="$(echo -n "${2}" | awk '{print $1}')" local cron_minute="$(echo -n "${2}" | awk '{print $1}')"
local cron_hour="$(echo -n "${2}" | awk '{print $2}')" local cron_hour="$(echo -n "${2}" | awk '{print $2}')"
local cron_day_of_month="$(echo -n "${2}" | awk '{print $3}')" local cron_day_of_month="$(echo -n "${2}" | awk '{print $3}')"
local cron_month="$(echo -n "${2}" | awk '{print $4}')"Generating local cron_month="$(echo -n "${2}" | awk '{print $4}')"
local cron_day_of_week="$(echo -n "${2}" | awk '{print $5}')"
local cron_next_minute="$(date --date=@"${cron_compare}" +"%-M")"
local cron_next_hour="$(date --date=@"${cron_compare}" +"%-H")" local cron_next_hour="$(date --date=@"${cron_compare}" +"%-H")"
local cron_next_day_of_month="$(date --date=@"${cron_compare}" +"%-d")" local cron_next_day_of_month="$(date --date=@"${cron_compare}" +"%-d")"
local cron_next_month="$(date --date=@"${cron_compare}" +"%-m")" local cron_next_month="$(date --date=@"${cron_compare}" +"%-m")"
@@ -1879,8 +1913,10 @@ timer() {
local cron_next_year="$(date --date=@"${cron_compare}" +"%-Y")" local cron_next_year="$(date --date=@"${cron_compare}" +"%-Y")"
local cron_next= local cron_next=
local cron_parsed=1
while [ "$cron_parsed" != "0" ]; do while [ "${cron_parsed}" != "0" ]; do
print_debug "[timer] [cron] Parse Minute"
cron_next=$(parse_expression "${cron_minute}" 59 "${cron_next_minute}") cron_next=$(parse_expression "${cron_minute}" 59 "${cron_next_minute}")
if [ "${cron_next}" != "${cron_next_minute}" ]; then if [ "${cron_next}" != "${cron_next_minute}" ]; then
if [ "${cron_next_minute}" -gt "${cron_next}" ]; then if [ "${cron_next_minute}" -gt "${cron_next}" ]; then
@@ -1890,20 +1926,23 @@ timer() {
cron_next_minute="${cron_next}" cron_next_minute="${cron_next}"
fi fi
print_debug "[timer] [cron] Parse Hour"
cron_next=$(parse_expression "${cron_hour}" 23 "${cron_next_hour}") cron_next=$(parse_expression "${cron_hour}" 23 "${cron_next_hour}")
if [ "${cron_next}" != "${cron_next_hour}" ]; then if [ "${cron_next}" != "${cron_next_hour}" ]; then
if [ "${cron_next_hour}" -gt "${cron_next}" ]; then if [ "${cron_next_hour}" -gt "${cron_next}" ]; then
cron_next_day_of_month=$(( cron_next_day_of_month + 1 )) cron_next_day_of_month=$(( cron_next_day_of_month + 1 ))
fi fi
cron_next_hour="${cron_next}" cron_next_hour="${cron_next}"
#cron_next_minute=0 cron_next_minute=0
fi fi
print_debug "[timer] [cron] Parse Day of Week"
cron_next=$(parse_expression "${cron_day_of_week}" 6 "${cron_next_day_of_week}") cron_next=$(parse_expression "${cron_day_of_week}" 6 "${cron_next_day_of_week}")
if [ "${cron_next}" != "${cron_next_day_of_week}" ]; then if [ "${cron_next}" != "${cron_next_day_of_week}" ]; then
day_of_week_difference=$(( ${cron_next} - ${cron_next_day_of_week} )) day_of_week_difference=$(( cron_next - cron_next_day_of_week ))
if [ "${day_of_week_difference}" -lt "0" ]; then if [ "${day_of_week_difference}" -lt 0 ]; then
day_of_week_difference=$(( day_of_week_difference + 7 )) day_of_week_difference=$(( day_of_week_difference + 7 ))
fi fi
@@ -1912,6 +1951,7 @@ timer() {
cron_next_minute=0 cron_next_minute=0
fi fi
print_debug "[timer] [cron] Parse day of month"
case "${cron_next_month}" in case "${cron_next_month}" in
1|3|5|7|8|10|12) 1|3|5|7|8|10|12)
last_day_of_month="31" last_day_of_month="31"
@@ -1951,6 +1991,7 @@ timer() {
cron_next_day_of_month=$cron_next cron_next_day_of_month=$cron_next
fi fi
print_debug "[timer] [cron] Parse Next Month"
cron_next=$(parse_expression "${cron_month}" 12 "${cron_next_month}") cron_next=$(parse_expression "${cron_month}" 12 "${cron_next_month}")
if [ "${cron_next}" != "${cron_next_month}" ]; then if [ "${cron_next}" != "${cron_next_month}" ]; then
if [ "${cron_next}" -gt "12" ]; then if [ "${cron_next}" -gt "12" ]; then
@@ -1967,7 +2008,6 @@ timer() {
fi fi
cron_parsed=0 cron_parsed=0
done done
local cron_future=$(date --date="${cron_next_year}-$(printf "%02d" ${cron_next_month})-$(printf "%02d" ${cron_next_day_of_month})T$(printf "%02d" ${cron_next_hour}):$(printf "%02d" ${cron_next_minute}):00" "+%s") local cron_future=$(date --date="${cron_next_year}-$(printf "%02d" ${cron_next_month})-$(printf "%02d" ${cron_next_day_of_month})T$(printf "%02d" ${cron_next_hour}):$(printf "%02d" ${cron_next_minute}):00" "+%s")
local cron_future_difference=$(( cron_future - cron_compare_seconds )) local cron_future_difference=$(( cron_future - cron_compare_seconds ))
time_cron=true time_cron=true
@@ -2062,4 +2102,4 @@ write_log() {
print_${_arg_log_level} "${_arg_log_message}" print_${_arg_log_level} "${_arg_log_message}"
output_on output_on
if var_true "${DEBUG_WRITE_LOG}" ; then debug off; fi if var_true "${DEBUG_WRITE_LOG}" ; then debug off; fi
} }