mirror of
https://github.com/tiredofit/docker-db-backup.git
synced 2025-12-22 21:53:42 +01:00
Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
be619fb707 | ||
|
|
cccc088b35 | ||
|
|
4579f4057c | ||
|
|
cd683648d0 | ||
|
|
11f55f3d82 | ||
|
|
674a98fcd8 | ||
|
|
77c747e01b | ||
|
|
2e30558a27 | ||
|
|
c746fb641e | ||
|
|
ca2f04cd59 | ||
|
|
dfa94ecab7 | ||
|
|
eaea6dc348 | ||
|
|
34abe88159 | ||
|
|
5ffbeeb163 | ||
|
|
c82cee80f8 |
61
CHANGELOG.md
61
CHANGELOG.md
@@ -1,3 +1,64 @@
|
|||||||
|
## 4.0.24 2023-11-28 <dave at tiredofit dot ca>
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Fix issue with cron parsing and 0 being a value getting clobbered by sort command
|
||||||
|
|
||||||
|
|
||||||
|
## 4.0.23 2023-11-28 <dave at tiredofit dot ca>
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Resolve issue with custom notification scripts not executing
|
||||||
|
|
||||||
|
|
||||||
|
## 4.0.22 2023-11-25 <dave at tiredofit dot ca>
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Move cleanup_old_data routines to happen within backup_ function to properly accomodate for globals, and ALL DB_NAME use cases
|
||||||
|
|
||||||
|
|
||||||
|
## 4.0.21 2023-11-22 <dave at tiredofit dot ca>
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Fix for SQLite backups not being cleaned up properly due to a malformed base
|
||||||
|
|
||||||
|
|
||||||
|
## 4.0.20 2023-11-21 <dave at tiredofit dot ca>
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Update base image to support S6 Overlay 3.1.6.2 to solve shutdown issues specifically with MODE=MANUAL and MANUAL_RUN_FOREVER=TRUE
|
||||||
|
- Add some safety nets for Manual scheduling
|
||||||
|
|
||||||
|
|
||||||
|
## 4.0.19 2023-11-20 <dave at tiredofit dot ca>
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Make adjustments to cron scheduling feature to be able to handle whitespace properly"
|
||||||
|
|
||||||
|
|
||||||
|
## 4.0.18 2023-11-18 <joergmschulz@github>
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Fix loading msmtp configuration
|
||||||
|
|
||||||
|
|
||||||
|
## 4.0.17 2023-11-17 <dave at tiredofit dot ca>
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Provide more details when notifying via instant messages
|
||||||
|
|
||||||
|
|
||||||
|
## 4.0.16 2023-11-17 <dave at tiredofit dot ca>
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Switch to using msmtp instead of s-mail for notify()
|
||||||
|
|
||||||
|
|
||||||
|
## 4.0.15 2023-11-16 <dave at tiredofit dot ca>
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Fix cleanup of old backups
|
||||||
|
|
||||||
|
|
||||||
## 4.0.14 2023-11-13 <dave at tiredofit dot ca>
|
## 4.0.14 2023-11-13 <dave at tiredofit dot ca>
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ ENV INFLUX1_CLIENT_VERSION=1.8.0 \
|
|||||||
MSODBC_VERSION=18.3.2.1-1 \
|
MSODBC_VERSION=18.3.2.1-1 \
|
||||||
MSSQL_VERSION=18.3.1.1-1 \
|
MSSQL_VERSION=18.3.1.1-1 \
|
||||||
AWS_CLI_VERSION=1.29.78 \
|
AWS_CLI_VERSION=1.29.78 \
|
||||||
CONTAINER_ENABLE_MESSAGING=FALSE \
|
CONTAINER_ENABLE_MESSAGING=TRUE \
|
||||||
CONTAINER_ENABLE_MONITORING=TRUE \
|
CONTAINER_ENABLE_MONITORING=TRUE \
|
||||||
IMAGE_NAME="tiredofit/db-backup" \
|
IMAGE_NAME="tiredofit/db-backup" \
|
||||||
IMAGE_REPO_URL="https://github.com/tiredofit/docker-db-backup/"
|
IMAGE_REPO_URL="https://github.com/tiredofit/docker-db-backup/"
|
||||||
@@ -76,7 +76,7 @@ RUN source /assets/functions/00-container && \
|
|||||||
*) sleep 0.1 ;; \
|
*) sleep 0.1 ;; \
|
||||||
esac; \
|
esac; \
|
||||||
\
|
\
|
||||||
if [ $mssql = "true" ] ; then curl -O https://download.microsoft.com/download/3/5/5/355d7943-a338-41a7-858d-53b259ea33f5/msodbcsql18_${MSODBC_VERSION}_${mssql_arch}.apk ; curl -O https://download.microsoft.com/download/3/5/5/355d7943-a338-41a7-858d-53b259ea33f5/mssql-tools18_${MSSQL_VERSION}_${mssql_arch}.apk ; ls -l ; echo y | apk add --allow-untrusted msodbcsql18_${MSODBC_VERSION}_${mssql_arch}.apk mssql-tools18_${MSSQL_VERSION}_${mssql_arch}.apk ; else echo >&2 "Detected non x86_64 or ARM64 build variant, skipping MSSQL installation" ; fi; \
|
if [ $mssql = "true" ] ; then curl -O https://download.microsoft.com/download/3/5/5/355d7943-a338-41a7-858d-53b259ea33f5/msodbcsql18_${MSODBC_VERSION}_${mssql_arch}.apk ; curl -O https://download.microsoft.com/download/3/5/5/355d7943-a338-41a7-858d-53b259ea33f5/mssql-tools18_${MSSQL_VERSION}_${mssql_arch}.apk ; echo y | apk add --allow-untrusted msodbcsql18_${MSODBC_VERSION}_${mssql_arch}.apk mssql-tools18_${MSSQL_VERSION}_${mssql_arch}.apk ; else echo >&2 "Detected non x86_64 or ARM64 build variant, skipping MSSQL installation" ; fi; \
|
||||||
if [ $influx2 = "true" ] ; then curl -sSL https://dl.influxdata.com/influxdb/releases/influxdb2-client-${INFLUX2_CLIENT_VERSION}-linux-${influx_arch}.tar.gz | tar xvfz - --strip=1 -C /usr/src/ ; chmod +x /usr/src/influx ; mv /usr/src/influx /usr/sbin/ ; else echo >&2 "Unable to build Influx 2 on this system" ; fi ; \
|
if [ $influx2 = "true" ] ; then curl -sSL https://dl.influxdata.com/influxdb/releases/influxdb2-client-${INFLUX2_CLIENT_VERSION}-linux-${influx_arch}.tar.gz | tar xvfz - --strip=1 -C /usr/src/ ; chmod +x /usr/src/influx ; mv /usr/src/influx /usr/sbin/ ; else echo >&2 "Unable to build Influx 2 on this system" ; fi ; \
|
||||||
clone_git_repo https://github.com/aws/aws-cli "${AWS_CLI_VERSION}" && \
|
clone_git_repo https://github.com/aws/aws-cli "${AWS_CLI_VERSION}" && \
|
||||||
python3 setup.py install --prefix=/usr && \
|
python3 setup.py install --prefix=/usr && \
|
||||||
|
|||||||
17
README.md
17
README.md
@@ -601,13 +601,13 @@ Options that are related to the value of `DB01_BACKUP_LOCATION`
|
|||||||
|
|
||||||
If `DB01_BACKUP_LOCTION` = `FILESYSTEM` then the following options are used:
|
If `DB01_BACKUP_LOCTION` = `FILESYSTEM` then the following options are used:
|
||||||
|
|
||||||
| Variable | Description | Default |
|
| Variable | Description | Default |
|
||||||
| --------------------------------- | ----------------------------------------------------------------------------------------------------- | --------------------------------- |
|
| --------------------------------- | ----------------------------------------------------------------------------------------------------- | ---------------------------------- |
|
||||||
| `DB01_CREATE_LATEST_SYMLINK` | Create a symbolic link pointing to last backup in this format: `latest-(DB_TYPE)-(DB_NAME)-(DB_HOST)` | `TRUE` |
|
| `DB01_CREATE_LATEST_SYMLINK` | Create a symbolic link pointing to last backup in this format: `latest-(DB_TYPE)-(DB_NAME)-(DB_HOST)` | `TRUE` |
|
||||||
| `DB01_FILESYSTEM_PATH` | Directory where the database dumps are kept. | `/backup` |
|
| `DB01_FILESYSTEM_PATH` | Directory where the database dumps are kept. | `/backup` |
|
||||||
| `DB01_FILESYSTEM_PATH_PERMISSION` | Permissions to apply to backup directory | `700` |
|
| `DB01_FILESYSTEM_PATH_PERMISSION` | Permissions to apply to backup directory | `700` |
|
||||||
| `DB01_FILESYSTEM_ARCHIVE_PATH` | Optional Directory where the database dumps archives are kept | `${DB01_FILESYSTEM_PATH/archive/` |
|
| `DB01_FILESYSTEM_ARCHIVE_PATH` | Optional Directory where the database dumps archives are kept | `${DB01_FILESYSTEM_PATH}/archive/` |
|
||||||
| `DB01_FILESYSTEM_PERMISSION` | Directory and File permissions to apply to files. | `600` |
|
| `DB01_FILESYSTEM_PERMISSION` | Directory and File permissions to apply to files. | `600` |
|
||||||
|
|
||||||
###### S3
|
###### S3
|
||||||
|
|
||||||
@@ -742,6 +742,9 @@ $5 body/error message
|
|||||||
|
|
||||||
|
|
||||||
##### Email Notifications
|
##### Email Notifications
|
||||||
|
|
||||||
|
See more details in the base image listed above for more mail environment variables.
|
||||||
|
|
||||||
| Parameter | Description | Default | `_FILE` |
|
| Parameter | Description | Default | `_FILE` |
|
||||||
| ----------- | ----------------------------------------------------------------------------------------- | ------- | ------- |
|
| ----------- | ----------------------------------------------------------------------------------------- | ------- | ------- |
|
||||||
| `MAIL_FROM` | What email address to send mail from for errors | | |
|
| `MAIL_FROM` | What email address to send mail from for errors | | |
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ fi
|
|||||||
|
|
||||||
trap ctrl_c INT
|
trap ctrl_c INT
|
||||||
|
|
||||||
if [[ "${MODE,,}" =~ "standalone" ]] || [ "${1,,}" = "manual" ] || [ "${1,,}" = "now" ]; then
|
if [[ "${MODE,,}" =~ "standalone" ]] || [ "${MODE,,}" = "manual" ] || [ "${1,,}" = "manual" ] || [ "${1,,}" = "now" ]; then
|
||||||
print_debug "Detected Manual Mode"
|
print_debug "Detected Manual Mode"
|
||||||
persist=false
|
persist=false
|
||||||
backup_job_backup_begin=+0
|
backup_job_backup_begin=+0
|
||||||
@@ -34,9 +34,12 @@ else
|
|||||||
elif [[ "${backup_job_backup_begin}" =~ ([0-9]{4})-([0-9]{2})-([0-9]{2})[[:space:]]([0-9]{2}):([0-9]{2}):([0-9]{2}) ]]; then
|
elif [[ "${backup_job_backup_begin}" =~ ([0-9]{4})-([0-9]{2})-([0-9]{2})[[:space:]]([0-9]{2}):([0-9]{2}):([0-9]{2}) ]]; then
|
||||||
print_debug "BACKUP_BEGIN is a full date timestamp"
|
print_debug "BACKUP_BEGIN is a full date timestamp"
|
||||||
timer datetime
|
timer datetime
|
||||||
|
#elif echo "${backup_job_backup_begin//\*/#}" | grep -qP "^(.*((\d+,)+\d+|(\d+(\/|-)\d+)|\d+|#) ?){5}$" ; then # Allow slashes, yet not supporting advanced cron yet
|
||||||
elif echo "${backup_job_backup_begin//\*/#}" | grep -qP "^(((\d+,)+\d+|(\d+(\/|-)\d+)|\d+|#) ?){5}$" ; then
|
elif echo "${backup_job_backup_begin//\*/#}" | grep -qP "^(((\d+,)+\d+|(\d+(\/|-)\d+)|\d+|#) ?){5}$" ; then
|
||||||
print_debug "BACKUP_BEGIN is a cron expression"
|
print_debug "BACKUP_BEGIN is a cron expression"
|
||||||
time_last_run=$(date +"%s")
|
time_last_run=$(date +"%s")
|
||||||
|
backup_job_backup_begin=${backup_job_backup_begin//\"/}
|
||||||
|
backup_job_backup_begin=${backup_job_backup_begin//\'/}
|
||||||
timer cron "${backup_job_backup_begin}" "${time_current}" "${time_last_run}"
|
timer cron "${backup_job_backup_begin}" "${time_current}" "${time_last_run}"
|
||||||
else
|
else
|
||||||
print_error "_BACKUP_BEGIN is invalid - Unable to perform scheduling"
|
print_error "_BACKUP_BEGIN is invalid - Unable to perform scheduling"
|
||||||
|
|||||||
@@ -482,6 +482,7 @@ backup_couch() {
|
|||||||
move_dbbackup
|
move_dbbackup
|
||||||
check_exit_code move "${backup_job_filename}"
|
check_exit_code move "${backup_job_filename}"
|
||||||
post_dbbackup ${backup_job_db_name}
|
post_dbbackup ${backup_job_db_name}
|
||||||
|
cleanup_old_data
|
||||||
}
|
}
|
||||||
|
|
||||||
backup_influx() {
|
backup_influx() {
|
||||||
@@ -522,6 +523,7 @@ backup_influx() {
|
|||||||
move_dbbackup
|
move_dbbackup
|
||||||
check_exit_code move "${backup_job_filename_dir}"
|
check_exit_code move "${backup_job_filename_dir}"
|
||||||
post_dbbackup "${db}"
|
post_dbbackup "${db}"
|
||||||
|
cleanup_old_data
|
||||||
done
|
done
|
||||||
;;
|
;;
|
||||||
2 )
|
2 )
|
||||||
@@ -550,6 +552,7 @@ backup_influx() {
|
|||||||
move_dbbackup
|
move_dbbackup
|
||||||
check_exit_code move "${backup_job_filename_dir}"
|
check_exit_code move "${backup_job_filename_dir}"
|
||||||
post_dbbackup "${db}"
|
post_dbbackup "${db}"
|
||||||
|
cleanup_old_data
|
||||||
done
|
done
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
@@ -586,6 +589,7 @@ backup_mongo() {
|
|||||||
move_dbbackup
|
move_dbbackup
|
||||||
check_exit_code move "${backup_job_filename}"
|
check_exit_code move "${backup_job_filename}"
|
||||||
post_dbbackup "${backup_job_db_name}"
|
post_dbbackup "${backup_job_db_name}"
|
||||||
|
cleanup_old_data
|
||||||
if var_true "${DEBUG_BACKUP_MONGO}" ; then debug off; fi
|
if var_true "${DEBUG_BACKUP_MONGO}" ; then debug off; fi
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -614,6 +618,7 @@ backup_mssql() {
|
|||||||
move_dbbackup
|
move_dbbackup
|
||||||
check_exit_code move "${backup_job_filename}"
|
check_exit_code move "${backup_job_filename}"
|
||||||
post_dbbackup "${backup_job_db_name}"
|
post_dbbackup "${backup_job_db_name}"
|
||||||
|
cleanup_old_data
|
||||||
;;
|
;;
|
||||||
trn|transaction )
|
trn|transaction )
|
||||||
prepare_dbbackup
|
prepare_dbbackup
|
||||||
@@ -636,6 +641,7 @@ backup_mssql() {
|
|||||||
move_dbbackup
|
move_dbbackup
|
||||||
check_exit_code move "${backup_job_filename}"
|
check_exit_code move "${backup_job_filename}"
|
||||||
post_dbbackup "${backup_job_db_name}"
|
post_dbbackup "${backup_job_db_name}"
|
||||||
|
cleanup_old_data
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
}
|
}
|
||||||
@@ -687,6 +693,7 @@ backup_mysql() {
|
|||||||
move_dbbackup
|
move_dbbackup
|
||||||
check_exit_code move "${backup_job_filename}"
|
check_exit_code move "${backup_job_filename}"
|
||||||
post_dbbackup "${db}"
|
post_dbbackup "${db}"
|
||||||
|
cleanup_old_data
|
||||||
done
|
done
|
||||||
else
|
else
|
||||||
write_log debug "Not splitting database dumps into their own files"
|
write_log debug "Not splitting database dumps into their own files"
|
||||||
@@ -707,6 +714,7 @@ backup_mysql() {
|
|||||||
move_dbbackup
|
move_dbbackup
|
||||||
check_exit_code move "${backup_job_filename}"
|
check_exit_code move "${backup_job_filename}"
|
||||||
post_dbbackup all
|
post_dbbackup all
|
||||||
|
cleanup_old_data
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -728,6 +736,7 @@ backup_pgsql() {
|
|||||||
move_dbbackup
|
move_dbbackup
|
||||||
check_exit_code move "${backup_job_filename}"
|
check_exit_code move "${backup_job_filename}"
|
||||||
post_dbbackup "globals"
|
post_dbbackup "globals"
|
||||||
|
cleanup_old_data
|
||||||
}
|
}
|
||||||
|
|
||||||
if var_true "${DEBUG_BACKUP_PGSQL}" ; then debug on; fi
|
if var_true "${DEBUG_BACKUP_PGSQL}" ; then debug on; fi
|
||||||
@@ -776,6 +785,7 @@ backup_pgsql() {
|
|||||||
move_dbbackup
|
move_dbbackup
|
||||||
check_exit_code move "${backup_job_filename}"
|
check_exit_code move "${backup_job_filename}"
|
||||||
post_dbbackup "${db}"
|
post_dbbackup "${db}"
|
||||||
|
cleanup_old_data
|
||||||
done
|
done
|
||||||
if var_true "${_postgres_backup_globals}" ; then backup_pgsql_globals; fi
|
if var_true "${_postgres_backup_globals}" ; then backup_pgsql_globals; fi
|
||||||
else
|
else
|
||||||
@@ -805,6 +815,7 @@ backup_pgsql() {
|
|||||||
move_dbbackup
|
move_dbbackup
|
||||||
check_exit_code move "${backup_job_filename}"
|
check_exit_code move "${backup_job_filename}"
|
||||||
post_dbbackup all
|
post_dbbackup all
|
||||||
|
cleanup_old_data
|
||||||
if var_true "${_postgres_backup_globals}" ; then backup_pgsql_globals; fi
|
if var_true "${_postgres_backup_globals}" ; then backup_pgsql_globals; fi
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
@@ -844,6 +855,7 @@ backup_redis() {
|
|||||||
move_dbbackup
|
move_dbbackup
|
||||||
check_exit_code move "${backup_job_filename}"
|
check_exit_code move "${backup_job_filename}"
|
||||||
post_dbbackup all
|
post_dbbackup all
|
||||||
|
cleanup_old_data
|
||||||
}
|
}
|
||||||
|
|
||||||
backup_sqlite3() {
|
backup_sqlite3() {
|
||||||
@@ -851,7 +863,7 @@ backup_sqlite3() {
|
|||||||
db=$(basename "${backup_job_db_host}")
|
db=$(basename "${backup_job_db_host}")
|
||||||
db="${db%.*}"
|
db="${db%.*}"
|
||||||
backup_job_filename=sqlite3_${db}_${now}.sqlite3
|
backup_job_filename=sqlite3_${db}_${now}.sqlite3
|
||||||
backup_job_filename_base=sqlite3_${db}.sqlite3
|
backup_job_filename_base=sqlite3_${db}
|
||||||
pre_dbbackup "${db}"
|
pre_dbbackup "${db}"
|
||||||
write_log notice "Dumping sqlite3 database: '${backup_job_db_host}' ${compression_string}"
|
write_log notice "Dumping sqlite3 database: '${backup_job_db_host}' ${compression_string}"
|
||||||
if var_true "${DEBUG_BACKUP_SQLITE3}" ; then debug on; fi
|
if var_true "${DEBUG_BACKUP_SQLITE3}" ; then debug on; fi
|
||||||
@@ -872,6 +884,7 @@ backup_sqlite3() {
|
|||||||
move_dbbackup
|
move_dbbackup
|
||||||
check_exit_code move "${backup_job_filename}"
|
check_exit_code move "${backup_job_filename}"
|
||||||
post_dbbackup "${db}"
|
post_dbbackup "${db}"
|
||||||
|
cleanup_old_data
|
||||||
}
|
}
|
||||||
|
|
||||||
check_availability() {
|
check_availability() {
|
||||||
@@ -1160,13 +1173,14 @@ create_schedulers() {
|
|||||||
instance=$(printf "%02d" $instance)
|
instance=$(printf "%02d" $instance)
|
||||||
cp -R /assets/dbbackup/template-dbbackup /etc/services.available/dbbackup-"${instance}"
|
cp -R /assets/dbbackup/template-dbbackup /etc/services.available/dbbackup-"${instance}"
|
||||||
sed -i "s|{{BACKUP_NUMBER}}|${instance}|g" /etc/services.available/dbbackup-"${instance}"/run
|
sed -i "s|{{BACKUP_NUMBER}}|${instance}|g" /etc/services.available/dbbackup-"${instance}"/run
|
||||||
|
if [ "${MODE,,}" = "manual" ] ; then service_stop dbbackup-"${instance}" ; fi
|
||||||
cat <<EOF >> /usr/bin/backup"${instance}"-now
|
cat <<EOF >> /usr/bin/backup"${instance}"-now
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
source /assets/functions/00-container
|
source /assets/functions/00-container
|
||||||
PROCESS_NAME=db-backup${instance}
|
PROCESS_NAME=db-backup${instance}
|
||||||
print_info "Starting Manual Backup for db-backup${instance}"
|
print_info "Starting Manual Backup for db-backup${instance}"
|
||||||
/var/run/s6/legacy-services/dbbackup-${instance}/run now
|
#/var/run/s6/legacy-services/dbbackup-${instance}/run now
|
||||||
|
/etc/services.available/dbbackup-${instance}/run now
|
||||||
|
|
||||||
EOF
|
EOF
|
||||||
chmod +x /usr/bin/backup"${instance}"-now
|
chmod +x /usr/bin/backup"${instance}"-now
|
||||||
@@ -1176,11 +1190,11 @@ EOF
|
|||||||
cat <<EOF > /usr/bin/backup-now
|
cat <<EOF > /usr/bin/backup-now
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
/usr/bin/backup${instance}-now
|
/usr/bin/backup${instance}-now now
|
||||||
|
|
||||||
EOF
|
EOF
|
||||||
else
|
else
|
||||||
echo "/usr/bin/backup${instance}-now" >> /usr/bin/backup-now
|
echo "/usr/bin/backup${instance}-now now" >> /usr/bin/backup-now
|
||||||
fi
|
fi
|
||||||
|
|
||||||
instance=$(echo "${instance} +1" | bc)
|
instance=$(echo "${instance} +1" | bc)
|
||||||
@@ -1337,20 +1351,20 @@ EOF
|
|||||||
notify() {
|
notify() {
|
||||||
if var_true "${DEBUG_NOTIFY}" ; then debug on; fi
|
if var_true "${DEBUG_NOTIFY}" ; then debug on; fi
|
||||||
notification_custom() {
|
notification_custom() {
|
||||||
if [ -n "${NOTIFICATION_SCRIPT}" ] ; then
|
if [ -n "${NOTIFICATION_CUSTOM_SCRIPT}" ] ; then
|
||||||
if var_true "${NOTIFICATION_SCRIPT_SKIP_X_VERIFY}" ; then
|
if var_true "${NOTIFICATION_CUSTOM_SCRIPT_SKIP_X_VERIFY}" ; then
|
||||||
eval "${NOTIFICATION_SCRIPT}" "${1}" "${2}" "${3}" "${4}" "${5}"
|
eval "${NOTIFICATION_CUSTOM_SCRIPT}" "${1}" "${2}" "${3}" "${4}" "${5}"
|
||||||
else
|
else
|
||||||
if [ -x "${NOTIFICATION_SCRIPT}" ] ; then
|
if [ -x "${NOTIFICATION_CUSTOM_SCRIPT}" ] ; then
|
||||||
write_log notice "Found NOTIFICATION_SCRIPT environment variable. Executing '${NOTIFICATION_SCRIPT}"
|
write_log notice "Found NOTIFICATION_CUSTOM_SCRIPT environment variable. Executing '${NOTIFICATION_CUSTOM_SCRIPT}"
|
||||||
# script timestamp logfile errorcode subject body
|
# script timestamp logfile errorcode subject body
|
||||||
eval "${NOTIFICATION_SCRIPT}" "${1}" "${2}" "${3}" "${4}" "${5}"
|
eval "${NOTIFICATION_CUSTOM_SCRIPT}" "${1}" "${2}" "${3}" "${4}" "${5}"
|
||||||
else
|
else
|
||||||
write_log error "Can't execute NOTIFICATION_SCRIPT environment variable '${NOTIFICATION_SCRIPT}' as its filesystem bit is not executible!"
|
write_log error "Can't execute NOTIFICATION_CUSTOM_SCRIPT environment variable '${NOTIFICATION_CUSTOM_SCRIPT}' as its filesystem bit is not executible!"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
print_error "[notifications] No NOTIFICATION_SCRIPT variable set - Skipping sending Custom notifications"
|
print_error "[notifications] No NOTIFICATION_CUSTOM_SCRIPT variable set - Skipping sending Custom notifications"
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1363,18 +1377,20 @@ notify() {
|
|||||||
if [ -z "${SMTP_HOST}" ] ; then write_log error "[notifications] No SMTP_HOST variable set - Skipping sending Email notifications" ; skip_mail=true ; fi
|
if [ -z "${SMTP_HOST}" ] ; then write_log error "[notifications] No SMTP_HOST variable set - Skipping sending Email notifications" ; skip_mail=true ; fi
|
||||||
if [ -z "${SMTP_PORT}" ] ; then write_log error "[notifications] No SMTP_PORT variable set - Skipping sending Email notifications" ; skip_mail=true ; fi
|
if [ -z "${SMTP_PORT}" ] ; then write_log error "[notifications] No SMTP_PORT variable set - Skipping sending Email notifications" ; skip_mail=true ; fi
|
||||||
if var_nottrue "${skip_mail}" ; then
|
if var_nottrue "${skip_mail}" ; then
|
||||||
|
if ! grep -q ^from /etc/msmptrc ; then
|
||||||
|
echo "from ${MAIL_FROM}" >> /etc/msmtprc
|
||||||
|
fi
|
||||||
mail_recipients=$(echo "${MAIL_TO}" | tr "," "\n")
|
mail_recipients=$(echo "${MAIL_TO}" | tr "," "\n")
|
||||||
for mail_recipient in $mail_recipients ; do
|
for mail_recipient in $mail_recipients ; do
|
||||||
cat <<EOF | s-nail -v \
|
cat <<EOF | msmtp -t "${mail_recipient}" -C /etc/msmtprc
|
||||||
-r "${MAIL_FROM}" \
|
To: ${mail_recipient}
|
||||||
-s "[db-backup] [${DOMAIN}] ${3}" \
|
Subject: [db-backup] ${4}
|
||||||
-S smtp="${SMTP_HOST}":"${SMTP_PORT}" \
|
|
||||||
"${mail_recipient}"
|
|
||||||
Time: ${1}
|
Time: ${1}
|
||||||
Log File: {2}
|
Log File: {2}
|
||||||
Error Code: ${3}
|
Error Code: ${3}
|
||||||
|
|
||||||
${4}
|
${5}
|
||||||
EOF
|
EOF
|
||||||
done
|
done
|
||||||
fi
|
fi
|
||||||
@@ -1390,7 +1406,7 @@ EOF
|
|||||||
if [ -z "${MATTERMOST_WEBHOOK_URL}" ] ; then write_log error "[notifications] No MATTERMOST_WEBHOOK_URL variable set - Skipping sending Mattermost notifications" ; skip_mattermost=true ; fi
|
if [ -z "${MATTERMOST_WEBHOOK_URL}" ] ; then write_log error "[notifications] No MATTERMOST_WEBHOOK_URL variable set - Skipping sending Mattermost notifications" ; skip_mattermost=true ; fi
|
||||||
if var_nottrue "${skip_mattermost}" ; then
|
if var_nottrue "${skip_mattermost}" ; then
|
||||||
emoji=":bomb:"
|
emoji=":bomb:"
|
||||||
message="*[db-backup] ${3}*\n${4}\n*Timestamp:* ${1}\n*Logfile:* ${2}\n*Error Code: ${3}"
|
message="*[db-backup] ${4}*\n${5}\n*Timestamp:* ${1}\n*Logfile:* ${2}\n*Error Code: ${3}"
|
||||||
mattermost_recipients=$(echo "${MATTERMOST_RECIPIENT}" | tr "," "\n")
|
mattermost_recipients=$(echo "${MATTERMOST_RECIPIENT}" | tr "," "\n")
|
||||||
for mattermost_recipient in $mattermost_recipients ; do
|
for mattermost_recipient in $mattermost_recipients ; do
|
||||||
payload="payload={\"channel\": \"${mattermost_recipient//\"/\\\"}\", \"username\": \"${MATTERMOST_USERNAME//\"/\\\"}\", \"text\": \"${message//\"/\\\"}\", \"icon_emoji\": \"${emoji}\"}"
|
payload="payload={\"channel\": \"${mattermost_recipient//\"/\\\"}\", \"username\": \"${MATTERMOST_USERNAME//\"/\\\"}\", \"text\": \"${message//\"/\\\"}\", \"icon_emoji\": \"${emoji}\"}"
|
||||||
@@ -1415,7 +1431,7 @@ EOF
|
|||||||
for matrix_room in $matrix_rooms ; do
|
for matrix_room in $matrix_rooms ; do
|
||||||
curl \
|
curl \
|
||||||
-XPOST \
|
-XPOST \
|
||||||
-d "{\"msgtype\":\"m.text\", \"body\":\"*[db-backup] ${3}*\n${4}\n*Timestamp:* ${1}\n*Logfile:* ${2}\n*Error Code: ${3}*\"}" \
|
-d "{\"msgtype\":\"m.text\", \"body\":\"*[db-backup] ${4}*\n${5}\n*Timestamp:* ${1}\n*Logfile:* ${2}\n*Error Code: ${3}*\"}" \
|
||||||
"${MATRIX_HOST}/_matrix/client/r0/rooms/${matrix_room}/send/m.room.message?access_token=${MATRIX_ACCESS_TOKEN}"
|
"${MATRIX_HOST}/_matrix/client/r0/rooms/${matrix_room}/send/m.room.message?access_token=${MATRIX_ACCESS_TOKEN}"
|
||||||
done
|
done
|
||||||
fi
|
fi
|
||||||
@@ -1431,7 +1447,7 @@ EOF
|
|||||||
if [ -z "${ROCKETCHAT_WEBHOOK_URL}" ] ; then write_log error "[notifications] No ROCKETCHAT_WEBHOOK_URL variable set - Skipping sending Rocket.Chat notifications" ; skip_rocketchat=true ; fi
|
if [ -z "${ROCKETCHAT_WEBHOOK_URL}" ] ; then write_log error "[notifications] No ROCKETCHAT_WEBHOOK_URL variable set - Skipping sending Rocket.Chat notifications" ; skip_rocketchat=true ; fi
|
||||||
if var_nottrue "${skip_rocketchat}" ; then
|
if var_nottrue "${skip_rocketchat}" ; then
|
||||||
emoji=":bomb:"
|
emoji=":bomb:"
|
||||||
message="*[db-backup] ${3}*\n${4}\n*Timestamp:* ${1}\n*Logfile:* ${2}\n*Error Code: ${3}"
|
message="*[db-backup] ${4}*\n${5}\n*Timestamp:* ${1}\n*Logfile:* ${2}\n*Error Code: ${3}"
|
||||||
rocketchat_recipients=$(echo "${ROCKETCHAT_RECIPIENT}" | tr "," "\n")
|
rocketchat_recipients=$(echo "${ROCKETCHAT_RECIPIENT}" | tr "," "\n")
|
||||||
for rocketchat_recipient in $rocketchat_recipients ; do
|
for rocketchat_recipient in $rocketchat_recipients ; do
|
||||||
payload="payload={\"channel\": \"${rocketchat_recipient//\"/\\\"}\", \"username\": \"${ROCKETCHAT_USERNAME//\"/\\\"}\", \"text\": \"${message//\"/\\\"}\", \"icon_emoji\": \"${emoji}\"}"
|
payload="payload={\"channel\": \"${rocketchat_recipient//\"/\\\"}\", \"username\": \"${ROCKETCHAT_USERNAME//\"/\\\"}\", \"text\": \"${message//\"/\\\"}\", \"icon_emoji\": \"${emoji}\"}"
|
||||||
@@ -1450,7 +1466,7 @@ EOF
|
|||||||
# $4 body
|
# $4 body
|
||||||
|
|
||||||
if var_true "${ENABLE_NOTIFICATIONS}" ; then
|
if var_true "${ENABLE_NOTIFICATIONS}" ; then
|
||||||
notification_types=$(echo "${NOTIIFICATION_TYPE}" | tr "," "\n")
|
notification_types=$(echo "${NOTIFICATION_TYPE}" | tr "," "\n")
|
||||||
for notification_type in $notification_types ; do
|
for notification_type in $notification_types ; do
|
||||||
case "${notification_type,,}" in
|
case "${notification_type,,}" in
|
||||||
"custom" )
|
"custom" )
|
||||||
@@ -1631,7 +1647,7 @@ pre_dbbackup() {
|
|||||||
|
|
||||||
### Pre Backup Custom Script Support
|
### Pre Backup Custom Script Support
|
||||||
if [ -d "/assets/custom-scripts/pre" ] && dir_notempty "/assets/custom-scripts/pre" ; then
|
if [ -d "/assets/custom-scripts/pre" ] && dir_notempty "/assets/custom-scripts/pre" ; then
|
||||||
write_log warning "Found Custom Post Scripts in /assets/custom-scripts/pre - Automatically moving them to '${backup_job_script_location_pre}'"
|
write_log warn "Found Custom Post Scripts in /assets/custom-scripts/pre - Automatically moving them to '${backup_job_script_location_pre}'"
|
||||||
mkdir -p "${backup_job_script_location_pre}"
|
mkdir -p "${backup_job_script_location_pre}"
|
||||||
silent cp -aR /assets/custom-scripts/pre/* "${backup_job_script_location_pre}"
|
silent cp -aR /assets/custom-scripts/pre/* "${backup_job_script_location_pre}"
|
||||||
fi
|
fi
|
||||||
@@ -1700,7 +1716,7 @@ EOZP
|
|||||||
|
|
||||||
### Post Backup Custom Script Support
|
### Post Backup Custom Script Support
|
||||||
if [ -d "/assets/custom-scripts/" ] && dir_notempty "/assets/custom-scripts" ; then
|
if [ -d "/assets/custom-scripts/" ] && dir_notempty "/assets/custom-scripts" ; then
|
||||||
write_log warning "Found Custom Post Scripts in /assets/custom-scripts/ - Automatically moving them to '${backup_job_script_location_post}'"
|
write_log warn "Found Custom Post Scripts in /assets/custom-scripts/ - Automatically moving them to '${backup_job_script_location_post}'"
|
||||||
mkdir -p "${backup_job_script_location_post}"
|
mkdir -p "${backup_job_script_location_post}"
|
||||||
cp -aR /assets/custom-scripts/* "${backup_job_script_location_post}"
|
cp -aR /assets/custom-scripts/* "${backup_job_script_location_post}"
|
||||||
fi
|
fi
|
||||||
@@ -1841,7 +1857,7 @@ timer() {
|
|||||||
if [ "${expression_step}" != "${expression}" ]; then
|
if [ "${expression_step}" != "${expression}" ]; then
|
||||||
for step in ${validate_temp}; do
|
for step in ${validate_temp}; do
|
||||||
if [ $(( ( step - expression_start ) % expression_step )) -eq 0 ]; then
|
if [ $(( ( step - expression_start ) % expression_step )) -eq 0 ]; then
|
||||||
validate_all="$validate_all ${step}"
|
validate_all="${validate_all} ${step}"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
else
|
else
|
||||||
@@ -1849,15 +1865,16 @@ timer() {
|
|||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
validate_all=$(echo "${validate_all}" | tr ' ' '\n' | sort -n -u | tr '\n' ' ')
|
validate_all=$(echo "${validate_all}" | tr ' ' '\n' | sort -g -u | tr '\n' ' ')
|
||||||
for entry in $validate_all; do
|
for entry in $validate_all; do
|
||||||
if [ "${entry}" -ge "${3}" ]; then
|
if [ ${entry} -ge ${3} ]; then
|
||||||
echo "${entry}"
|
echo "${entry}"
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
echo "${validate_all%% *}"
|
echo "${validate_all// /}"
|
||||||
|
#echo "${validate_all%% *}"
|
||||||
}
|
}
|
||||||
|
|
||||||
local cron_compare="${3}"
|
local cron_compare="${3}"
|
||||||
@@ -1876,7 +1893,10 @@ timer() {
|
|||||||
local cron_minute="$(echo -n "${2}" | awk '{print $1}')"
|
local cron_minute="$(echo -n "${2}" | awk '{print $1}')"
|
||||||
local cron_hour="$(echo -n "${2}" | awk '{print $2}')"
|
local cron_hour="$(echo -n "${2}" | awk '{print $2}')"
|
||||||
local cron_day_of_month="$(echo -n "${2}" | awk '{print $3}')"
|
local cron_day_of_month="$(echo -n "${2}" | awk '{print $3}')"
|
||||||
local cron_month="$(echo -n "${2}" | awk '{print $4}')"Generating
|
local cron_month="$(echo -n "${2}" | awk '{print $4}')"
|
||||||
|
local cron_day_of_week="$(echo -n "${2}" | awk '{print $5}')"
|
||||||
|
|
||||||
|
local cron_next_minute="$(date --date=@"${cron_compare}" +"%-M")"
|
||||||
local cron_next_hour="$(date --date=@"${cron_compare}" +"%-H")"
|
local cron_next_hour="$(date --date=@"${cron_compare}" +"%-H")"
|
||||||
local cron_next_day_of_month="$(date --date=@"${cron_compare}" +"%-d")"
|
local cron_next_day_of_month="$(date --date=@"${cron_compare}" +"%-d")"
|
||||||
local cron_next_month="$(date --date=@"${cron_compare}" +"%-m")"
|
local cron_next_month="$(date --date=@"${cron_compare}" +"%-m")"
|
||||||
@@ -1885,8 +1905,10 @@ timer() {
|
|||||||
local cron_next_year="$(date --date=@"${cron_compare}" +"%-Y")"
|
local cron_next_year="$(date --date=@"${cron_compare}" +"%-Y")"
|
||||||
|
|
||||||
local cron_next=
|
local cron_next=
|
||||||
|
local cron_parsed=1
|
||||||
|
|
||||||
while [ "$cron_parsed" != "0" ]; do
|
while [ "${cron_parsed}" != "0" ]; do
|
||||||
|
print_debug "[timer] [cron] Parse Minute"
|
||||||
cron_next=$(parse_expression "${cron_minute}" 59 "${cron_next_minute}")
|
cron_next=$(parse_expression "${cron_minute}" 59 "${cron_next_minute}")
|
||||||
if [ "${cron_next}" != "${cron_next_minute}" ]; then
|
if [ "${cron_next}" != "${cron_next_minute}" ]; then
|
||||||
if [ "${cron_next_minute}" -gt "${cron_next}" ]; then
|
if [ "${cron_next_minute}" -gt "${cron_next}" ]; then
|
||||||
@@ -1896,20 +1918,22 @@ timer() {
|
|||||||
cron_next_minute="${cron_next}"
|
cron_next_minute="${cron_next}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
print_debug "[timer] [cron] Parse Hour"
|
||||||
cron_next=$(parse_expression "${cron_hour}" 23 "${cron_next_hour}")
|
cron_next=$(parse_expression "${cron_hour}" 23 "${cron_next_hour}")
|
||||||
if [ "${cron_next}" != "${cron_next_hour}" ]; then
|
if [ "${cron_next}" != "${cron_next_hour}" ]; then
|
||||||
if [ "${cron_next_hour}" -gt "${cron_next}" ]; then
|
if [ "${cron_next_hour}" -gt "${cron_next}" ]; then
|
||||||
cron_next_day_of_month=$(( cron_next_day_of_month + 1 ))
|
cron_next_day_of_month=$(( cron_next_day_of_month + 1 ))
|
||||||
fi
|
fi
|
||||||
|
|
||||||
cron_next_hour="${cron_next}"
|
cron_next_hour="${cron_next}"
|
||||||
#cron_next_minute=0
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
print_debug "[timer] [cron] Parse Day of Week"
|
||||||
cron_next=$(parse_expression "${cron_day_of_week}" 6 "${cron_next_day_of_week}")
|
cron_next=$(parse_expression "${cron_day_of_week}" 6 "${cron_next_day_of_week}")
|
||||||
if [ "${cron_next}" != "${cron_next_day_of_week}" ]; then
|
if [ "${cron_next}" != "${cron_next_day_of_week}" ]; then
|
||||||
day_of_week_difference=$(( ${cron_next} - ${cron_next_day_of_week} ))
|
day_of_week_difference=$(( cron_next - cron_next_day_of_week ))
|
||||||
|
|
||||||
if [ "${day_of_week_difference}" -lt "0" ]; then
|
if [ "${day_of_week_difference}" -lt 0 ]; then
|
||||||
day_of_week_difference=$(( day_of_week_difference + 7 ))
|
day_of_week_difference=$(( day_of_week_difference + 7 ))
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -1918,6 +1942,7 @@ timer() {
|
|||||||
cron_next_minute=0
|
cron_next_minute=0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
print_debug "[timer] [cron] Parse day of month"
|
||||||
case "${cron_next_month}" in
|
case "${cron_next_month}" in
|
||||||
1|3|5|7|8|10|12)
|
1|3|5|7|8|10|12)
|
||||||
last_day_of_month="31"
|
last_day_of_month="31"
|
||||||
@@ -1957,6 +1982,7 @@ timer() {
|
|||||||
cron_next_day_of_month=$cron_next
|
cron_next_day_of_month=$cron_next
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
print_debug "[timer] [cron] Parse Next Month"
|
||||||
cron_next=$(parse_expression "${cron_month}" 12 "${cron_next_month}")
|
cron_next=$(parse_expression "${cron_month}" 12 "${cron_next_month}")
|
||||||
if [ "${cron_next}" != "${cron_next_month}" ]; then
|
if [ "${cron_next}" != "${cron_next_month}" ]; then
|
||||||
if [ "${cron_next}" -gt "12" ]; then
|
if [ "${cron_next}" -gt "12" ]; then
|
||||||
@@ -2068,4 +2094,4 @@ write_log() {
|
|||||||
print_${_arg_log_level} "${_arg_log_message}"
|
print_${_arg_log_level} "${_arg_log_message}"
|
||||||
output_on
|
output_on
|
||||||
if var_true "${DEBUG_WRITE_LOG}" ; then debug off; fi
|
if var_true "${DEBUG_WRITE_LOG}" ; then debug off; fi
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user