mirror of
https://github.com/tiredofit/docker-db-backup.git
synced 2025-12-21 13:23:12 +01:00
Compare commits
21 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
11f55f3d82 | ||
|
|
674a98fcd8 | ||
|
|
77c747e01b | ||
|
|
2e30558a27 | ||
|
|
c746fb641e | ||
|
|
ca2f04cd59 | ||
|
|
dfa94ecab7 | ||
|
|
eaea6dc348 | ||
|
|
34abe88159 | ||
|
|
5ffbeeb163 | ||
|
|
c82cee80f8 | ||
|
|
ab059ccdf1 | ||
|
|
1e8ccf4d56 | ||
|
|
65c40cac0a | ||
|
|
a9f2d51ff9 | ||
|
|
7f455abc1a | ||
|
|
c16add4525 | ||
|
|
d5769b1588 | ||
|
|
0b2c7836cf | ||
|
|
535e011740 | ||
|
|
5a391b908a |
82
CHANGELOG.md
82
CHANGELOG.md
@@ -1,3 +1,85 @@
|
||||
## 4.0.20 2023-11-21 <dave at tiredofit dot ca>
|
||||
|
||||
### Changed
|
||||
- Update base image to support S6 Overlay 3.1.6.2 to solve shutdown issues specifically with MODE=MANUAL and MANUAL_RUN_FOREVER=TRUE
|
||||
- Add some safety nets for Manual scheduling
|
||||
|
||||
|
||||
## 4.0.19 2023-11-20 <dave at tiredofit dot ca>
|
||||
|
||||
### Changed
|
||||
- Make adjustments to cron scheduling feature to be able to handle whitespace properly"
|
||||
|
||||
|
||||
## 4.0.18 2023-11-18 <joergmschulz@github>
|
||||
|
||||
### Changed
|
||||
- Fix loading msmtp configuration
|
||||
|
||||
|
||||
## 4.0.17 2023-11-17 <dave at tiredofit dot ca>
|
||||
|
||||
### Changed
|
||||
- Provide more details when notifying via instant messages
|
||||
|
||||
|
||||
## 4.0.16 2023-11-17 <dave at tiredofit dot ca>
|
||||
|
||||
### Changed
|
||||
- Switch to using msmtp instead of s-mail for notify()
|
||||
|
||||
|
||||
## 4.0.15 2023-11-16 <dave at tiredofit dot ca>
|
||||
|
||||
### Changed
|
||||
- Fix cleanup of old backups
|
||||
|
||||
|
||||
## 4.0.14 2023-11-13 <dave at tiredofit dot ca>
|
||||
|
||||
### Changed
|
||||
- Bugfix when PRE/POST scripts found not giving legacy warning
|
||||
- Run pre / post scripts as root
|
||||
|
||||
|
||||
## 4.0.13 2023-11-12 <dave at tiredofit dot ca>
|
||||
|
||||
### Changed
|
||||
- Check for any quotes if using MONGO_CUSTOM_URI and remove
|
||||
|
||||
|
||||
## 4.0.12 2023-11-12 <dave at tiredofit dot ca>
|
||||
|
||||
### Changed
|
||||
- Allow creating schedulers if _MONGO_CUSTOM_URI is set and _DB_HOST blank
|
||||
|
||||
|
||||
## 4.0.11 2023-11-11 <dave at tiredofit dot ca>
|
||||
|
||||
### Changed
|
||||
- Resolve issue with backing up ALL databases with PGSQL and MySQL
|
||||
|
||||
|
||||
## 4.0.10 2023-11-11 <dave at tiredofit dot ca>
|
||||
|
||||
### Changed
|
||||
- Change environment variable parsing routines to properly accomodate for Passwords containing '=='
|
||||
|
||||
|
||||
## 4.0.9 2023-11-11 <dave at tiredofit dot ca>
|
||||
|
||||
### Changed
|
||||
- Fix issue with quotes being wrapped around _PASS variables
|
||||
|
||||
|
||||
## 4.0.8 2023-11-11 <dave at tiredofit dot ca>
|
||||
|
||||
### Changed
|
||||
- Tidy up file_encryption() routines
|
||||
- Change environment variable _ENCRYPT_PUBKEY to _ENCRYPT_PUBLIC_KEY
|
||||
- Add new environment variable _ENCRYPT_PRIVATE_KEY
|
||||
|
||||
|
||||
## 4.0.7 2023-11-11 <dave at tiredofit dot ca>
|
||||
|
||||
### Added
|
||||
|
||||
@@ -10,7 +10,7 @@ ENV INFLUX1_CLIENT_VERSION=1.8.0 \
|
||||
MSODBC_VERSION=18.3.2.1-1 \
|
||||
MSSQL_VERSION=18.3.1.1-1 \
|
||||
AWS_CLI_VERSION=1.29.78 \
|
||||
CONTAINER_ENABLE_MESSAGING=FALSE \
|
||||
CONTAINER_ENABLE_MESSAGING=TRUE \
|
||||
CONTAINER_ENABLE_MONITORING=TRUE \
|
||||
IMAGE_NAME="tiredofit/db-backup" \
|
||||
IMAGE_REPO_URL="https://github.com/tiredofit/docker-db-backup/"
|
||||
@@ -76,7 +76,7 @@ RUN source /assets/functions/00-container && \
|
||||
*) sleep 0.1 ;; \
|
||||
esac; \
|
||||
\
|
||||
if [ $mssql = "true" ] ; then curl -O https://download.microsoft.com/download/3/5/5/355d7943-a338-41a7-858d-53b259ea33f5/msodbcsql18_${MSODBC_VERSION}_${mssql_arch}.apk ; curl -O https://download.microsoft.com/download/3/5/5/355d7943-a338-41a7-858d-53b259ea33f5/mssql-tools18_${MSSQL_VERSION}_${mssql_arch}.apk ; ls -l ; echo y | apk add --allow-untrusted msodbcsql18_${MSODBC_VERSION}_${mssql_arch}.apk mssql-tools18_${MSSQL_VERSION}_${mssql_arch}.apk ; else echo >&2 "Detected non x86_64 or ARM64 build variant, skipping MSSQL installation" ; fi; \
|
||||
if [ $mssql = "true" ] ; then curl -O https://download.microsoft.com/download/3/5/5/355d7943-a338-41a7-858d-53b259ea33f5/msodbcsql18_${MSODBC_VERSION}_${mssql_arch}.apk ; curl -O https://download.microsoft.com/download/3/5/5/355d7943-a338-41a7-858d-53b259ea33f5/mssql-tools18_${MSSQL_VERSION}_${mssql_arch}.apk ; echo y | apk add --allow-untrusted msodbcsql18_${MSODBC_VERSION}_${mssql_arch}.apk mssql-tools18_${MSSQL_VERSION}_${mssql_arch}.apk ; else echo >&2 "Detected non x86_64 or ARM64 build variant, skipping MSSQL installation" ; fi; \
|
||||
if [ $influx2 = "true" ] ; then curl -sSL https://dl.influxdata.com/influxdb/releases/influxdb2-client-${INFLUX2_CLIENT_VERSION}-linux-${influx_arch}.tar.gz | tar xvfz - --strip=1 -C /usr/src/ ; chmod +x /usr/src/influx ; mv /usr/src/influx /usr/sbin/ ; else echo >&2 "Unable to build Influx 2 on this system" ; fi ; \
|
||||
clone_git_repo https://github.com/aws/aws-cli "${AWS_CLI_VERSION}" && \
|
||||
python3 setup.py install --prefix=/usr && \
|
||||
|
||||
44
README.md
44
README.md
@@ -214,12 +214,13 @@ If these are set and no other defaults or variables are set explicitly, they wil
|
||||
|
||||
Encryption occurs after compression and the encrypted filename will have a `.gpg` suffix
|
||||
|
||||
| Variable | Description | Default |
|
||||
| ---------------------------- | ------------------------------------------- | ------- |
|
||||
| `DEFAULT_ENCRYPT` | Encrypt file after backing up with GPG | `FALSE` |
|
||||
| `DEFAULT_ENCRYPT_PASSPHRASE` | Passphrase to encrypt file with GPG | |
|
||||
| *or* | | |
|
||||
| `DEFAULT_ENCRYPT_PUBKEY` | Path of public key to encrypt file with GPG | |
|
||||
| Variable | Description | Default | `_FILE` |
|
||||
| ----------------------------- | -------------------------------------------- | ------- | ------- |
|
||||
| `DEFAULT_ENCRYPT` | Encrypt file after backing up with GPG | `FALSE` | |
|
||||
| `DEFAULT_ENCRYPT_PASSPHRASE` | Passphrase to encrypt file with GPG | | x |
|
||||
| *or* | | | |
|
||||
| `DEFAULT_ENCRYPT_PUBLIC_KEY` | Path of public key to encrypt file with GPG | | x |
|
||||
| `DEFAULT_ENCRYPT_PRIVATE_KEY` | Path of private key to encrypt file with GPG | | x |
|
||||
|
||||
##### Scheduling Options
|
||||
|
||||
@@ -476,12 +477,14 @@ Otherwise, override them per backup job. Additional backup jobs can be scheduled
|
||||
|
||||
Encryption will occur after compression and the resulting filename will have a `.gpg` suffix
|
||||
|
||||
| Variable | Description | Default |
|
||||
| ------------------------- | ------------------------------------------- | ------- |
|
||||
| `DB01_ENCRYPT` | Encrypt file after backing up with GPG | `FALSE` |
|
||||
| `DB01_ENCRYPT_PASSPHRASE` | Passphrase to encrypt file with GPG | |
|
||||
| *or* | | |
|
||||
| `DB01_ENCRYPT_PUBKEY` | Path of public key to encrypt file with GPG | |
|
||||
|
||||
| Variable | Description | Default | `_FILE` |
|
||||
| -------------------------- | -------------------------------------------- | ------- | ------- |
|
||||
| `DB01_ENCRYPT` | Encrypt file after backing up with GPG | `FALSE` | |
|
||||
| `DB01_ENCRYPT_PASSPHRASE` | Passphrase to encrypt file with GPG | | x |
|
||||
| *or* | | | |
|
||||
| `DB01_ENCRYPT_PUBLIC_KEY` | Path of public key to encrypt file with GPG | | x |
|
||||
| `DB01_ENCRYPT_PRIVATE_KEY` | Path of private key to encrypt file with GPG | | x |
|
||||
|
||||
##### Scheduling Options
|
||||
|
||||
@@ -598,13 +601,13 @@ Options that are related to the value of `DB01_BACKUP_LOCATION`
|
||||
|
||||
If `DB01_BACKUP_LOCTION` = `FILESYSTEM` then the following options are used:
|
||||
|
||||
| Variable | Description | Default |
|
||||
| --------------------------------- | ----------------------------------------------------------------------------------------------------- | --------------------------------- |
|
||||
| `DB01_CREATE_LATEST_SYMLINK` | Create a symbolic link pointing to last backup in this format: `latest-(DB_TYPE)-(DB_NAME)-(DB_HOST)` | `TRUE` |
|
||||
| `DB01_FILESYSTEM_PATH` | Directory where the database dumps are kept. | `/backup` |
|
||||
| `DB01_FILESYSTEM_PATH_PERMISSION` | Permissions to apply to backup directory | `700` |
|
||||
| `DB01_FILESYSTEM_ARCHIVE_PATH` | Optional Directory where the database dumps archives are kept | `${DB01_FILESYSTEM_PATH/archive/` |
|
||||
| `DB01_FILESYSTEM_PERMISSION` | Directory and File permissions to apply to files. | `600` |
|
||||
| Variable | Description | Default |
|
||||
| --------------------------------- | ----------------------------------------------------------------------------------------------------- | ---------------------------------- |
|
||||
| `DB01_CREATE_LATEST_SYMLINK` | Create a symbolic link pointing to last backup in this format: `latest-(DB_TYPE)-(DB_NAME)-(DB_HOST)` | `TRUE` |
|
||||
| `DB01_FILESYSTEM_PATH` | Directory where the database dumps are kept. | `/backup` |
|
||||
| `DB01_FILESYSTEM_PATH_PERMISSION` | Permissions to apply to backup directory | `700` |
|
||||
| `DB01_FILESYSTEM_ARCHIVE_PATH` | Optional Directory where the database dumps archives are kept | `${DB01_FILESYSTEM_PATH}/archive/` |
|
||||
| `DB01_FILESYSTEM_PERMISSION` | Directory and File permissions to apply to files. | `600` |
|
||||
|
||||
###### S3
|
||||
|
||||
@@ -739,6 +742,9 @@ $5 body/error message
|
||||
|
||||
|
||||
##### Email Notifications
|
||||
|
||||
See more details in the base image listed above for more mail environment variables.
|
||||
|
||||
| Parameter | Description | Default | `_FILE` |
|
||||
| ----------- | ----------------------------------------------------------------------------------------- | ------- | ------- |
|
||||
| `MAIL_FROM` | What email address to send mail from for errors | | |
|
||||
|
||||
@@ -16,7 +16,7 @@ fi
|
||||
|
||||
trap ctrl_c INT
|
||||
|
||||
if [[ "${MODE,,}" =~ "standalone" ]] || [ "${1,,}" = "manual" ] || [ "${1,,}" = "now" ]; then
|
||||
if [[ "${MODE,,}" =~ "standalone" ]] || [ "${MODE,,}" = "manual" ] || [ "${1,,}" = "manual" ] || [ "${1,,}" = "now" ]; then
|
||||
print_debug "Detected Manual Mode"
|
||||
persist=false
|
||||
backup_job_backup_begin=+0
|
||||
@@ -34,9 +34,12 @@ else
|
||||
elif [[ "${backup_job_backup_begin}" =~ ([0-9]{4})-([0-9]{2})-([0-9]{2})[[:space:]]([0-9]{2}):([0-9]{2}):([0-9]{2}) ]]; then
|
||||
print_debug "BACKUP_BEGIN is a full date timestamp"
|
||||
timer datetime
|
||||
#elif echo "${backup_job_backup_begin//\*/#}" | grep -qP "^(.*((\d+,)+\d+|(\d+(\/|-)\d+)|\d+|#) ?){5}$" ; then # Allow slashes, yet not supporting advanced cron yet
|
||||
elif echo "${backup_job_backup_begin//\*/#}" | grep -qP "^(((\d+,)+\d+|(\d+(\/|-)\d+)|\d+|#) ?){5}$" ; then
|
||||
print_debug "BACKUP_BEGIN is a cron expression"
|
||||
time_last_run=$(date +"%s")
|
||||
backup_job_backup_begin=${backup_job_backup_begin//\"/}
|
||||
backup_job_backup_begin=${backup_job_backup_begin//\'/}
|
||||
timer cron "${backup_job_backup_begin}" "${time_current}" "${time_last_run}"
|
||||
else
|
||||
print_error "_BACKUP_BEGIN is invalid - Unable to perform scheduling"
|
||||
@@ -87,6 +90,7 @@ while true; do
|
||||
fi
|
||||
|
||||
symlink_log
|
||||
cleanup_old_data
|
||||
|
||||
if var_false "${persist}" ; then
|
||||
print_debug "Exiting due to manual mode"
|
||||
|
||||
@@ -48,7 +48,8 @@ bootstrap_variables() {
|
||||
DEFAULT_USER \
|
||||
DEFAULT_PASS \
|
||||
DEFAULT_ENCRYPT_PASSPHRASE \
|
||||
DEFAULT_ENCRYPT_PUBKEY \
|
||||
DEFAULT_ENCRYPT_PUBLIC_KEY \
|
||||
DEFAULT_ENCRYPT_PRIVATE_KEY \
|
||||
DEFAULT_MONGO_CUSTOM_URI \
|
||||
DEFAULT_MYSQL_TLS_CA_FILE \
|
||||
DEFAULT_MYSQL_TLS_CERT_FILE \
|
||||
@@ -74,7 +75,8 @@ bootstrap_variables() {
|
||||
DB"${backup_instance_number}"_USER \
|
||||
DB"${backup_instance_number}"_PASS \
|
||||
DB"${backup_instance_number}"_ENCRYPT_PASSPHRASE \
|
||||
DB"${backup_instance_number}"_ENCRYPT_PUBKEY \
|
||||
DB"${backup_instance_number}"_ENCRYPT_PUBLIC_KEY \
|
||||
DB"${backup_instance_number}"_ENCRYPT_PRIVATE_KEY \
|
||||
DB"${backup_instance_number}"_MONGO_CUSTOM_URI \
|
||||
DB"${backup_instance_number}"_MYSQL_TLS_CA_FILE \
|
||||
DB"${backup_instance_number}"_MYSQL_TLS_CERT_FILE \
|
||||
@@ -151,18 +153,28 @@ bootstrap_variables() {
|
||||
fi
|
||||
##
|
||||
|
||||
if grep -qo ".*_PASS='.*'" "${backup_instance_vars}"; then
|
||||
print_debug "[bootstrap_variables] [backup_init] Found _PASS variable with quotes"
|
||||
sed -i "s|_PASS='\(.*\)'|_PASS=\1|g" "${backup_instance_vars}"
|
||||
fi
|
||||
|
||||
if grep -qo "MONGO_CUSTOM_URI='.*'" "${backup_instance_vars}"; then
|
||||
print_debug "[bootstrap_variables] [backup_init] Found _MONGO_CUSTOM_URI variable with quotes"
|
||||
sed -i "s|MONGO_CUSTOM_URI='\(.*\)'|MONGO_CUSTOM_URI=\1|g" "${backup_instance_vars}"
|
||||
fi
|
||||
|
||||
transform_backup_instance_variable() {
|
||||
if grep -q "^DB${1}_${2}=" "${backup_instance_vars}" && [ "$(grep "^DB${1}_${2}=" "${backup_instance_vars}" | cut -d = -f2)" != "unset" ]; then
|
||||
export "$3"="$(grep "^DB${1}_${2}=" "${backup_instance_vars}" | cut -d = -f2)"
|
||||
export "$3"="$(grep "^DB${1}_${2}=" "${backup_instance_vars}" | cut -d = -f2-)"
|
||||
elif grep -q "^DB_${2}=" "${backup_instance_vars}" && [ "$(grep "^DB_${2}=" "${backup_instance_vars}" | cut -d = -f2)" != "unset" ]; then
|
||||
# Allow old legacy work, perhaps remove old DB_ functionality in future? This should allow for seamless upgrades
|
||||
#print_warn "Legacy Variable 'DB_${2}'' detected - Please upgrade your variables as they will be removed in version 4.3.0"
|
||||
export "$3"="$(grep "^DB_${2}=" "${backup_instance_vars}" | cut -d = -f2)"
|
||||
export "$3"="$(grep "^DB_${2}=" "${backup_instance_vars}" | cut -d = -f2-)"
|
||||
elif grep -q "^${2}=" "${backup_instance_vars}" && [ "$(grep "^${2}=" "${backup_instance_vars}" | cut -d = -f2)" != "unset" ]; then
|
||||
print_warn "Legacy unsupported variable '${2}' detected - Please upgrade your variables as they will be removed in version 4.3.0"
|
||||
export "$3"="$(grep "^${2}=" "${backup_instance_vars}" | cut -d = -f2)"
|
||||
export "$3"="$(grep "^${2}=" "${backup_instance_vars}" | cut -d = -f2-)"
|
||||
elif grep -q "^DEFAULT_${2}=" "${backup_instance_vars}" && [ "$(grep "^DEFAULT_${2}=" "${backup_instance_vars}" | cut -d = -f2)" != "unset" ]; then
|
||||
export "$3"="$(grep "^DEFAULT_${2}=" "${backup_instance_vars}" | cut -d = -f2)"
|
||||
export "$3"="$(grep "^DEFAULT_${2}=" "${backup_instance_vars}" | cut -d = -f2-)"
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -185,7 +197,8 @@ bootstrap_variables() {
|
||||
transform_backup_instance_variable "${backup_instance_number}" ENABLE_PARALLEL_COMPRESSION backup_job_parallel_compression
|
||||
transform_backup_instance_variable "${backup_instance_number}" ENCRYPT backup_job_encrypt
|
||||
transform_backup_instance_variable "${backup_instance_number}" ENCRYPT_PASSPHRASE backup_job_encrypt_passphrase
|
||||
transform_backup_instance_variable "${backup_instance_number}" ENCRYPT_PUBKEY backup_job_encrypt_pubkey
|
||||
transform_backup_instance_variable "${backup_instance_number}" ENCRYPT_PRIVATE_KEY backup_job_encrypt_private_key
|
||||
transform_backup_instance_variable "${backup_instance_number}" ENCRYPT_PUBLIC_KEY backup_job_encrypt_public_key
|
||||
transform_backup_instance_variable "${backup_instance_number}" EXTRA_DUMP_OPTS backup_job_extra_dump_opts
|
||||
transform_backup_instance_variable "${backup_instance_number}" EXTRA_ENUMERATION_OPTS backup_job_extra_enumeration_opts
|
||||
transform_backup_instance_variable "${backup_instance_number}" EXTRA_OPTS backup_job_extra_opts
|
||||
@@ -726,7 +739,7 @@ backup_pgsql() {
|
||||
fi
|
||||
if [ "${backup_job_db_name,,}" = "all" ] ; then
|
||||
write_log debug "Preparing to back up all databases"
|
||||
db_names=$(run_as_user psql -h ${backup_job_db_host} -U ${backup_job_db_user} -p ${backup_job_db_port} -d ${authdb} -c 'COPY (SELECT datname FROM pg_database WHERE datistemplate = false) TO STDOUT;' )
|
||||
db_names=$(psql -h ${backup_job_db_host} -U ${backup_job_db_user} -p ${backup_job_db_port} -d ${authdb} -c 'COPY (SELECT datname FROM pg_database WHERE datistemplate = false) TO STDOUT;' )
|
||||
if [ -n "${backup_job_db_name_exclude}" ] ; then
|
||||
db_names_exclusions=$(echo "${backup_job_db_name_exclude}" | tr ',' '\n')
|
||||
for db_exclude in ${db_names_exclusions} ; do
|
||||
@@ -774,7 +787,7 @@ backup_pgsql() {
|
||||
pre_dbbackup all
|
||||
write_log notice "Dumping all PostgreSQL databases: '$(echo ${db_names} | xargs | tr ' ' ',')' ${compression_string}"
|
||||
if var_true "${DEBUG_BACKUP_PGSQL}" ; then debug on; fi
|
||||
tmp_db_names=$(run_as_user psql -h ${backup_job_db_host} -p ${backup_job_db_port} -U ${backup_job_db_user} -d ${authdb} -c 'COPY (SELECT datname FROM pg_database WHERE datistemplate = false) TO STDOUT;' )
|
||||
tmp_db_names=$(psql -h ${backup_job_db_host} -p ${backup_job_db_port} -U ${backup_job_db_user} -d ${authdb} -c 'COPY (SELECT datname FROM pg_database WHERE datistemplate = false) TO STDOUT;' )
|
||||
for r_db_name in $(echo $db_names | xargs); do
|
||||
tmp_db_names=$(echo "$tmp_db_names" | xargs | sed "s|${r_db_name}||g" )
|
||||
done
|
||||
@@ -1133,10 +1146,11 @@ create_archive() {
|
||||
|
||||
create_schedulers() {
|
||||
if var_true "${DEBUG_CREATE_SCHEDULERS}" ; then debug on; fi
|
||||
|
||||
backup() {
|
||||
bootstrap_variables upgrade BACKUP
|
||||
local backup_instances=$(printenv | sort | grep -c "^DB[0-9]._HOST")
|
||||
local backup_instances=$(set -o posix ; set | grep -Pc "^(DB[0-9]._HOST=|.*MONGO_CUSTOM_URI=)")
|
||||
print_debug "[create_schedulers] Found '${backup_instances}' DB_HOST instances"
|
||||
|
||||
if [ -n "${DB_HOST}" ] && [ "${backup_instances}" ]; then
|
||||
backup_instances=1;
|
||||
print_debug "[create_schedulers] Detected using old DB_ variables"
|
||||
@@ -1146,13 +1160,14 @@ create_schedulers() {
|
||||
instance=$(printf "%02d" $instance)
|
||||
cp -R /assets/dbbackup/template-dbbackup /etc/services.available/dbbackup-"${instance}"
|
||||
sed -i "s|{{BACKUP_NUMBER}}|${instance}|g" /etc/services.available/dbbackup-"${instance}"/run
|
||||
|
||||
if [ "${MODE,,}" = "manual" ] ; then service_stop dbbackup-"${instance}" ; fi
|
||||
cat <<EOF >> /usr/bin/backup"${instance}"-now
|
||||
#!/bin/bash
|
||||
source /assets/functions/00-container
|
||||
PROCESS_NAME=db-backup${instance}
|
||||
print_info "Starting Manual Backup for db-backup${instance}"
|
||||
/var/run/s6/legacy-services/dbbackup-${instance}/run now
|
||||
#/var/run/s6/legacy-services/dbbackup-${instance}/run now
|
||||
/etc/services.available/dbbackup-${instance}/run now
|
||||
|
||||
EOF
|
||||
chmod +x /usr/bin/backup"${instance}"-now
|
||||
@@ -1162,11 +1177,11 @@ EOF
|
||||
cat <<EOF > /usr/bin/backup-now
|
||||
#!/bin/bash
|
||||
|
||||
/usr/bin/backup${instance}-now
|
||||
/usr/bin/backup${instance}-now now
|
||||
|
||||
EOF
|
||||
else
|
||||
echo "/usr/bin/backup${instance}-now" >> /usr/bin/backup-now
|
||||
echo "/usr/bin/backup${instance}-now now" >> /usr/bin/backup-now
|
||||
fi
|
||||
|
||||
instance=$(echo "${instance} +1" | bc)
|
||||
@@ -1180,7 +1195,7 @@ EOF
|
||||
}
|
||||
|
||||
ctrl_c() {
|
||||
sed -i "/^{{BACKUP_NUMBER}}/d" /tmp/.container/db-backup-backups
|
||||
sed -i "/^${backup_instance_number}/d" /tmp/.container/db-backup-backups
|
||||
symlink_log
|
||||
print_warn "User aborted"
|
||||
exit
|
||||
@@ -1195,7 +1210,7 @@ db_backup_container_init() {
|
||||
debug() {
|
||||
case "${1}" in
|
||||
off)
|
||||
backup_job_log_level=$_original_job_log_log_level}
|
||||
backup_job_log_level=$_original_job_log_level}
|
||||
CONTAINER_LOG_LEVEL=${_original_container_log_level}
|
||||
DEBUG_MODE=${_original_debug_mode}
|
||||
SHOW_OUTPUT=${_original_show_output}
|
||||
@@ -1219,6 +1234,9 @@ debug() {
|
||||
fi
|
||||
if [ -z "${_original_show_output}" ]; then
|
||||
_original_show_output="${SHOW_OUTPUT}"
|
||||
if ! [[ "${_original_show_output,,}" =~ true|false ]]; then
|
||||
__original_show_output="FALSE"
|
||||
fi
|
||||
fi
|
||||
backup_job_log_level=DEBUG
|
||||
CONTAINER_LOG_LEVEL=DEBUG
|
||||
@@ -1234,21 +1252,26 @@ file_encryption() {
|
||||
if [ "${exit_code}" = "0" ] ; then
|
||||
print_debug "[file_encryption] Encrypting"
|
||||
output_off
|
||||
if [ -n "${backup_job_encrypt_passphrase}" ] && [ -n "${backup_job_encrypt_pubkey}" ]; then
|
||||
if [ -n "${backup_job_encrypt_passphrase}" ] && [ -n "${backup_job_encrypt_public_key}" ]; then
|
||||
print_error "Can't encrypt as both ENCRYPT_PASSPHRASE and ENCRYPT_PUBKEY exist!"
|
||||
return
|
||||
elif [ -n "${backup_job_encrypt_passphrase}" ] && [ -z "${backup_job_encrypt_pubkey}" ]; then
|
||||
elif [ -n "${backup_job_encrypt_passphrase}" ] && [ -z "${backup_job_encrypt_public_key}" ]; then
|
||||
print_notice "Encrypting with GPG Passphrase"
|
||||
encrypt_routines_start_time=$(date +'%s')
|
||||
encrypt_tmp_dir=$(run_as_user mktemp -d)
|
||||
echo "${backup_job_encrypt_passphrase}" | silent run_as_user ${play_fair} gpg --batch --home ${encrypt_tmp_dir} --yes --passphrase-fd 0 -c "${TEMP_PATH}"/"${backup_job_filename}"
|
||||
rm -rf "${encrypt_tmp_dir}"
|
||||
elif [ -z "${backup_job_encrypt_passphrase}" ] && [ -n "${backup_job_encrypt_pubkey}" ]; then
|
||||
if [ -f "${backup_job_encrypt_pubkey}" ]; then
|
||||
elif [ -z "${backup_job_encrypt_passphrase}" ] && [ -n "${backup_job_encrypt_public_key}" ] && [ -n "${backup_job_encrypt_private_key}" ]; then
|
||||
if [ -f "${backup_job_encrypt_private_key}" ]; then
|
||||
encrypt_routines_start_time=$(date +'%s')
|
||||
print_notice "Encrypting with GPG Public Key"
|
||||
print_notice "Encrypting with GPG Private Key"
|
||||
encrypt_tmp_dir=$(run_as_user mktemp -d)
|
||||
silent run_as_user ${play_fair} gpg --batch --yes --home "${encrypt_tmp_dir}" --recipient-file "${backup_job_encrypt_pubkey}" -c "${TEMP_PATH}"/"${backup_job_filename}"
|
||||
cat "${backup_job_encrypt_private_key}" | run_as_user tee "${encrypt_tmp_dir}"/private_key.asc > /dev/null
|
||||
print_debug "[file_encryption] [key] Importing Private Key"
|
||||
silent run_as_user gpg --home ${encrypt_tmp_dir} --batch --import "${encrypt_tmp_dir}"/private_key.asc
|
||||
print_debug "[file_encryption] [key] Encrypting to Public Key"
|
||||
cat "${backup_job_encrypt_public_key}" | run_as_user tee "${encrypt_tmp_dir}"/public_key.asc > /dev/null
|
||||
silent run_as_user ${play_fair} gpg --batch --yes --home "${encrypt_tmp_dir}" --encrypt --recipient-file "${encrypt_tmp_dir}"/public_key.asc "${TEMP_PATH}"/"${backup_job_filename}"
|
||||
rm -rf "${encrypt_tmp_dir}"
|
||||
fi
|
||||
fi
|
||||
@@ -1263,6 +1286,9 @@ file_encryption() {
|
||||
- dbbackup.backup.encrypt.duration.[${backup_job_db_host}.${backup_job_db_name}] ${encrypt_routines_total_time}
|
||||
EOF
|
||||
)
|
||||
else
|
||||
print_error "Encryption failed! Could not detect encrypted file"
|
||||
return 99
|
||||
fi
|
||||
else
|
||||
write_log error "Skipping encryption because backup did not complete successfully"
|
||||
@@ -1338,18 +1364,20 @@ notify() {
|
||||
if [ -z "${SMTP_HOST}" ] ; then write_log error "[notifications] No SMTP_HOST variable set - Skipping sending Email notifications" ; skip_mail=true ; fi
|
||||
if [ -z "${SMTP_PORT}" ] ; then write_log error "[notifications] No SMTP_PORT variable set - Skipping sending Email notifications" ; skip_mail=true ; fi
|
||||
if var_nottrue "${skip_mail}" ; then
|
||||
if ! grep -q ^from /etc/msmptrc ; then
|
||||
echo "from ${MAIL_FROM}" >> /etc/msmtprc
|
||||
fi
|
||||
mail_recipients=$(echo "${MAIL_TO}" | tr "," "\n")
|
||||
for mail_recipient in $mail_recipients ; do
|
||||
cat <<EOF | s-nail -v \
|
||||
-r "${MAIL_FROM}" \
|
||||
-s "[db-backup] [${DOMAIN}] ${3}" \
|
||||
-S smtp="${SMTP_HOST}":"${SMTP_PORT}" \
|
||||
"${mail_recipient}"
|
||||
cat <<EOF | msmtp -t "${mail_recipient}" -C /etc/msmtprc
|
||||
To: ${mail_recipient}
|
||||
Subject: [db-backup] ${4}
|
||||
|
||||
Time: ${1}
|
||||
Log File: {2}
|
||||
Error Code: ${3}
|
||||
|
||||
${4}
|
||||
${5}
|
||||
EOF
|
||||
done
|
||||
fi
|
||||
@@ -1365,7 +1393,7 @@ EOF
|
||||
if [ -z "${MATTERMOST_WEBHOOK_URL}" ] ; then write_log error "[notifications] No MATTERMOST_WEBHOOK_URL variable set - Skipping sending Mattermost notifications" ; skip_mattermost=true ; fi
|
||||
if var_nottrue "${skip_mattermost}" ; then
|
||||
emoji=":bomb:"
|
||||
message="*[db-backup] ${3}*\n${4}\n*Timestamp:* ${1}\n*Logfile:* ${2}\n*Error Code: ${3}"
|
||||
message="*[db-backup] ${4}*\n${5}\n*Timestamp:* ${1}\n*Logfile:* ${2}\n*Error Code: ${3}"
|
||||
mattermost_recipients=$(echo "${MATTERMOST_RECIPIENT}" | tr "," "\n")
|
||||
for mattermost_recipient in $mattermost_recipients ; do
|
||||
payload="payload={\"channel\": \"${mattermost_recipient//\"/\\\"}\", \"username\": \"${MATTERMOST_USERNAME//\"/\\\"}\", \"text\": \"${message//\"/\\\"}\", \"icon_emoji\": \"${emoji}\"}"
|
||||
@@ -1390,7 +1418,7 @@ EOF
|
||||
for matrix_room in $matrix_rooms ; do
|
||||
curl \
|
||||
-XPOST \
|
||||
-d "{\"msgtype\":\"m.text\", \"body\":\"*[db-backup] ${3}*\n${4}\n*Timestamp:* ${1}\n*Logfile:* ${2}\n*Error Code: ${3}*\"}" \
|
||||
-d "{\"msgtype\":\"m.text\", \"body\":\"*[db-backup] ${4}*\n${5}\n*Timestamp:* ${1}\n*Logfile:* ${2}\n*Error Code: ${3}*\"}" \
|
||||
"${MATRIX_HOST}/_matrix/client/r0/rooms/${matrix_room}/send/m.room.message?access_token=${MATRIX_ACCESS_TOKEN}"
|
||||
done
|
||||
fi
|
||||
@@ -1406,7 +1434,7 @@ EOF
|
||||
if [ -z "${ROCKETCHAT_WEBHOOK_URL}" ] ; then write_log error "[notifications] No ROCKETCHAT_WEBHOOK_URL variable set - Skipping sending Rocket.Chat notifications" ; skip_rocketchat=true ; fi
|
||||
if var_nottrue "${skip_rocketchat}" ; then
|
||||
emoji=":bomb:"
|
||||
message="*[db-backup] ${3}*\n${4}\n*Timestamp:* ${1}\n*Logfile:* ${2}\n*Error Code: ${3}"
|
||||
message="*[db-backup] ${4}*\n${5}\n*Timestamp:* ${1}\n*Logfile:* ${2}\n*Error Code: ${3}"
|
||||
rocketchat_recipients=$(echo "${ROCKETCHAT_RECIPIENT}" | tr "," "\n")
|
||||
for rocketchat_recipient in $rocketchat_recipients ; do
|
||||
payload="payload={\"channel\": \"${rocketchat_recipient//\"/\\\"}\", \"username\": \"${ROCKETCHAT_USERNAME//\"/\\\"}\", \"text\": \"${message//\"/\\\"}\", \"icon_emoji\": \"${emoji}\"}"
|
||||
@@ -1606,20 +1634,20 @@ pre_dbbackup() {
|
||||
|
||||
### Pre Backup Custom Script Support
|
||||
if [ -d "/assets/custom-scripts/pre" ] && dir_notempty "/assets/custom-scripts/pre" ; then
|
||||
write_log warning "Found Custom Post Scripts in /assets/custom-scripts/pre - Automatically moving them to '${backup_job_script_location_pre}'"
|
||||
run_as_user mkdir -p "${backup_job_script_location_pre}"
|
||||
silent run_as_user cp /assets/custom-scripts/pre/* "${backup_job_script_location_pre}"
|
||||
write_log warn "Found Custom Post Scripts in /assets/custom-scripts/pre - Automatically moving them to '${backup_job_script_location_pre}'"
|
||||
mkdir -p "${backup_job_script_location_pre}"
|
||||
silent cp -aR /assets/custom-scripts/pre/* "${backup_job_script_location_pre}"
|
||||
fi
|
||||
|
||||
if [ -d "${backup_job_script_location_pre}" ] && dir_notempty "${backup_job_script_location_pre}" ; then
|
||||
for f in $(find ${backup_job_script_location_pre} -name \*.sh -type f); do
|
||||
if var_true "${backup_job_pre_script_x_verify}" ; then
|
||||
run_as_user ${f} "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${backup_job_filename}"
|
||||
${f} "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${backup_job_filename}"
|
||||
else
|
||||
if [ -x "${f}" ] ; then
|
||||
write_log notice "Executing pre backup custom script : '${f}'"
|
||||
## script DB_TYPE DB_HOST DB_NAME STARTEPOCH BACKUP_FILENAME
|
||||
run_as_user ${f} "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${backup_job_filename}"
|
||||
${f} "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${backup_job_filename}"
|
||||
else
|
||||
write_log error "Can't run pre backup custom script: '${f}' as its filesystem bit is not executible!"
|
||||
fi
|
||||
@@ -1662,11 +1690,11 @@ EOZP
|
||||
### Post Script Support
|
||||
if [ -n "${backup_job_post_script}" ] ; then
|
||||
if var_true "${backup_job_post_script_x_verify}" ; then
|
||||
run_as_user eval "${backup_job_post_script}" "${exit_code}" "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${dbbackup_finish_time}" "${dbbackup_total_time}" "${backup_job_filename}" "${filesize}" "${checksum_value}" "${move_exit_code}"
|
||||
eval "${backup_job_post_script}" "${exit_code}" "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${dbbackup_finish_time}" "${dbbackup_total_time}" "${backup_job_filename}" "${filesize}" "${checksum_value}" "${move_exit_code}"
|
||||
else
|
||||
if [ -x "${backup_job_post_script}" ] ; then
|
||||
write_log notice "Found POST_SCRIPT environment variable. Executing '${backup_job_post_script}"
|
||||
run_as_user eval "${backup_job_post_script}" "${exit_code}" "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${dbbackup_finish_time}" "${dbbackup_total_time}" "${backup_job_filename}" "${filesize}" "${checksum_value}" "${move_exit_code}"
|
||||
eval "${backup_job_post_script}" "${exit_code}" "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${dbbackup_finish_time}" "${dbbackup_total_time}" "${backup_job_filename}" "${filesize}" "${checksum_value}" "${move_exit_code}"
|
||||
else
|
||||
write_log error "Can't execute POST_SCRIPT environment variable '${backup_job_post_script}' as its filesystem bit is not executible!"
|
||||
fi
|
||||
@@ -1675,20 +1703,20 @@ EOZP
|
||||
|
||||
### Post Backup Custom Script Support
|
||||
if [ -d "/assets/custom-scripts/" ] && dir_notempty "/assets/custom-scripts" ; then
|
||||
write_log warning "Found Custom Post Scripts in /assets/custom-scripts/ - Automatically moving them to '${backup_job_script_location_post}'"
|
||||
run_as_user mkdir -p "${backup_job_script_location_post}"
|
||||
silent run_as_user cp /assets/custom-scripts/* "${backup_job_script_location_post}"
|
||||
write_log warn "Found Custom Post Scripts in /assets/custom-scripts/ - Automatically moving them to '${backup_job_script_location_post}'"
|
||||
mkdir -p "${backup_job_script_location_post}"
|
||||
cp -aR /assets/custom-scripts/* "${backup_job_script_location_post}"
|
||||
fi
|
||||
|
||||
if [ -d "${backup_job_script_location_post}" ] && dir_notempty "${backup_job_script_location_post}" ; then
|
||||
for f in $(run_as_user find "${backup_job_script_location_post}" -name \*.sh -type f); do
|
||||
if var_true "${backup_job_post_script_x_verify}" ; then
|
||||
run_as_user ${f} "${exit_code}" "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${dbbackup_finish_time}" "${dbbackup_total_time}" "${backup_job_filename}" "${filesize}" "${checksum_value}" "${move_exit_code}"
|
||||
${f} "${exit_code}" "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${dbbackup_finish_time}" "${dbbackup_total_time}" "${backup_job_filename}" "${filesize}" "${checksum_value}" "${move_exit_code}"
|
||||
else
|
||||
if [ -x "${f}" ] ; then
|
||||
write_log notice "Executing post backup custom script : '${f}'"
|
||||
## script EXIT_CODE DB_TYPE DB_HOST DB_NAME STARTEPOCH FINISHEPOCH DURATIONEPOCH BACKUP_FILENAME FILESIZE CHECKSUMVALUE
|
||||
run_as_user ${f} "${exit_code}" "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${dbbackup_finish_time}" "${dbbackup_total_time}" "${backup_job_filename}" "${filesize}" "${checksum_value}" "${move_exit_code}"
|
||||
${f} "${exit_code}" "${dbtype}" "${backup_job_db_host}" "${1}" "${dbbackup_start_time}" "${dbbackup_finish_time}" "${dbbackup_total_time}" "${backup_job_filename}" "${filesize}" "${checksum_value}" "${move_exit_code}"
|
||||
else
|
||||
write_log error "Can't run post backup custom script: '${f}' as its filesystem bit is not executible!"
|
||||
fi
|
||||
@@ -1724,7 +1752,7 @@ process_limiter() {
|
||||
}
|
||||
|
||||
run_as_user() {
|
||||
sudo -u "${DBBACKUP_USER}" $@
|
||||
sudo -Eu "${DBBACKUP_USER}" "$@"
|
||||
}
|
||||
|
||||
setup_mode() {
|
||||
@@ -1816,7 +1844,7 @@ timer() {
|
||||
if [ "${expression_step}" != "${expression}" ]; then
|
||||
for step in ${validate_temp}; do
|
||||
if [ $(( ( step - expression_start ) % expression_step )) -eq 0 ]; then
|
||||
validate_all="$validate_all ${step}"
|
||||
validate_all="${validate_all} ${step}"
|
||||
fi
|
||||
done
|
||||
else
|
||||
@@ -1826,13 +1854,14 @@ timer() {
|
||||
|
||||
validate_all=$(echo "${validate_all}" | tr ' ' '\n' | sort -n -u | tr '\n' ' ')
|
||||
for entry in $validate_all; do
|
||||
if [ "${entry}" -ge "${3}" ]; then
|
||||
if [ ${entry} -ge ${3} ]; then
|
||||
echo "${entry}"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
|
||||
echo "${validate_all%% *}"
|
||||
echo "${validate_all// /}"
|
||||
#echo "${validate_all%% *}"
|
||||
}
|
||||
|
||||
local cron_compare="${3}"
|
||||
@@ -1851,7 +1880,10 @@ timer() {
|
||||
local cron_minute="$(echo -n "${2}" | awk '{print $1}')"
|
||||
local cron_hour="$(echo -n "${2}" | awk '{print $2}')"
|
||||
local cron_day_of_month="$(echo -n "${2}" | awk '{print $3}')"
|
||||
local cron_month="$(echo -n "${2}" | awk '{print $4}')"Generating
|
||||
local cron_month="$(echo -n "${2}" | awk '{print $4}')"
|
||||
local cron_day_of_week="$(echo -n "${2}" | awk '{print $5}')"
|
||||
|
||||
local cron_next_minute="$(date --date=@"${cron_compare}" +"%-M")"
|
||||
local cron_next_hour="$(date --date=@"${cron_compare}" +"%-H")"
|
||||
local cron_next_day_of_month="$(date --date=@"${cron_compare}" +"%-d")"
|
||||
local cron_next_month="$(date --date=@"${cron_compare}" +"%-m")"
|
||||
@@ -1860,8 +1892,10 @@ timer() {
|
||||
local cron_next_year="$(date --date=@"${cron_compare}" +"%-Y")"
|
||||
|
||||
local cron_next=
|
||||
local cron_parsed=1
|
||||
|
||||
while [ "$cron_parsed" != "0" ]; do
|
||||
while [ "${cron_parsed}" != "0" ]; do
|
||||
print_debug "[timer] [cron] Parse Minute"
|
||||
cron_next=$(parse_expression "${cron_minute}" 59 "${cron_next_minute}")
|
||||
if [ "${cron_next}" != "${cron_next_minute}" ]; then
|
||||
if [ "${cron_next_minute}" -gt "${cron_next}" ]; then
|
||||
@@ -1871,20 +1905,22 @@ timer() {
|
||||
cron_next_minute="${cron_next}"
|
||||
fi
|
||||
|
||||
print_debug "[timer] [cron] Parse Hour"
|
||||
cron_next=$(parse_expression "${cron_hour}" 23 "${cron_next_hour}")
|
||||
if [ "${cron_next}" != "${cron_next_hour}" ]; then
|
||||
if [ "${cron_next_hour}" -gt "${cron_next}" ]; then
|
||||
cron_next_day_of_month=$(( cron_next_day_of_month + 1 ))
|
||||
fi
|
||||
|
||||
cron_next_hour="${cron_next}"
|
||||
#cron_next_minute=0
|
||||
fi
|
||||
|
||||
print_debug "[timer] [cron] Parse Day of Week"
|
||||
cron_next=$(parse_expression "${cron_day_of_week}" 6 "${cron_next_day_of_week}")
|
||||
if [ "${cron_next}" != "${cron_next_day_of_week}" ]; then
|
||||
day_of_week_difference=$(( ${cron_next} - ${cron_next_day_of_week} ))
|
||||
day_of_week_difference=$(( cron_next - cron_next_day_of_week ))
|
||||
|
||||
if [ "${day_of_week_difference}" -lt "0" ]; then
|
||||
if [ "${day_of_week_difference}" -lt 0 ]; then
|
||||
day_of_week_difference=$(( day_of_week_difference + 7 ))
|
||||
fi
|
||||
|
||||
@@ -1893,6 +1929,7 @@ timer() {
|
||||
cron_next_minute=0
|
||||
fi
|
||||
|
||||
print_debug "[timer] [cron] Parse day of month"
|
||||
case "${cron_next_month}" in
|
||||
1|3|5|7|8|10|12)
|
||||
last_day_of_month="31"
|
||||
@@ -1932,6 +1969,7 @@ timer() {
|
||||
cron_next_day_of_month=$cron_next
|
||||
fi
|
||||
|
||||
print_debug "[timer] [cron] Parse Next Month"
|
||||
cron_next=$(parse_expression "${cron_month}" 12 "${cron_next_month}")
|
||||
if [ "${cron_next}" != "${cron_next_month}" ]; then
|
||||
if [ "${cron_next}" -gt "12" ]; then
|
||||
@@ -2043,4 +2081,4 @@ write_log() {
|
||||
print_${_arg_log_level} "${_arg_log_message}"
|
||||
output_on
|
||||
if var_true "${DEBUG_WRITE_LOG}" ; then debug off; fi
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user