mirror of
https://github.com/tiredofit/docker-db-backup.git
synced 2025-12-21 13:23:12 +01:00
Sanitize DB_HOST for URLs
This commit is contained in:
@@ -79,10 +79,10 @@ bootstrap_variables() {
|
|||||||
|
|
||||||
backup_couch() {
|
backup_couch() {
|
||||||
pre_dbbackup
|
pre_dbbackup
|
||||||
target=couch_${dbname}_${dbhost}_${now}.txt
|
target=couch_${dbname}_${dbhost#*//}_${now}.txt
|
||||||
compression
|
compression
|
||||||
print_notice "Dumping CouchDB database: '${dbname}' ${compression_string}"
|
print_notice "Dumping CouchDB database: '${dbname}' ${compression_string}"
|
||||||
curl -sSL -X GET ${dbhost}:${dbport}/${dbname}/_all_docs?include_docs=true ${compress_cmd} | $compress_cmd > ${TEMP_LOCATION}/${target}
|
curl -sSL -X GET ${dbhost}:${dbport}/${dbname}/_all_docs?include_docs=true ${compress_cmd} | $compress_cmd > "${TEMP_LOCATION}"/"${target}"
|
||||||
exit_code=$?
|
exit_code=$?
|
||||||
check_exit_code $target
|
check_exit_code $target
|
||||||
generate_checksum
|
generate_checksum
|
||||||
@@ -103,7 +103,7 @@ backup_influx() {
|
|||||||
for db in ${db_names}; do
|
for db in ${db_names}; do
|
||||||
pre_dbbackup
|
pre_dbbackup
|
||||||
if [ "${db}" != "justbackupeverything" ] ; then bucket="-bucket $db" ; else db=all ; fi
|
if [ "${db}" != "justbackupeverything" ] ; then bucket="-bucket $db" ; else db=all ; fi
|
||||||
target=influx_${db}_${dbhost}_${now}
|
target=influx_${db}_${dbhost#*//}_${now}
|
||||||
compression
|
compression
|
||||||
print_notice "Dumping Influx database: '${db}'"
|
print_notice "Dumping Influx database: '${db}'"
|
||||||
influxd backup ${influx_compression} ${bucket} -host ${dbhost}:${dbport} ${EXTRA_OPTS} "${TEMP_LOCATION}"/"${target_dir}"
|
influxd backup ${influx_compression} ${bucket} -host ${dbhost}:${dbport} ${EXTRA_OPTS} "${TEMP_LOCATION}"/"${target_dir}"
|
||||||
@@ -111,7 +111,7 @@ backup_influx() {
|
|||||||
check_exit_code $target_dir
|
check_exit_code $target_dir
|
||||||
print_notice "Creating archive file of '${target_dir}' with tar ${compresion_string}"
|
print_notice "Creating archive file of '${target_dir}' with tar ${compresion_string}"
|
||||||
tar cf - "${TEMP_LOCATION}"/"${target_dir}" | $dir_compress_cmd > "${TEMP_LOCATION}"/"${target_dir}".tar"${extension}"
|
tar cf - "${TEMP_LOCATION}"/"${target_dir}" | $dir_compress_cmd > "${TEMP_LOCATION}"/"${target_dir}".tar"${extension}"
|
||||||
target=influx_${db}_${dbhost}_${now}.tar${extension}
|
target=influx_${db}_${dbhost#*//}_${now}.tar${extension}
|
||||||
generate_checksum
|
generate_checksum
|
||||||
move_dbbackup
|
move_dbbackup
|
||||||
post_dbbackup $db
|
post_dbbackup $db
|
||||||
@@ -121,14 +121,14 @@ backup_influx() {
|
|||||||
for db in ${db_names}; do
|
for db in ${db_names}; do
|
||||||
pre_dbbackup
|
pre_dbbackup
|
||||||
if [ "${db}" != "justbackupeverything" ] ; then bucket="--bucket $db" ; else db=all ; fi
|
if [ "${db}" != "justbackupeverything" ] ; then bucket="--bucket $db" ; else db=all ; fi
|
||||||
target=influx2_${db}_${dbhost}_${now}
|
target=influx2_${db}_${dbhost#*//}_${now}
|
||||||
compression
|
compression
|
||||||
print_notice "Dumping Influx2 database: '${db}'"
|
print_notice "Dumping Influx2 database: '${db}'"
|
||||||
influx backup --org ${dbuser} ${bucket} --host ${dbhost}:${dbport} --token ${dbpass} ${EXTRA_OPTS} --compression none "${TEMP_LOCATION}"/"${target_dir}"
|
influx backup --org ${dbuser} ${bucket} --host ${dbhost}:${dbport} --token ${dbpass} ${EXTRA_OPTS} --compression none "${TEMP_LOCATION}"/"${target_dir}"
|
||||||
exit_code=$?
|
exit_code=$?
|
||||||
check_exit_code $target_dir
|
check_exit_code $target_dir
|
||||||
create_archive
|
create_archive
|
||||||
target=influx2_${db}_${dbhost}_${now}.tar${extension}
|
target=influx2_${db}_${dbhost#*//}_${now}.tar${extension}
|
||||||
generate_checksum
|
generate_checksum
|
||||||
move_dbbackup
|
move_dbbackup
|
||||||
post_dbbackup $db
|
post_dbbackup $db
|
||||||
@@ -137,15 +137,6 @@ backup_influx() {
|
|||||||
esac
|
esac
|
||||||
}
|
}
|
||||||
|
|
||||||
create_archive() {
|
|
||||||
if [ "${exit_code}" = "0" ] ; then
|
|
||||||
print_notice "Creating archive file of '${target_dir}' with tar ${compresion_string}"
|
|
||||||
tar cf - "${TEMP_LOCATION}"/"${target_dir}" | $dir_compress_cmd > "${TEMP_LOCATION}"/"${target_dir}".tar"${extension}"
|
|
||||||
else
|
|
||||||
print_warn "Skipping creating archive file because backup did not complete successfully"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
backup_mongo() {
|
backup_mongo() {
|
||||||
pre_dbbackup
|
pre_dbbackup
|
||||||
if [ "${ENABLE_COMPRESSION,,}" = "none" ] || [ "${ENABLE_COMPRESSION,,}" = "false" ] ; then
|
if [ "${ENABLE_COMPRESSION,,}" = "none" ] || [ "${ENABLE_COMPRESSION,,}" = "false" ] ; then
|
||||||
@@ -437,7 +428,6 @@ cleanup_old_data() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
compression() {
|
compression() {
|
||||||
if var_false "${ENABLE_PARALLEL_COMPRESSION}" ; then
|
if var_false "${ENABLE_PARALLEL_COMPRESSION}" ; then
|
||||||
PARALLEL_COMPRESSION_THREADS=1
|
PARALLEL_COMPRESSION_THREADS=1
|
||||||
@@ -499,6 +489,15 @@ compression() {
|
|||||||
esac
|
esac
|
||||||
}
|
}
|
||||||
|
|
||||||
|
create_archive() {
|
||||||
|
if [ "${exit_code}" = "0" ] ; then
|
||||||
|
print_notice "Creating archive file of '${target_dir}' with tar ${compresion_string}"
|
||||||
|
tar cf - "${TEMP_LOCATION}"/"${target_dir}" | $dir_compress_cmd > "${TEMP_LOCATION}"/"${target_dir}".tar"${extension}"
|
||||||
|
else
|
||||||
|
print_warn "Skipping creating archive file because backup did not complete successfully"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
generate_checksum() {
|
generate_checksum() {
|
||||||
if var_true "${ENABLE_CHECKSUM}" ;then
|
if var_true "${ENABLE_CHECKSUM}" ;then
|
||||||
if [ "${exit_code}" = "0" ] ; then
|
if [ "${exit_code}" = "0" ] ; then
|
||||||
|
|||||||
Reference in New Issue
Block a user