diff --git a/README.md b/README.md index 6def421..30395bc 100644 --- a/README.md +++ b/README.md @@ -189,19 +189,19 @@ Your Organization will be mapped to `DB_USER` and your root token will need to b If `BACKUP_LOCATION` = `S3` then the following options are used. -| Parameter | Description | Default | -|-----------------------|-------------------------------------------------------------------------------------------|---------| -| `S3_BUCKET` | S3 Bucket name e.g. `mybucket` | | -| `S3_KEY_ID` | S3 Key ID | | -| `S3_KEY_SECRET` | S3 Key Secret | | -| `S3_PATH` | S3 Pathname to save to (must end in a trailing slash e.g. '`backup/`') | | -| `S3_REGION` | Define region in which bucket is defined. Example: `ap-northeast-2` | | -| `S3_HOST` | Hostname (and port) of S3-compatible service, e.g. `minio:8080`. Defaults to AWS. | | -| `S3_PROTOCOL` | Protocol to connect to `S3_HOST`. Either `http` or `https`. Defaults to `https`. | `https` | -| `S3_EXTRA_OPTS` | Add any extra options to the end of the `aws-cli` process execution | | +| Parameter | Description | Default | +|-----------------------|------------------------------------------------------------------------------------------|---------| +| `S3_BUCKET` | S3 Bucket name e.g. `mybucket` | | +| `S3_KEY_ID` | S3 Key ID | | +| `S3_KEY_SECRET` | S3 Key Secret | | +| `S3_PATH` | S3 Pathname to save to (must NOT end in a trailing slash e.g. '`backup`') | | +| `S3_REGION` | Define region in which bucket is defined. Example: `ap-northeast-2` | | +| `S3_HOST` | Hostname (and port) of S3-compatible service, e.g. `minio:8080`. Defaults to AWS. | | +| `S3_PROTOCOL` | Protocol to connect to `S3_HOST`. Either `http` or `https`. Defaults to `https`. | `https` | +| `S3_EXTRA_OPTS` | Add any extra options to the end of the `aws-cli` process execution | | | `S3_CERT_CA_FILE` | Map a volume and point to your custom CA Bundle for verification e.g. `/certs/bundle.pem` | | -| _*OR*_ | | | -| `S3_CERT_SKIP_VERIFY` | Skip verifying self signed certificates when connecting | `TRUE` | +| _*OR*_ | | | +| `S3_CERT_SKIP_VERIFY` | Skip verifying self signed certificates when connecting | `TRUE` | #### Upload to a Azure storage account by `blobxfer` diff --git a/install/assets/functions/10-db-backup b/install/assets/functions/10-db-backup index 26111c0..ee95db2 100644 --- a/install/assets/functions/10-db-backup +++ b/install/assets/functions/10-db-backup @@ -471,7 +471,7 @@ cleanup_old_data() { ;; "s3" | "minio" ) print_info "Cleaning up old backups on S3 storage" - aws ${PARAM_AWS_ENDPOINT_URL} s3 ls s3://${S3_BUCKET}/${S3_PATH} ${s3_ssl} ${s3_ca_cert} ${S3_EXTRA_OPTS} | grep " DIR " -v | grep " PRE " -v | while read -r s3_file; do + aws ${PARAM_AWS_ENDPOINT_URL} s3 ls s3://${S3_BUCKET}/${S3_PATH}/ ${s3_ssl} ${s3_ca_cert} ${S3_EXTRA_OPTS} | grep " DIR " -v | grep " PRE " -v | while read -r s3_file; do s3_createdate=$(echo $s3_file | awk {'print $1" "$2'}) s3_createdate=$(date -d "$s3_createdate" "+%s") s3_olderthan=$(echo $(( $(date +%s)-${DB_CLEANUP_TIME}*60 ))) @@ -479,7 +479,7 @@ cleanup_old_data() { s3_filename=$(echo $s3_file | awk {'print $4'}) if [ "$s3_filename" != "" ] ; then print_debug "Deleting $s3_filename" - silent aws ${PARAM_AWS_ENDPOINT_URL} s3 rm s3://${S3_BUCKET}/${S3_PATH}${s3_filename} ${s3_ssl} ${s3_ca_cert} ${S3_EXTRA_OPTS} + silent aws ${PARAM_AWS_ENDPOINT_URL} s3 rm s3://${S3_BUCKET}/${S3_PATH}/${s3_filename} ${s3_ssl} ${s3_ca_cert} ${S3_EXTRA_OPTS} fi fi