Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 11 additions & 22 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,28 +1,17 @@
# With Python 3.12.4 on Alpine 3.20, s3cmd 2.4.0 fails with an AttributeError.
# See ITSE-1440 for details.
FROM python:3.12.4-alpine

# Current version of s3cmd is in edge/testing repo
RUN echo https://dl-cdn.alpinelinux.org/alpine/edge/testing >> /etc/apk/repositories
RUN <<EOF
apk update
apk add --no-cache \
bash \
curl \
postgresql14-client \
py3-pip

# Install everything via repo because repo & pip installs can break things
RUN apk update \
&& apk add --no-cache \
bash \
postgresql14-client \
py3-magic \
py3-dateutil \
curl \
jq

RUN wget https://github.com/s3tools/s3cmd/archive/refs/tags/v2.4.0.tar.gz \
&& tar xzf v2.4.0.tar.gz \
&& cd s3cmd-2.4.0 \
&& python setup.py install \
&& cd .. \
&& rm -rf s3cmd-2.4.0 v2.4.0.tar.gz
# Install sentry-cli
RUN curl -sL https://sentry.io/get-cli/ | bash
curl -sL https://sentry.io/get-cli/ | bash

pip3 install awscli
EOF

COPY application/ /data/
WORKDIR /data
Expand Down
14 changes: 6 additions & 8 deletions application/backup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ fi

# Upload compressed backup file to S3
start=$(date +%s);
s3cmd put /tmp/${DB_NAME}.sql.gz ${S3_BUCKET} || STATUS=$?;
aws s3 cp "/tmp/${DB_NAME}.sql.gz" "s3://${S3_BUCKET}/${DB_NAME}.sql.gz" || STATUS=$?
if [ $STATUS -ne 0 ]; then
error_message="${MYNAME}: FATAL: Copy backup to ${S3_BUCKET} of ${DB_NAME} returned non-zero status ($STATUS) in $(expr ${end} - ${start}) seconds.";
log "ERROR" "${error_message}";
Expand All @@ -130,7 +130,7 @@ if [ $STATUS -ne 0 ]; then
fi

# Upload checksum file
s3cmd put /tmp/${DB_NAME}.sql.sha256.gz ${S3_BUCKET} || STATUS=$?;
aws s3 cp "/tmp/${DB_NAME}.sql.sha256.gz" "s3://${S3_BUCKET}/${DB_NAME}.sql.sha256.gz" || STATUS=$?;
end=$(date +%s);
if [ $STATUS -ne 0 ]; then
error_message="${MYNAME}: FATAL: Copy checksum to ${S3_BUCKET} of ${DB_NAME} returned non-zero status ($STATUS).";
Expand All @@ -144,12 +144,10 @@ fi
# Backblaze B2 Upload
if [ "${B2_BUCKET}" != "" ]; then
start=$(date +%s);
s3cmd \
--access_key=${B2_APPLICATION_KEY_ID} \
--secret_key=${B2_APPLICATION_KEY} \
--host=${B2_HOST} \
--host-bucket='%(bucket)s.'"${B2_HOST}" \
put /tmp/${DB_NAME}.sql.gz s3://${B2_BUCKET}/${DB_NAME}.sql.gz;
AWS_ACCESS_KEY_ID="${B2_APPLICATION_KEY_ID}" \
AWS_SECRET_ACCESS_KEY="${B2_APPLICATION_KEY}" \
aws s3 cp "/tmp/${DB_NAME}.sql.gz" "s3://${B2_BUCKET}/${DB_NAME}.sql.gz" \
--endpoint-url "https://${B2_HOST}"
STATUS=$?;
end=$(date +%s);
if [ $STATUS -ne 0 ]; then
Expand Down
4 changes: 2 additions & 2 deletions application/restore.sh
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ log "INFO" "${MYNAME}: copying database ${DB_NAME} backup and checksum from ${S3
start=$(date +%s)

# Download database backup
s3cmd get -f ${S3_BUCKET}/${DB_NAME}.sql.gz /tmp/${DB_NAME}.sql.gz || STATUS=$?
aws s3 cp s3://${S3_BUCKET}/${DB_NAME}.sql.gz /tmp/${DB_NAME}.sql.gz || STATUS=$?
if [ $STATUS -ne 0 ]; then
error_message="${MYNAME}: FATAL: Copy backup of ${DB_NAME} from ${S3_BUCKET} returned non-zero status ($STATUS) in $(expr $(date +%s) - ${start}) seconds."
log "ERROR" "${error_message}"
Expand All @@ -104,7 +104,7 @@ if [ $STATUS -ne 0 ]; then
fi

# Download checksum file
s3cmd get -f ${S3_BUCKET}/${DB_NAME}.sql.sha256.gz /tmp/${DB_NAME}.sql.sha256.gz || STATUS=$?
aws s3 cp s3://${S3_BUCKET}/${DB_NAME}.sql.sha256.gz /tmp/${DB_NAME}.sql.sha256.gz || STATUS=$?
end=$(date +%s)
if [ $STATUS -ne 0 ]; then
error_message="${MYNAME}: FATAL: Copy checksum of ${DB_NAME} from ${S3_BUCKET} returned non-zero status ($STATUS) in $(expr ${end} - ${start}) seconds."
Expand Down