Skip to content

Database Backup

Database Backup #11

Workflow file for this run

name: Database Backup
on:
schedule:
# Runs every day at 02:00 UTC
- cron: "0 2 * * *"
workflow_dispatch:
jobs:
backup:
runs-on: ubuntu-latest
steps:
- name: Install pg_dump (v17)
run: |
sudo apt-get update -qq
sudo apt-get install -y curl ca-certificates
sudo install -d /usr/share/postgresql-common/pgdg
sudo curl -o /usr/share/postgresql-common/pgdg/apt.postgresql.org.asc --fail https://www.postgresql.org/media/keys/ACCC4CF8.asc
echo "deb [signed-by=/usr/share/postgresql-common/pgdg/apt.postgresql.org.asc] https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" | sudo tee /etc/apt/sources.list.d/pgdg.list
sudo apt-get update -qq
sudo apt-get install -y postgresql-client-17
- name: Dump and compress database
run: |
set -eo pipefail
TIMESTAMP=$(date -u +"%Y-%m-%dT%H-%M-%SZ")
FILENAME="backup-${TIMESTAMP}.sql.gz"
echo "FILENAME=${FILENAME}" >> $GITHUB_ENV
/usr/lib/postgresql/17/bin/pg_dump "${{ secrets.DATABASE_URL }}" \
--no-password \
--format=plain \
--no-owner \
--no-acl \
--schema=public \
--schema=bluesky_bot \
--table=auth.users \
| gzip > "${FILENAME}"
FILESIZE=$(stat -c%s "${FILENAME}")
if [ "$FILESIZE" -lt 1024 ]; then
echo "::error::Backup file is suspiciously small (${FILESIZE} bytes)"
exit 1
fi
echo "Backup size: $(du -sh ${FILENAME} | cut -f1)"
- name: Upload to Cloudflare R2
env:
AWS_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: auto
run: |
aws s3 cp "${FILENAME}" \
"s3://${{ secrets.R2_BUCKET_NAME }}/scroll-reader/${FILENAME}" \
--endpoint-url "https://${{ secrets.R2_ACCOUNT_ID }}.r2.cloudflarestorage.com"
- name: Delete local backup file
if: always()
run: rm -f "${FILENAME}"