|
| 1 | +name: Database Backup |
| 2 | + |
| 3 | +on: |
| 4 | + schedule: |
| 5 | + # Runs every day at 02:00 UTC |
| 6 | + - cron: "0 2 * * *" |
| 7 | + workflow_dispatch: |
| 8 | + |
| 9 | +jobs: |
| 10 | + backup: |
| 11 | + runs-on: ubuntu-latest |
| 12 | + |
| 13 | + steps: |
| 14 | + - name: Install pg_dump (v17) |
| 15 | + run: | |
| 16 | + sudo apt-get update -qq |
| 17 | + sudo apt-get install -y curl ca-certificates |
| 18 | + sudo install -d /usr/share/postgresql-common/pgdg |
| 19 | + sudo curl -o /usr/share/postgresql-common/pgdg/apt.postgresql.org.asc --fail https://www.postgresql.org/media/keys/ACCC4CF8.asc |
| 20 | + echo "deb [signed-by=/usr/share/postgresql-common/pgdg/apt.postgresql.org.asc] https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" | sudo tee /etc/apt/sources.list.d/pgdg.list |
| 21 | + sudo apt-get update -qq |
| 22 | + sudo apt-get install -y postgresql-client-17 |
| 23 | +
|
| 24 | + - name: Dump and compress database |
| 25 | + run: | |
| 26 | + set -eo pipefail |
| 27 | +
|
| 28 | + TIMESTAMP=$(date -u +"%Y-%m-%dT%H-%M-%SZ") |
| 29 | + FILENAME="backup-${TIMESTAMP}.sql.gz" |
| 30 | + echo "FILENAME=${FILENAME}" >> $GITHUB_ENV |
| 31 | +
|
| 32 | + /usr/lib/postgresql/17/bin/pg_dump "${{ secrets.DATABASE_URL }}" \ |
| 33 | + --no-password \ |
| 34 | + --format=plain \ |
| 35 | + --no-owner \ |
| 36 | + --no-acl \ |
| 37 | + --schema=public \ |
| 38 | + --schema=bluesky_bot \ |
| 39 | + --table=auth.users \ |
| 40 | + | gzip > "${FILENAME}" |
| 41 | +
|
| 42 | + FILESIZE=$(stat -c%s "${FILENAME}") |
| 43 | + if [ "$FILESIZE" -lt 1024 ]; then |
| 44 | + echo "::error::Backup file is suspiciously small (${FILESIZE} bytes)" |
| 45 | + exit 1 |
| 46 | + fi |
| 47 | +
|
| 48 | + echo "Backup size: $(du -sh ${FILENAME} | cut -f1)" |
| 49 | +
|
| 50 | + - name: Upload to Cloudflare R2 |
| 51 | + env: |
| 52 | + AWS_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }} |
| 53 | + AWS_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }} |
| 54 | + AWS_DEFAULT_REGION: auto |
| 55 | + run: | |
| 56 | + aws s3 cp "${FILENAME}" \ |
| 57 | + "s3://${{ secrets.R2_BUCKET_NAME }}/${FILENAME}" \ |
| 58 | + --endpoint-url "https://${{ secrets.R2_ACCOUNT_ID }}.r2.cloudflarestorage.com" |
| 59 | +
|
| 60 | + - name: Delete local backup file |
| 61 | + if: always() |
| 62 | + run: rm -f "${FILENAME}" |
0 commit comments