Skip to content

Commit

Permalink
Docker backup commands
Browse files Browse the repository at this point in the history
  • Loading branch information
stuartmaxwell committed Oct 7, 2024
1 parent d13d983 commit 0c4bf36
Show file tree
Hide file tree
Showing 5 changed files with 84 additions and 6 deletions.
17 changes: 13 additions & 4 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
FROM python:3.12-slim-bookworm

# Install system dependencies
RUN apt-get update \
&& apt-get upgrade -y \
&& apt-get install -y build-essential libpq-dev netcat-traditional --no-install-recommends \
&& apt-get install -y build-essential sqlite3 awscli cron --no-install-recommends \
&& rm -rf /var/lib/apt/lists/* /usr/share/doc /usr/share/man \
&& apt-get clean

Expand All @@ -13,24 +14,32 @@ COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv
# - tell uv to byte-compile packages for faster application startups,
# - prevent uv from accidentally downloading isolated Python builds,
# - pick a Python,
# - and finally declare `/venv` as the target for `uv sync`.
ENV UV_LINK_MODE=copy \
UV_COMPILE_BYTECODE=1 \
UV_PYTHON_DOWNLOADS=never \
UV_PYTHON=python3.12
# UV_PROJECT_ENVIRONMENT=/venv
# UV_CACHE_DIR=/cache

# Add the backup script
COPY backup.sh /app/
RUN chmod +x /app/backup.sh

# Add the entrypoint script
COPY entrypoint.sh /app/
RUN chmod +x /app/entrypoint.sh

# Set up the environment
COPY pyproject.toml /app/
COPY uv.lock /app/
WORKDIR /app
RUN uv sync --locked --no-dev

# Copy the rest of the application
COPY . /app
RUN mkdir /app/staticfiles

# Switching to a non-root user
RUN useradd appuser && chown -R appuser /app
USER appuser

# Expose the port
EXPOSE 8000
22 changes: 22 additions & 0 deletions backup.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
#!/usr/bin/env bash
#
# Run using cron to backup db every hour
#
set -euf -o pipefail

DB_PATH="/app/$DB_NAME.sqlite3"
BACKUP_PATH="/app/backup"
BACKUP_FILE="${BACKUP_PATH}/backup-$(date +%H).sqlite3"
TAR_FILE="${BACKUP_PATH}/backup-$(date +%H).tar.zst"

# Ensure the backup directory exists
mkdir -p "${BACKUP_PATH}"

# Backup the SQLite database
sqlite3 "${DB_PATH}" "VACUUM INTO '${BACKUP_FILE}'"

# Compress the backup
tar --zstd -cf "${TAR_FILE}" "${BACKUP_FILE}"

# Upload to S3
aws s3 cp "${TAR_FILE}" s3://$S3_BUCKET/backup-$(date +%H).tar.zst
16 changes: 15 additions & 1 deletion config/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,19 @@

# Database
DB_NAME = BASE_DIR / f"{env('DB_NAME')}.sqlite3" if "sqlite" in env("DB_ENGINE") else env("DB_NAME")

SQLITE_OPTIONS = {
"init_command": (
"PRAGMA foreign_keys=ON;"
"PRAGMA journal_mode = WAL;"
"PRAGMA synchronous = NORMAL;"
"PRAGMA busy_timeout = 5000;"
"PRAGMA temp_store = MEMORY;"
"PRAGMA mmap_size = 134217728;"
"PRAGMA journal_size_limit = 67108864;"
"PRAGMA cache_size = 2000;"
),
"transaction_mode": "IMMEDIATE",
}
DATABASES = {
"default": {
"ENGINE": env("DB_ENGINE"),
Expand All @@ -137,6 +149,8 @@
"PORT": env("DB_PORT"),
},
}
if "sqlite" in env("DB_ENGINE"):
DATABASES["default"].update(SQLITE_OPTIONS)

DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"

Expand Down
3 changes: 3 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@ services:
command: uv run --no-cache gunicorn --worker-tmp-dir /dev/shm --workers=2 --worker-class=gthread --bind 0.0.0.0:8000 config.wsgi
user: "1000"
environment:
- "S3_BUCKET=${S3_BUCKET}"
- "AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID}"
- "AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY}"
- "SECRET_KEY=${SECRET_KEY}"
- "DEBUG=${DEBUG}"
- "ALLOWED_HOSTS=${ALLOWED_HOSTS}"
Expand Down
32 changes: 31 additions & 1 deletion entrypoint.sh
Original file line number Diff line number Diff line change
@@ -1,7 +1,22 @@
#!/bin/bash

# Exit immediately if any command fails
set -e
# set -e

# Function to download the latest SQLite database backup from S3
download_latest_backup() {
echo "Downloading latest SQLite database backup from S3..."
latest_backup=$(aws s3 ls s3://$S3_BUCKET/ --recursive | sort | tail -n 1 | awk '{print $4}')
if [ -z "$latest_backup" ]; then
echo "No backup found in S3 bucket."
else
echo "Latest backup found: $latest_backup"
aws s3 cp s3://$S3_BUCKET/$latest_backup /app/latest-backup.tar.zst
tar --zstd -xf /app/latest-backup.tar.zst -O > /app/$DB_NAME.sqlite3
echo "SQLite database backup downloaded and extracted."
fi
}


if [ "$DB_ENGINE" = "django.db.backends.postgresql" ]
then
Expand All @@ -14,6 +29,17 @@ then
echo "PostgreSQL ready!"
fi

# Check if the database engine is SQLite and the database file doesn't exist
echo "Checking if SQLite database file exists"
# Print out environment variables
echo "DB_ENGINE: $DB_ENGINE"
echo "DB_NAME: $DB_NAME"
echo "S3_BUCKET: $S3_BUCKET"
echo "AWS_ENDPOINT_URL: $AWS_ENDPOINT_URL"
if [ "$DB_ENGINE" = "django.db.backends.sqlite3" ] && [ ! -f "/app/$DB_NAME.sqlite3" ]; then
download_latest_backup
fi

# Apply database migrations
echo "Applying database migrations"
uv run --no-cache manage.py migrate --noinput
Expand All @@ -22,6 +48,10 @@ uv run --no-cache manage.py migrate --noinput
echo "Collecting static files"
uv run --no-cache manage.py collectstatic --noinput

# Set up cronjob
echo "Setting up cronjob"
echo "0 * * * * /app/backup.sh" | crontab -

# Start the Django server with Gunicorn
echo "Starting server"
exec "$@"

0 comments on commit 0c4bf36

Please sign in to comment.