Skip to content

Use docker compose for debugging production #158

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 29 additions & 2 deletions .env/local.sample
Original file line number Diff line number Diff line change
@@ -1,8 +1,35 @@
# This is a sample of environment variables which are used only to run Docker locally.
# These are never used in production.

# Django
# ------------------------------------------------------------------------------
# Run Django in production mode (DEBUG=False)
DJANGO_SETTINGS_MODULE=config.settings.prod

# Use a strong secret in production
SECRET_KEY="this-is-a-bad-secret"

# In production, we use postgres but for testing a deployment, using SQLite is fine
DATABASE_URL="sqlite:///db.sqlite3"

# PostgreSQL
# ------------------------------------------------------------------------------
# This must match .env/postgres
DATABASE_URL=pgsql://localuser:localpass@postgres:5432/sandiegopython


# Redis
# ------------------------------------------------------------------------------
REDIS_URL=redis://redis:6379/0


# S3/R2 Media Storage
# ------------------------------------------------------------------------------
# If not empty, S3/R2 will be used for media storage
AWS_S3_ACCESS_KEY_ID=
AWS_S3_SECRET_ACCESS_KEY=
AWS_STORAGE_BUCKET_NAME=
# If using a custom domain for media storage, set the MEDIA_URL
# and AWS_S3_CUSTOM_DOMAIN
AWS_S3_CUSTOM_DOMAIN=
MEDIA_URL=/media/
# The endpoint URL is necessary for Cloudflare R2
AWS_S3_ENDPOINT_URL=
7 changes: 7 additions & 0 deletions .env/postgres
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# PostgreSQL
# ------------------------------------------------------------------------------
POSTGRES_HOST=postgres
POSTGRES_PORT=5432
POSTGRES_DB=sandiegopython
POSTGRES_USER=localuser
POSTGRES_PASSWORD=localpass
18 changes: 10 additions & 8 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -20,22 +20,24 @@ RUN apt-get install -y --no-install-recommends \
make \
build-essential \
g++ \
postgresql-client \
git

RUN mkdir -p /code

WORKDIR /code

COPY . /code/
# Requirements are installed here to ensure they will be cached.
# https://docs.docker.com/build/cache/#use-the-dedicated-run-cache
COPY ./requirements /requirements
RUN pip install --upgrade pip
RUN --mount=type=cache,target=/root/.cache/pip pip install -r /requirements/deployment.txt
RUN --mount=type=cache,target=/root/.cache/pip pip install -r /requirements/local.txt

# Cache dependencies when building which should result in faster docker builds
RUN --mount=type=cache,target=/root/.cache/pip set -ex && \
pip install --upgrade --no-cache-dir pip && \
pip install -r /code/requirements.txt && \
pip install -r /code/requirements/local.txt
COPY . /code/

# Build JS/static assets
RUN npm install
RUN --mount=type=cache,target=/root/.npm npm install
RUN npm run build

RUN python manage.py collectstatic --noinput
Expand All @@ -52,4 +54,4 @@ RUN date -u +'%Y-%m-%dT%H:%M:%SZ' > BUILD_DATE

EXPOSE 8000

CMD ["gunicorn", "--timeout", "15", "--bind", ":8000", "--workers", "2", "--max-requests", "10000", "--max-requests-jitter", "100", "config.wsgi"]
CMD ["gunicorn", "--timeout", "15", "--bind", ":8000", "--workers", "2", "--max-requests", "10000", "--max-requests-jitter", "100", "--log-file", "-", "config.wsgi"]
24 changes: 23 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
@@ -1,10 +1,17 @@
.PHONY: help test clean deploy
.PHONY: help test clean dockerbuild dockerserve dockershell deploy


DOCKER_CONFIG=compose.yaml


help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " test Run the full test suite"
@echo " clean Delete assets processed by webpack"
@echo " dockerbuild Build the Docker compose dev environment"
@echo " dockerserve Run the Docker containers for the site"
@echo " (starts a webserver on http://localhost:8000)"
@echo " dockershell Connect to a bash shell on the Django Docker container"
@echo " deploy Deploy the app to fly.io"


Expand All @@ -14,6 +21,21 @@ test:
clean:
rm -rf assets/dist/*

# Build the local multi-container application
# This command can take a while the first time
dockerbuild:
docker compose -f $(DOCKER_CONFIG) build

# You should run "dockerbuild" at least once before running this
# It isn't a dependency because running "dockerbuild" can take some time
dockerserve:
docker compose -f $(DOCKER_CONFIG) up

# Use this command to inspect the container, run management commands,
# or run anything else on the Django container
dockershell:
docker compose -f $(DOCKER_CONFIG) run --rm django /bin/bash

# Build and deploy the production container
deploy:
flyctl deploy
20 changes: 13 additions & 7 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -42,13 +42,19 @@ you can build the container and run it locally:
cp .env/local.sample .env/local

# Build the docker image for sandiegopython.org
docker buildx build -t sandiegopython.org .

# Start a development server on http://localhost:8000
docker run --env-file=".env/local" --publish=8000:8000 sandiegopython.org

# You can start a shell to the container with the following:
docker run --env-file=".env/local" -it sandiegopython.org /bin/bash
# Use Docker compose to have Redis and PostgreSQL just like in production
# Note: Docker is used in production but Docker compose is just for development
make dockerbuild

# Start a development web server on http://localhost:8000
# Use ctrl+C to stop
make dockerserve

# While the server is running,
# you can start a bash shell to the container with the following:
# Once you have a bash shell, you can run migrations,
# manually connect to the local Postgres database or anything else
make dockershell
```


Expand Down
40 changes: 40 additions & 0 deletions compose.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
# Docker Compose Local Development Setup
#
# This starts a local multi-container development environment
# with Postgres, Redis, and Django.
# The configuration comes from .env/local and .env/postgres
#
# To run:
# $ make dockerbuild
# $ make dockerserve

volumes:
local_postgres_data: {}

services:
django:
build:
context: .
dockerfile: ./Dockerfile
image: sandiegopython_local_django
depends_on:
- postgres
env_file:
- ./.env/local
- ./.env/postgres
ports:
- "${SANDIEGOPYTHON_DJANGO_PORT:-8000}:8000"
# Allow us to run `docker attach` and get
# control on STDIN and be able to debug our code with interactive pdb
stdin_open: true
tty: true

postgres:
image: postgres:15.2
volumes:
- local_postgres_data:/var/lib/postgresql/data
env_file:
- ./.env/postgres

redis:
image: redis:5.0
2 changes: 1 addition & 1 deletion config/settings/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@
os.path.join(BASE_DIR, "pythonsd", "static"),
]

MEDIA_URL = "/media/"
MEDIA_URL = os.environ.get("MEDIA_URL", default="/media/")
MEDIA_ROOT = os.path.join(BASE_DIR, "media")


Expand Down
16 changes: 16 additions & 0 deletions config/settings/prod.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,22 @@
ADMIN_URL = os.environ.get("ADMIN_URL", "admin")


# Django-storages
# https://django-storages.readthedocs.io
# --------------------------------------------------------------------------
# Optionally store media files in S3/R2/etc.
AWS_S3_ACCESS_KEY_ID = os.environ.get("AWS_S3_ACCESS_KEY_ID")
AWS_S3_SECRET_ACCESS_KEY = os.environ.get("AWS_S3_SECRET_ACCESS_KEY")
AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_STORAGE_BUCKET_NAME")
# When using media storage with a custom domain
# set this and set MEDIA_URL
AWS_S3_CUSTOM_DOMAIN = os.environ.get("AWS_S3_CUSTOM_DOMAIN")
# The endpoint URL is necessary for Cloudflare R2
AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", default=None)
if AWS_S3_ACCESS_KEY_ID and AWS_S3_SECRET_ACCESS_KEY and AWS_STORAGE_BUCKET_NAME:
DEFAULT_FILE_STORAGE = "storages.backends.s3.S3Storage"


# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
# --------------------------------------------------------------------------
Expand Down
2 changes: 1 addition & 1 deletion pythonsd/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ class Organizer(models.Model):
help_text="Set to False to hide this organizer from the organizers page",
)

# For production, store the image in Cloud Storage (S3, Appwrite, etc.)
# For production, store the image in Cloud Storage (S3, R2, Appwrite, etc.)
photo = models.ImageField(
upload_to="organizers/",
help_text="Recommended size of 400*400px or larger square",
Expand Down
3 changes: 3 additions & 0 deletions requirements/deployment.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,6 @@ django-redis==5.4.0

# Email
django-anymail==10.3

# Cloud storage for media storage (S3, R2, etc.)
django-storages[s3]==1.14.3
Loading