diff options
author | Matthew Lemon <y@yulqen.org> | 2024-05-13 17:26:25 +0100 |
---|---|---|
committer | Matthew Lemon <y@yulqen.org> | 2024-05-13 17:26:25 +0100 |
commit | efbbd480ddc62e695123d31c31d233b0df5155bd (patch) | |
tree | bc2fb465edd5050d83c97f280b1aac8e023fe3e5 /compose |
After first pre-commit processing
Diffstat (limited to 'compose')
29 files changed, 755 insertions, 0 deletions
diff --git a/compose/local/django/Dockerfile b/compose/local/django/Dockerfile new file mode 100644 index 0000000..5f5f4e5 --- /dev/null +++ b/compose/local/django/Dockerfile @@ -0,0 +1,91 @@ +# define an alias for the specific python version used in this file. +FROM docker.io/python:3.12.3-slim-bookworm as python + +# Python build stage +FROM python as python-build-stage + +ARG BUILD_ENVIRONMENT=local + +# Install apt packages +RUN apt-get update && apt-get install --no-install-recommends -y \ + # dependencies for building Python packages + build-essential \ + # psycopg dependencies + libpq-dev + +# Requirements are installed here to ensure they will be cached. +COPY ./requirements . + +# Create Python Dependency and Sub-Dependency Wheels. +RUN pip wheel --wheel-dir /usr/src/app/wheels \ + -r ${BUILD_ENVIRONMENT}.txt + + +# Python 'run' stage +FROM python as python-run-stage + +ARG BUILD_ENVIRONMENT=local +ARG APP_HOME=/app + +ENV PYTHONUNBUFFERED 1 +ENV PYTHONDONTWRITEBYTECODE 1 +ENV BUILD_ENV ${BUILD_ENVIRONMENT} + +WORKDIR ${APP_HOME} + + +# devcontainer dependencies and utils +RUN apt-get update && apt-get install --no-install-recommends -y \ + sudo git bash-completion nano ssh + +# Create devcontainer user and add it to sudoers +RUN groupadd --gid 1000 dev-user \ + && useradd --uid 1000 --gid dev-user --shell /bin/bash --create-home dev-user \ + && echo dev-user ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/dev-user \ + && chmod 0440 /etc/sudoers.d/dev-user + + +# Install required system dependencies +RUN apt-get update && apt-get install --no-install-recommends -y \ + # psycopg dependencies + libpq-dev \ + # Translations dependencies + gettext \ + # cleaning up unused files + && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \ + && rm -rf /var/lib/apt/lists/* + +# All absolute dir copies ignore workdir instruction. All relative dir copies are wrt to the workdir instruction +# copy python dependency wheels from python-build-stage +COPY --from=python-build-stage /usr/src/app/wheels /wheels/ + +# use wheels to install python dependencies +RUN pip install --no-cache-dir --no-index --find-links=/wheels/ /wheels/* \ + && rm -rf /wheels/ + +COPY ./compose/production/django/entrypoint /entrypoint +RUN sed -i 's/\r$//g' /entrypoint +RUN chmod +x /entrypoint + +COPY ./compose/local/django/start /start +RUN sed -i 's/\r$//g' /start +RUN chmod +x /start + + +COPY ./compose/local/django/celery/worker/start /start-celeryworker +RUN sed -i 's/\r$//g' /start-celeryworker +RUN chmod +x /start-celeryworker + +COPY ./compose/local/django/celery/beat/start /start-celerybeat +RUN sed -i 's/\r$//g' /start-celerybeat +RUN chmod +x /start-celerybeat + +COPY ./compose/local/django/celery/flower/start /start-flower +RUN sed -i 's/\r$//g' /start-flower +RUN chmod +x /start-flower + + +# copy application code to WORKDIR +COPY . ${APP_HOME} + +ENTRYPOINT ["/entrypoint"] diff --git a/compose/local/django/celery/beat/start b/compose/local/django/celery/beat/start new file mode 100644 index 0000000..8adc489 --- /dev/null +++ b/compose/local/django/celery/beat/start @@ -0,0 +1,8 @@ +#!/bin/bash + +set -o errexit +set -o nounset + + +rm -f './celerybeat.pid' +exec watchfiles --filter python celery.__main__.main --args '-A config.celery_app beat -l INFO' diff --git a/compose/local/django/celery/flower/start b/compose/local/django/celery/flower/start new file mode 100644 index 0000000..b4783d2 --- /dev/null +++ b/compose/local/django/celery/flower/start @@ -0,0 +1,8 @@ +#!/bin/bash + +set -o errexit +set -o nounset + +exec watchfiles --filter python celery.__main__.main \ + --args \ + "-A config.celery_app -b \"${CELERY_BROKER_URL}\" flower --basic_auth=\"${CELERY_FLOWER_USER}:${CELERY_FLOWER_PASSWORD}\"" diff --git a/compose/local/django/celery/worker/start b/compose/local/django/celery/worker/start new file mode 100644 index 0000000..183a801 --- /dev/null +++ b/compose/local/django/celery/worker/start @@ -0,0 +1,7 @@ +#!/bin/bash + +set -o errexit +set -o nounset + + +exec watchfiles --filter python celery.__main__.main --args '-A config.celery_app worker -l INFO' diff --git a/compose/local/django/start b/compose/local/django/start new file mode 100644 index 0000000..ba96db4 --- /dev/null +++ b/compose/local/django/start @@ -0,0 +1,9 @@ +#!/bin/bash + +set -o errexit +set -o pipefail +set -o nounset + + +python manage.py migrate +exec python manage.py runserver_plus 0.0.0.0:8000 diff --git a/compose/local/docs/Dockerfile b/compose/local/docs/Dockerfile new file mode 100644 index 0000000..3556504 --- /dev/null +++ b/compose/local/docs/Dockerfile @@ -0,0 +1,62 @@ +# define an alias for the specific python version used in this file. +FROM docker.io/python:3.12.3-slim-bookworm as python + + +# Python build stage +FROM python as python-build-stage + +ENV PYTHONDONTWRITEBYTECODE 1 + +RUN apt-get update && apt-get install --no-install-recommends -y \ + # dependencies for building Python packages + build-essential \ + # psycopg dependencies + libpq-dev \ + # cleaning up unused files + && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \ + && rm -rf /var/lib/apt/lists/* + +# Requirements are installed here to ensure they will be cached. +COPY ./requirements /requirements + +# create python dependency wheels +RUN pip wheel --no-cache-dir --wheel-dir /usr/src/app/wheels \ + -r /requirements/local.txt -r /requirements/production.txt \ + && rm -rf /requirements + + +# Python 'run' stage +FROM python as python-run-stage + +ARG BUILD_ENVIRONMENT +ENV PYTHONUNBUFFERED 1 +ENV PYTHONDONTWRITEBYTECODE 1 + +RUN apt-get update && apt-get install --no-install-recommends -y \ + # To run the Makefile + make \ + # psycopg dependencies + libpq-dev \ + # Translations dependencies + gettext \ + # Uncomment below lines to enable Sphinx output to latex and pdf + # texlive-latex-recommended \ + # texlive-fonts-recommended \ + # texlive-latex-extra \ + # latexmk \ + # cleaning up unused files + && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \ + && rm -rf /var/lib/apt/lists/* + +# copy python dependency wheels from python-build-stage +COPY --from=python-build-stage /usr/src/app/wheels /wheels + +# use wheels to install python dependencies +RUN pip install --no-cache /wheels/* \ + && rm -rf /wheels + +COPY ./compose/local/docs/start /start-docs +RUN sed -i 's/\r$//g' /start-docs +RUN chmod +x /start-docs + +WORKDIR /docs diff --git a/compose/local/docs/start b/compose/local/docs/start new file mode 100644 index 0000000..96a94f5 --- /dev/null +++ b/compose/local/docs/start @@ -0,0 +1,7 @@ +#!/bin/bash + +set -o errexit +set -o pipefail +set -o nounset + +exec make livehtml diff --git a/compose/production/aws/Dockerfile b/compose/production/aws/Dockerfile new file mode 100644 index 0000000..36eea7f --- /dev/null +++ b/compose/production/aws/Dockerfile @@ -0,0 +1,9 @@ +FROM docker.io/garland/aws-cli-docker:1.16.140 + +COPY ./compose/production/aws/maintenance /usr/local/bin/maintenance +COPY ./compose/production/postgres/maintenance/_sourced /usr/local/bin/maintenance/_sourced + +RUN chmod +x /usr/local/bin/maintenance/* + +RUN mv /usr/local/bin/maintenance/* /usr/local/bin \ + && rmdir /usr/local/bin/maintenance diff --git a/compose/production/aws/maintenance/download b/compose/production/aws/maintenance/download new file mode 100644 index 0000000..9561d91 --- /dev/null +++ b/compose/production/aws/maintenance/download @@ -0,0 +1,23 @@ +#!/bin/sh + +### Download a file from your Amazon S3 bucket to the postgres /backups folder +### +### Usage: +### $ docker compose -f production.yml run --rm awscli <1> + +set -o errexit +set -o pipefail +set -o nounset + +working_dir="$(dirname ${0})" +source "${working_dir}/_sourced/constants.sh" +source "${working_dir}/_sourced/messages.sh" + +export AWS_ACCESS_KEY_ID="${DJANGO_AWS_ACCESS_KEY_ID}" +export AWS_SECRET_ACCESS_KEY="${DJANGO_AWS_SECRET_ACCESS_KEY}" +export AWS_STORAGE_BUCKET_NAME="${DJANGO_AWS_STORAGE_BUCKET_NAME}" + + +aws s3 cp s3://${AWS_STORAGE_BUCKET_NAME}${BACKUP_DIR_PATH}/${1} ${BACKUP_DIR_PATH}/${1} + +message_success "Finished downloading ${1}." diff --git a/compose/production/aws/maintenance/upload b/compose/production/aws/maintenance/upload new file mode 100644 index 0000000..73c1b9b --- /dev/null +++ b/compose/production/aws/maintenance/upload @@ -0,0 +1,29 @@ +#!/bin/sh + +### Upload the /backups folder to Amazon S3 +### +### Usage: +### $ docker compose -f production.yml run --rm awscli upload + +set -o errexit +set -o pipefail +set -o nounset + +working_dir="$(dirname ${0})" +source "${working_dir}/_sourced/constants.sh" +source "${working_dir}/_sourced/messages.sh" + +export AWS_ACCESS_KEY_ID="${DJANGO_AWS_ACCESS_KEY_ID}" +export AWS_SECRET_ACCESS_KEY="${DJANGO_AWS_SECRET_ACCESS_KEY}" +export AWS_STORAGE_BUCKET_NAME="${DJANGO_AWS_STORAGE_BUCKET_NAME}" + + +message_info "Upload the backups directory to S3 bucket {$AWS_STORAGE_BUCKET_NAME}" + +aws s3 cp ${BACKUP_DIR_PATH} s3://${AWS_STORAGE_BUCKET_NAME}${BACKUP_DIR_PATH} --recursive + +message_info "Cleaning the directory ${BACKUP_DIR_PATH}" + +rm -rf ${BACKUP_DIR_PATH}/* + +message_success "Finished uploading and cleaning." diff --git a/compose/production/django/Dockerfile b/compose/production/django/Dockerfile new file mode 100644 index 0000000..502c16e --- /dev/null +++ b/compose/production/django/Dockerfile @@ -0,0 +1,96 @@ + +# define an alias for the specific python version used in this file. +FROM docker.io/python:3.12.3-slim-bookworm as python + +# Python build stage +FROM python as python-build-stage + +ARG BUILD_ENVIRONMENT=production + +# Install apt packages +RUN apt-get update && apt-get install --no-install-recommends -y \ + # dependencies for building Python packages + build-essential \ + # psycopg dependencies + libpq-dev + +# Requirements are installed here to ensure they will be cached. +COPY ./requirements . + +# Create Python Dependency and Sub-Dependency Wheels. +RUN pip wheel --wheel-dir /usr/src/app/wheels \ + -r ${BUILD_ENVIRONMENT}.txt + + +# Python 'run' stage +FROM python as python-run-stage + +ARG BUILD_ENVIRONMENT=production +ARG APP_HOME=/app + +ENV PYTHONUNBUFFERED 1 +ENV PYTHONDONTWRITEBYTECODE 1 +ENV BUILD_ENV ${BUILD_ENVIRONMENT} + +WORKDIR ${APP_HOME} + +RUN addgroup --system django \ + && adduser --system --ingroup django django + + +# Install required system dependencies +RUN apt-get update && apt-get install --no-install-recommends -y \ + # psycopg dependencies + libpq-dev \ + # Translations dependencies + gettext \ + # cleaning up unused files + && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \ + && rm -rf /var/lib/apt/lists/* + +# All absolute dir copies ignore workdir instruction. All relative dir copies are wrt to the workdir instruction +# copy python dependency wheels from python-build-stage +COPY --from=python-build-stage /usr/src/app/wheels /wheels/ + +# use wheels to install python dependencies +RUN pip install --no-cache-dir --no-index --find-links=/wheels/ /wheels/* \ + && rm -rf /wheels/ + + +COPY --chown=django:django ./compose/production/django/entrypoint /entrypoint +RUN sed -i 's/\r$//g' /entrypoint +RUN chmod +x /entrypoint + + +COPY --chown=django:django ./compose/production/django/start /start +RUN sed -i 's/\r$//g' /start +RUN chmod +x /start +COPY --chown=django:django ./compose/production/django/celery/worker/start /start-celeryworker +RUN sed -i 's/\r$//g' /start-celeryworker +RUN chmod +x /start-celeryworker + + +COPY --chown=django:django ./compose/production/django/celery/beat/start /start-celerybeat +RUN sed -i 's/\r$//g' /start-celerybeat +RUN chmod +x /start-celerybeat + + +COPY --chown=django:django ./compose/production/django/celery/flower/start /start-flower +RUN sed -i 's/\r$//g' /start-flower +RUN chmod +x /start-flower + + +# copy application code to WORKDIR +COPY --chown=django:django . ${APP_HOME} + +# make django owner of the WORKDIR directory as well. +RUN chown -R django:django ${APP_HOME} + +USER django + +RUN DATABASE_URL="" \ + CELERY_BROKER_URL="" \ + DJANGO_SETTINGS_MODULE="config.settings.test" \ + python manage.py compilemessages + +ENTRYPOINT ["/entrypoint"] diff --git a/compose/production/django/celery/beat/start b/compose/production/django/celery/beat/start new file mode 100644 index 0000000..42ddca9 --- /dev/null +++ b/compose/production/django/celery/beat/start @@ -0,0 +1,8 @@ +#!/bin/bash + +set -o errexit +set -o pipefail +set -o nounset + + +exec celery -A config.celery_app beat -l INFO diff --git a/compose/production/django/celery/flower/start b/compose/production/django/celery/flower/start new file mode 100644 index 0000000..4180d67 --- /dev/null +++ b/compose/production/django/celery/flower/start @@ -0,0 +1,11 @@ +#!/bin/bash + +set -o errexit +set -o nounset + + +exec celery \ + -A config.celery_app \ + -b "${CELERY_BROKER_URL}" \ + flower \ + --basic_auth="${CELERY_FLOWER_USER}:${CELERY_FLOWER_PASSWORD}" diff --git a/compose/production/django/celery/worker/start b/compose/production/django/celery/worker/start new file mode 100644 index 0000000..af0c8f7 --- /dev/null +++ b/compose/production/django/celery/worker/start @@ -0,0 +1,8 @@ +#!/bin/bash + +set -o errexit +set -o pipefail +set -o nounset + + +exec celery -A config.celery_app worker -l INFO diff --git a/compose/production/django/entrypoint b/compose/production/django/entrypoint new file mode 100644 index 0000000..249d8d9 --- /dev/null +++ b/compose/production/django/entrypoint @@ -0,0 +1,49 @@ +#!/bin/bash + +set -o errexit +set -o pipefail +set -o nounset + + + +# N.B. If only .env files supported variable expansion... +export CELERY_BROKER_URL="${REDIS_URL}" + + +if [ -z "${POSTGRES_USER}" ]; then + base_postgres_image_default_user='postgres' + export POSTGRES_USER="${base_postgres_image_default_user}" +fi +export DATABASE_URL="postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}" + +python << END +import sys +import time + +import psycopg + +suggest_unrecoverable_after = 30 +start = time.time() + +while True: + try: + psycopg.connect( + dbname="${POSTGRES_DB}", + user="${POSTGRES_USER}", + password="${POSTGRES_PASSWORD}", + host="${POSTGRES_HOST}", + port="${POSTGRES_PORT}", + ) + break + except psycopg.OperationalError as error: + sys.stderr.write("Waiting for PostgreSQL to become available...\n") + + if time.time() - start > suggest_unrecoverable_after: + sys.stderr.write(" This is taking longer than expected. The following exception may be indicative of an unrecoverable error: '{}'\n".format(error)) + + time.sleep(1) +END + +>&2 echo 'PostgreSQL is available' + +exec "$@" diff --git a/compose/production/django/start b/compose/production/django/start new file mode 100644 index 0000000..97216fa --- /dev/null +++ b/compose/production/django/start @@ -0,0 +1,10 @@ +#!/bin/bash + +set -o errexit +set -o pipefail +set -o nounset + + +python /app/manage.py collectstatic --noinput + +exec /usr/local/bin/gunicorn config.wsgi --bind 0.0.0.0:5000 --chdir=/app diff --git a/compose/production/nginx/Dockerfile b/compose/production/nginx/Dockerfile new file mode 100644 index 0000000..ec2ad35 --- /dev/null +++ b/compose/production/nginx/Dockerfile @@ -0,0 +1,2 @@ +FROM docker.io/nginx:1.17.8-alpine +COPY ./compose/production/nginx/default.conf /etc/nginx/conf.d/default.conf diff --git a/compose/production/nginx/default.conf b/compose/production/nginx/default.conf new file mode 100644 index 0000000..562dba8 --- /dev/null +++ b/compose/production/nginx/default.conf @@ -0,0 +1,7 @@ +server { + listen 80; + server_name localhost; + location /media/ { + alias /usr/share/nginx/media/; + } +} diff --git a/compose/production/postgres/Dockerfile b/compose/production/postgres/Dockerfile new file mode 100644 index 0000000..176a5f1 --- /dev/null +++ b/compose/production/postgres/Dockerfile @@ -0,0 +1,6 @@ +FROM docker.io/postgres:16 + +COPY ./compose/production/postgres/maintenance /usr/local/bin/maintenance +RUN chmod +x /usr/local/bin/maintenance/* +RUN mv /usr/local/bin/maintenance/* /usr/local/bin \ + && rmdir /usr/local/bin/maintenance diff --git a/compose/production/postgres/maintenance/_sourced/constants.sh b/compose/production/postgres/maintenance/_sourced/constants.sh new file mode 100644 index 0000000..6ca4f0c --- /dev/null +++ b/compose/production/postgres/maintenance/_sourced/constants.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + + +BACKUP_DIR_PATH='/backups' +BACKUP_FILE_PREFIX='backup' diff --git a/compose/production/postgres/maintenance/_sourced/countdown.sh b/compose/production/postgres/maintenance/_sourced/countdown.sh new file mode 100644 index 0000000..e6cbfb6 --- /dev/null +++ b/compose/production/postgres/maintenance/_sourced/countdown.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + + +countdown() { + declare desc="A simple countdown. Source: https://superuser.com/a/611582" + local seconds="${1}" + local d=$(($(date +%s) + "${seconds}")) + while [ "$d" -ge `date +%s` ]; do + echo -ne "$(date -u --date @$(($d - `date +%s`)) +%H:%M:%S)\r"; + sleep 0.1 + done +} diff --git a/compose/production/postgres/maintenance/_sourced/messages.sh b/compose/production/postgres/maintenance/_sourced/messages.sh new file mode 100644 index 0000000..f6be756 --- /dev/null +++ b/compose/production/postgres/maintenance/_sourced/messages.sh @@ -0,0 +1,41 @@ +#!/usr/bin/env bash + + +message_newline() { + echo +} + +message_debug() +{ + echo -e "DEBUG: ${@}" +} + +message_welcome() +{ + echo -e "\e[1m${@}\e[0m" +} + +message_warning() +{ + echo -e "\e[33mWARNING\e[0m: ${@}" +} + +message_error() +{ + echo -e "\e[31mERROR\e[0m: ${@}" +} + +message_info() +{ + echo -e "\e[37mINFO\e[0m: ${@}" +} + +message_suggestion() +{ + echo -e "\e[33mSUGGESTION\e[0m: ${@}" +} + +message_success() +{ + echo -e "\e[32mSUCCESS\e[0m: ${@}" +} diff --git a/compose/production/postgres/maintenance/_sourced/yes_no.sh b/compose/production/postgres/maintenance/_sourced/yes_no.sh new file mode 100644 index 0000000..fd9cae1 --- /dev/null +++ b/compose/production/postgres/maintenance/_sourced/yes_no.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + + +yes_no() { + declare desc="Prompt for confirmation. \$\"\{1\}\": confirmation message." + local arg1="${1}" + + local response= + read -r -p "${arg1} (y/[n])? " response + if [[ "${response}" =~ ^[Yy]$ ]] + then + exit 0 + else + exit 1 + fi +} diff --git a/compose/production/postgres/maintenance/backup b/compose/production/postgres/maintenance/backup new file mode 100644 index 0000000..f72304c --- /dev/null +++ b/compose/production/postgres/maintenance/backup @@ -0,0 +1,38 @@ +#!/usr/bin/env bash + + +### Create a database backup. +### +### Usage: +### $ docker compose -f <environment>.yml (exec |run --rm) postgres backup + + +set -o errexit +set -o pipefail +set -o nounset + + +working_dir="$(dirname ${0})" +source "${working_dir}/_sourced/constants.sh" +source "${working_dir}/_sourced/messages.sh" + + +message_welcome "Backing up the '${POSTGRES_DB}' database..." + + +if [[ "${POSTGRES_USER}" == "postgres" ]]; then + message_error "Backing up as 'postgres' user is not supported. Assign 'POSTGRES_USER' env with another one and try again." + exit 1 +fi + +export PGHOST="${POSTGRES_HOST}" +export PGPORT="${POSTGRES_PORT}" +export PGUSER="${POSTGRES_USER}" +export PGPASSWORD="${POSTGRES_PASSWORD}" +export PGDATABASE="${POSTGRES_DB}" + +backup_filename="${BACKUP_FILE_PREFIX}_$(date +'%Y_%m_%dT%H_%M_%S').sql.gz" +pg_dump | gzip > "${BACKUP_DIR_PATH}/${backup_filename}" + + +message_success "'${POSTGRES_DB}' database backup '${backup_filename}' has been created and placed in '${BACKUP_DIR_PATH}'." diff --git a/compose/production/postgres/maintenance/backups b/compose/production/postgres/maintenance/backups new file mode 100644 index 0000000..a18937d --- /dev/null +++ b/compose/production/postgres/maintenance/backups @@ -0,0 +1,22 @@ +#!/usr/bin/env bash + + +### View backups. +### +### Usage: +### $ docker compose -f <environment>.yml (exec |run --rm) postgres backups + + +set -o errexit +set -o pipefail +set -o nounset + + +working_dir="$(dirname ${0})" +source "${working_dir}/_sourced/constants.sh" +source "${working_dir}/_sourced/messages.sh" + + +message_welcome "These are the backups you have got:" + +ls -lht "${BACKUP_DIR_PATH}" diff --git a/compose/production/postgres/maintenance/restore b/compose/production/postgres/maintenance/restore new file mode 100644 index 0000000..c68f17d --- /dev/null +++ b/compose/production/postgres/maintenance/restore @@ -0,0 +1,55 @@ +#!/usr/bin/env bash + + +### Restore database from a backup. +### +### Parameters: +### <1> filename of an existing backup. +### +### Usage: +### $ docker compose -f <environment>.yml (exec |run --rm) postgres restore <1> + + +set -o errexit +set -o pipefail +set -o nounset + + +working_dir="$(dirname ${0})" +source "${working_dir}/_sourced/constants.sh" +source "${working_dir}/_sourced/messages.sh" + + +if [[ -z ${1+x} ]]; then + message_error "Backup filename is not specified yet it is a required parameter. Make sure you provide one and try again." + exit 1 +fi +backup_filename="${BACKUP_DIR_PATH}/${1}" +if [[ ! -f "${backup_filename}" ]]; then + message_error "No backup with the specified filename found. Check out the 'backups' maintenance script output to see if there is one and try again." + exit 1 +fi + +message_welcome "Restoring the '${POSTGRES_DB}' database from the '${backup_filename}' backup..." + +if [[ "${POSTGRES_USER}" == "postgres" ]]; then + message_error "Restoring as 'postgres' user is not supported. Assign 'POSTGRES_USER' env with another one and try again." + exit 1 +fi + +export PGHOST="${POSTGRES_HOST}" +export PGPORT="${POSTGRES_PORT}" +export PGUSER="${POSTGRES_USER}" +export PGPASSWORD="${POSTGRES_PASSWORD}" +export PGDATABASE="${POSTGRES_DB}" + +message_info "Dropping the database..." +dropdb "${PGDATABASE}" + +message_info "Creating a new database..." +createdb --owner="${POSTGRES_USER}" + +message_info "Applying the backup to the new database..." +gunzip -c "${backup_filename}" | psql "${POSTGRES_DB}" + +message_success "The '${POSTGRES_DB}' database has been restored from the '${backup_filename}' backup." diff --git a/compose/production/postgres/maintenance/rmbackup b/compose/production/postgres/maintenance/rmbackup new file mode 100644 index 0000000..fdfd20e --- /dev/null +++ b/compose/production/postgres/maintenance/rmbackup @@ -0,0 +1,36 @@ +#!/usr/bin/env bash + +### Remove a database backup. +### +### Parameters: +### <1> filename of a backup to remove. +### +### Usage: +### $ docker-compose -f <environment>.yml (exec |run --rm) postgres rmbackup <1> + + +set -o errexit +set -o pipefail +set -o nounset + + +working_dir="$(dirname ${0})" +source "${working_dir}/_sourced/constants.sh" +source "${working_dir}/_sourced/messages.sh" + + +if [[ -z ${1+x} ]]; then + message_error "Backup filename is not specified yet it is a required parameter. Make sure you provide one and try again." + exit 1 +fi +backup_filename="${BACKUP_DIR_PATH}/${1}" +if [[ ! -f "${backup_filename}" ]]; then + message_error "No backup with the specified filename found. Check out the 'backups' maintenance script output to see if there is one and try again." + exit 1 +fi + +message_welcome "Removing the '${backup_filename}' backup file..." + +rm -r "${backup_filename}" + +message_success "The '${backup_filename}' database backup has been removed." diff --git a/compose/production/traefik/Dockerfile b/compose/production/traefik/Dockerfile new file mode 100644 index 0000000..d54bf27 --- /dev/null +++ b/compose/production/traefik/Dockerfile @@ -0,0 +1,5 @@ +FROM docker.io/traefik:2.11.2 +RUN mkdir -p /etc/traefik/acme \ + && touch /etc/traefik/acme/acme.json \ + && chmod 600 /etc/traefik/acme/acme.json +COPY ./compose/production/traefik/traefik.yml /etc/traefik diff --git a/compose/production/traefik/traefik.yml b/compose/production/traefik/traefik.yml new file mode 100644 index 0000000..4c274d1 --- /dev/null +++ b/compose/production/traefik/traefik.yml @@ -0,0 +1,75 @@ +log: + level: INFO + +entryPoints: + web: + # http + address: ':80' + http: + # https://doc.traefik.io/traefik/routing/entrypoints/#entrypoint + redirections: + entryPoint: + to: web-secure + + web-secure: + # https + address: ':443' + + flower: + address: ':5555' + +certificatesResolvers: + letsencrypt: + # https://doc.traefik.io/traefik/https/acme/#lets-encrypt + acme: + email: 'y@yulqen.org' + storage: /etc/traefik/acme/acme.json + # https://doc.traefik.io/traefik/https/acme/#httpchallenge + httpChallenge: + entryPoint: web + +http: + routers: + web-secure-router: + rule: 'Host(`resources.joannalemon.com`)' + entryPoints: + - web-secure + middlewares: + - csrf + service: django + tls: + # https://doc.traefik.io/traefik/routing/routers/#certresolver + certResolver: letsencrypt + + flower-secure-router: + rule: 'Host(`resources.joannalemon.com`)' + entryPoints: + - flower + service: flower + tls: + # https://doc.traefik.io/traefik/master/routing/routers/#certresolver + certResolver: letsencrypt + + middlewares: + csrf: + # https://doc.traefik.io/traefik/master/middlewares/http/headers/#hostsproxyheaders + # https://docs.djangoproject.com/en/dev/ref/csrf/#ajax + headers: + hostsProxyHeaders: ['X-CSRFToken'] + + services: + django: + loadBalancer: + servers: + - url: http://django:5000 + + flower: + loadBalancer: + servers: + - url: http://flower:5555 + +providers: + # https://doc.traefik.io/traefik/master/providers/file/ + file: + filename: /etc/traefik/traefik.yml + watch: true |