Skip to content

Commit

Permalink
✨ Add celery for notifications
Browse files Browse the repository at this point in the history
since celery is now required by notifications-api-common to send notifications
  • Loading branch information
stevenbal committed Feb 2, 2024
1 parent 9ad58e1 commit 199cac9
Show file tree
Hide file tree
Showing 8 changed files with 115 additions and 2 deletions.
3 changes: 3 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -47,10 +47,13 @@ RUN apt-get update && apt-get install -y --no-install-recommends \

COPY --from=backend-build /usr/local/lib/python3.10 /usr/local/lib/python3.10
COPY --from=backend-build /usr/local/bin/uwsgi /usr/local/bin/uwsgi
COPY --from=backend-build /usr/local/bin/celery /usr/local/bin/celery

# Stage 3.2 - Copy source code
WORKDIR /app
COPY ./bin/docker_start.sh /start.sh
COPY ./bin/wait_for_db.sh /wait_for_db.sh
COPY ./bin/celery_worker.sh /celery_worker.sh
RUN mkdir /app/log /app/config

# copy frontend build statics
Expand Down
34 changes: 34 additions & 0 deletions bin/celery_worker.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
#!/bin/bash

set -euo pipefail

LOGLEVEL=${CELERY_LOGLEVEL:-INFO}

QUEUE=${1:-${CELERY_WORKER_QUEUE:=celery}}
WORKER_NAME=${2:-${CELERY_WORKER_NAME:="${QUEUE}"@%n}}

# Figure out abspath of this script
SCRIPT=$(readlink -f "$0")
SCRIPTPATH=$(dirname "$SCRIPT")

# wait for required services
${SCRIPTPATH}/wait_for_db.sh

# build up worker options array
worker_options=(
"-Q$QUEUE"
"-n$WORKER_NAME"
"-l$LOGLEVEL"
"-Ofair"
)

if [[ -v CELERY_WORKER_CONCURRENCY ]]; then
echo "Using concurrency ${CELERY_WORKER_CONCURRENCY}"
worker_options+=( "-c${CELERY_WORKER_CONCURRENCY}" )
fi

echo "Starting celery worker $WORKER_NAME with queue $QUEUE"
exec celery \
--app objects \
--workdir src \
worker "${worker_options[@]}"
15 changes: 15 additions & 0 deletions bin/wait_for_db.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
#!/bin/sh

set -e

# Wait for the database container
# See: https://docs.docker.com/compose/startup-order/
export PGHOST=${DB_HOST:-db}
export PGPORT=${DB_PORT:-5432}

until pg_isready; do
>&2 echo "Waiting for database connection..."
sleep 1
done

>&2 echo "Database is up."
19 changes: 18 additions & 1 deletion docker-compose-quickstart.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,30 @@ services:
- POSTGRES_USER=${DB_USER:-objects}
- POSTGRES_PASSWORD=${DB_PASSWORD:-objects}

redis:
image: redis

web:
image: maykinmedia/objects-api:latest
environment:
environment: &app-env
- DJANGO_SETTINGS_MODULE=objects.conf.docker
- SECRET_KEY=${SECRET_KEY:-1(@f(-6s_u(5fd&1sg^uvu2s(c-9sapw)1era8q&)g)h@cwxxg}
- ALLOWED_HOSTS=*
- CACHE_DEFAULT=redis:6379/0
- CACHE_AXES=redis:6379/0
- CELERY_BROKER_URL=redis://redis:6379/1
- CELERY_RESULT_BACKEND=redis://redis:6379/1
- CELERY_LOGLEVEL=DEBUG
- CELERY_WORKER_CONCURRENCY=${CELERY_WORKER_CONCURRENCY:-4}
ports:
- 8000:8000
depends_on:
- db

celery:
image: maykinmedia/objects-api:latest
environment: *app-env
command: /celery_worker.sh
depends_on:
- db
- redis
20 changes: 19 additions & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,16 +9,34 @@ services:
- POSTGRES_USER=${DB_USER:-objects}
- POSTGRES_PASSWORD=${DB_PASSWORD:-objects}

redis:
image: redis

web:
build: .
environment:
environment: &app-env
- DJANGO_SETTINGS_MODULE=objects.conf.docker
- SECRET_KEY=${SECRET_KEY:-1(@f(-6s_u(5fd&1sg^uvu2s(c-9sapw)1era8q&)g)h@cwxxg}
- OBJECTS_SUPERUSER_USERNAME=admin
- OBJECTS_SUPERUSER_PASSWORD=admin
- OBJECTS_SUPERUSER_EMAIL=admin@localhost
- ALLOWED_HOSTS=*
- CACHE_DEFAULT=redis:6379/0
- CACHE_AXES=redis:6379/0
- CELERY_BROKER_URL=redis://redis:6379/1
- CELERY_RESULT_BACKEND=redis://redis:6379/1
- CELERY_LOGLEVEL=DEBUG
- CELERY_WORKER_CONCURRENCY=${CELERY_WORKER_CONCURRENCY:-4}
ports:
- 8000:8000
depends_on:
- db

celery:
build: .
image: maykinmedia/objects-api
environment: *app-env
command: /celery_worker.sh
depends_on:
- db
- redis
8 changes: 8 additions & 0 deletions docs/installation/config.rst
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,14 @@ Other settings
* ``TWO_FACTOR_PATCH_ADMIN``: Whether to use the 2 Factor Authentication login flow for
the admin or not. Default ``True``. You'll probably want to disable this when using OIDC.

Celery
-------

* ``CELERY_BROKER_URL``: the URL of the broker that will be used to actually send the notifications (default: ``redis://localhost:6379/1``).

* ``CELERY_RESULT_BACKEND``: the backend where the results of tasks will be stored (default: ``redis://localhost:6379/1``)


Initial superuser creation
--------------------------

Expand Down
12 changes: 12 additions & 0 deletions src/objects/celery.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# SPDX-License-Identifier: EUPL-1.2
# Copyright (C) 2022 Dimpact
from celery import Celery

from objects.setup import setup_env

setup_env()

app = Celery("objects")

app.config_from_object("django.conf:settings", namespace="CELERY")
app.autodiscover_tasks()
6 changes: 6 additions & 0 deletions src/objects/conf/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,6 +325,12 @@
# Library settings
#

#
# CELERY
#
CELERY_BROKER_URL = config("CELERY_BROKER_URL", "redis://localhost:6379/1")
CELERY_RESULT_BACKEND = config("CELERY_RESULT_BACKEND", "redis://localhost:6379/1")


# Django-Admin-Index
ADMIN_INDEX_SHOW_REMAINING_APPS_TO_SUPERUSERS = False
Expand Down

0 comments on commit 199cac9

Please sign in to comment.