Skip to content
Snippets Groups Projects
Commit c7f81221 authored by Maxim Scheremetjew's avatar Maxim Scheremetjew
Browse files

feat: Integrated Celery.

parent 1d98eee4
No related branches found
No related tags found
1 merge request!7feat: Add distance app
......@@ -21,7 +21,7 @@ RUN apt-get update && \
libpq-dev \
python-dev
COPY app/requirements.txt $DIRPATH/
COPY requirements.txt $DIRPATH/
RUN mkdir -pm 0600 ~/.ssh && ssh-keyscan github.com >> ~/.ssh/known_hosts
RUN --mount=type=ssh \
pip install --no-cache-dir --upgrade pip && \
......
"""Settings for the Levenshtein distance service."""
import logging.config
from environs import Env
from common.logging_config import get_logging_config
env = Env()
# The logging must to be the first thing to configure to get the logs raised here
LOGGING = get_logging_config(
env,
loggers_to_silent=[
# The following loggers put lot of DEBUG entries and we only care if there is some error
# to show the DEBUG level, use the CUSTOM_LOGGING env variable
"urllib3.util.retry",
"urllib3.connectionpool",
],
)
logging.config.dictConfig(LOGGING)
UNIPROT_RESTAPI_ENDPOINT = env.str("UNIPROT_RESTAPI_ENDPOINT", default="https://rest.uniprot.org")
......@@ -164,7 +164,7 @@ pidfile /var/run/redis_6379.pid
# verbose (many rarely useful info, but not a mess like the debug level)
# notice (moderately verbose, what you want in production probably)
# warning (only very important / critical messages are logged)
loglevel notice
loglevel debug
# Specify the log file name. Also the empty string can be used to force
# Redis to log on the standard output. Note that if you use standard
......@@ -670,7 +670,7 @@ slave-lazy-flush no
#
# Please check http://redis.io/topics/persistence for more information.
appendonly no
appendonly yes
# The name of the append only file (default: "appendonly.aof")
......
......@@ -12,14 +12,18 @@ services:
- .:/var/levenshtein-distance-service
depends_on:
- postgres
- celery_worker
- redis_cache
networks:
- microservices
environment:
- DEBUG=true
- DEBUG=True
- SECRET_KEY=blahe9w5c92)o5x%!awwfhduln=06xl%u_*(x%)_7s9y3v3f971tek
# Logging
- CONSOLE_LOGGING_HANDLER_MIN_LEVEL=DEBUG
- CUSTOM_LOGGING
# Celery configuration
- CELERY_BROKER_URL=amqp://admin:mypass@rabbitmq
postgres:
image: postgres:13.4
restart: always
......@@ -31,16 +35,50 @@ services:
- POSTGRES_DB=distance_service
- POSTGRES_USER=maxim
- POSTGRES_PASSWORD=maxim
redis:
# For dev purposes only
rabbitmq:
image: rabbitmq:3.6-management
hostname: rabbitmq
environment:
- RABBITMQ_DEFAULT_USER=admin
- RABBITMQ_DEFAULT_PASS=mypass
ports:
- "5672:5672"
- "15672:15672"
networks:
- microservices
celery_worker:
image: mscheremetjew/levenshtein-distance-service
command: celery -A webapp worker --loglevel DEBUG --concurrency 4 -E
healthcheck:
test: [ "CMD", "celery", "status" ]
interval: 1m
timeout: 5s
retries: 3
volumes:
- .:/var/levenshtein-distance-service
depends_on:
- postgres
- rabbitmq
networks:
- microservices
environment:
- DEBUG=True
- SECRET_KEY=blahe9w5c92)o5x%!awwfhduln=06xl%u_*(x%)_7s9y3v3f971tek
# Logging
- CONSOLE_LOGGING_HANDLER_MIN_LEVEL=DEBUG
- CUSTOM_LOGGING
# Celery configuration
- CELERY_BROKER_URL=amqp://admin:mypass@rabbitmq
redis_cache:
image: "redis:6.2.12-alpine"
networks:
- microservices
ports:
- "6379:6379"
volumes:
- ./config/redis/redis.conf:/redis.conf
command: [ "redis-server", "/redis.conf" ]
networks:
- microservices
networks:
microservices:
\ No newline at end of file
......@@ -2,5 +2,5 @@ from django.apps import AppConfig
class RegistrationConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'registration'
default_auto_field = "django.db.models.BigAutoField"
name = "registration"
"""webapp module"""
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
__all__ = ("celery_app",)
import logging
import os
from celery import Celery
from django.conf import settings
logger = logging.getLogger(__name__)
# Set the default Django settings module for the 'celery' program.
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "webapp.settings")
# Instantiate Celery object
app = Celery("webapp")
app.config_from_object(settings.CELERY_CONF)
# Load task modules from all registered Django apps.
app.autodiscover_tasks()
......@@ -10,6 +10,7 @@ For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
import logging.config
from multiprocessing import cpu_count
from pathlib import Path
from environs import Env
......@@ -30,6 +31,7 @@ SECRET_KEY = env.str("SECRET_KEY", default="fake-secret")
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
TESTING = env.bool("TESTING", default=False)
# The logging must to be the first thing to configure to get the logs raised here
LOGGING = get_logging_config(
......@@ -57,6 +59,8 @@ INSTALLED_APPS = [
"django.contrib.staticfiles",
"authentication",
"registration",
"distance_app",
"django_celery_results",
]
MIDDLEWARE = [
......@@ -162,3 +166,34 @@ DJANGO_SUPERUSER_USERNAME = "admin"
DJANGO_SUPERUSER_EMAIL = "admin@group.com"
AUTH_USER_MODEL = "authentication.User"
# Celery settings
# Note revised lower case settings, changed at 4.0
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#new-lowercase-settings
BROKER_URL = env.str("CELERY_BROKER_URL", default="amqp://")
# If testing, send Celery tasks to an in-memory queue (which should not be read by any workers).
if TESTING:
BROKER_URL = "memory://localhost/"
CELERY_CONF = {
"broker_url": BROKER_URL,
"result_backend": "django-db",
"accept_content": ["json"],
"task_serializer": "json",
"worker_concurrency": cpu_count() * env.int("CELERY_WORKER_CONCURRENCY_PER_CPU", 4),
"worker_hijack_root_logger": False,
}
UNIPROT_RESTAPI_ENDPOINT = env.str("UNIPROT_RESTAPI_ENDPOINT", default="https://rest.uniprot.org")
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://redis_cache:6379/1",
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
},
}
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment