robust background queue for django


Keywords
celery, django, postgresql, queue
Install
pip install django-robust==0.5.0

Documentation

django-robust Build Status Coverage Status PyPI Version

install

$ pip install django-robust
INSTALLED_APPS = [
    'robust.apps.RobustConfig',
]
DB = {
    'ENGINE': 'django.db.backends.postgresql_psycopg2',
}
DATABASES = {
    'default': DB,
    'robust_ratelimit': DB, # <- same db
}

define tasks

from robust import task

@task()
def heavy_stuff(foo):
    pass

@task(tags=['service1'])
def talk_to_external_service():
    pass

@task(bind=True, retries=3)
def retry_me(self):
    self.retry()

schedule tasks

from .tasks import heavy_stuff

heavy_stuff.delay(foo='bar')

execute tasks

$ ./manage.py robust_worker

run scheduler

standalone

$ ./manage.py robust_beat

embedded

$ ./manage.py robust_worker --beat

cleanup

for cleanup completed tasks add robust.utils.cleanup to robust schedule.

settings

ROBUST_RATE_LIMIT = {
    'service1': (1, timedelta(seconds=10)),  # 1/10s,
    'bar':      (20, timedelta(minutes=1)),  # 20/m
}

ROBUST_SCHEDULE = [
    (timedelta(seconds=1), 'foo.tasks.every_second'),
    (timedelta(minutes=5), 'foo.tasks.every_5_minutes'),
]

ROBUST_LOG_EVENTS = True  # log all task state changes

ROBUST_WORKER_FAILURE_TIMEOUT = 5  # wait 5 seconds when worker faces unexpected errors

ROBUST_NOTIFY_TIMEOUT = 10  # listen to postgres notify for 10 seconds, then poll database

ROBUST_ALWAYS_EAGER = False  # if this is True, tasks will be executed locally instead of being sent to the queue

ROBUST_PAYLOAD_PROCESSOR = 'robust.utils.PayloadProcessor'

ROBUST_SUCCEED_TASK_EXPIRE = datetime.timedelta(hours=1) # succeed tasks cleanup period. Default: 1 hour ago

ROBUST_FAILED_TASK_EXPIRE = datetime.timedelta(weeks=1) # failed tasks cleanup period. Default: 1 week ago