Celery的使用

Celery单独使用

目录结构

  • Celery
    • tasks.py
    • run.py

tasks.py

from celery import Celery
app = Celery('tasks', backend='redis://127.0.0.1', broker='redis://127.0.0.1')
@app.task
def add(x, y):
    return x + y

run.py

from tasks import add

result = add.delay(4, 4)
print('Is task ready: %s' % result.ready())

run_result = result.get(timeout=1)
print('task result: %s' % run_result)

Django中使用Celery

目录结构

  • AppName
    • __ init __.py
    • tasks.py
    • celery.py
    • setting.py

__ inti __.py

from __future__ import absolute_import, unicode_literals

# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app

__all__ = ('celery_app',)

celery.py

from __future__ import absolute_import, unicode_literals
from celery import Celery, shared_task

# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'QuickApp.settings')
app = Celery('QuickApp')

# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
#   should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')

# Load task modules from all registered Django app configs.
app.autodiscover_tasks()

@app.task
def add(a, b):
    return a + b

# bind 参数意味着该函数将是一个“绑定方法”,以便您可以访问任务类型实例上的属性和方法。
# 绑定的任务意味着任务的第一个参数将始终是任务实例 (self),就像 Python 绑定方法一样
@app.task(bind=True)
def debug_task(self):
    print('Request: {0!r}'.format(self.request))

# 系统启动的时候添加定时任务
from celery.schedules import crontab
@app.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
    # Calls test('hello') every 10 seconds.
    sender.add_periodic_task(10.0, test.s('hello'), name='hello every 10')

    # Calls test('world') every 30 seconds
    sender.add_periodic_task(30.0, test.s('world'), expires=10)

    # Executes every Monday morning at 7:30 a.m.
    sender.add_periodic_task(
        crontab(hour=7, minute=30, day_of_week=1),
        test.s('Happy Mondays!'),
    )
app.conf.timezone = "Asia/Shanghai"

tasks.py

from __future__ import absolute_import, unicode_literals
from celery import shared_task

# 你很可能在可重用的 Django APP 中编写了一些任务,但是 Django APP 不能依赖于具体的 Django 项目,所以你无法直接导入 Celery 实例。
# @shared_task 装饰器能让你在没有具体的 Celery 实例时创建任务
@shared_task
def add(a, b):
    return a + b

setting.py

import os
import platform
from pathlib import Path

# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
# BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
LOG_DIR = os.path.join(BASE_DIR, "logs")

if platform.system() == "Linux" or platform.system() == "Windows":
    # linux or windows
    Path(LOG_DIR).mkdir(parents=True, exist_ok=True)
if platform.system() == "Darwin" or platform.system() == "Mac":
    ...


CELERY_BROKER_URL = 'redis://localhost:6379/0'
CELERY_RESULT_BACKEND = 'redis://localhost:6379/0'
# CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
# CELERY_RESULT_BACKEND = 'djcelery.backends.cache:CacheBackend'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TASK_SERIALIZER = 'json'
CELERY_TIMEZONE = 'Asia/Shanghai'
CELERYD_MAX_TASKS_PER_CHILD = 10
CELERYD_LOG_FILE = os.path.join(BASE_DIR, "logs", "celery_work.log")
CELERYBEAT_LOG_FILE = os.path.join(BASE_DIR, "logs", "celery_beat.log")