目录结构
/data/celery/
|-- __init__.py
|-- __pycache__
|-- run_test.py
|-- start.sh
|-- sxj
| |-- celery.py
| |-- __init__.py
| |-- __pycache__
| |-- run_py.py
| `-- task.py
|-- task1.py
|-- task2.py
|-- task3
| |-- __init__.py
| |-- __pycache__
| `-- task.py
`-- test_monit.py
配置
from celery import Celery,platforms
from kombu import Exchange, Queue
platforms.C_FORCE_ROOT = True
broker = 'redis://127.0.0.1:6380/1'
backend = 'redis://127.0.0.1:6380/2'
app = Celery(
'sxj',
broker=broker,
backend=backend,
include=[ # 重要,添加的tasks
'sxj.task',
'task1',
'task2',
'task3.task'
]
)
set_queue = ( # 设置队列
Queue('default',Exchange('default',type='direct'),routing_key='default'),
Queue('Q1',Exchange('Q1',type='direct'),routing_key='Q1'),
)
app.conf.update( # 配置
CELERY_TASK_SERIALIZER = 'json',
CELERY_ACCEPT_CONTENT = ['json'],
CELERY_RESULT_SERIALIZER = 'json',
CELERY_TIMEZONE = 'Asia/Shanghai',
CELERY_ENABLE_UTC = True,
CELERYD_CONCURRENCY = 2, # 发worker数
CELERYD_MAX_TASKS_PER_CHILD = 5,
CELERYD_PREFETCH_MULTIPLIER = 2, # worker每次去redis取任务的数量
CELERYD_TASK_TIME_LIMIT = 3600, # 超时
CELERY_QUEUES=set_queue,
CELERY_DEFAULT_QUEUE = 'default',
CELERY_DEFAULT_EXCHANGE = 'default',
CELERY_DEFAULT_ROUTING_KEY = 'default',
)
启动task
# 使用了supervisor
[program:test_celery]
directory = /data/celery/
command = /usr/bin/celery worker -A sxj -E -Q default -n default --loglevel=info
user = root
autostart = false
autorestart = true
startsecs = 3
stopsignal=INT
loglevel = info
redirect_stderr=true
stdout_logfile=/data/logs/test_celery.log
program:test_queue]
irectory = /data/django/snake/
ommand = /usr/bin/celery worker -A snake -Q test_queue -n test_queue -l info --logfile /data/logs/celery_test_queue.log
ser = root
utostart = false
utorestart = true
tartsecs = 3
oglevel = info
edirect_stderr=true
tdout_logfile = /data/logs/celery_test_queue.log
调用task
# /data/celery/task1.py
from sxj.celery import app
@app.task
def t1():
return "task1"
# /data/celery/task3/task.py
from sxj.celery import app
@app.task
def t3():
return "task3"