官方文檔
- https://docs.celeryproject.org/en/latest/django/first-steps-with-django.html#using-celery-with-django(配置文檔)
- https://github.com/celery/celery/tree/master/examples/django(django 例子)
其他文檔
- https://www.jianshu.com/p/fb3de1d9508c(celery 相關介紹)
開發環境
- python 3.6.8
- django 1.11
- celery 4.3.0
- django-celery-results 1.1.2
- django-celery-beat 1.5.0
安裝 redis
安裝操作 redis 庫
pip install redis
(這里說明一下,pip 安裝的 redis 僅僅是一個連接到 redis 緩存的一個工具;redis 服務需要自己去安裝,安裝文檔如上)
安裝 celery
pip install celery
安裝 Django-celery-results
pip install django-celery-results
配置 settings.py
# 添加 djcelery APP INSTALLED_APPS = [ # ... 'django_celery_results', # 查看 celery 執行結果 ] # django 緩存 CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", } } } # celery 定時任務 # 注意,celery4 版本后,CELERY_BROKER_URL 改為 BROKER_URL BROKER_URL = 'redis://127.0.0.1:6379/0' # Broker 使用 Redis, 使用0數據庫(暫時不是很清楚原理) # CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler' # 定時任務調度器 python manage.py celery beat CELERYD_MAX_TASKS_PER_CHILD = 3 # 每個 worker 最多執行3個任務就會被銷毀,可防止內存泄露 # CELERY_RESULT_BACKEND = 'redis://127.0.0.1:6379/0' # celery 結果返回,可用於跟蹤結果 CELERY_RESULT_BACKEND = 'django-db' # 使用 database 作為結果存儲 CELERY_CACHE_BACKEND = 'django-cache' # celery 后端緩存 # celery 內容等消息的格式設置 if os.name != "nt": # Mac and Centos # worker 啟動命令:celery -A sqlmanager worker -l info CELERY_ACCEPT_CONTENT = ['application/json', ] CELERY_TASK_SERIALIZER = 'json' # CELERY_RESULT_SERIALIZER = 'json' else: # windows # pip install eventlet # worker 啟動命令:celery -A sqlmanager worker -l info -P eventlet CELERY_ACCEPT_CONTENT = ['pickle', ] CELERY_TASK_SERIALIZER = 'pickle' # CELERY_RESULT_SERIALIZER = 'pickle'
生成 Django-celery-results 關聯表
python manage.py migrate
python manage.py migrate # 結果 raven.contrib.django.client.DjangoClient: 2019-12-15 21:47:10,426 /XXXXX/lib/python3.6/site-packages/raven/base.py [line:213] INFO Raven is not configured (logging is disabled). Please see the documentation for more information. Operations to perform: Apply all migrations: admin, auth, blog, captcha, contenttypes, django_celery_results, djcelery, logger, photo, sessions, sites, user, users Running migrations: Applying django_celery_results.0001_initial... OK Applying django_celery_results.0002_add_task_name_args_kwargs... OK Applying django_celery_results.0003_auto_20181106_1101... OK Applying django_celery_results.0004_auto_20190516_0412... OK Applying djcelery.0001_initial... OK
項目根目錄添加 celery.py
from __future__ import absolute_import, unicode_literals from celery import Celery from django.conf import settings import os # 獲取當前文件夾名,即為該 Django 的項目名 project_name = os.path.split(os.path.abspath('.'))[-1] project_settings = '%s.settings' % project_name # 設置環境變量 os.environ.setdefault('DJANGO_SETTINGS_MODULE', project_settings) # 實例化 Celery app = Celery(project_name) # 使用 django 的 settings 文件配置 celery app.config_from_object('django.conf:settings') # Celery 加載所有注冊的應用 app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
配置項目根目錄 __init__.py
from __future__ import absolute_import, unicode_literals # This will make sure the app is always imported when # Django starts so that shared_task will use this app. from .celery import app as celery_app import pymysql pymysql.install_as_MySQLdb() __all__ = ('celery_app',)
app 目錄添加 tasks.py
import json import requests from celery import task @task def task_send_dd_text(url, msg, atMoblies, atAll="flase"): body = { "msgtype": "text", "text": { "content": msg }, "at": { "atMobiles": atMoblies, "isAtAll": atAll } } headers = {'content-type': 'application/json', 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:22.0) Gecko/20100101 Firefox/22.0'} r = requests.post(url, headers=headers, data=json.dumps(body)) # print(r.text)
views.py 調用
# 假如 url 設置成 test def test(request): # 導入 from .tasks import task_send_dd_text # 執行 task_send_dd_text.delay(settings.DD_NOTICE_URL, "異步任務調用成功", atMoblies=["18612345678"], atAll="false") return HttpResponse("test")
啟動 celery worker
# 項目根目錄終端執行(項目名稱) centos or mac os:celery -A sqlmanager(項目名稱) worker -l info (centos) windows: celery -A sqlmanager(項目名稱) worker -l info -P eventlet (可能還需要 pip install eventlet) # 守護進程 /root/.virtualenvs/blog/bin/celery multi start w1 -A sqlmanager(項目名稱) -l info --logfile=./celerylog.log
centos7 守護 celery worker
訪問調用 異步任務 的視圖
http://127.0.0.1/test
Django 后台查看 celery 異步任務結果