Celery执行异步任务

消费者
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
# celery_task1.py
import celery
import time

# 定义redis信息
backend = 'redis://:xxx@111@43.254.x.x:6379/1'
broker = 'redis://:xxx@111@43.254.x.x:6379/2'

cel = celery.Celery('haha', backend=backend, broker=broker)

@celery.task
def test_send_email(name):
print('向%s发送邮件...' %name)
time.sleep(3)
print('向%s发送邮件完成在!' %name)
return "ok"
生产者
1
2
3
4
5
6
# produce_task.py
from celery_task import test_send_email
result1 = test_send_email.delay("liyk")
print(result1.id)
result2 = test_send_email.delay("mz")
print(result2.id)
执行
1
2
3
4
5
celery worker -A celery_task1 -l info -P eventlet
# 需安装eventlet,pip install eventlet
或者:celery worker -A celery_task1 --pool=solo -l info
# work是开启的进程; -A 执行的文件名; -l 显示日志,显示info日志
# 执行命令后做的事情:用celery连接中间件(redis)、创建一个队列并监听它、再启动多个worker去监听任务
1
2
先命令行celery worker xxx启动监听
运行produce_task.py


多任务结构

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
# celery.py
from celery import Celery

cel = Celery('celery_demo',
backend='redis://:xxxx@111@43.254.x.x:6379/1',
broker = 'redis://:xxx@111@43.254.x.x:6379/2',
# 包含以下两个任务文件,去相应的py文件中找任务,对多个任务做分类
include=['celery_tasks.task01',
'celery_tasks.task02'
])

# 时区
cel.conf.timezone = 'Asia/Shanghai'
# 是否使用UTC
cel.conf.enable_utc = False
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
# task01.py
import time
from .celery import cel

@cel.task
def send_email(res):
print("完成向%s发送邮件任务" %res)
time.sleep(5)
return "邮件发送完成"

# task02.py
import time
from .celery import cel

@cel.task
def send_msg(name):
print("完成向%s发送短信任务"%name)
time.sleep(5)
return "短信发送完成!"
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
# produce_task.py
from celery_tasks.task01 import send_email
from celery_tasks.task02 import send_msg

# 立即告知celery去执行test_celery任务,并传入一个参数
result1 = send_email.delay('liyk')
print(result1.id)
result2 = send_msg.delay('mz')
print(result2.id)

# result.py
from celery.result import AsyncResult
from celery_task import cel

async_result=AsyncResult(id="0ec55a47-e91a-41c1-8428-3da4bb54a669", app=cel)

if async_result.successful():
result = async_result.get()
print(result)
# result.forget() # 将结果删除
elif async_result.failed():
print('执行失败')
elif async_result.status == 'PENDING':
print('任务等待中被执行')
elif async_result.status == 'RETRY':
print('任务异常后正在重试')
elif async_result.status == 'STARTED':
print('任务已经开始被执行')
执行
1
2
# 在celery_tasks目录外执行
celery worker -A celery_tasks -l info -P eventlet

celery执行定时任务

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
# 设置时间让celery执行一个定时任务
# produce_task.py
from celery_task import send_email
from datetime import datetime

# 方式一
# v1 = datetime(2020, 7, 27, 16, 20, 00)
# print(v1)
# v2 = datetime.utcfromtimestamp(v1.timestamp())
# print(v2)
# result = send_email.apply_async(args=["egon",], eta=v2)
# print(result.id)

# 方式二
ctime = datetime.now()
# 默认用utc时间
utc_ctime = datetime.utcfromtimestamp(ctime.timestamp())
from datetime import timedelta
time_delay = timedelta(seconds=10)
task_time = utc_ctime + time_delay

# 使用apply_async并设定时间
result = send_email.apply_async(args=["egon"], eta=task_time)
print(result.id)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
# 多任务结构中celery.py
from datetime import timedelta
from celery import Celery
from celery.schedules import crontab

cel = Celery('tasks', broker='redis://127.0.0.1:6379/1', backend='redis://127.0.0.1:6379/2', include=[
'celery_tasks.task01',
'celery_tasks.task02',
])
cel.conf.timezone = 'Asia/Shanghai'
cel.conf.enable_utc = False

cel.conf.beat_schedule = {
# 名字随意命名
'add-every-10-seconds': {
# 执行tasks1下的test_celery函数
'task': 'celery_tasks.task01.send_email',
# 每隔2秒执行一次
# 'schedule': 1.0,
# 'schedule': crontab(minute="*/1"),
'schedule': timedelta(seconds=6),
# 传递参数
'args': ('张三',)
},
# 'add-every-12-seconds': {
# 'task': 'celery_tasks.task01.send_email',
# 每年4月11号,8点42分执行
# 'schedule': crontab(minute=42, hour=8, day_of_month=11, month_of_year=4),
# 'args': ('张三',)
# },
}

# 启动Beat程序。
$ celery beat -A proj<br># Celery Beat进程会读取配置文件的内容,周期性的将配置中到期需要执行的任务发送给任务队列
# 之后启动 worker 进程.
$ celery -A proj worker -l info 或者$ celery -B -A proj worker -l info
-------------本文结束感谢您的阅读-------------
原创技术分享,感谢您的支持。