因最近想尝试一下Python来对接微信公众平台,结合相对比较熟悉的tornado+celery+redis来实现一些特定的功能.
微信公众平台的很多接口调用依赖access_token,但是它只有2小时的有效期,因此需要定时去获取有效的token.因此这里使用tornado的异步httpclient获取数据,并将结果写入redis 完成整个过程.
1.目录结构
将任务定义与celery运行配置隔离开放在不同的文件中,
mytask
--celery.py
--celeryconfig.py
--task.py
--__init__.py
2.celery.py
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
from __future__ import absolute_import #当前文件名为celery,避免跟celery库冲突
from celery import Celery,platforms
app = Celery('mytask', include=['mytask.task'])
app.config_from_object('mytask.celeryconfig')
platforms.C_FORCE_ROOT = True #允许使用root执行
if __name__ == '__main__':
app.start()
3.celeryconfig.py #每隔一段时间执行任务
#!/usr/bin/env python
# -*- coding:utf-8 -*-
#http://docs.celeryproject.org/en/latest/userguide/configuration.html#configuration
#celery 4.0开始引入了小写字母的配置项[旧的大写配置依然可以使用]
from __future__ import absolute_import
BROKER_URL = 'redis://localhost:6379/0'
CELERY_USER='work'
CELERY_GROUP='work'
CELERY_CREATE_DIRS=1
CELERY_LOG_FILE='/var/log/celery/%n%I.log'
CELERY_PID_FILE='/var/log/celery/%n.pid'
CELERY_TASK_SERIALIZER = ‘json’ # 4.0开始默认为JSON,此前为pickle
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TIMEZONE = 'Asia/Shanghai'
from datetime import timedelta
CELERYBEAT_SCHEDULE = {
'fetch-every-6000-seconds': {
'task':'mytask.task.fetch_access_token’, #任务路径
'schedule': timedelta(seconds=6000) #每6000秒执行,access_token2小时过期,需要定时获取,这里设置为100分钟执行一次,测试时,可以调小,查看效果
}
}
4.task.py
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import sys,json
from tornado import httpclient, gen, ioloop
import hashlib
import time
from celery import Celery
from tornado.options import define, options
import redis
base_config = {
'AppID': 'wx*****', #自行替换为微信公众号接口AppID
'AppSecret': '**********', #自行替换
'redis': {
'host': '127.0.0.1',
'port': 6379
},
"user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.71 Safari/537.36",
}
reload(sys)
sys.setdefaultencoding('utf8')
global_redis = redis.StrictRedis(host=base_config['redis']['host'], port=base_config['redis']['port'], db=0)
app = Celery()
@gen.coroutine
def main():
base_url = 'https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid=%s&secret=%s';
http_header = {'User-Agent': base_config['user_agent'],
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'zh-CN,zh;q=0.8,en;q=0.6',
}
httpclient.AsyncHTTPClient.configure(None,
defaults=dict(user_agent=base_config["user_agent"]))
url = base_url % (base_config['AppID'],base_config['AppSecret'])
print url
try:
response = yield httpclient.AsyncHTTPClient().fetch(url, headers=http_header, method='GET')
except web.HTTPError as e:
print "tornado.web.HTTPError,", e.message
finally:
pass
print('fetched %s' % url)
html = response.body if isinstance(response.body, str) \
else response.body.decode()
final_rs = json.loads(html)
try:
new_access_token = final_rs['access_token']
global_redis.set('access_token',new_access_token);
print "done,refresh new access token"
except KeyError as e:
print "return error", e.message
finally:
pass
#任务名称
@app.task
def fetch_access_token():
io_loop = ioloop.IOLoop.current()
io_loop.run_sync(main)
5.执行:
$cd ./../ # cd到当前目录上一级,不然找不到mytask
$celery -A mytask worker -B -l info # -B 表示同时启动beat调度器
6.改为crontab执行,修改celeryconfig.py
#!/usr/bin/env python
# -*- coding:utf-8 -*-
from __future__ import absolute_import
BROKER_URL = 'redis://localhost:6379/0'
CELERY_USER='work'
CELERY_GROUP='work'
CELERY_CREATE_DIRS=1
CELERY_LOG_FILE='/var/log/celery/%n%I.log'
CELERY_PID_FILE='/var/log/celery/%n.pid'
CELERY_TASK_SERIALIZER = 'json'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TIMEZONE = 'Asia/Shanghai'
from celery.schedules import crontab
CELERYBEAT_SCHEDULE = {
'fetch-every-6000-seconds': {
'task':'mytask.task.fetch_access_token’, //任务路径
'schedule': crontab(hour=7, mintue=30, day_of_week=1) # 每周一早上7;30执行
}
}
7.使用supervisord 管理celery
[program:celery]
user=work
numprocs=1
command=/usr/local/bin/celery -A mytask worker -B -l info
directory=/usr/local/htdocs #mytask目录部署在/usr/local/htdocs下
stdout_logfile=/tmp/supervisord_celery.log
autostart=true
autorestart=true
redirect_stderr=true
stopsignal=QUIT