celey框架出现maximum recursion depth exceeded的bug
celey框架出现maximum recursion depth exceeded的bug
初用celery部署爬虫时出错。
项目如下
其中__init__.py:
from celery import Celery
app = Celery('demo')
app.config_from_object('celery_app.celeryconfig')
BROKER_URL = 'redis://127.0.0.1:6379/0'
CELERY_RESULT_BACKEND = 'redis://127.0.0.1:6379/1'
CELERY_TIMEZONE = 'Asia/Shanghai'
CELERY_IMPORTS = (
'celery_app.ceshi_1'
)
CELERY_ACCEPT_CONTENT = ['json','pickle']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
ceshi_1.py:
import requests
import json
from celery_app import app
@app.task
def ceshi():
url = 'http://api.wandoudl.com/api/ip?app_key=88a34462e4f758ee750dac2bf22ba6c6&pack=205555&num=1&xy=2&type=2&lb=\r\n&mr=1&'
response = requests.get(url).content.decode()
s = json.loads(response)
ip_port = str(s['data'][0]["ip"]) + ":" + str(s['data'][0]["port"])
print(ip_port)
proxy = {
'http': 'socks5://{}'.format(ip_port),
'https': 'socks5://{}'.format(ip_port)
}
url = "https://s.hc360.com"
r = requests.get(url
,proxies=proxy
).content.decode()
from celery_app import ceshi_1
ceshi_1.ceshi.apply_async()
ceshi_1.py单独执行时无误,但celery启动worker时出错,错误如下:
[tasks]
. celery_app.ceshi_1.ceshi
[2019-06-30 20:22:00,774: INFO/MainProcess] Connected to redis://127.0.0.1:6379/0
[2019-06-30 20:22:00,795: INFO/MainProcess] mingle: searching for neighbors
[2019-06-30 20:22:01,817: INFO/MainProcess] mingle: all alone
[2019-06-30 20:22:01,823: INFO/MainProcess] celery@DELL ready.
[2019-06-30 20:22:01,838: INFO/MainProcess] pidbox: Connected to redis://127.0.0.1:6379/0.
[2019-06-30 20:22:19,118: INFO/MainProcess] Received task: celery_app.ceshi_1.ceshi[b649a22d-80c0-46ee-bbfd-5f728f9c8d92]
[2019-06-30 20:22:19,375: WARNING/MainProcess] 119.115.75.138:5412
[2019-06-30 20:22:19,392: ERROR/MainProcess] Task celery_app.ceshi_1.ceshi[b649a22d-80c0-46ee-bbfd-5f728f9c8d92] raised unexpected: RecursionError(
'maximum recursion depth exceeded')
Traceback (most recent call last):
File "c:\users\administrator\anaconda3\lib\site-packages\celery\app\trace.py", line 385, in trace_task
R = retval = fun(*args, **kwargs)
File "c:\users\administrator\anaconda3\lib\site-packages\celery\app\trace.py", line 648, in __protected_call__
return self.run(*args, **kwargs)
File "D:\celery_demo\celery_app\ceshi_1.py", line 22, in ceshi
,proxies=proxy
File "c:\users\administrator\anaconda3\lib\site-packages\requests\api.py", line 75, in get
return request('get', url, params=params, **kwargs)
File "c:\users\administrator\anaconda3\lib\site-packages\requests\api.py", line 60, in request
return session.request(method=method, url=url, **kwargs)
File "c:\users\administrator\anaconda3\lib\site-packages\requests\sessions.py", line 533, in request
resp = self.send(prep, **send_kwargs)
File "c:\users\administrator\anaconda3\lib\site-packages\requests\sessions.py", line 646, in send
r = adapter.send(request, **kwargs)
File "c:\users\administrator\anaconda3\lib\site-packages\requests\adapters.py", line 449, in send
timeout=timeout
File "c:\users\administrator\anaconda3\lib\site-packages\urllib3\connectionpool.py", line 600, in urlopen