Commit 4354e583 by lichenggang

Initial commit

parents
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import hashlib
import os
import time
import traceback
from datetime import datetime, timedelta
import psutil
from elasticsearch import Elasticsearch
from utils.log_manage import get_logger
from utils.mysql import MySqlOperator
from utils.robots import dd_send_msg
from utils.tas_redis import task_redis
logger = get_logger('crawler_process')
dashboard = MySqlOperator('dashboard')
ichunt_elk = Elasticsearch(host="127.0.0.1", port=9200, timeout=20)
def get_process_task(task_code):
keys = task_redis.keys(task_code + '*')
keys_len = sum([task_redis.llen(k) for k in keys])
abnormal_keys = task_redis.keys('abnormal_' + task_code + '*')
abnormal_keys_len = sum([task_redis.llen(k) for k in abnormal_keys])
return keys_len, abnormal_keys_len
def get_error_ratio(task_code, time_range=30):
utc_since = datetime.utcnow() - timedelta(minutes=time_range)
index = 'logstash-' + utc_since.strftime('%Y.%m.%d')
query = {
"size": 0,
"query": {
"bool": {
"filter": [
{
"term": {
"taskCode": task_code
}
},
{
"range": {
"@timestamp": {
"gte": utc_since.strftime("%Y-%m-%dT%H:%M:%SZ")
}
}
}
]
}
},
"aggs": {
"avgFailRate": {
"avg": {
"field": "failRate"
}
}
}
}
response = ichunt_elk.search(index=index, doc_type='doc', body=query)
value = response['aggregations']['avgFailRate']['value']
value = value if value is not None else 0.0
return round(value, 3)
def get_crawler_process_info():
flags = task_redis.hgetall('task_running_flag')
pids = os.popen("ps -ef | grep async_tas_caller.py | grep -v grep | awk '{print $2}'")
process = dict()
for pid in pids:
p = psutil.Process(int(pid))
info = p.as_dict(attrs=['cmdline', 'create_time', 'cpu_times'])
info['cmdline'] = {k: v for k, v in zip(info['cmdline'][2::2], info['cmdline'][3::2])}
info_dict = dict()
info_dict['pid'] = pid.strip()
info_dict['task_code'] = info['cmdline']['-r']
info_dict['platform'] = info['cmdline']['-mq'].split("_")[0]
info_dict['concurrency'] = int(info['cmdline']['-c'])
info_dict['queue'] = info['cmdline']['-mq'] if '-mq' in info['cmdline'] else ''
unic = ''.join([str(info['create_time']), info_dict['task_code'], str(info_dict['concurrency'])]).encode()
info_dict['unique_code'] = hashlib.md5(unic).hexdigest()
info_dict['status'] = 1 if info_dict['task_code'].encode() in flags else 0
info_dict['task_type'] = info['cmdline']['-ef'] if '-ef' in info['cmdline'] else ''
info_dict['start_time'] = int(info['create_time'])
info_dict['run_time'] = round(sum(info['cpu_times']), 3)
info_dict['remain_task'], info_dict['remain_wrong_task'] = get_process_task(info_dict['task_code'])
info_dict['wrong_radio'] = get_error_ratio(info_dict['task_code'])
process[info_dict['unique_code']] = info_dict
return process
def info_update(process_info):
# 第一步 获取进程信息 插入或更新mysql,更新redis暂停flag或终止进程
for k, v in process_info.items():
status = dashboard.check_exist_by_unicode(v['unique_code']) # 该任务状态
if status:
dashboard.update_process(v) # update
if status[0] == 0 and v['status'] == 1: # 运行
logger.info('%s:back to running', v['task_code'])
task_redis.hdel('task_running_flag', v['task_code'])
elif status[0] == 1 and v['status'] == 0: # 暂停
logger.info('%s: pause the crawler', v['task_code'])
task_redis.hset('task_running_flag', v['task_code'], '1')
elif status[0] == 2: # kill
logger.info('%s: kill the process', v['task_code'])
task_redis.hdel('task_running_flag', v['task_code'])
os.system('kill -9 %s' % v['pid'])
else:
dashboard.insert_process(v) # insert
# 第二步 检查mysql是否存在僵尸进程记录给予删除
# 1.进程不存在 mysql 记录存在 2.状态同步以mysql为标准
ps = dashboard.get_process_alive() # 获取mysql存活的记录
for p in ps:
if p[1] not in process_info:
dashboard.update_status(2, p[1])
logger.info('%s: update status', p[0])
def test():
flags = task_redis.hgetall('task_running_flag')
ps = dashboard.get_process_alive() # 获取mysql存活的记录
def main():
while 1:
try:
pi = get_crawler_process_info()
info_update(pi)
except Exception:
logger.error(traceback.format_exc())
dd_send_msg(traceback.format_exc())
finally:
time.sleep(60)
if __name__ == "__main__":
main()
File mode changed
2019-07-30 21:21:26,955 crawler_process.py[line:102] INFO mug: pause the crawler
2019-07-30 21:21:43,768 crawler_process.py[line:99] INFO mug:back to running
2019-08-01 16:18:45,218 crawler_process.py[line:116] INFO skw: update status
2019-08-01 16:22:30,250 crawler_process.py[line:116] INFO vrg: update status
2019-08-02 10:34:16,743 crawler_process.py[line:98] INFO update info mug
2019-08-02 10:34:16,746 crawler_process.py[line:98] INFO update info alg
2019-08-02 10:34:16,749 crawler_process.py[line:98] INFO update info rsg
2019-08-02 10:34:16,752 crawler_process.py[line:98] INFO update info rog
2019-08-02 10:34:16,755 crawler_process.py[line:98] INFO update info r24g
2019-08-02 10:34:16,758 crawler_process.py[line:98] INFO update info lcg
2019-08-02 10:34:16,761 crawler_process.py[line:98] INFO update info lcc
2019-08-02 10:34:16,764 crawler_process.py[line:98] INFO update info elgw
2019-08-02 10:34:16,767 crawler_process.py[line:98] INFO update info csg
2019-08-02 10:34:16,770 crawler_process.py[line:98] INFO update info bkgn
2019-08-02 10:34:16,775 crawler_process.py[line:98] INFO update info pog
2019-08-02 10:34:16,778 crawler_process.py[line:98] INFO update info apc
2019-08-02 10:34:16,781 crawler_process.py[line:98] INFO update info apg
2019-08-02 10:34:16,785 crawler_process.py[line:98] INFO update info fuc
2019-08-02 10:34:16,788 crawler_process.py[line:98] INFO update info mtg
2019-08-02 10:34:16,791 crawler_process.py[line:98] INFO update info hlg
2019-08-02 10:34:16,795 crawler_process.py[line:98] INFO update info tmeg
2019-08-02 10:34:16,801 crawler_process.py[line:98] INFO update info vrg
2019-08-02 10:34:16,805 crawler_process.py[line:98] INFO update info mkg
2019-08-02 10:34:16,808 crawler_process.py[line:98] INFO update info alc
2019-08-02 10:34:16,812 crawler_process.py[line:98] INFO update info bkc
2019-08-02 10:34:16,815 crawler_process.py[line:98] INFO update info mxg
2019-08-02 10:34:16,818 crawler_process.py[line:98] INFO update info fug
2019-08-02 10:34:16,820 crawler_process.py[line:98] INFO update info arg
2019-08-02 10:34:16,823 crawler_process.py[line:98] INFO update info cfc
2019-08-02 10:34:16,827 crawler_process.py[line:98] INFO update info elc
2019-08-02 10:34:16,837 crawler_process.py[line:98] INFO update info oyg
2019-08-02 10:34:16,841 crawler_process.py[line:98] INFO update info ang
2019-08-02 10:34:16,846 crawler_process.py[line:98] INFO update info dgc
2019-08-02 10:34:16,849 crawler_process.py[line:98] INFO update info anc
2019-08-02 10:34:16,852 crawler_process.py[line:98] INFO update info skw
2019-08-02 10:34:16,855 crawler_process.py[line:98] INFO update info dggw
2019-08-02 10:34:16,857 crawler_process.py[line:98] INFO update info cfg
2019-08-02 10:34:16,860 crawler_process.py[line:98] INFO update info wpgg
2019-08-02 10:34:16,863 crawler_process.py[line:98] INFO update info mcg
2019-08-02 10:34:16,866 crawler_process.py[line:98] INFO update info icnud
2019-08-02 10:34:16,869 crawler_process.py[line:98] INFO update info pc
2019-08-02 10:39:35,268 crawler_process.py[line:99] INFO update info mug
2019-08-02 10:39:35,272 crawler_process.py[line:99] INFO update info alg
2019-08-02 10:39:35,275 crawler_process.py[line:99] INFO update info rsg
2019-08-02 10:39:35,278 crawler_process.py[line:99] INFO update info rog
2019-08-02 10:39:35,282 crawler_process.py[line:99] INFO update info r24g
2019-08-02 10:39:35,286 crawler_process.py[line:99] INFO update info lcg
2019-08-02 10:39:35,290 crawler_process.py[line:99] INFO update info lcc
2019-08-02 10:39:35,294 crawler_process.py[line:99] INFO update info elgw
2019-08-02 10:39:35,300 crawler_process.py[line:99] INFO update info csg
2019-08-02 10:39:35,304 crawler_process.py[line:99] INFO update info bkgn
2019-08-02 10:39:35,311 crawler_process.py[line:99] INFO update info pog
2019-08-02 10:39:35,315 crawler_process.py[line:99] INFO update info apc
2019-08-02 10:39:35,320 crawler_process.py[line:99] INFO update info apg
2019-08-02 10:39:35,323 crawler_process.py[line:99] INFO update info fuc
2019-08-02 10:39:35,327 crawler_process.py[line:99] INFO update info mtg
2019-08-02 10:39:35,331 crawler_process.py[line:99] INFO update info hlg
2019-08-02 10:39:35,334 crawler_process.py[line:99] INFO update info tmeg
2019-08-02 10:39:35,338 crawler_process.py[line:99] INFO update info vrg
2019-08-02 10:39:35,341 crawler_process.py[line:99] INFO update info mkg
2019-08-02 10:39:35,344 crawler_process.py[line:99] INFO update info alc
2019-08-02 10:39:35,350 crawler_process.py[line:99] INFO update info bkc
2019-08-02 10:39:35,354 crawler_process.py[line:99] INFO update info mxg
2019-08-02 10:39:35,358 crawler_process.py[line:99] INFO update info fug
2019-08-02 10:39:35,363 crawler_process.py[line:99] INFO update info arg
2019-08-02 10:39:35,368 crawler_process.py[line:99] INFO update info cfc
2019-08-02 10:39:35,371 crawler_process.py[line:99] INFO update info elc
2019-08-02 10:39:35,375 crawler_process.py[line:99] INFO update info oyg
2019-08-02 10:39:35,377 crawler_process.py[line:99] INFO update info ang
2019-08-02 10:39:35,387 crawler_process.py[line:99] INFO update info dgc
2019-08-02 10:39:35,392 crawler_process.py[line:99] INFO update info anc
2019-08-02 10:39:35,396 crawler_process.py[line:99] INFO update info skw
2019-08-02 10:39:35,400 crawler_process.py[line:99] INFO update info dggw
2019-08-02 10:39:35,404 crawler_process.py[line:99] INFO update info cfg
2019-08-02 10:39:35,407 crawler_process.py[line:99] INFO update info wpgg
2019-08-02 10:39:35,410 crawler_process.py[line:99] INFO update info mcg
2019-08-02 10:39:35,414 crawler_process.py[line:99] INFO update info icnud
2019-08-02 10:39:35,417 crawler_process.py[line:99] INFO update info pc
2019-08-02 10:41:28,752 crawler_process.py[line:98] INFO update info mug
2019-08-02 10:41:28,756 crawler_process.py[line:98] INFO update info alg
2019-08-02 10:41:28,759 crawler_process.py[line:98] INFO update info rsg
2019-08-02 10:41:28,763 crawler_process.py[line:98] INFO update info rog
2019-08-02 10:41:28,766 crawler_process.py[line:98] INFO update info r24g
2019-08-02 10:41:28,769 crawler_process.py[line:98] INFO update info lcg
2019-08-02 10:41:28,772 crawler_process.py[line:98] INFO update info lcc
2019-08-02 10:41:28,775 crawler_process.py[line:98] INFO update info elgw
2019-08-02 10:41:28,778 crawler_process.py[line:98] INFO update info csg
2019-08-02 10:41:28,782 crawler_process.py[line:98] INFO update info bkgn
2019-08-02 10:41:28,785 crawler_process.py[line:98] INFO update info pog
2019-08-02 10:41:28,788 crawler_process.py[line:98] INFO update info apc
2019-08-02 10:41:28,791 crawler_process.py[line:98] INFO update info apg
2019-08-02 10:41:28,795 crawler_process.py[line:98] INFO update info fuc
2019-08-02 10:41:28,798 crawler_process.py[line:98] INFO update info mtg
2019-08-02 10:41:28,801 crawler_process.py[line:98] INFO update info hlg
2019-08-02 10:41:28,805 crawler_process.py[line:98] INFO update info tmeg
2019-08-02 10:41:28,808 crawler_process.py[line:98] INFO update info vrg
2019-08-02 10:41:28,811 crawler_process.py[line:98] INFO update info mkg
2019-08-02 10:41:28,817 crawler_process.py[line:98] INFO update info alc
2019-08-02 10:41:28,820 crawler_process.py[line:98] INFO update info bkc
2019-08-02 10:41:28,823 crawler_process.py[line:98] INFO update info mxg
2019-08-02 10:41:28,827 crawler_process.py[line:98] INFO update info fug
2019-08-02 10:41:28,831 crawler_process.py[line:98] INFO update info arg
2019-08-02 10:41:28,838 crawler_process.py[line:98] INFO update info cfc
2019-08-02 10:41:28,841 crawler_process.py[line:98] INFO update info elc
2019-08-02 10:41:28,844 crawler_process.py[line:98] INFO update info oyg
2019-08-02 10:41:28,847 crawler_process.py[line:98] INFO update info ang
2019-08-02 10:41:28,850 crawler_process.py[line:98] INFO update info dgc
2019-08-02 10:41:28,853 crawler_process.py[line:98] INFO update info anc
2019-08-02 10:41:28,856 crawler_process.py[line:98] INFO update info skw
2019-08-02 10:41:28,859 crawler_process.py[line:98] INFO update info dggw
2019-08-02 10:41:28,862 crawler_process.py[line:98] INFO update info cfg
2019-08-02 10:41:28,865 crawler_process.py[line:98] INFO update info wpgg
2019-08-02 10:41:28,868 crawler_process.py[line:98] INFO update info mcg
2019-08-02 10:41:28,871 crawler_process.py[line:98] INFO update info icnud
2019-08-02 10:41:28,874 crawler_process.py[line:98] INFO update info pc
2019-08-02 10:43:15,702 crawler_process.py[line:98] INFO update info mug
2019-08-02 10:43:15,706 crawler_process.py[line:98] INFO update info alg
2019-08-02 10:43:15,710 crawler_process.py[line:98] INFO update info rsg
2019-08-02 10:43:15,715 crawler_process.py[line:98] INFO update info rog
2019-08-02 10:43:15,719 crawler_process.py[line:98] INFO update info r24g
2019-08-02 10:43:15,722 crawler_process.py[line:98] INFO update info lcg
2019-08-02 10:43:15,725 crawler_process.py[line:98] INFO update info lcc
2019-08-02 10:43:15,725 crawler_process.py[line:103] INFO lcc: pause the crawler
2019-08-02 10:43:15,729 crawler_process.py[line:98] INFO update info elgw
2019-08-02 10:43:15,733 crawler_process.py[line:98] INFO update info csg
2019-08-02 10:43:15,737 crawler_process.py[line:98] INFO update info bkgn
2019-08-02 10:43:15,740 crawler_process.py[line:98] INFO update info pog
2019-08-02 10:43:15,743 crawler_process.py[line:98] INFO update info apc
2019-08-02 10:43:15,747 crawler_process.py[line:98] INFO update info apg
2019-08-02 10:43:15,750 crawler_process.py[line:98] INFO update info fuc
2019-08-02 10:43:15,755 crawler_process.py[line:98] INFO update info mtg
2019-08-02 10:43:15,759 crawler_process.py[line:98] INFO update info hlg
2019-08-02 10:43:15,763 crawler_process.py[line:98] INFO update info tmeg
2019-08-02 10:43:15,766 crawler_process.py[line:98] INFO update info vrg
2019-08-02 10:43:15,770 crawler_process.py[line:98] INFO update info mkg
2019-08-02 10:43:15,774 crawler_process.py[line:98] INFO update info alc
2019-08-02 10:43:15,778 crawler_process.py[line:98] INFO update info bkc
2019-08-02 10:43:15,781 crawler_process.py[line:98] INFO update info mxg
2019-08-02 10:43:15,784 crawler_process.py[line:98] INFO update info fug
2019-08-02 10:43:15,788 crawler_process.py[line:98] INFO update info arg
2019-08-02 10:43:15,791 crawler_process.py[line:98] INFO update info cfc
2019-08-02 10:43:15,795 crawler_process.py[line:98] INFO update info elc
2019-08-02 10:43:15,798 crawler_process.py[line:98] INFO update info oyg
2019-08-02 10:43:15,801 crawler_process.py[line:98] INFO update info ang
2019-08-02 10:43:15,804 crawler_process.py[line:98] INFO update info dgc
2019-08-02 10:43:15,807 crawler_process.py[line:98] INFO update info anc
2019-08-02 10:43:15,808 crawler_process.py[line:103] INFO anc: pause the crawler
2019-08-02 10:43:15,813 crawler_process.py[line:98] INFO update info skw
2019-08-02 10:43:15,813 crawler_process.py[line:103] INFO skw: pause the crawler
2019-08-02 10:43:15,818 crawler_process.py[line:98] INFO update info dggw
2019-08-02 10:43:15,821 crawler_process.py[line:98] INFO update info cfg
2019-08-02 10:43:15,824 crawler_process.py[line:98] INFO update info wpgg
2019-08-02 10:43:15,828 crawler_process.py[line:98] INFO update info mcg
2019-08-02 10:43:15,831 crawler_process.py[line:98] INFO update info icnud
2019-08-02 10:43:15,834 crawler_process.py[line:98] INFO update info pc
2019-08-02 14:06:23,651 crawler_process.py[line:98] INFO update info mug
2019-08-02 14:06:23,655 crawler_process.py[line:98] INFO update info alg
2019-08-02 14:06:23,659 crawler_process.py[line:98] INFO update info rsg
2019-08-02 14:06:23,662 crawler_process.py[line:98] INFO update info rog
2019-08-02 14:06:23,666 crawler_process.py[line:98] INFO update info r24g
2019-08-02 14:06:23,669 crawler_process.py[line:98] INFO update info lcg
2019-08-02 14:06:23,672 crawler_process.py[line:98] INFO update info lcc
2019-08-02 14:06:23,672 crawler_process.py[line:100] INFO lcc:back to running
2019-08-02 14:06:23,676 crawler_process.py[line:98] INFO update info elgw
2019-08-02 14:06:23,680 crawler_process.py[line:98] INFO update info csg
2019-08-02 14:06:23,683 crawler_process.py[line:98] INFO update info bkgn
2019-08-02 14:06:23,687 crawler_process.py[line:98] INFO update info pog
2019-08-02 14:06:23,691 crawler_process.py[line:98] INFO update info apc
2019-08-02 14:06:23,697 crawler_process.py[line:98] INFO update info apg
2019-08-02 14:06:23,701 crawler_process.py[line:98] INFO update info fuc
2019-08-02 14:06:23,706 crawler_process.py[line:98] INFO update info mtg
2019-08-02 14:06:23,709 crawler_process.py[line:98] INFO update info hlg
2019-08-02 14:06:23,712 crawler_process.py[line:98] INFO update info tmeg
2019-08-02 14:06:23,715 crawler_process.py[line:98] INFO update info vrg
2019-08-02 14:06:23,718 crawler_process.py[line:98] INFO update info mkg
2019-08-02 14:06:23,721 crawler_process.py[line:98] INFO update info alc
2019-08-02 14:06:23,725 crawler_process.py[line:98] INFO update info bkc
2019-08-02 14:06:23,727 crawler_process.py[line:98] INFO update info mxg
2019-08-02 14:06:23,731 crawler_process.py[line:98] INFO update info fug
2019-08-02 14:06:23,735 crawler_process.py[line:98] INFO update info arg
2019-08-02 14:06:23,739 crawler_process.py[line:98] INFO update info cfc
2019-08-02 14:06:23,742 crawler_process.py[line:98] INFO update info elc
2019-08-02 14:06:23,745 crawler_process.py[line:98] INFO update info oyg
2019-08-02 14:06:23,748 crawler_process.py[line:98] INFO update info ang
2019-08-02 14:06:23,751 crawler_process.py[line:98] INFO update info dgc
2019-08-02 14:06:23,755 crawler_process.py[line:98] INFO update info anc
2019-08-02 14:06:23,756 crawler_process.py[line:100] INFO anc:back to running
2019-08-02 14:06:23,759 crawler_process.py[line:98] INFO update info skw
2019-08-02 14:06:23,759 crawler_process.py[line:100] INFO skw:back to running
2019-08-02 14:06:23,763 crawler_process.py[line:98] INFO update info dggw
2019-08-02 14:06:23,765 crawler_process.py[line:98] INFO update info cfg
2019-08-02 14:06:23,769 crawler_process.py[line:98] INFO update info wpgg
2019-08-02 14:06:23,772 crawler_process.py[line:98] INFO update info mcg
2019-08-02 14:06:23,775 crawler_process.py[line:98] INFO update info icnud
2019-08-02 14:06:23,780 crawler_process.py[line:98] INFO update info pc
2019-08-02 14:07:22,328 crawler_process.py[line:98] INFO update info mug
2019-08-02 14:07:22,332 crawler_process.py[line:98] INFO update info alg
2019-08-02 14:07:22,335 crawler_process.py[line:98] INFO update info rsg
2019-08-02 14:07:22,338 crawler_process.py[line:98] INFO update info rog
2019-08-02 14:07:22,341 crawler_process.py[line:98] INFO update info r24g
2019-08-02 14:07:22,344 crawler_process.py[line:98] INFO update info lcg
2019-08-02 14:07:22,347 crawler_process.py[line:98] INFO update info lcc
2019-08-02 14:07:22,347 crawler_process.py[line:103] INFO lcc: pause the crawler
2019-08-02 14:07:22,350 crawler_process.py[line:98] INFO update info elgw
2019-08-02 14:07:22,353 crawler_process.py[line:98] INFO update info csg
2019-08-02 14:07:22,356 crawler_process.py[line:98] INFO update info bkgn
2019-08-02 14:07:22,359 crawler_process.py[line:98] INFO update info pog
2019-08-02 14:07:22,362 crawler_process.py[line:98] INFO update info apc
2019-08-02 14:07:22,368 crawler_process.py[line:98] INFO update info apg
2019-08-02 14:07:22,373 crawler_process.py[line:98] INFO update info fuc
2019-08-02 14:07:22,380 crawler_process.py[line:98] INFO update info mtg
2019-08-02 14:07:22,383 crawler_process.py[line:98] INFO update info hlg
2019-08-02 14:07:22,389 crawler_process.py[line:98] INFO update info tmeg
2019-08-02 14:07:22,392 crawler_process.py[line:98] INFO update info vrg
2019-08-02 14:07:22,396 crawler_process.py[line:98] INFO update info mkg
2019-08-02 14:07:22,402 crawler_process.py[line:98] INFO update info alc
2019-08-02 14:07:22,408 crawler_process.py[line:98] INFO update info bkc
2019-08-02 14:07:22,418 crawler_process.py[line:98] INFO update info mxg
2019-08-02 14:07:22,423 crawler_process.py[line:98] INFO update info fug
2019-08-02 14:07:22,430 crawler_process.py[line:98] INFO update info arg
2019-08-02 14:07:22,435 crawler_process.py[line:98] INFO update info cfc
2019-08-02 14:07:22,439 crawler_process.py[line:98] INFO update info elc
2019-08-02 14:07:22,444 crawler_process.py[line:98] INFO update info oyg
2019-08-02 14:07:22,448 crawler_process.py[line:98] INFO update info ang
2019-08-02 14:07:22,457 crawler_process.py[line:98] INFO update info dgc
2019-08-02 14:07:22,462 crawler_process.py[line:98] INFO update info anc
2019-08-02 14:07:22,463 crawler_process.py[line:103] INFO anc: pause the crawler
2019-08-02 14:07:22,466 crawler_process.py[line:98] INFO update info skw
2019-08-02 14:07:22,466 crawler_process.py[line:103] INFO skw: pause the crawler
2019-08-02 14:07:22,470 crawler_process.py[line:98] INFO update info dggw
2019-08-02 14:07:22,474 crawler_process.py[line:98] INFO update info cfg
2019-08-02 14:07:22,478 crawler_process.py[line:98] INFO update info wpgg
2019-08-02 14:07:22,481 crawler_process.py[line:98] INFO update info mcg
2019-08-02 14:07:22,485 crawler_process.py[line:98] INFO update info icnud
2019-08-02 14:07:22,488 crawler_process.py[line:98] INFO update info pc
2019-08-02 14:31:24,958 crawler_process.py[line:98] INFO update info mug
2019-08-02 14:31:24,959 crawler_process.py[line:103] INFO mug: pause the crawler
2019-08-02 14:31:24,964 crawler_process.py[line:98] INFO update info alg
2019-08-02 14:31:24,964 crawler_process.py[line:103] INFO alg: pause the crawler
2019-08-02 14:31:24,972 crawler_process.py[line:98] INFO update info rsg
2019-08-02 14:31:24,972 crawler_process.py[line:103] INFO rsg: pause the crawler
2019-08-02 14:31:24,979 crawler_process.py[line:98] INFO update info rog
2019-08-02 14:31:24,979 crawler_process.py[line:103] INFO rog: pause the crawler
2019-08-02 14:31:24,983 crawler_process.py[line:98] INFO update info r24g
2019-08-02 14:31:24,983 crawler_process.py[line:103] INFO r24g: pause the crawler
2019-08-02 14:31:24,988 crawler_process.py[line:98] INFO update info lcg
2019-08-02 14:31:24,988 crawler_process.py[line:103] INFO lcg: pause the crawler
2019-08-02 14:31:24,992 crawler_process.py[line:98] INFO update info lcc
2019-08-02 14:31:24,996 crawler_process.py[line:98] INFO update info elgw
2019-08-02 14:31:24,996 crawler_process.py[line:103] INFO elgw: pause the crawler
2019-08-02 14:31:24,999 crawler_process.py[line:98] INFO update info csg
2019-08-02 14:31:24,999 crawler_process.py[line:103] INFO csg: pause the crawler
2019-08-02 14:31:25,003 crawler_process.py[line:98] INFO update info bkgn
2019-08-02 14:31:25,004 crawler_process.py[line:103] INFO bkgn: pause the crawler
2019-08-02 14:31:25,008 crawler_process.py[line:98] INFO update info pog
2019-08-02 14:31:25,008 crawler_process.py[line:103] INFO pog: pause the crawler
2019-08-02 14:31:25,012 crawler_process.py[line:98] INFO update info apc
2019-08-02 14:31:25,012 crawler_process.py[line:103] INFO apc: pause the crawler
2019-08-02 14:31:25,016 crawler_process.py[line:98] INFO update info apg
2019-08-02 14:31:25,016 crawler_process.py[line:103] INFO apg: pause the crawler
2019-08-02 14:31:25,020 crawler_process.py[line:98] INFO update info fuc
2019-08-02 14:31:25,020 crawler_process.py[line:103] INFO fuc: pause the crawler
2019-08-02 14:31:25,032 crawler_process.py[line:98] INFO update info mtg
2019-08-02 14:31:25,032 crawler_process.py[line:103] INFO mtg: pause the crawler
2019-08-02 14:31:25,036 crawler_process.py[line:98] INFO update info hlg
2019-08-02 14:31:25,037 crawler_process.py[line:103] INFO hlg: pause the crawler
2019-08-02 14:31:25,040 crawler_process.py[line:98] INFO update info tmeg
2019-08-02 14:31:25,040 crawler_process.py[line:103] INFO tmeg: pause the crawler
2019-08-02 14:31:25,043 crawler_process.py[line:98] INFO update info vrg
2019-08-02 14:31:25,044 crawler_process.py[line:103] INFO vrg: pause the crawler
2019-08-02 14:31:25,047 crawler_process.py[line:98] INFO update info mkg
2019-08-02 14:31:25,047 crawler_process.py[line:103] INFO mkg: pause the crawler
2019-08-02 14:31:25,051 crawler_process.py[line:98] INFO update info alc
2019-08-02 14:31:25,051 crawler_process.py[line:103] INFO alc: pause the crawler
2019-08-02 14:31:25,054 crawler_process.py[line:98] INFO update info bkc
2019-08-02 14:31:25,054 crawler_process.py[line:103] INFO bkc: pause the crawler
2019-08-02 14:31:25,057 crawler_process.py[line:98] INFO update info mxg
2019-08-02 14:31:25,057 crawler_process.py[line:103] INFO mxg: pause the crawler
2019-08-02 14:31:25,062 crawler_process.py[line:98] INFO update info fug
2019-08-02 14:31:25,062 crawler_process.py[line:103] INFO fug: pause the crawler
2019-08-02 14:31:25,065 crawler_process.py[line:98] INFO update info arg
2019-08-02 14:31:25,065 crawler_process.py[line:103] INFO arg: pause the crawler
2019-08-02 14:31:25,068 crawler_process.py[line:98] INFO update info cfc
2019-08-02 14:31:25,069 crawler_process.py[line:103] INFO cfc: pause the crawler
2019-08-02 14:31:25,072 crawler_process.py[line:98] INFO update info elc
2019-08-02 14:31:25,072 crawler_process.py[line:103] INFO elc: pause the crawler
2019-08-02 14:31:25,076 crawler_process.py[line:98] INFO update info oyg
2019-08-02 14:31:25,076 crawler_process.py[line:103] INFO oyg: pause the crawler
2019-08-02 14:31:25,079 crawler_process.py[line:98] INFO update info ang
2019-08-02 14:31:25,079 crawler_process.py[line:103] INFO ang: pause the crawler
2019-08-02 14:31:25,083 crawler_process.py[line:98] INFO update info dgc
2019-08-02 14:31:25,083 crawler_process.py[line:103] INFO dgc: pause the crawler
2019-08-02 14:31:25,086 crawler_process.py[line:98] INFO update info anc
2019-08-02 14:31:25,089 crawler_process.py[line:98] INFO update info skw
2019-08-02 14:31:25,093 crawler_process.py[line:98] INFO update info dggw
2019-08-02 14:31:25,093 crawler_process.py[line:103] INFO dggw: pause the crawler
2019-08-02 14:31:25,097 crawler_process.py[line:98] INFO update info cfg
2019-08-02 14:31:25,097 crawler_process.py[line:103] INFO cfg: pause the crawler
2019-08-02 14:31:25,101 crawler_process.py[line:98] INFO update info wpgg
2019-08-02 14:31:25,101 crawler_process.py[line:103] INFO wpgg: pause the crawler
2019-08-02 14:31:25,106 crawler_process.py[line:98] INFO update info mcg
2019-08-02 14:31:25,106 crawler_process.py[line:103] INFO mcg: pause the crawler
2019-08-02 14:31:25,111 crawler_process.py[line:98] INFO update info icnud
2019-08-02 14:31:25,111 crawler_process.py[line:103] INFO icnud: pause the crawler
2019-08-02 14:31:25,128 crawler_process.py[line:98] INFO update info pc
2019-08-02 14:31:25,128 crawler_process.py[line:103] INFO pc: pause the crawler
2019-08-02 14:39:25,563 crawler_process.py[line:117] INFO mug: update status
2019-08-02 14:39:25,568 crawler_process.py[line:117] INFO alg: update status
2019-08-02 14:39:25,571 crawler_process.py[line:117] INFO rsg: update status
2019-08-02 14:39:25,574 crawler_process.py[line:117] INFO rog: update status
2019-08-02 14:39:25,577 crawler_process.py[line:117] INFO r24g: update status
2019-08-02 14:39:25,580 crawler_process.py[line:117] INFO lcg: update status
2019-08-02 14:39:25,583 crawler_process.py[line:117] INFO lcc: update status
2019-08-02 14:39:25,585 crawler_process.py[line:117] INFO elgw: update status
2019-08-02 14:39:25,588 crawler_process.py[line:117] INFO csg: update status
2019-08-02 14:39:25,591 crawler_process.py[line:117] INFO bkgn: update status
2019-08-02 14:39:25,594 crawler_process.py[line:117] INFO pog: update status
2019-08-02 14:39:25,597 crawler_process.py[line:117] INFO apc: update status
2019-08-02 14:39:25,600 crawler_process.py[line:117] INFO apg: update status
2019-08-02 14:39:25,602 crawler_process.py[line:117] INFO fuc: update status
2019-08-02 14:39:25,605 crawler_process.py[line:117] INFO mtg: update status
2019-08-02 14:39:25,608 crawler_process.py[line:117] INFO hlg: update status
2019-08-02 14:39:25,610 crawler_process.py[line:117] INFO tmeg: update status
2019-08-02 14:39:25,614 crawler_process.py[line:117] INFO vrg: update status
2019-08-02 14:39:25,620 crawler_process.py[line:117] INFO mkg: update status
2019-08-02 14:39:25,624 crawler_process.py[line:117] INFO alc: update status
2019-08-02 14:39:25,627 crawler_process.py[line:117] INFO bkc: update status
2019-08-02 14:39:25,633 crawler_process.py[line:117] INFO mxg: update status
2019-08-02 14:39:25,636 crawler_process.py[line:117] INFO fug: update status
2019-08-02 14:39:25,638 crawler_process.py[line:117] INFO arg: update status
2019-08-02 14:39:25,641 crawler_process.py[line:117] INFO cfc: update status
2019-08-02 14:39:25,644 crawler_process.py[line:117] INFO elc: update status
2019-08-02 14:39:25,647 crawler_process.py[line:117] INFO oyg: update status
2019-08-02 14:39:25,651 crawler_process.py[line:117] INFO ang: update status
2019-08-02 14:39:25,654 crawler_process.py[line:117] INFO dgc: update status
2019-08-02 14:39:25,657 crawler_process.py[line:117] INFO anc: update status
2019-08-02 14:39:25,660 crawler_process.py[line:117] INFO dggw: update status
2019-08-02 14:39:25,663 crawler_process.py[line:117] INFO cfg: update status
2019-08-02 14:39:25,666 crawler_process.py[line:117] INFO wpgg: update status
2019-08-02 14:39:25,669 crawler_process.py[line:117] INFO mcg: update status
2019-08-02 14:39:25,672 crawler_process.py[line:117] INFO icnud: update status
2019-08-02 14:39:25,675 crawler_process.py[line:117] INFO pc: update status
2019-08-02 14:39:25,678 crawler_process.py[line:117] INFO skw: update status
2019-08-02 15:23:56,129 crawler_process.py[line:131] ERROR Traceback (most recent call last):
File "crawler_process.py", line 128, in main
pi = get_crawler_process_info()
File "crawler_process.py", line 87, in get_crawler_process_info
info_dict['wrong_radio'] = get_error_ratio(info_dict['task_code'])
File "crawler_process.py", line 63, in get_error_ratio
return round(response['aggregations']['avgFailRate']['value'], 3)
TypeError: type NoneType doesn't define __round__ method
2019-08-02 15:24:19,299 crawler_process.py[line:131] ERROR Traceback (most recent call last):
File "crawler_process.py", line 128, in main
pi = get_crawler_process_info()
File "crawler_process.py", line 87, in get_crawler_process_info
info_dict['wrong_radio'] = get_error_ratio(info_dict['task_code'])
File "crawler_process.py", line 63, in get_error_ratio
return round(response['aggregations']['avgFailRate']['value'], 3)
TypeError: type NoneType doesn't define __round__ method
2019-08-02 15:25:29,240 crawler_process.py[line:132] ERROR Traceback (most recent call last):
File "crawler_process.py", line 129, in main
pi = get_crawler_process_info()
File "crawler_process.py", line 88, in get_crawler_process_info
info_dict['wrong_radio'] = get_error_ratio(info_dict['task_code'])
File "crawler_process.py", line 64, in get_error_ratio
return round(response['aggregations']['avgFailRate']['value'], 3)
TypeError: type NoneType doesn't define __round__ method
2019-08-02 15:27:21,435 crawler_process.py[line:132] ERROR Traceback (most recent call last):
File "crawler_process.py", line 129, in main
pi = get_crawler_process_info()
File "crawler_process.py", line 88, in get_crawler_process_info
info_dict['wrong_radio'] = get_error_ratio(info_dict['task_code'])
File "crawler_process.py", line 63, in get_error_ratio
print(esponse['aggregations']['avgFailRate'])
NameError: name 'esponse' is not defined
2019-08-02 15:31:25,744 crawler_process.py[line:104] INFO skw: pause the crawler
2019-08-02 15:31:25,748 crawler_process.py[line:104] INFO anc: pause the crawler
2019-08-02 15:31:25,753 crawler_process.py[line:104] INFO lcc: pause the crawler
2019-08-02 17:30:21,911 crawler_process.py[line:101] INFO skw:back to running
2019-08-02 17:30:21,915 crawler_process.py[line:101] INFO anc:back to running
2019-08-02 17:30:21,919 crawler_process.py[line:101] INFO lcc:back to running
2019-08-02 22:10:38,818 crawler_process.py[line:104] INFO mtg: pause the crawler
2019-08-02 22:19:41,482 crawler_process.py[line:101] INFO mtg:back to running
2019-08-03 13:38:43,493 crawler_process.py[line:104] INFO hlg: pause the crawler
2019-08-04 00:02:33,516 crawler_process.py[line:132] ERROR Traceback (most recent call last):
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 392, in read_response
raise socket.error(SERVER_CLOSED_CONNECTION_ERROR)
OSError: Connection closed by server.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 668, in execute_command
return self.parse_response(connection, command_name, **options)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 680, in parse_response
response = connection.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 624, in read_response
response = self._parser.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 403, in read_response
(e.args,))
redis.exceptions.ConnectionError: Error while reading from socket: ('Connection closed by server.',)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "./crawler_process.py", line 129, in main
pi = get_crawler_process_info()
File "./crawler_process.py", line 69, in get_crawler_process_info
flags = task_redis.hgetall('task_running_flag')
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 1967, in hgetall
return self.execute_command('HGETALL', name)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 674, in execute_command
return self.parse_response(connection, command_name, **options)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 680, in parse_response
response = connection.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 624, in read_response
response = self._parser.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 421, in read_response
raise response
redis.exceptions.BusyLoadingError: Redis is loading the dataset in memory
2019-08-04 00:03:33,863 crawler_process.py[line:132] ERROR Traceback (most recent call last):
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 668, in execute_command
return self.parse_response(connection, command_name, **options)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 680, in parse_response
response = connection.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 624, in read_response
response = self._parser.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 421, in read_response
raise response
redis.exceptions.BusyLoadingError: Redis is loading the dataset in memory
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "./crawler_process.py", line 129, in main
pi = get_crawler_process_info()
File "./crawler_process.py", line 69, in get_crawler_process_info
flags = task_redis.hgetall('task_running_flag')
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 1967, in hgetall
return self.execute_command('HGETALL', name)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 674, in execute_command
return self.parse_response(connection, command_name, **options)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 680, in parse_response
response = connection.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 624, in read_response
response = self._parser.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 421, in read_response
raise response
redis.exceptions.BusyLoadingError: Redis is loading the dataset in memory
2019-08-04 00:04:34,140 crawler_process.py[line:132] ERROR Traceback (most recent call last):
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 668, in execute_command
return self.parse_response(connection, command_name, **options)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 680, in parse_response
response = connection.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 624, in read_response
response = self._parser.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 421, in read_response
raise response
redis.exceptions.BusyLoadingError: Redis is loading the dataset in memory
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "./crawler_process.py", line 129, in main
pi = get_crawler_process_info()
File "./crawler_process.py", line 69, in get_crawler_process_info
flags = task_redis.hgetall('task_running_flag')
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 1967, in hgetall
return self.execute_command('HGETALL', name)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 674, in execute_command
return self.parse_response(connection, command_name, **options)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 680, in parse_response
response = connection.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 624, in read_response
response = self._parser.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 421, in read_response
raise response
redis.exceptions.BusyLoadingError: Redis is loading the dataset in memory
2019-08-04 00:05:34,429 crawler_process.py[line:132] ERROR Traceback (most recent call last):
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 668, in execute_command
return self.parse_response(connection, command_name, **options)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 680, in parse_response
response = connection.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 624, in read_response
response = self._parser.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 421, in read_response
raise response
redis.exceptions.BusyLoadingError: Redis is loading the dataset in memory
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "./crawler_process.py", line 129, in main
pi = get_crawler_process_info()
File "./crawler_process.py", line 69, in get_crawler_process_info
flags = task_redis.hgetall('task_running_flag')
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 1967, in hgetall
return self.execute_command('HGETALL', name)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 674, in execute_command
return self.parse_response(connection, command_name, **options)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 680, in parse_response
response = connection.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 624, in read_response
response = self._parser.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 421, in read_response
raise response
redis.exceptions.BusyLoadingError: Redis is loading the dataset in memory
2019-08-04 00:06:34,724 crawler_process.py[line:132] ERROR Traceback (most recent call last):
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 668, in execute_command
return self.parse_response(connection, command_name, **options)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 680, in parse_response
response = connection.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 624, in read_response
response = self._parser.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 421, in read_response
raise response
redis.exceptions.BusyLoadingError: Redis is loading the dataset in memory
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "./crawler_process.py", line 129, in main
pi = get_crawler_process_info()
File "./crawler_process.py", line 69, in get_crawler_process_info
flags = task_redis.hgetall('task_running_flag')
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 1967, in hgetall
return self.execute_command('HGETALL', name)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 674, in execute_command
return self.parse_response(connection, command_name, **options)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 680, in parse_response
response = connection.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 624, in read_response
response = self._parser.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 421, in read_response
raise response
redis.exceptions.BusyLoadingError: Redis is loading the dataset in memory
2019-08-04 00:07:34,971 crawler_process.py[line:132] ERROR Traceback (most recent call last):
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 668, in execute_command
return self.parse_response(connection, command_name, **options)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 680, in parse_response
response = connection.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 624, in read_response
response = self._parser.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 421, in read_response
raise response
redis.exceptions.BusyLoadingError: Redis is loading the dataset in memory
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "./crawler_process.py", line 129, in main
pi = get_crawler_process_info()
File "./crawler_process.py", line 69, in get_crawler_process_info
flags = task_redis.hgetall('task_running_flag')
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 1967, in hgetall
return self.execute_command('HGETALL', name)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 674, in execute_command
return self.parse_response(connection, command_name, **options)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 680, in parse_response
response = connection.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 624, in read_response
response = self._parser.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 421, in read_response
raise response
redis.exceptions.BusyLoadingError: Redis is loading the dataset in memory
2019-08-04 00:08:35,175 crawler_process.py[line:132] ERROR Traceback (most recent call last):
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 484, in connect
sock = self._connect()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 541, in _connect
raise err
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 529, in _connect
sock.connect(socket_address)
OSError: [Errno 113] No route to host
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 667, in execute_command
connection.send_command(*args)
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 610, in send_command
self.send_packed_command(self.pack_command(*args))
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 585, in send_packed_command
self.connect()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 489, in connect
raise ConnectionError(self._error_message(e))
redis.exceptions.ConnectionError: Error 113 connecting to localhost:6379. No route to host.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 484, in connect
sock = self._connect()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 541, in _connect
raise err
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 529, in _connect
sock.connect(socket_address)
OSError: [Errno 113] No route to host
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "./crawler_process.py", line 129, in main
pi = get_crawler_process_info()
File "./crawler_process.py", line 69, in get_crawler_process_info
flags = task_redis.hgetall('task_running_flag')
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 1967, in hgetall
return self.execute_command('HGETALL', name)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 673, in execute_command
connection.send_command(*args)
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 610, in send_command
self.send_packed_command(self.pack_command(*args))
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 585, in send_packed_command
self.connect()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 489, in connect
raise ConnectionError(self._error_message(e))
redis.exceptions.ConnectionError: Error 113 connecting to localhost:6379. No route to host.
2019-08-04 00:09:35,727 crawler_process.py[line:132] ERROR Traceback (most recent call last):
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 484, in connect
sock = self._connect()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 541, in _connect
raise err
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 529, in _connect
sock.connect(socket_address)
OSError: [Errno 113] No route to host
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 667, in execute_command
connection.send_command(*args)
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 610, in send_command
self.send_packed_command(self.pack_command(*args))
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 585, in send_packed_command
self.connect()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 489, in connect
raise ConnectionError(self._error_message(e))
redis.exceptions.ConnectionError: Error 113 connecting to localhost:6379. No route to host.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 484, in connect
sock = self._connect()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 541, in _connect
raise err
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 529, in _connect
sock.connect(socket_address)
OSError: [Errno 113] No route to host
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "./crawler_process.py", line 129, in main
pi = get_crawler_process_info()
File "./crawler_process.py", line 69, in get_crawler_process_info
flags = task_redis.hgetall('task_running_flag')
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 1967, in hgetall
return self.execute_command('HGETALL', name)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 673, in execute_command
connection.send_command(*args)
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 610, in send_command
self.send_packed_command(self.pack_command(*args))
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 585, in send_packed_command
self.connect()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 489, in connect
raise ConnectionError(self._error_message(e))
redis.exceptions.ConnectionError: Error 113 connecting to localhost:6379. No route to host.
2019-08-04 00:10:36,273 crawler_process.py[line:132] ERROR Traceback (most recent call last):
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 668, in execute_command
return self.parse_response(connection, command_name, **options)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 680, in parse_response
response = connection.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 624, in read_response
response = self._parser.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 421, in read_response
raise response
redis.exceptions.BusyLoadingError: Redis is loading the dataset in memory
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "./crawler_process.py", line 129, in main
pi = get_crawler_process_info()
File "./crawler_process.py", line 69, in get_crawler_process_info
flags = task_redis.hgetall('task_running_flag')
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 1967, in hgetall
return self.execute_command('HGETALL', name)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 674, in execute_command
return self.parse_response(connection, command_name, **options)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 680, in parse_response
response = connection.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 624, in read_response
response = self._parser.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 421, in read_response
raise response
redis.exceptions.BusyLoadingError: Redis is loading the dataset in memory
2019-08-04 00:11:36,553 crawler_process.py[line:132] ERROR Traceback (most recent call last):
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 668, in execute_command
return self.parse_response(connection, command_name, **options)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 680, in parse_response
response = connection.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 624, in read_response
response = self._parser.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 421, in read_response
raise response
redis.exceptions.BusyLoadingError: Redis is loading the dataset in memory
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "./crawler_process.py", line 129, in main
pi = get_crawler_process_info()
File "./crawler_process.py", line 69, in get_crawler_process_info
flags = task_redis.hgetall('task_running_flag')
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 1967, in hgetall
return self.execute_command('HGETALL', name)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 674, in execute_command
return self.parse_response(connection, command_name, **options)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 680, in parse_response
response = connection.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 624, in read_response
response = self._parser.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 421, in read_response
raise response
redis.exceptions.BusyLoadingError: Redis is loading the dataset in memory
2019-08-04 00:12:36,821 crawler_process.py[line:132] ERROR Traceback (most recent call last):
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 668, in execute_command
return self.parse_response(connection, command_name, **options)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 680, in parse_response
response = connection.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 624, in read_response
response = self._parser.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 421, in read_response
raise response
redis.exceptions.BusyLoadingError: Redis is loading the dataset in memory
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "./crawler_process.py", line 129, in main
pi = get_crawler_process_info()
File "./crawler_process.py", line 69, in get_crawler_process_info
flags = task_redis.hgetall('task_running_flag')
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 1967, in hgetall
return self.execute_command('HGETALL', name)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 674, in execute_command
return self.parse_response(connection, command_name, **options)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 680, in parse_response
response = connection.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 624, in read_response
response = self._parser.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 421, in read_response
raise response
redis.exceptions.BusyLoadingError: Redis is loading the dataset in memory
2019-08-04 00:13:37,161 crawler_process.py[line:132] ERROR Traceback (most recent call last):
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 484, in connect
sock = self._connect()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 541, in _connect
raise err
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 529, in _connect
sock.connect(socket_address)
OSError: [Errno 113] No route to host
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 667, in execute_command
connection.send_command(*args)
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 610, in send_command
self.send_packed_command(self.pack_command(*args))
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 585, in send_packed_command
self.connect()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 489, in connect
raise ConnectionError(self._error_message(e))
redis.exceptions.ConnectionError: Error 113 connecting to localhost:6379. No route to host.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 484, in connect
sock = self._connect()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 541, in _connect
raise err
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 529, in _connect
sock.connect(socket_address)
OSError: [Errno 113] No route to host
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "./crawler_process.py", line 129, in main
pi = get_crawler_process_info()
File "./crawler_process.py", line 69, in get_crawler_process_info
flags = task_redis.hgetall('task_running_flag')
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 1967, in hgetall
return self.execute_command('HGETALL', name)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 673, in execute_command
connection.send_command(*args)
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 610, in send_command
self.send_packed_command(self.pack_command(*args))
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 585, in send_packed_command
self.connect()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 489, in connect
raise ConnectionError(self._error_message(e))
redis.exceptions.ConnectionError: Error 113 connecting to localhost:6379. No route to host.
2019-08-04 00:14:38,447 crawler_process.py[line:132] ERROR Traceback (most recent call last):
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 484, in connect
sock = self._connect()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 541, in _connect
raise err
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 529, in _connect
sock.connect(socket_address)
OSError: [Errno 113] No route to host
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 667, in execute_command
connection.send_command(*args)
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 610, in send_command
self.send_packed_command(self.pack_command(*args))
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 585, in send_packed_command
self.connect()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 489, in connect
raise ConnectionError(self._error_message(e))
redis.exceptions.ConnectionError: Error 113 connecting to localhost:6379. No route to host.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 484, in connect
sock = self._connect()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 541, in _connect
raise err
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 529, in _connect
sock.connect(socket_address)
OSError: [Errno 113] No route to host
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "./crawler_process.py", line 129, in main
pi = get_crawler_process_info()
File "./crawler_process.py", line 69, in get_crawler_process_info
flags = task_redis.hgetall('task_running_flag')
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 1967, in hgetall
return self.execute_command('HGETALL', name)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 673, in execute_command
connection.send_command(*args)
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 610, in send_command
self.send_packed_command(self.pack_command(*args))
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 585, in send_packed_command
self.connect()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 489, in connect
raise ConnectionError(self._error_message(e))
redis.exceptions.ConnectionError: Error 113 connecting to localhost:6379. No route to host.
2019-08-05 09:14:03,184 crawler_process.py[line:101] INFO hlg:back to running
2019-08-05 09:27:07,003 crawler_process.py[line:101] INFO hlg:back to running
2019-08-05 09:28:07,285 crawler_process.py[line:107] INFO hlg: kill the process
2019-08-05 11:42:44,838 crawler_process.py[line:104] INFO alg: pause the crawler
2019-08-05 11:44:45,424 crawler_process.py[line:107] INFO alg: kill the process
2019-08-05 14:19:28,970 crawler_process.py[line:104] INFO dggw: pause the crawler
2019-08-05 14:19:28,975 crawler_process.py[line:104] INFO mug: pause the crawler
2019-08-05 14:22:29,741 crawler_process.py[line:107] INFO dggw: kill the process
2019-08-05 14:22:29,749 crawler_process.py[line:107] INFO mug: kill the process
2019-08-06 10:33:05,075 crawler_process.py[line:104] INFO dggw: pause the crawler
2019-08-06 10:36:06,013 crawler_process.py[line:118] INFO dggw: update status
2019-08-06 10:56:11,518 crawler_process.py[line:104] INFO dggw: pause the crawler
2019-08-06 10:58:12,072 crawler_process.py[line:107] INFO dggw: kill the process
2019-08-06 14:31:10,578 crawler_process.py[line:104] INFO csg: pause the crawler
2019-08-06 14:31:10,582 crawler_process.py[line:104] INFO mtg: pause the crawler
2019-08-06 14:31:10,586 crawler_process.py[line:104] INFO elgw: pause the crawler
2019-08-06 14:31:10,589 crawler_process.py[line:104] INFO pog: pause the crawler
2019-08-06 14:31:10,593 crawler_process.py[line:104] INFO pc: pause the crawler
2019-08-06 14:31:10,597 crawler_process.py[line:104] INFO oyg: pause the crawler
2019-08-06 14:31:10,603 crawler_process.py[line:104] INFO ang: pause the crawler
2019-08-06 14:31:10,606 crawler_process.py[line:104] INFO lcg: pause the crawler
2019-08-06 14:31:10,610 crawler_process.py[line:104] INFO r24g: pause the crawler
2019-08-06 14:31:10,614 crawler_process.py[line:104] INFO wpgg: pause the crawler
2019-08-06 14:31:10,618 crawler_process.py[line:104] INFO icnud: pause the crawler
2019-08-06 14:31:10,622 crawler_process.py[line:104] INFO mcg: pause the crawler
2019-08-06 14:31:10,625 crawler_process.py[line:104] INFO cfg: pause the crawler
2019-08-06 14:31:10,629 crawler_process.py[line:104] INFO dgc: pause the crawler
2019-08-06 14:31:10,632 crawler_process.py[line:104] INFO elc: pause the crawler
2019-08-06 14:31:10,635 crawler_process.py[line:104] INFO cfc: pause the crawler
2019-08-06 14:31:10,639 crawler_process.py[line:104] INFO arg: pause the crawler
2019-08-06 14:31:10,642 crawler_process.py[line:104] INFO mxg: pause the crawler
2019-08-06 14:31:10,645 crawler_process.py[line:104] INFO bkc: pause the crawler
2019-08-06 14:31:10,649 crawler_process.py[line:104] INFO alc: pause the crawler
2019-08-06 14:31:10,652 crawler_process.py[line:104] INFO mkg: pause the crawler
2019-08-06 14:31:10,655 crawler_process.py[line:104] INFO vrg: pause the crawler
2019-08-06 14:31:10,659 crawler_process.py[line:104] INFO tmeg: pause the crawler
2019-08-06 14:31:10,662 crawler_process.py[line:104] INFO fuc: pause the crawler
2019-08-06 14:31:10,665 crawler_process.py[line:104] INFO apg: pause the crawler
2019-08-06 14:31:10,669 crawler_process.py[line:104] INFO apc: pause the crawler
2019-08-06 14:31:10,672 crawler_process.py[line:104] INFO bkgn: pause the crawler
2019-08-06 14:31:10,676 crawler_process.py[line:104] INFO rog: pause the crawler
2019-08-06 14:31:10,680 crawler_process.py[line:104] INFO rsg: pause the crawler
2019-08-06 14:31:10,684 crawler_process.py[line:104] INFO skw: pause the crawler
2019-08-06 14:31:10,687 crawler_process.py[line:104] INFO anc: pause the crawler
2019-08-06 14:31:10,691 crawler_process.py[line:104] INFO lcc: pause the crawler
2019-08-06 14:31:10,696 crawler_process.py[line:104] INFO dggw: pause the crawler
2019-08-06 14:31:10,700 crawler_process.py[line:104] INFO hlg: pause the crawler
2019-08-06 14:31:10,704 crawler_process.py[line:104] INFO alg: pause the crawler
2019-08-06 14:31:10,708 crawler_process.py[line:104] INFO mug: pause the crawler
2019-08-06 14:33:11,212 crawler_process.py[line:101] INFO dggw:back to running
2019-08-06 14:33:11,221 crawler_process.py[line:101] INFO mug:back to running
2019-08-06 14:51:42,343 crawler_process.py[line:101] INFO pc:back to running
2019-08-06 15:29:53,111 crawler_process.py[line:101] INFO csg:back to running
2019-08-06 15:29:53,115 crawler_process.py[line:101] INFO mtg:back to running
2019-08-06 15:29:53,117 crawler_process.py[line:101] INFO elgw:back to running
2019-08-06 15:29:53,122 crawler_process.py[line:101] INFO pog:back to running
2019-08-06 15:29:53,129 crawler_process.py[line:101] INFO oyg:back to running
2019-08-06 15:29:53,133 crawler_process.py[line:101] INFO ang:back to running
2019-08-06 15:29:53,137 crawler_process.py[line:101] INFO lcg:back to running
2019-08-06 15:29:53,141 crawler_process.py[line:101] INFO r24g:back to running
2019-08-06 15:29:53,146 crawler_process.py[line:101] INFO wpgg:back to running
2019-08-06 15:29:53,150 crawler_process.py[line:101] INFO icnud:back to running
2019-08-06 15:29:53,159 crawler_process.py[line:101] INFO mcg:back to running
2019-08-06 15:29:53,170 crawler_process.py[line:101] INFO cfg:back to running
2019-08-06 15:29:53,174 crawler_process.py[line:101] INFO dgc:back to running
2019-08-06 15:29:53,177 crawler_process.py[line:101] INFO elc:back to running
2019-08-06 15:29:53,180 crawler_process.py[line:101] INFO cfc:back to running
2019-08-06 15:29:53,184 crawler_process.py[line:101] INFO arg:back to running
2019-08-06 15:29:53,188 crawler_process.py[line:101] INFO mxg:back to running
2019-08-06 15:29:53,191 crawler_process.py[line:101] INFO bkc:back to running
2019-08-06 15:29:53,194 crawler_process.py[line:101] INFO alc:back to running
2019-08-06 15:29:53,198 crawler_process.py[line:101] INFO mkg:back to running
2019-08-06 15:29:53,201 crawler_process.py[line:101] INFO vrg:back to running
2019-08-06 15:29:53,204 crawler_process.py[line:101] INFO tmeg:back to running
2019-08-06 15:29:53,208 crawler_process.py[line:101] INFO fuc:back to running
2019-08-06 15:29:53,213 crawler_process.py[line:101] INFO apg:back to running
2019-08-06 15:29:53,216 crawler_process.py[line:101] INFO apc:back to running
2019-08-06 15:29:53,219 crawler_process.py[line:101] INFO bkgn:back to running
2019-08-06 15:29:53,223 crawler_process.py[line:101] INFO rog:back to running
2019-08-06 15:29:53,226 crawler_process.py[line:101] INFO rsg:back to running
2019-08-06 15:29:53,230 crawler_process.py[line:101] INFO skw:back to running
2019-08-06 15:29:53,233 crawler_process.py[line:101] INFO anc:back to running
2019-08-06 15:29:53,238 crawler_process.py[line:101] INFO lcc:back to running
2019-08-06 15:29:53,251 crawler_process.py[line:101] INFO hlg:back to running
2019-08-06 15:29:53,255 crawler_process.py[line:101] INFO alg:back to running
2019-08-06 16:00:12,955 crawler_process.py[line:132] ERROR Traceback (most recent call last):
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 668, in execute_command
return self.parse_response(connection, command_name, **options)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 680, in parse_response
response = connection.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 624, in read_response
response = self._parser.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 421, in read_response
raise response
redis.exceptions.BusyLoadingError: Redis is loading the dataset in memory
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "./crawler_process.py", line 129, in main
pi = get_crawler_process_info()
File "./crawler_process.py", line 69, in get_crawler_process_info
flags = task_redis.hgetall('task_running_flag')
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 1967, in hgetall
return self.execute_command('HGETALL', name)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 674, in execute_command
return self.parse_response(connection, command_name, **options)
File "/usr/local/python3/lib/python3.6/site-packages/redis/client.py", line 680, in parse_response
response = connection.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 624, in read_response
response = self._parser.read_response()
File "/usr/local/python3/lib/python3.6/site-packages/redis/connection.py", line 421, in read_response
raise response
redis.exceptions.BusyLoadingError: Redis is loading the dataset in memory
2019-08-06 16:01:13,203 crawler_process.py[line:118] INFO csg: update status
2019-08-06 16:01:13,206 crawler_process.py[line:118] INFO mtg: update status
2019-08-06 16:01:13,209 crawler_process.py[line:118] INFO elgw: update status
2019-08-06 16:01:13,212 crawler_process.py[line:118] INFO pog: update status
2019-08-06 16:01:13,215 crawler_process.py[line:118] INFO pc: update status
2019-08-06 16:01:13,217 crawler_process.py[line:118] INFO oyg: update status
2019-08-06 16:01:13,220 crawler_process.py[line:118] INFO ang: update status
2019-08-06 16:01:13,223 crawler_process.py[line:118] INFO lcg: update status
2019-08-06 16:01:13,226 crawler_process.py[line:118] INFO r24g: update status
2019-08-06 16:01:13,229 crawler_process.py[line:118] INFO wpgg: update status
2019-08-06 16:01:13,232 crawler_process.py[line:118] INFO icnud: update status
2019-08-06 16:01:13,235 crawler_process.py[line:118] INFO mcg: update status
2019-08-06 16:01:13,239 crawler_process.py[line:118] INFO cfg: update status
2019-08-06 16:01:13,242 crawler_process.py[line:118] INFO dgc: update status
2019-08-06 16:01:13,244 crawler_process.py[line:118] INFO elc: update status
2019-08-06 16:01:13,248 crawler_process.py[line:118] INFO cfc: update status
2019-08-06 16:01:13,250 crawler_process.py[line:118] INFO arg: update status
2019-08-06 16:01:13,253 crawler_process.py[line:118] INFO mxg: update status
2019-08-06 16:01:13,256 crawler_process.py[line:118] INFO bkc: update status
2019-08-06 16:01:13,259 crawler_process.py[line:118] INFO alc: update status
2019-08-06 16:01:13,264 crawler_process.py[line:118] INFO mkg: update status
2019-08-06 16:01:13,266 crawler_process.py[line:118] INFO vrg: update status
2019-08-06 16:01:13,269 crawler_process.py[line:118] INFO tmeg: update status
2019-08-06 16:01:13,272 crawler_process.py[line:118] INFO fuc: update status
2019-08-06 16:01:13,275 crawler_process.py[line:118] INFO apg: update status
2019-08-06 16:01:13,278 crawler_process.py[line:118] INFO apc: update status
2019-08-06 16:01:13,281 crawler_process.py[line:118] INFO bkgn: update status
2019-08-06 16:01:13,285 crawler_process.py[line:118] INFO rog: update status
2019-08-06 16:01:13,289 crawler_process.py[line:118] INFO rsg: update status
2019-08-06 16:01:13,291 crawler_process.py[line:118] INFO skw: update status
2019-08-06 16:01:13,294 crawler_process.py[line:118] INFO anc: update status
2019-08-06 16:01:13,297 crawler_process.py[line:118] INFO lcc: update status
2019-08-06 16:01:13,300 crawler_process.py[line:118] INFO vrg: update status
2019-08-06 16:01:13,303 crawler_process.py[line:118] INFO hlg: update status
2019-08-06 16:01:13,306 crawler_process.py[line:118] INFO alg: update status
2019-08-06 16:01:13,308 crawler_process.py[line:118] INFO mug: update status
2019-08-06 16:01:13,311 crawler_process.py[line:118] INFO dggw: update status
2019-08-06 16:16:15,233 crawler_process.py[line:118] INFO fuc: update status
2019-08-06 16:16:15,236 crawler_process.py[line:118] INFO tmeg: update status
2019-08-06 19:58:19,438 crawler_process.py[line:107] INFO pc: kill the process
2019-08-06 20:22:25,922 crawler_process.py[line:118] INFO pc: update status
2019-08-06 20:44:31,671 crawler_process.py[line:118] INFO pc: update status
2019-08-06 21:13:39,420 crawler_process.py[line:118] INFO pc: update status
2019-08-06 21:40:46,578 crawler_process.py[line:118] INFO pc: update status
2019-08-07 14:28:05,900 crawler_process.py[line:118] INFO pct: update status
2019-08-07 14:30:06,407 crawler_process.py[line:118] INFO pct: update status
2019-08-07 14:39:08,756 crawler_process.py[line:118] INFO pct3: update status
2019-08-07 14:44:10,176 crawler_process.py[line:118] INFO pct3: update status
2019-08-07 16:34:39,780 crawler_process.py[line:118] INFO pct4: update status
2019-08-08 09:31:07,994 crawler_process.py[line:104] INFO dggw: pause the crawler
2019-08-08 09:49:12,868 crawler_process.py[line:107] INFO dggw: kill the process
2019-08-08 09:56:14,729 crawler_process.py[line:104] INFO mug: pause the crawler
2019-08-08 10:01:15,999 crawler_process.py[line:107] INFO mug: kill the process
2019-08-08 10:21:21,452 crawler_process.py[line:104] INFO mcg: pause the crawler
2019-08-08 10:28:23,393 crawler_process.py[line:118] INFO mcg: update status
2019-08-08 19:01:40,777 crawler_process.py[line:107] INFO pct5: kill the process
2019-08-11 01:09:57,190 crawler_process.py[line:104] INFO dggw: pause the crawler
2019-08-11 20:01:56,443 crawler_process.py[line:104] INFO dggw: pause the crawler
2019-08-12 09:13:24,602 crawler_process.py[line:101] INFO dggw:back to running
2019-08-12 10:24:43,682 crawler_process.py[line:104] INFO dggw: pause the crawler
2019-08-12 10:26:44,192 crawler_process.py[line:107] INFO dggw: kill the process
2019-08-12 12:15:12,917 crawler_process.py[line:107] INFO pct6: kill the process
2019-08-12 13:27:31,269 crawler_process.py[line:104] INFO dggw: pause the crawler
2019-08-12 13:41:34,794 crawler_process.py[line:101] INFO dggw:back to running
2019-08-12 13:54:38,097 crawler_process.py[line:104] INFO dggw: pause the crawler
2019-08-12 13:56:38,659 crawler_process.py[line:101] INFO dggw:back to running
2019-08-12 17:08:27,896 crawler_process.py[line:104] INFO dggw: pause the crawler
2019-08-12 17:13:29,157 crawler_process.py[line:101] INFO dggw:back to running
2019-08-12 17:32:34,041 crawler_process.py[line:104] INFO dggw: pause the crawler
2019-08-12 17:38:35,549 crawler_process.py[line:101] INFO dggw:back to running
2019-08-12 22:47:54,100 crawler_process.py[line:104] INFO dggw: pause the crawler
2019-08-12 22:48:54,379 crawler_process.py[line:107] INFO dggw: kill the process
2019-08-13 09:33:39,172 crawler_process.py[line:104] INFO mtg: pause the crawler
2019-08-13 09:45:42,385 crawler_process.py[line:118] INFO mtg: update status
2019-08-13 22:49:05,439 crawler_process.py[line:104] INFO pc: pause the crawler
2019-08-14 12:25:33,776 crawler_process.py[line:101] INFO pc:back to running
2019-08-14 14:05:59,162 crawler_process.py[line:104] INFO pc: pause the crawler
2019-08-14 14:13:00,956 crawler_process.py[line:101] INFO pc:back to running
2019-08-14 16:50:42,361 crawler_process.py[line:118] INFO pct7: update status
2019-08-14 17:01:45,427 crawler_process.py[line:118] INFO pct8: update status
2019-08-14 17:20:50,527 crawler_process.py[line:118] INFO pct8: update status
2019-08-14 18:01:01,365 crawler_process.py[line:118] INFO pct8: update status
2019-08-14 18:12:04,138 crawler_process.py[line:104] INFO pc: pause the crawler
2019-08-14 20:37:42,264 crawler_process.py[line:101] INFO pc:back to running
2019-08-15 15:45:40,297 crawler_process.py[line:104] INFO pc: pause the crawler
2019-08-15 22:39:28,266 crawler_process.py[line:101] INFO pc:back to running
2019-08-16 09:54:23,553 crawler_process.py[line:118] INFO pct8: update status
2019-08-16 10:15:28,845 crawler_process.py[line:118] INFO pct9: update status
2019-08-16 11:33:48,944 crawler_process.py[line:118] INFO pct9: update status
2019-08-16 11:39:50,502 crawler_process.py[line:118] INFO pct9: update status
2019-08-16 11:53:54,157 crawler_process.py[line:118] INFO pct9: update status
2019-08-16 11:53:54,160 crawler_process.py[line:118] INFO pct9: update status
2019-08-16 12:37:05,257 crawler_process.py[line:118] INFO pct9: update status
2019-08-16 12:52:09,110 crawler_process.py[line:118] INFO pct9: update status
2019-08-16 12:57:10,404 crawler_process.py[line:118] INFO pct8: update status
2019-08-16 13:13:14,667 crawler_process.py[line:118] INFO pct8: update status
2019-08-16 13:19:16,410 crawler_process.py[line:118] INFO pct8: update status
2019-08-16 13:32:19,972 crawler_process.py[line:118] INFO pct8: update status
2019-08-16 13:36:20,944 crawler_process.py[line:104] INFO pc: pause the crawler
2019-08-16 13:36:20,947 crawler_process.py[line:104] INFO csg: pause the crawler
2019-08-16 13:36:20,950 crawler_process.py[line:104] INFO pog: pause the crawler
2019-08-16 13:36:20,954 crawler_process.py[line:104] INFO rsg: pause the crawler
2019-08-16 13:36:20,957 crawler_process.py[line:104] INFO rog: pause the crawler
2019-08-16 13:36:20,960 crawler_process.py[line:104] INFO oyg: pause the crawler
2019-08-16 13:36:20,963 crawler_process.py[line:104] INFO alg: pause the crawler
2019-08-16 13:36:20,966 crawler_process.py[line:104] INFO lcg: pause the crawler
2019-08-16 13:36:20,969 crawler_process.py[line:104] INFO arg: pause the crawler
2019-08-16 13:36:20,973 crawler_process.py[line:104] INFO mxg: pause the crawler
2019-08-16 13:36:20,976 crawler_process.py[line:104] INFO wpgg: pause the crawler
2019-08-16 13:36:20,979 crawler_process.py[line:104] INFO hlg: pause the crawler
2019-08-16 13:36:20,983 crawler_process.py[line:104] INFO vrg: pause the crawler
2019-08-16 13:36:20,986 crawler_process.py[line:104] INFO lcc: pause the crawler
2019-08-16 13:36:20,989 crawler_process.py[line:104] INFO anc: pause the crawler
2019-08-16 13:36:20,992 crawler_process.py[line:104] INFO skw: pause the crawler
2019-08-16 13:36:20,995 crawler_process.py[line:104] INFO bkgn: pause the crawler
2019-08-16 13:36:20,998 crawler_process.py[line:104] INFO apc: pause the crawler
2019-08-16 13:36:21,000 crawler_process.py[line:104] INFO apg: pause the crawler
2019-08-16 13:36:21,004 crawler_process.py[line:104] INFO fuc: pause the crawler
2019-08-16 13:36:21,007 crawler_process.py[line:104] INFO tmeg: pause the crawler
2019-08-16 13:36:21,010 crawler_process.py[line:104] INFO mkg: pause the crawler
2019-08-16 13:36:21,013 crawler_process.py[line:104] INFO alc: pause the crawler
2019-08-16 13:36:21,016 crawler_process.py[line:104] INFO bkc: pause the crawler
2019-08-16 13:36:21,019 crawler_process.py[line:104] INFO cfc: pause the crawler
2019-08-16 13:36:21,022 crawler_process.py[line:104] INFO elc: pause the crawler
2019-08-16 13:36:21,026 crawler_process.py[line:104] INFO dgc: pause the crawler
2019-08-16 13:36:21,030 crawler_process.py[line:104] INFO cfg: pause the crawler
2019-08-16 13:36:21,035 crawler_process.py[line:104] INFO icnud: pause the crawler
2019-08-16 13:36:21,038 crawler_process.py[line:104] INFO r24g: pause the crawler
2019-08-16 13:36:21,044 crawler_process.py[line:104] INFO ang: pause the crawler
2019-08-16 13:36:21,048 crawler_process.py[line:104] INFO elgw: pause the crawler
2019-08-16 13:36:21,053 crawler_process.py[line:104] INFO dggw: pause the crawler
2019-08-16 13:36:21,057 crawler_process.py[line:104] INFO mug: pause the crawler
2019-08-16 13:36:21,060 crawler_process.py[line:104] INFO mcg: pause the crawler
2019-08-16 13:37:21,203 crawler_process.py[line:104] INFO mtg: pause the crawler
2019-08-16 13:44:23,048 crawler_process.py[line:118] INFO pc: update status
2019-08-16 14:16:30,599 crawler_process.py[line:101] INFO csg:back to running
2019-08-16 14:16:30,602 crawler_process.py[line:101] INFO pog:back to running
2019-08-16 14:16:30,605 crawler_process.py[line:101] INFO rsg:back to running
2019-08-16 14:16:30,608 crawler_process.py[line:101] INFO rog:back to running
2019-08-16 14:16:30,611 crawler_process.py[line:101] INFO oyg:back to running
2019-08-16 14:16:30,614 crawler_process.py[line:101] INFO alg:back to running
2019-08-16 14:16:30,619 crawler_process.py[line:101] INFO lcg:back to running
2019-08-16 14:16:30,622 crawler_process.py[line:101] INFO arg:back to running
2019-08-16 14:16:30,625 crawler_process.py[line:101] INFO mxg:back to running
2019-08-16 14:16:30,628 crawler_process.py[line:101] INFO wpgg:back to running
2019-08-16 14:16:30,631 crawler_process.py[line:101] INFO hlg:back to running
2019-08-16 14:16:30,635 crawler_process.py[line:101] INFO vrg:back to running
2019-08-16 14:16:30,638 crawler_process.py[line:101] INFO lcc:back to running
2019-08-16 14:16:30,641 crawler_process.py[line:101] INFO anc:back to running
2019-08-16 14:16:30,644 crawler_process.py[line:101] INFO skw:back to running
2019-08-16 14:16:30,647 crawler_process.py[line:101] INFO bkgn:back to running
2019-08-16 14:16:30,649 crawler_process.py[line:101] INFO apc:back to running
2019-08-16 14:16:30,652 crawler_process.py[line:101] INFO apg:back to running
2019-08-16 14:16:30,655 crawler_process.py[line:101] INFO fuc:back to running
2019-08-16 14:16:30,659 crawler_process.py[line:101] INFO tmeg:back to running
2019-08-16 14:16:30,662 crawler_process.py[line:101] INFO mkg:back to running
2019-08-16 14:16:30,665 crawler_process.py[line:101] INFO alc:back to running
2019-08-16 14:16:30,668 crawler_process.py[line:101] INFO bkc:back to running
2019-08-16 14:16:30,672 crawler_process.py[line:101] INFO cfc:back to running
2019-08-16 14:16:30,675 crawler_process.py[line:101] INFO elc:back to running
2019-08-16 14:16:30,679 crawler_process.py[line:101] INFO dgc:back to running
2019-08-16 14:16:30,682 crawler_process.py[line:101] INFO cfg:back to running
2019-08-16 14:16:30,685 crawler_process.py[line:101] INFO icnud:back to running
2019-08-16 14:16:30,687 crawler_process.py[line:101] INFO r24g:back to running
2019-08-16 14:16:30,690 crawler_process.py[line:101] INFO ang:back to running
2019-08-16 14:16:30,693 crawler_process.py[line:101] INFO elgw:back to running
2019-08-16 14:16:30,696 crawler_process.py[line:101] INFO dggw:back to running
2019-08-16 14:16:30,700 crawler_process.py[line:101] INFO mug:back to running
2019-08-16 14:16:30,703 crawler_process.py[line:101] INFO mcg:back to running
2019-08-16 14:17:30,845 crawler_process.py[line:101] INFO mtg:back to running
2019-08-16 14:43:37,336 crawler_process.py[line:107] INFO test: kill the process
2019-08-16 15:16:45,569 crawler_process.py[line:118] INFO test: update status
2019-08-16 15:30:59,533 crawler_process.py[line:121] INFO csg: update status
2019-08-16 15:30:59,536 crawler_process.py[line:121] INFO pog: update status
2019-08-16 15:30:59,538 crawler_process.py[line:121] INFO rsg: update status
2019-08-16 15:30:59,541 crawler_process.py[line:121] INFO rog: update status
2019-08-16 15:30:59,543 crawler_process.py[line:121] INFO oyg: update status
2019-08-16 15:30:59,546 crawler_process.py[line:121] INFO alg: update status
2019-08-16 15:30:59,549 crawler_process.py[line:121] INFO lcg: update status
2019-08-16 15:30:59,552 crawler_process.py[line:121] INFO arg: update status
2019-08-16 15:30:59,554 crawler_process.py[line:121] INFO mxg: update status
2019-08-16 15:30:59,557 crawler_process.py[line:121] INFO wpgg: update status
2019-08-16 15:30:59,559 crawler_process.py[line:121] INFO hlg: update status
2019-08-16 15:30:59,562 crawler_process.py[line:121] INFO vrg: update status
2019-08-16 15:30:59,564 crawler_process.py[line:121] INFO lcc: update status
2019-08-16 15:30:59,567 crawler_process.py[line:121] INFO anc: update status
2019-08-16 15:30:59,570 crawler_process.py[line:121] INFO skw: update status
2019-08-16 15:30:59,572 crawler_process.py[line:121] INFO bkgn: update status
2019-08-16 15:30:59,575 crawler_process.py[line:121] INFO apc: update status
2019-08-16 15:30:59,577 crawler_process.py[line:121] INFO apg: update status
2019-08-16 15:30:59,580 crawler_process.py[line:121] INFO fuc: update status
2019-08-16 15:30:59,582 crawler_process.py[line:121] INFO tmeg: update status
2019-08-16 15:30:59,585 crawler_process.py[line:121] INFO mkg: update status
2019-08-16 15:30:59,587 crawler_process.py[line:121] INFO alc: update status
2019-08-16 15:30:59,590 crawler_process.py[line:121] INFO bkc: update status
2019-08-16 15:30:59,593 crawler_process.py[line:121] INFO cfc: update status
2019-08-16 15:30:59,597 crawler_process.py[line:121] INFO elc: update status
2019-08-16 15:30:59,602 crawler_process.py[line:121] INFO dgc: update status
2019-08-16 15:30:59,604 crawler_process.py[line:121] INFO cfg: update status
2019-08-16 15:30:59,607 crawler_process.py[line:121] INFO icnud: update status
2019-08-16 15:30:59,610 crawler_process.py[line:121] INFO r24g: update status
2019-08-16 15:30:59,613 crawler_process.py[line:121] INFO ang: update status
2019-08-16 15:30:59,615 crawler_process.py[line:121] INFO elgw: update status
2019-08-16 15:30:59,618 crawler_process.py[line:121] INFO mug: update status
2019-08-16 15:30:59,621 crawler_process.py[line:121] INFO mcg: update status
2019-08-16 15:30:59,623 crawler_process.py[line:121] INFO dggw: update status
2019-08-16 15:30:59,626 crawler_process.py[line:121] INFO mtg: update status
2019-08-16 15:30:59,627 crawler_process.py[line:131] INFO new process run nohup python3 /data/cage/tas_caller/async_tas_caller.py -r test -c 10 -mq test_queue -ef goods> /dev/null &
2019-08-16 15:30:59,629 crawler_process.py[line:135] INFO started process {'cmdline': ['/bin/sh', '-c', 'nohup python3 /data/cage/tas_caller/async_tas_caller.py -r test -c 10 -mq test_queue -ef goods> /dev/null &'], 'cpu_times': pcputimes(user=0.0, system=0.0, children_user=0.0, children_system=0.0), 'create_time': 1565940659.16}
2019-08-16 15:31:49,241 crawler_process.py[line:107] INFO csg: kill the process
2019-08-16 15:31:49,250 crawler_process.py[line:107] INFO pog: kill the process
2019-08-16 15:31:49,259 crawler_process.py[line:107] INFO rsg: kill the process
2019-08-16 15:31:49,268 crawler_process.py[line:107] INFO rog: kill the process
2019-08-16 15:31:49,274 crawler_process.py[line:107] INFO oyg: kill the process
2019-08-16 15:31:49,280 crawler_process.py[line:107] INFO alg: kill the process
2019-08-16 15:31:49,286 crawler_process.py[line:107] INFO lcg: kill the process
2019-08-16 15:31:49,293 crawler_process.py[line:107] INFO arg: kill the process
2019-08-16 15:31:49,299 crawler_process.py[line:107] INFO mxg: kill the process
2019-08-16 15:31:49,306 crawler_process.py[line:107] INFO wpgg: kill the process
2019-08-16 15:31:49,314 crawler_process.py[line:107] INFO hlg: kill the process
2019-08-16 15:31:49,320 crawler_process.py[line:107] INFO vrg: kill the process
2019-08-16 15:31:49,326 crawler_process.py[line:107] INFO lcc: kill the process
2019-08-16 15:31:49,331 crawler_process.py[line:107] INFO anc: kill the process
2019-08-16 15:31:49,337 crawler_process.py[line:107] INFO skw: kill the process
2019-08-16 15:31:49,343 crawler_process.py[line:107] INFO bkgn: kill the process
2019-08-16 15:31:49,350 crawler_process.py[line:107] INFO apc: kill the process
2019-08-16 15:31:49,358 crawler_process.py[line:107] INFO apg: kill the process
2019-08-16 15:31:49,366 crawler_process.py[line:107] INFO fuc: kill the process
2019-08-16 15:31:49,372 crawler_process.py[line:107] INFO tmeg: kill the process
2019-08-16 15:31:49,378 crawler_process.py[line:107] INFO mkg: kill the process
2019-08-16 15:31:49,384 crawler_process.py[line:107] INFO alc: kill the process
2019-08-16 15:31:49,390 crawler_process.py[line:107] INFO bkc: kill the process
2019-08-16 15:31:49,397 crawler_process.py[line:107] INFO cfc: kill the process
2019-08-16 15:31:49,404 crawler_process.py[line:107] INFO elc: kill the process
2019-08-16 15:31:49,410 crawler_process.py[line:107] INFO dgc: kill the process
2019-08-16 15:31:49,416 crawler_process.py[line:107] INFO cfg: kill the process
2019-08-16 15:31:49,422 crawler_process.py[line:107] INFO icnud: kill the process
2019-08-16 15:31:49,427 crawler_process.py[line:107] INFO r24g: kill the process
2019-08-16 15:31:49,433 crawler_process.py[line:107] INFO ang: kill the process
2019-08-16 15:31:49,452 crawler_process.py[line:107] INFO elgw: kill the process
2019-08-16 15:31:49,458 crawler_process.py[line:107] INFO dggw: kill the process
2019-08-16 15:31:49,467 crawler_process.py[line:107] INFO mug: kill the process
2019-08-16 15:31:49,473 crawler_process.py[line:107] INFO mcg: kill the process
2019-08-16 15:31:49,480 crawler_process.py[line:118] INFO mtg: update status
2019-08-16 15:31:49,483 crawler_process.py[line:118] INFO csg: update status
2019-08-16 15:31:49,486 crawler_process.py[line:118] INFO pog: update status
2019-08-16 15:31:49,489 crawler_process.py[line:118] INFO rsg: update status
2019-08-16 15:31:49,492 crawler_process.py[line:118] INFO rog: update status
2019-08-16 15:31:49,495 crawler_process.py[line:118] INFO oyg: update status
2019-08-16 15:31:49,498 crawler_process.py[line:118] INFO alg: update status
2019-08-16 15:31:49,501 crawler_process.py[line:118] INFO lcg: update status
2019-08-16 15:31:49,503 crawler_process.py[line:118] INFO arg: update status
2019-08-16 15:31:49,506 crawler_process.py[line:118] INFO mxg: update status
2019-08-16 15:31:49,508 crawler_process.py[line:118] INFO wpgg: update status
2019-08-16 15:31:49,511 crawler_process.py[line:118] INFO hlg: update status
2019-08-16 15:31:49,513 crawler_process.py[line:118] INFO vrg: update status
2019-08-16 15:31:49,516 crawler_process.py[line:118] INFO lcc: update status
2019-08-16 15:31:49,518 crawler_process.py[line:118] INFO anc: update status
2019-08-16 15:31:49,520 crawler_process.py[line:118] INFO skw: update status
2019-08-16 15:31:49,523 crawler_process.py[line:118] INFO bkgn: update status
2019-08-16 15:31:49,526 crawler_process.py[line:118] INFO apc: update status
2019-08-16 15:31:49,528 crawler_process.py[line:118] INFO apg: update status
2019-08-16 15:31:49,531 crawler_process.py[line:118] INFO fuc: update status
2019-08-16 15:31:49,533 crawler_process.py[line:118] INFO tmeg: update status
2019-08-16 15:31:49,536 crawler_process.py[line:118] INFO mkg: update status
2019-08-16 15:31:49,538 crawler_process.py[line:118] INFO alc: update status
2019-08-16 15:31:49,541 crawler_process.py[line:118] INFO bkc: update status
2019-08-16 15:31:49,544 crawler_process.py[line:118] INFO cfc: update status
2019-08-16 15:31:49,546 crawler_process.py[line:118] INFO elc: update status
2019-08-16 15:31:49,549 crawler_process.py[line:118] INFO dgc: update status
2019-08-16 15:31:49,551 crawler_process.py[line:118] INFO cfg: update status
2019-08-16 15:31:49,553 crawler_process.py[line:118] INFO icnud: update status
2019-08-16 15:31:49,556 crawler_process.py[line:118] INFO r24g: update status
2019-08-16 15:31:49,558 crawler_process.py[line:118] INFO ang: update status
2019-08-16 15:31:49,561 crawler_process.py[line:118] INFO elgw: update status
2019-08-16 15:31:49,564 crawler_process.py[line:118] INFO dggw: update status
2019-08-16 15:31:49,567 crawler_process.py[line:118] INFO mug: update status
2019-08-16 15:31:49,569 crawler_process.py[line:118] INFO mcg: update status
2019-08-16 15:31:50,194 crawler_process.py[line:110] INFO mtg: kill the process
2019-08-16 15:31:50,208 crawler_process.py[line:121] INFO test: update status
2019-08-16 15:32:49,649 crawler_process.py[line:107] INFO test: kill the process
2019-08-16 15:32:49,655 crawler_process.py[line:118] INFO test: update status
2019-08-16 15:33:14,302 crawler_process.py[line:121] INFO test: update status
2019-08-16 15:33:14,303 crawler_process.py[line:131] INFO new process run nohup python3 /data/cage/tas_caller/async_tas_caller.py -r test2 -c 1 -mq test_queue -ef goods> /dev/null &
2019-08-16 15:33:14,305 crawler_process.py[line:135] INFO started process {'cmdline': ['/bin/sh', '-c', 'nohup python3 /data/cage/tas_caller/async_tas_caller.py -r test2 -c 1 -mq test_queue -ef goods> /dev/null &'], 'cpu_times': pcputimes(user=0.0, system=0.0, children_user=0.0, children_system=0.0), 'create_time': 1565940793.84}
2019-08-16 15:33:49,717 crawler_process.py[line:118] INFO test2: update status
2019-08-17 23:06:22,134 crawler_process.py[line:118] INFO alg: update status
2019-08-17 23:07:22,215 crawler_process.py[line:118] INFO csg: update status
2019-08-17 23:07:22,218 crawler_process.py[line:118] INFO pog: update status
2019-08-17 23:07:22,225 crawler_process.py[line:118] INFO rsg: update status
2019-08-17 23:07:22,228 crawler_process.py[line:118] INFO rog: update status
2019-08-17 23:07:22,231 crawler_process.py[line:118] INFO oyg: update status
2019-08-17 23:07:22,235 crawler_process.py[line:118] INFO alg: update status
2019-08-17 23:07:22,238 crawler_process.py[line:118] INFO lcg: update status
2019-08-17 23:07:22,241 crawler_process.py[line:118] INFO arg: update status
2019-08-17 23:07:22,244 crawler_process.py[line:118] INFO mxg: update status
2019-08-17 23:07:22,247 crawler_process.py[line:118] INFO wpgg: update status
2019-08-17 23:07:22,249 crawler_process.py[line:118] INFO anc: update status
2019-08-17 23:07:22,252 crawler_process.py[line:118] INFO skw: update status
2019-08-17 23:07:22,255 crawler_process.py[line:118] INFO bkgn: update status
2019-08-17 23:07:22,257 crawler_process.py[line:118] INFO apc: update status
2019-08-17 23:07:22,260 crawler_process.py[line:118] INFO apg: update status
2019-08-17 23:07:22,262 crawler_process.py[line:118] INFO fuc: update status
2019-08-17 23:07:22,265 crawler_process.py[line:118] INFO tmeg: update status
2019-08-17 23:07:22,267 crawler_process.py[line:118] INFO mkg: update status
2019-08-17 23:07:22,270 crawler_process.py[line:118] INFO alc: update status
2019-08-17 23:07:22,273 crawler_process.py[line:118] INFO bkc: update status
2019-08-17 23:07:22,276 crawler_process.py[line:118] INFO cfc: update status
2019-08-17 23:07:22,279 crawler_process.py[line:118] INFO elc: update status
2019-08-17 23:07:22,282 crawler_process.py[line:118] INFO dgc: update status
2019-08-17 23:07:22,285 crawler_process.py[line:118] INFO cfg: update status
2019-08-17 23:07:22,287 crawler_process.py[line:118] INFO icnud: update status
2019-08-17 23:07:22,290 crawler_process.py[line:118] INFO r24g: update status
2019-08-17 23:07:22,293 crawler_process.py[line:118] INFO ang: update status
2019-08-17 23:07:22,295 crawler_process.py[line:118] INFO elgw: update status
2019-08-17 23:07:22,298 crawler_process.py[line:118] INFO mug: update status
2019-08-17 23:07:22,301 crawler_process.py[line:118] INFO mcg: update status
2019-08-17 23:07:22,303 crawler_process.py[line:118] INFO dggw: update status
2019-08-17 23:07:22,306 crawler_process.py[line:118] INFO mtg: update status
2019-08-17 23:07:22,310 crawler_process.py[line:118] INFO hlg: update status
2019-08-17 23:07:22,313 crawler_process.py[line:118] INFO vrg: update status
2019-08-17 23:07:22,315 crawler_process.py[line:118] INFO lcc: update status
2019-08-17 23:07:22,318 crawler_process.py[line:118] INFO alg: update status
2019-08-17 23:40:30,234 crawler_process.py[line:104] INFO oyg: pause the crawler
2019-08-19 09:22:55,056 crawler_process.py[line:118] INFO mtg: update status
2019-08-19 09:25:55,814 crawler_process.py[line:104] INFO mtg: pause the crawler
2019-08-19 09:29:56,773 crawler_process.py[line:107] INFO mtg: kill the process
2019-08-19 10:00:04,129 crawler_process.py[line:104] INFO mtg: pause the crawler
2019-08-19 10:07:05,858 crawler_process.py[line:107] INFO mtg: kill the process
2019-08-20 14:20:03,761 crawler_process.py[line:101] INFO oyg:back to running
2019-08-20 17:13:46,837 crawler_process.py[line:118] INFO pct9: update status
2019-08-20 17:19:48,401 crawler_process.py[line:118] INFO pct9: update status
2019-08-20 17:28:50,691 crawler_process.py[line:118] INFO pct9: update status
2019-08-20 17:46:55,033 crawler_process.py[line:118] INFO pct9: update status
2019-08-21 10:04:54,024 crawler_process.py[line:104] INFO mcg: pause the crawler
2019-08-21 10:23:58,943 crawler_process.py[line:118] INFO mcg: update status
2019-08-21 11:31:16,496 crawler_process.py[line:104] INFO mug: pause the crawler
2019-08-21 11:50:21,350 crawler_process.py[line:101] INFO mug:back to running
This diff could not be displayed because it is too large.
This diff could not be displayed because it is too large.
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import threading
import time
import traceback
from utils.es_api import get_task_error_rate
from utils.log_manage import get_logger
from utils.mongo import MongoOperator
from utils.robots import dd_send_msg
from utils.tas_redis import get_flag
TIME_INTERVAL = 90 # minis
logger = get_logger('task_monitor')
mongo = MongoOperator()
def task_fail_rate_monitor():
while True:
try:
msg = get_task_error_rate(time_range=TIME_INTERVAL)
if msg:
logger.info(msg)
dd_send_msg(msg)
time.sleep(TIME_INTERVAL * 60)
except Exception:
logger.error(traceback.format_exc())
dd_send_msg(traceback.format_exc())
time.sleep(60 * 5)
def mongo_data_expired_rate_monitor():
while True:
try:
msg = mongo.get_platform_data_expired_rate()
if msg:
logger.info(msg)
dd_send_msg(msg)
time.sleep(3 * 60 * 60)
except Exception:
logger.error(traceback.format_exc())
dd_send_msg(traceback.format_exc())
time.sleep(60 * 5)
def task_running_flag_monitor():
while True:
try:
msg = get_flag()
if msg:
logger.info(msg)
dd_send_msg(msg)
time.sleep(3 * 60 * 60)
except Exception:
logger.error(traceback.format_exc())
dd_send_msg(traceback.format_exc())
time.sleep(60 * 5)
def main():
thread_monitor1 = threading.Thread(target=task_fail_rate_monitor)
thread_monitor1.start()
thread_monitor2 = threading.Thread(target=mongo_data_expired_rate_monitor)
thread_monitor2.start()
thread_monitor3 = threading.Thread(target=task_running_flag_monitor)
thread_monitor3.start()
if __name__ == "__main__":
main()
[program:af_schedule]
command = airflow scheduler
numprocs = 1
process_name = %(program_name)s_%(process_num)02d
autostart = true
autorestart = true
startretries = 2
user = root
redirect_stderr = true
[program:af_server]
command = /usr/bin/airflow webserver
numprocs = 1
process_name = %(program_name)s_%(process_num)02d
autostart = true
autorestart = true
startretries = 2
user = liexin521
redirect_stderr = true
[program:cp]
directory = /data/cage/tas_monitor
command = python3 ./crawler_process.py
numprocs = 1
process_name = %(program_name)s_%(process_num)02d
autostart = true
autorestart = true
startretries = 2
user = root
redirect_stderr = true
[program:data_manager]
directory = /data/cage/data_manager
command = python3 ./server.py
numprocs = 1
process_name = %(program_name)s_%(process_num)02d
autostart = true
autorestart = true
startretries = 2
user = root
redirect_stderr = true
[program:keywords]
directory = /data/cage/tas_caller
command = python3 ./db2rds/keywords.py
numprocs = 1
process_name = %(program_name)s_%(process_num)02d
autostart = true
autorestart = true
startretries = 2
user = root
redirect_stderr = true
\ No newline at end of file
[program:master_ip]
directory = /data/cage/tas_caller
command = python3 ./script/update_master_ip.py
numprocs = 1
process_name = %(program_name)s_%(process_num)02d
autostart = true
autorestart = true
startretries = 2
user = root
redirect_stderr = true
[program:monitor]
directory = /data/cage/tas_monitor
command = python3 ./monitor.py
numprocs = 1
process_name = %(program_name)s_%(process_num)02d
autostart = true
autorestart = true
startretries = 2
user = root
redirect_stderr = true
\ No newline at end of file
[program:redis]
command = redis-server /home/liexin521/redis.conf
numprocs = 1
process_name = %(program_name)s_%(process_num)02d
autostart = true
autorestart = true
startretries = 2
user = root
redirect_stderr = true
[program:tas]
directory = /data/cage/tornado_api_server
command = python3 ./server.py --port=32345 --region=master
numprocs = 1
process_name = %(program_name)s_%(process_num)02d
autostart = true
autorestart = true
startretries = 2
user = root
redirect_stderr = true
[program:token_server]
directory = /data/cage/tas_token_server
command = python3 ./token_server.py
numprocs = 1
process_name = %(program_name)s_%(process_num)02d
autostart = true
autorestart = true
startretries = 2
user = root
redirect_stderr = true
#!/usr/bin/env python
# -*- coding:utf-8 -*-
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import sys
ENVIRONMENT = 'test' if sys.platform in ('darwin', 'win32') else 'produce'
def get_mysql_conf(db):
if ENVIRONMENT == 'produce':
user, psd = 'acuser', 'acuserXty2201'
host = '172.18.137.35'
else:
user, psd = db, db + '#zsyM'
host = '192.168.2.232'
conf = {
'host': host,
'port': 3306,
'user': user,
'password': psd,
'db': db,
'charset': 'utf8'
}
return conf
#!/usr/bin/env python
# -*- coding:utf-8 -*-
from datetime import datetime, timedelta
from elasticsearch import Elasticsearch
ichunt_elk = Elasticsearch(host="127.0.0.1", port=9200, timeout=20)
task_mapping = {
'arg': 'arrow sku更新',
'arc': 'arrow新品采集',
'ang': 'avnet sku更新',
'anc': 'avnet新品采集',
'alg': 'allied sku更新',
'alc': 'allied新品采集',
'rog': 'rochester sku更新',
'vrg': 'verical sku更新',
'rsg': 'rs sku更新',
'mug': 'mouser sku更新',
'dggw': 'digikey sku更新',
'dgc': 'digikey新品采集',
'r24g': 'rutronik sku更新',
'r24c': 'rutronik新品采集',
'skw': '搜索关键词sku更新',
'pc': '平台比价',
'spu': 'spu税率采集',
'bkc': 'buerklin新品采集',
'bkg': 'buerklin sku更新',
'bkgn': 'buerklin sku更新',
'msg': 'master sku更新',
'csg': 'chip1stop sku更新',
'elgw': 'element14 sku更新',
'elc': 'element14 新品更新',
'poc': 'powell 新品采集',
'pog': 'powell sku更新',
"mcc": 'microchip 新品采集',
'mcg': 'microchip sku更新',
"mtg": "master sku更新",
"hlg": "heilind sku更新",
"tic": "TI 新品采集",
"tig": "TI sku更新",
"mxc": 'maxim 新品采集',
'mxg': 'maxim sku更新',
'ikkw': 'ickey 自营采集',
'ikid': 'ickey 指数采集',
'mkg': 'marki sku更新',
'cfc': 'corestaff 新品采集',
'cfg': 'corestaff sku更新',
"lcg": '深圳立创 sku更新',
"lcc": "深圳立创 新品采集",
"icnud": 'ic交易网 更新',
"apc": 'aipco 新品更新',
"apg": 'aipco sku更新',
"fuc": 'future 新品更新',
"fug": 'future sku更新',
"tmeg": 'tme sku更新',
"oyg": '唯样 sku更新',
"wpgg": 'wpg sku更新'
}
task_rate_warning = {'spu': 70, 'pc': 90, 'ikkw': 50, 'ikid': 50, 'icnud': 95}
# 任务(dggw) digikey sku更新 错误率 31.1%
def gen_msg(time_range, buckets):
msg = 'TAS任务监控预警 过去%d分钟内:\n' % time_range
demo = '任务:{} {} 平均错误率为{:.2f}%'
msgs = []
for task in buckets:
key = task['key']
if key in task_rate_warning and task['avgFailRate']['value'] < task_rate_warning[key]:
continue
desc = task_mapping[key] if key in task_mapping else '未知'
msg_line = demo.format(key, desc, task['avgFailRate']['value'])
msgs.append(msg_line)
msg += msg_line + '\n'
if msgs:
return msg
else:
return
def get_task_error_rate(time_range=30, error_rate=30):
utc_since = datetime.utcnow() - timedelta(minutes=time_range)
# date_now = datetime.now() - timedelta(minutes=time_range)
index = 'logstash-' + utc_since.strftime('%Y.%m.%d')
query = {
"size": 0,
"query": {
"range": {
"@timestamp": {
"gte": utc_since.strftime("%Y-%m-%dT%H:%M:%SZ")
}
}
},
"aggs": {
"taskCodes": {
"terms": {
"field": "taskCode",
"size": 40
},
"aggs": {
"avgFailRate": {
"avg": {
"field": "failRate"
}
},
"avgFailRateFilter": {
"bucket_selector": {
"buckets_path": {
"avgFailRate": "avgFailRate"
},
"script": {
"source": "params.avgFailRate >= " + str(error_rate)
}
}
}
}
}
}
}
response = ichunt_elk.search(index=index, doc_type='doc', body=query)
buckets = response['aggregations']['taskCodes']['buckets']
if buckets:
return gen_msg(time_range, buckets)
return
if __name__ == "__main__":
print(get_task_error_rate(error_rate=25))
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import logging
import os
from logging.handlers import RotatingFileHandler
loggerLevel = logging.INFO
root_path = os.getcwd().split('tas_monitor')[0] + 'tas_monitor'
log_dir = root_path + '/logs'
# log_dir = "logs"
console_formatter = '%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s'
json_formatter = '%(message)s'
logger_dict = dict()
def create_logger(log_name, log_type):
g_logger = logging.getLogger(log_name)
# rt = log_name.split('_')[0]
log_path = "%s" % log_dir
if not os.path.exists(log_path):
os.makedirs(log_path)
logfile = log_name + ".log"
log_file = "%s/%s" % (log_path, logfile)
console = logging.StreamHandler()
console.setFormatter(logging.Formatter(console_formatter))
handler = RotatingFileHandler(log_file, maxBytes=2 * 1204 * 1024, backupCount=1)
fmt = json_formatter if log_type == 'json' else console_formatter
handler.setFormatter(logging.Formatter(fmt))
g_logger.addHandler(console)
g_logger.addHandler(handler)
g_logger.setLevel(loggerLevel)
return g_logger
def get_logger(log_name, log_type='file'):
if log_name not in logger_dict:
create_logger(log_name, log_type)
logger_dict[log_name] = logging.getLogger(log_name)
return logging.getLogger(log_name)
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import sys
import time
from urllib.parse import quote_plus
import pymongo
MG_HOST_SET = {
'test': '192.168.1.237',
'produce': '172.18.137.23'
}
ENV = 'test' if sys.platform in ('darwin', 'win32') else 'produce'
def get_mongo_conf():
host = MG_HOST_SET[ENV]
conf = {
"host": host,
"port": 27017,
"database": 'ichunt',
"user": 'ichunt',
"password": 'huntmon66499'
}
return conf
class MongoOperator:
def __init__(self):
config = get_mongo_conf()
uri = 'mongodb://%s:%s@%s/%s' % (
quote_plus(config['user']), quote_plus(config['password']), config['host'], config['database'])
self.conn = pymongo.MongoClient(uri)
self.db = self.conn[config['database']]
# element14 chip1stop ti
self.colls = ('alliedelec', 'arrow', 'avnet', 'buerklin', 'digikey', 'master', 'rs', 'rochester',
'verical', "powell", 'microchip', 'tme', 'heilind', 'maxim', 'aipco', 'company', 'rutronik',
'mouser', 'corestaff', 'wpg', 'szlc', 'element14', 'chip1stop', 'future')
@staticmethod
def get_unexpired_time(plat):
if plat == 'powell':
ts = int(time.time()) - 3600 * 24 * 7 # 5d
elif plat in ('digikey',):
ts = int(time.time()) - 3600 * 72 # 72h
elif plat in ('microchip', 'heilind', 'element14', 'chip1stop', 'future'):
ts = int(time.time()) - 3600 * 24 # 24h
else:
ts = int(time.time()) - 3600 * 48 # 48
return ts
@staticmethod
def get_query(ts, plat):
expired_query, total_query = {'time': {"$lt": ts}}, {}
if plat not in ('element14', 'chip1stop', 'future'):
expired_query['is_error'] = 0
total_query['is_error'] = 0
elif plat in ('element14', 'chip1stop'):
expired_query = {'time': {"$gt": ts}}
return expired_query, total_query
def get_platform_data_expired_rate(self):
msg = '数据过期率监控预警 过去3小时内:\n'
demo = '平台:{} 数据过期率为{:.2f}%'
msgs = []
for plat in self.colls:
coll = self.db[plat]
ts = self.get_unexpired_time(plat)
eq, tq = self.get_query(ts, plat)
expired = coll.count(eq)
if plat == 'element14':
total = 600000
expired = total - expired
elif plat == 'chip1stop':
total = 800000
expired = total - expired
else:
total = coll.count(tq) # if plat != 'element14' else 600000 # element14 不执行总数查询 手动设置为60w
if total > 0:
rate = expired / total * 100
print(plat, expired, total, rate)
if rate >= 10:
msg_line = demo.format(plat, rate)
msgs.append(msg_line)
msg += msg_line + '\n'
else:
print(plat, expired, total)
if msgs:
return msg
else:
return
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import pymysql
from utils.config import get_mysql_conf
class MySqlOperator:
def __init__(self, db_key_name):
config = get_mysql_conf(db_key_name)
self.db = pymysql.connect(**config)
def re_connect(self):
try:
self.db.ping()
except Exception:
self.db.connect()
def check_exist_by_unicode(self, uc):
with self.db.cursor() as cursor:
sql = "SELECT status FROM lie_spider_board WHERE unique_code = %s"
cursor.execute(sql, uc)
result = cursor.fetchone()
return result
def insert_process(self, p):
with self.db.cursor() as cursor:
sql = """
INSERT INTO lie_spider_board(task_code, platform, concurrency, queue, unique_code, status,
task_type, start_time, run_time, remain_task, remain_wrong_task, wrong_radio) VALUES
(%(task_code)s,%(platform)s,%(concurrency)s,%(queue)s,%(unique_code)s,%(status)s,%(task_type)s,
%(start_time)s,%(run_time)s,%(remain_task)s,%(remain_wrong_task)s,%(wrong_radio)s)
"""
cursor.execute(sql, p)
self.db.commit()
def update_process(self, p):
with self.db.cursor() as cursor:
sql = """
UPDATE lie_spider_board SET run_time=%(run_time)s, remain_task=%(remain_task)s,
remain_wrong_task=%(remain_wrong_task)s, wrong_radio=%(wrong_radio)s WHERE unique_code=%(unique_code)s
"""
cursor.execute(sql, p)
self.db.commit()
def get_process_alive(self):
with self.db.cursor() as cursor:
sql = "SELECT task_code, unique_code FROM lie_spider_board WHERE status IN (0, 1)"
cursor.execute(sql)
result = cursor.fetchall()
return result
def update_status(self, s, u):
with self.db.cursor() as cursor:
sql = """
UPDATE lie_spider_board SET status=%s WHERE unique_code=%s
"""
cursor.execute(sql, (s, u))
self.db.commit()
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import requests
dd_robot_api = ' https://oapi.dingtalk.com/robot/send?access_token='
tom = 'c19beeab837f5c6e019e7fc602d470704d634ae33510f40fb0e903cde215fb24'
def dd_send_msg(msg, robot=tom):
data = {
"msgtype": "text",
"text": {
"content": msg
}
}
requests.post(dd_robot_api + robot, json=data)
if __name__ == "__main__":
dd_send_msg('喂喂 你发的太多了!')
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import sys
import redis
from utils.es_api import task_mapping
env = 'test' if sys.platform in ('darwin', 'win32') else 'produce'
def get_redis_conf():
conf = {
'host': 'localhost',
'port': 6379,
'db': 0,
}
if env.startswith('produce'):
conf['password'] = 'icDb29mLy2s'
return conf
pool = redis.ConnectionPool(**get_redis_conf())
task_redis = redis.Redis(connection_pool=pool)
def get_flag():
flags = task_redis.hgetall('task_running_flag')
if flags:
msg = 'TAS暂停中的调度任务有:\n'
demo = '任务:{} {}'
for k in flags:
k = k.decode()
desc = task_mapping[k] if k in task_mapping else '未知'
line = demo.format(k, desc)
msg += line + '\n'
return msg
if __name__ == "__main__":
get_flag()
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment