Commit 5b397aa3 by lzzzzl

Initial commit

parent d4a0e717
Showing with 12221 additions and 82 deletions
from extract.ex_ly_sku import ExLySku
from utils.msg_handler import MsgHandler
import traceback
from extract.ex_sku_expose import ExSkuExpose
from extract.ex_ickey import ExIckey
from extract.ex_base import Base
from load.load_hbase import LoadHbase
from utils.db_handler import DBHandler
from utils.date_handler import DateHandler
from pipeline.pi_goods import PiGoods
from pipeline.pi_erp import PiErp
from config.conn_list import ConnList
import sys
......@@ -11,15 +18,59 @@ if __name__ == '__main__':
del_sku: 删除MongoDB里的原始数据
"""
if len(sys.argv) > 1:
# sku价格采集
if sys.argv[1] == 'ex_sku':
try:
ExLySku.ex_sku()
except:
MsgHandler.send_dd_msg("【联营SKU价格采集任务】 失败\n" +
"失败信息:" + str(traceback.print_exc()))
elif sys.argv[1] == 'del_sku':
try:
ExLySku.del_sku()
except:
MsgHandler.send_dd_msg("【Mongo数据删除任务】 失败\n" +
"失败信息:" + str(traceback.print_exc()))
elif sys.argv[1] == 'put_hdfs':
ExLySku.put_hdfs()
elif sys.argv[1] == 'load_hbase':
ExLySku.load_hbase()
elif sys.argv[1] == 'del_hdfs':
ExLySku.del_hdfs()
# sku曝光
elif sys.argv[1] == 'sku_expose_collect':
ExSkuExpose.collect_rq()
elif sys.argv[1] == 'merge_file':
ExSkuExpose.merge_file()
# ickey自营采集
elif sys.argv[1] == 'ickey_zy':
ickey = ExIckey('ickey')
ickey.ex_ickey_goods()
# 云汉指数
elif sys.argv[1] == 'ickey_index':
ickey = ExIckey('ickey')
# 采集
ickey.ex_ickey_index()
# 上传数据
local_file = '/data3/hdfs_data/ickey_index/' + 'ickey_' + \
DateHandler.now_date(days=0, d_type=2) + '.txt'
hdfs_file = '/ickey_index/ickey_' + DateHandler.now_date(days=0, d_type=2) + '.txt'
DBHandler.hdfs_upload('/ickey_index/', local_file)
Base.rm_local_file(local_file)
# HDFS数据写入HBASE
row = "%s,%s,%s,%s,%s,%s" % ("cf1:searchTrendAnalysis", "cf1:searchAreaTrendAnalysis",
"cf1:industryType", "cf1:search_num", "cf1:trade_num", "cf1:buyer_num")
msg = '【云汉指数采集】写入HBASE完成'
LoadHbase.cmd_load(row, "sku:ickey_index", hdfs_file, msg, '|')
# 云汉指数备货
elif sys.argv[1] == 'ickey_stock_find':
PiGoods.pipeline_zy_stock()
# ERP获取客户订单
elif sys.argv[1] == 'erp_company':
PiErp().pipeline_erp_company(ConnList.Credit(), ConnList.CreditSource())
# ERP客户订单info
elif sys.argv[1] == 'erp_info':
PiErp().pipeline_erp_info()
# ERP客户订单month
elif sys.argv[1] == 'erp_month':
PiErp().pipeline_erp_month(False, "WHERE is_credit = 1 ORDER BY business_time", "lie_basic_month", ConnList.Credit(), ConnList.CreditSource())
PiErp().pipeline_erp_weight("lie_basic_month", ConnList.Credit())
# ERP客户订单容差month
elif sys.argv[1] == 'erp_month_tolerance':
PiErp().pipeline_erp_month(True, "WHERE is_credit = 1 ORDER BY business_time", "lie_basic_month_tolerance", ConnList.CreditLx(), ConnList.CreditSource())
PiErp().pipeline_erp_weight("lie_basic_month_tolerance", ConnList.CreditLx())
# ERP更新CMP_CODE
elif sys.argv[1] == 'erp_code':
PiErp().pipeline_erp_CmpCode()
import happybase
conn = happybase.Connection("localhost", 9090)
print(conn.tables())
from pipeline.pi_purchase import PiPurchase
from pipeline.pi_email import PiEmail
from pipeline.pi_search import PiSearch
from pipeline.pi_goods import PiGoods
from pipeline.pi_daily import PiDaily
from extract.ex_sku_expose import ExSkuExpose
import sys
# 邮件发送,参数自定
if __name__ == '__main__':
"""
zm_down: 每日专卖下架量
safe_stock: 库存预警
search_no_r: 搜索无结果
sku_expose: 搜索曝光
operate_daily: 运营日报
"""
try:
if len(sys.argv) > 1:
if sys.argv[1] == 'core_daily': # 核心日报
PiDaily.pipeline_core_daily()
if sys.argv[1] == 'zm_down':
PiEmail.pipeline_zm_down()
if sys.argv[1] == 'zm_warn':
PiGoods.pipeline_zm_warn()
if sys.argv[1] == 'safe_stock':
PiPurchase.pipeline_safe_stock()
if sys.argv[1] == 'search_no_r':
PiSearch.search_no_result()
if sys.argv[1] == 'sku_expose':
# 获取HDFS数据
hdfs_data = ExSkuExpose.get_hdfs_data()
# 计算曝光汇总
ExSkuExpose.cal_sku_expose(hdfs_data)
# ExSkuExpose.cal_sku_expose_detail(hdfs_data)
# 立创曝光
ExSkuExpose.cal_lc_expose(hdfs_data)
if sys.argv[1] == 'zyly_match':
PiGoods.pipeline_zylxly_goods()
if sys.argv[1] == 'operate_daily':
PiDaily.pipeline_operate_daily()
if sys.argv[1] == 'operate_weekly':
PiDaily.pipeline_operate_weekly()
except:
pass
from pipeline.pi_order import PiOrder
from pipeline.pi_user import PiUser
from pipeline.pi_rank import PiRank
from config.conn_list import ConnList
from utils.db_handler import DBHandler
from utils.date_handler import DateHandler
from utils.excel_handler import ExcelHandler
from extract.ex_erp import ExERP
from pipeline.pi_rank import PiRank
from pipeline.pi_erp import PiErp
from pipeline.pi_lx_log import PiLxLog
import math
import requests
import time
import json
from translate.ts_erp import TsERP
from pipeline.pi_weekly import PiWeekly
def main():
def test():
PiOrder.pipeline_order_items()
rd = []
db = ConnList.CreditSource()
# pi_user = PiUser('用户流')
# pi_user.pipeline_user_without_login()
pass
sql = "SELECT erp_company_code,poolfund_no,poolfund_source_no,entru_no,order_amount,business_time,\
deadline_day,receive_time,return_amount,return_time,is_settle,delay_day,delay_amount,\
is_credit,gross_profit,poolfund_type,source_type\
FROM lie_basic_detail"
result = DBHandler.read(db, sql)
for row in result:
rd.append({
'company': row[0],
'poolfund_no': row[1],
'poolfund_source_no': row[2],
'entru_no': row[3],
'order_amount': row[4],
'business_time': DateHandler.unix_to_date(row[5], fmt="%Y-%m-%d"),
'deadline_day': row[6],
'receive_time': DateHandler.unix_to_date(row[7], fmt="%Y-%m-%d"),
'return_amount': row[8],
'return_time': DateHandler.unix_to_date(row[9], fmt="%Y-%m-%d"),
'is_settle': '是' if row[10] == 1 else '否',
'delay_day': row[11],
'delay_amount': row[12],
'is_credit': '有' if row[13] == 1 else '无',
'gross_profit': row[14],
'poolfund_type': row[15],
'source_type': '供应链' if row[16] == 1 else '科技'
})
# Excel标题
title = ['公司', '资金池编号', '资金池来源编码', '入仓号', '下单金额', '业务时间', '订单期限(账期天数)', '应收日期(业务日期+期限)', '回款金额', '回款日期', '是否结清',
'逾期支付天数', '逾期支付金额', '信用', '毛利', '资金池类型', '来源']
# Excel内容
content = ['company', 'poolfund_no', 'poolfund_source_no', 'entru_no', 'order_amount', 'business_time',
'deadline_day', 'receive_time',
'return_amount', 'return_time',
'is_settle', 'delay_day', 'delay_amount',
'is_credit', 'gross_profit', 'poolfund_type', 'source_type']
ExcelHandler.write_to_excel(title, content, rd, "result", result_type=2)
if __name__ == '__main__':
main()
test()
from extract.ex_order import ExOrder
from translate.ts_order import TsOrder
def ac_cal():
# 时间
# start_0114 = 1547395200
start_0125 = 1548432000
# start_1217 = 1544976000
# start_1228 = 1546012800
start_0101 = 1546272000
verical_id = 5
arror_id = 10
# 汇总
verical = {'start_time': start_0101, 'end_time': start_0125,
'condition': ['i.supplier_id = ' + str(verical_id),
'o.order_pay_type != 3',
'o.order_source not like \'%pf=-1%\'']}
arror = {'start_time': start_0101, 'end_time': start_0125,
'condition': ['i.supplier_id = ' + str(arror_id),
'o.order_pay_type != 3',
'o.order_source not like \'%pf=-1%\'']}
ex_order = ExOrder('order detail')
ts_verical = TsOrder('ts', ex_order.order_items(verical)).trans_order_view(6.78)
ts_paid_verical = TsOrder('ts', ex_order.order_paid_items(verical)).trans_order_view(6.78)
ts_arror = TsOrder('ts', ex_order.order_items(arror)).trans_order_view(6.78)
ts_paid_arror = TsOrder('ts', ex_order.order_paid_items(arror)).trans_order_view(6.78)
print('-------------------------------------------')
print('Verical')
print('下单人数:', len(ts_verical['user']), '下单金额:', ts_verical['amount'])
print('付款人数:', len(ts_paid_verical['user']), '付款金额:', ts_paid_verical['amount'])
print('Arror')
print('下单人数:', len(ts_arror['user']), '下单金额:', ts_arror['amount'])
print('付款人数:', len(ts_paid_arror['user']), '付款金额:', ts_paid_arror['amount'])
print('汇总')
print('下单人数:', len(list(set(ts_verical['user'] + ts_arror['user']))),
'下单金额:', ts_verical['amount'] + ts_arror['amount'])
print('付款人数:', len(list(set(ts_paid_verical['user'] + ts_paid_arror['user']))),
'付款金额:', ts_paid_verical['amount'] + ts_paid_arror['amount'])
# 区分渠道汇总
sum_canal = {'start_time': start_0101, 'end_time': start_0125,
'condition': ['i.supplier_id in (5, 10)',
'o.order_pay_type != 3',
'o.order_source not like \'%pf=-1%\'']}
ts_sum_canal = TsOrder('', ex_order.order_items(sum_canal)).trans_order_tag(6.78)
ts_sum_paid_canal = TsOrder('', ex_order.order_paid_items(sum_canal)).trans_order_tag(6.78)
sem = ts_sum_canal['sem']
sem_paid = ts_sum_paid_canal['sem']
wx = ts_sum_canal['wx']
wx_paid = ts_sum_paid_canal['wx']
wechat = ts_sum_canal['wechat']
wechat_paid = ts_sum_paid_canal['wechat']
qq = ts_sum_canal['qq']
qq_paid = ts_sum_paid_canal['qq']
print('-------------------------------------------')
print('SEM')
print('下单人数:', len(sem['user']), '下单金额:', sem['amount'],
'付款人数:', len(sem_paid['user']), '付款金额', sem_paid['amount'])
print('微信')
print('下单人数:', len(wx['user']), '下单金额:', wx['amount'],
'付款人数:', len(wx_paid['user']), '付款金额', wx_paid['amount'])
print('微信公众号')
print('下单人数:', len(wechat['user']), '下单金额:', wechat['amount'],
'付款人数:', len(wechat_paid['user']), '付款金额', wechat_paid['amount'])
print('QQ')
print('下单人数:', len(qq['user']), '下单金额:', qq['amount'],
'付款人数:', len(qq_paid['user']), '付款金额', qq_paid['amount'])
# 新老用户区分
print('-------------------------------------------')
ts_user_type = TsOrder('', ex_order.order_items(sum_canal)).trans_user_type(6.78, start_0101)
ts_user_paid_type = TsOrder('', ex_order.order_paid_items(sum_canal)).trans_user_type(6.78, start_0101)
print(ts_user_type)
print(ts_user_paid_type)
# 数据Excel
# TsOrder('', ex_order.order_paid_items(sum_canal)).trans_order_people(6.78)
from sklearn import model_selection, preprocessing, linear_model, naive_bayes, metrics, svm
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
from sklearn import decomposition, ensemble
from sklearn.externals import joblib
from config.conn_list import ConnList
import pandas as pd
import numpy as np
import pymysql
db = ConnList.Dashboard()
model = joblib.load("train_model.m")
tfidf_vect_ngram_chars = joblib.load("tfidf_vect_ngram_chars.m")
test = pd.Series(["0402"])
test_tfidf_ngram_chars = tfidf_vect_ngram_chars.transform(test)
# 读DB
def read(db, sql):
results = {}
cursor = db.cursor()
try:
cursor.execute(sql)
results = cursor.fetchall()
except:
db.rollback()
return results
# 判断是否索引列
def verify_index(data):
# 分值
point = 0
max_num = 0
total = len(data)
# 遍历数据
try:
for i in data:
# 判断是否正整数
if str(int(i)).isdigit() and int(i) >= 0:
# 递增且递增差值小于100
if (i > max_num) and (max_num - i <= 10):
max_num = i
point += 1
except:
pass
# 判断该列是否为索引,占比6成以上即认定为True
if (point / total) >= 0.6:
return True
else:
return False
# 判断是否数量列
def verify_number(data):
# 分值
point = 0
total = len(data)
# 遍历数据
try:
for i in data:
# 判断是否正整数
if str(int(i)).isdigit():
point += 1
except:
pass
# 判断该列是否为索引,占比6成以上即认定为True
if (point / total) >= 0.6:
return True
else:
return False
# 判断分类
def verify_classify(data):
# 分值
point = 0
total = len(data)
# 遍历数据
try:
for i in data:
# 判断分类
sql = "SELECT 1 FROM lie_bom_class_name WHERE class_name like '%%%s%%'" % i
result = read(db,sql)
if len(result) > 0:
point += 1
except:
pass
# 判断该列是否为索引,占比6成以上即认定为True
if (point / total) >= 0.6:
return True
else:
return False
# 判断品牌
def verify_brand(data):
# 分值
point = 0
total = len(data)
# 遍历数据
try:
for i in data:
brand_name = str(i).split('(')[0].upper()
# 判断分类
sql = "SELECT 1 FROM lie_bom_brand_name WHERE brand_name like '%%%s%%'" % i
result = read(db,sql)
if len(result) > 0:
point += 1
except:
pass
# 判断该列是否为索引,占比6成以上即认定为True
if (point / total) >= 0.6:
return True
else:
return False
# 判断参数
def verify_param(data):
# 分值
point = 0
total = len(data)
# 遍历数据
try:
test = pd.Series(data)
test_tfidf_ngram_chars = tfidf_vect_ngram_chars.transform(test)
predictions = model.predict(test_tfidf_ngram_chars)
predictions
for pre in predictions:
if pre == 'param':
point += 1
except:
pass
# 判断该列是否为索引,占比6成以上即认定为True
if (point / total) >= 0.6:
return True
else:
return False
# 判断型号
def verify_goods_name(data):
# 分值
point = 0
total = len(data)
# 遍历数据
try:
test = pd.Series(data)
test_tfidf_ngram_chars = tfidf_vect_ngram_chars.transform(test)
predictions = model.predict(test_tfidf_ngram_chars)
predictions
for pre in predictions:
if pre == 'goods_name':
point += 1
except:
pass
# 判断该列是否为索引,占比6成以上即认定为True
if (point / total) >= 0.6:
return True
else:
return False
# 判断封装
def verify_encap(data):
# 分值
point = 0
total = len(data)
# 遍历数据
try:
test = pd.Series(data)
test_tfidf_ngram_chars = tfidf_vect_ngram_chars.transform(test)
predictions = model.predict(test_tfidf_ngram_chars)
predictions
for pre in predictions:
if pre == 'encap':
point += 1
except:
pass
# 判断该列是否为索引,占比6成以上即认定为True
if (point / total) >= 0.6:
return True
else:
return False
def main():
test = pd.read_excel('BOM选型标准模板_ICkey2.xlsx')
test.dropna()
test.fillna('NA')
columns = list(test.columns)
for col in columns:
test[col] = test[col].fillna('NA')
col_index = {'索引': -1, '数量': -1, '分类': -1, '品牌': -1, '参数': -1, '型号': -1, '封装': -1}
col_list = []
# 索引列
for i in range(0, len(columns)):
col = columns[i]
if i not in col_list and verify_index(test[col]):
col_index['索引'] = i
col_list.append(i)
break
# 数量列
for i in range(0, len(columns)):
col = columns[i]
if i not in col_list and verify_number(test[col]):
col_index['数量'] = i
col_list.append(i)
break
# 分类列
for i in range(0, len(columns)):
col = columns[i]
if i not in col_list and verify_classify(test[col]):
col_index['分类'] = i
col_list.append(i)
break
# 品牌列
for i in range(0, len(columns)):
col = columns[i]
if i not in col_list and verify_brand(test[col]):
col_index['品牌'] = i
col_list.append(i)
break
# 参数列
for i in range(0, len(columns)):
col = columns[i]
if i not in col_list and verify_param(test[col]):
col_index['参数'] = i
col_list.append(i)
break
# 型号
for i in range(0, len(columns)):
col = columns[i]
if i not in col_list and verify_goods_name(test[col]):
col_index['型号'] = i
col_list.append(i)
break
# 封装
for i in range(0, len(columns)):
col = columns[i]
if i not in col_list and verify_encap(test[col]):
col_index['封装'] = i
col_list.append(i)
break
print(col_list)
if __name__ == '__main__':
main()
No preview for this file type
import pymysql
from config.db import *
import pymongo as pm
import redis
class ConnList:
@staticmethod
def Dashboard():
conf = dashboard_server
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
@staticmethod
def Order():
conf = order_server
host = str(conf['host'])
user = conf['user']
password = conf['password']
db_name = conf['db_name']
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
@staticmethod
def WriteOrder():
conf = order_write_server
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
return pymysql.connect(host, user, password, db_name, charset='utf8')
@staticmethod
def Bigdata():
conf = bigdata_server
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
@staticmethod
def Wms():
conf = wms_server
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
@staticmethod
def Zy():
conf = zy_server
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
@staticmethod
def lxData():
conf = lxdata_server
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
@staticmethod
def Behavior():
conf = behavior_server
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
@staticmethod
def IcData():
conf = ic_data_server
host = str(conf['host'])
user = conf['user']
password = conf['password']
db_name = conf['db_name']
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
@staticmethod
def SupData():
conf = supplier_server
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
@staticmethod
def Activity():
conf = activity_server
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
@staticmethod
def LyGoods():
conf = lygoods_server
return redis.Redis(**conf)
@staticmethod
def Offer():
conf = offer_server
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
@staticmethod
def Credit():
conf = credit_server
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
@staticmethod
def CreditSource():
conf = credit_source_server
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
@staticmethod
def CreditLx():
conf = credit_lx_server
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
@staticmethod
def CreditLocal():
conf = credit_local_server
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
return pymysql.connect(host, user, password, db_name, charset='utf8')
@staticmethod
def CreditSourceLocal():
conf = credit_source_local_server
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
@staticmethod
def Chain():
conf = chain_server
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
@staticmethod
def Local():
conf = local_server
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
@staticmethod
def Sz():
conf = sz_server
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
@staticmethod
def MongoDB():
conf = mongodb_server
host = str(conf['host'])
return pm.MongoClient(str(conf['host']))
@staticmethod
def SeoMongoDB():
conf = seo_mongodb_server
return pm.MongoClient(str(conf['host']))
@staticmethod
def LocalMongoDB():
conf = local_mongodb_server
return pm.MongoClient(str(conf['host']))
@staticmethod
def LocalRedis():
conf = local_redis
return redis.Redis(**conf)
@staticmethod
def WriteRedis():
conf = write_redis
return redis.Redis(**conf)
@staticmethod
def WriteRedis23():
conf = write_redis_23
return redis.Redis(**conf)
@staticmethod
def LocalTestRedis():
conf = local_test_redis
return redis.Redis(**conf)
@staticmethod
def SzLxData():
conf = sz_lx_data
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
@staticmethod
def WrCrm():
conf = wr_crm_server
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
@staticmethod
def LocalLx():
conf = local_liexin
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
@staticmethod
def LocalRank():
conf = local_rank
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
@staticmethod
def LxRank():
conf = lx_rank
return pymysql.connect(str(conf['host']), conf['user'], conf['password'],
conf['db_name'], charset='utf8')
return pm.MongoClient(host)
#!/usr/bin/env python
# -*- coding:utf-8 -*-
dashboard_server = {
'host': 'localhost',
'user': 'dashboard',
'password': 'ichunt5Dashboard@',
'db_name': 'dashboard'
}
order_server = {
'host': '172.18.137.22',
'user': 'huntdbslave',
......@@ -8,6 +15,48 @@ order_server = {
'db_name': 'hunt2016'
}
order_write_server = {
'host': '172.18.137.21',
'user': 'huntdbuser',
'password': 'mLssy2@@!!@$#yy',
'db_name': 'hunt2016'
}
bigdata_server = {
'host': '172.18.137.37',
'user': 'bigdata',
'password': 'bdYm2yy2mmyzlmlly',
'db_name': 'bigdata'
}
wms_server = {
'host': '172.18.137.33',
'user': 'LxWmsUserRead',
'password': 'Xu0U2oix3soYmosflmxIiZmyt',
'db_name': 'liexin_wms'
}
lxdata_server = {
'host': '172.18.137.33',
'user': 'LxDDUsedRead',
'password': '0o9u0U2oixoYmosflmxXtZmyt',
'db_name': 'liexin_data'
}
zy_server = {
'host': '172.18.137.33',
'user': 'LxDDUsedRead',
'password': '0o9u0U2oixoYmosflmxXtZmyt',
'db_name': 'liexin_data'
}
behavior_server = {
'host': '172.18.137.21',
'user': 'lxbehaviorread',
'password': 'XwZa920OiMLymsZzytread',
'db_name': 'liexin_behavior'
}
ic_data_server = {
'host': '172.18.137.21',
'user': 'dtuserRead',
......@@ -15,6 +64,163 @@ ic_data_server = {
'db_name': 'icdata'
}
supplier_server = {
'host': '172.18.137.21',
'user': 'SupDbUserR',
'password': 'Supssy2Ryxy',
'db_name': 'liexin_supp'
}
activity_server = {
'host': '172.18.137.21',
'user': 'lxtopic',
'password': 'mXYToic@#@yxIy',
'db_name': 'liexin_topic'
}
local_server = {
'host': 'localhost',
'user': 'root',
'password': '123',
'db_name': 'dashboard'
}
offer_server = {
'host': '192.168.1.232',
'user': 'root',
'password': '123456789',
'db_name': 'offer'
}
sz_server = {
'host': '172.18.137.21',
'user': 'ichunt_szusr',
'password': 'mXYm2##@!!@$#yy',
'db_name': 'ichunt_sz'
}
sz_lx_data = {
'host': '172.18.137.35',
'user': 'LxszUsed',
'password': '0o9u0SDxSflmxXtZmyt',
'db_name': 'liexin_sz_data'
}
mongodb_server = {
'host': 'mongodb://ichunt:huntmon66499@172.18.137.23/ichunt'
}
seo_mongodb_server = {
'host': 'mongodb://SeoUxPxts:hunxP2i1JlLm@172.18.137.23/seo'
}
local_mongodb_server = {
'host': 'mongodb://ichunt:huntmon6699@192.168.1.237/ichunt'
}
lygoods_server = {
'host': '172.18.137.39',
'password': 'icDb29mLy2s',
'port': '6379'
}
wr_crm_server = {
'host': '172.18.137.21',
'user': 'LxCrmUser',
'password': 'xUTmu0XsdUqoZIim2y',
'db_name': 'liexin_crm'
}
credit_server = {
'host': 'fkdb-master.ichunt.cc',
'user': 'Cdimz200o',
'password': 'mLssyDxmsySZmBomy',
'db_name': 'liexin_credit'
}
credit_source_server = {
'host': 'fkdb-master.ichunt.cc',
'user': 'tZzU0serMq',
'password': 'mLssyD2sySZmBo1y',
'db_name': 'liexin_credit_source'
}
credit_lx_server = {
'host': 'fkdb-master.ichunt.cc',
'user': 'PxLxcdp201',
'password': 'Oxnt2n0qplztMszym',
'db_name': 'liexin_credit_lx'
}
credit_local_server = {
'host': 'localhost',
'user': 'dashboard',
'password': 'ichunt5Dashboard@',
'db_name': 'dashboard'
}
credit_source_local_server = {
'host': 'localhost',
'user': 'dashboard',
'password': 'ichunt5Dashboard@',
'db_name': 'dashboard'
}
chain_server = {
'host': '172.18.137.21',
'user': 'ScsDbsy2x',
'password': 'xscSsy2@@!!@x%Yxm',
'db_name': 'liexin_sc'
}
rabbitMq_server = {
'host': '172.18.137.23',
# 'host': '192.168.1.237',
'user': 'huntadmin',
'password': 'jy2y2900'
}
local_redis = {
'host': '192.168.1.235',
'port': '6379',
'password': 'icDb29mLy2s'
}
write_redis = {
'host': '172.18.137.38',
'password': 'icDb29mLy2s',
'port': '6379'
}
write_redis_23 = {
'host': '172.18.137.21',
'password': 'icDb29mLy2s',
'port': '6379'
}
local_test_redis = {
'host': '192.168.1.235',
'password': 'icDb29mLy2s',
'port': '623379'
}
local_liexin = {
'host': '192.168.2.232',
'user': 'liexin',
'password': 'liexin#zsyM',
'db_name': 'liexin'
}
local_rank = {
'host': 'localhost',
'user': 'dashboard',
'password': 'ichunt5Dashboard@',
'db_name': 'dashboard'
}
lx_rank = {
'host': '172.18.137.37',
'user': 'Drakxs',
'password': 'sXtm23@!!@$2yoZ',
'db_name': 'data_rank'
}
supplier = {
'future': 1,
'rochester': 3,
'tme': 4,
'verical': 5,
'element14': 6,
'digikey': 7,
'chip1stop': 8,
'aipco': 9,
'arrow': 10,
'alliedelec': 12,
'avnet': 13,
'mouser': 14,
'peigenesis': 19,
'powell': 20,
'rs': 21,
'master': 1672,
'rutronik': 1673,
'corestaff': 1675,
'buerklin': 1676,
'microchip': 1677,
'heilind': 1678,
'ti': 1679,
'撮合': -1000,
'猎芯联营-技术采集': 17,
'猎芯联营-渠道开发': 17,
'猎芯自营': 10000,
}
# 猎芯联营采集
supplier_collect = ['L0000096', 'L0000004', 'L0000218']
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
from extract.ex_base import Base
from config.conn_list import ConnList
from utils.db_handler import DBHandler
class ExActivity(Base):
"""
钱包活动
字段:
wpr_id: 钱包优惠规则自增id
rule_name: 规则名称
start_time: 活动开始时间(int)
end_time: 活动结束时间(int)
rule_name: 活动名称
mall_type: 适用范围:1-全站 2-自营 3-联营
receiver_type: 返现对象 1-仅下单人 2-仅邀请人 3-邀请人和被邀请人
inviter_cashback_scale: 邀请人返现比例
invitee_cashback_scale: 被邀请人返现比例
inviter_require_amount: 邀请人订单金额下限
invitee_require_amount: 被邀请人订单金额下限
inviter_max_preferential_amount: 邀请人最大返还金额
invitee_max_preferential_amount: 被邀请人最大返还金额
"""
def wallet_activity(self, condition):
col = ['wpr_id',
'rule_name',
'rule_name',
'mall_type',
'start_time',
'end_time',
'receiver_type',
'invitee_cashback_scale',
'inviter_cashback_scale',
'inviter_require_amount',
'invitee_require_amount',
'inviter_max_preferential_amount',
'invitee_max_preferential_amount']
col_str = super().col_to_str(col)
db = ConnList.Order()
wpr_id = condition['wpr_id']
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT %s FROM lie_wallet_preferential_rule WHERE wpr_id = %d AND %s" % (col_str, wpr_id, con_str)
results = DBHandler.read(db=db, sql=sql)
return super().result_to_dict(col, results)
from utils.date_handler import DateHandler
import subprocess
class Base:
def __init__(self, name):
print('抽取数据:', name)
print('--------', name, '---------')
# WHERE条件组装
@staticmethod
......@@ -13,7 +15,7 @@ class Base:
con_str = ''
index = 0
if len(condition) > 0:
con_str += ' AND '
# con_str += ' AND '
for row in condition:
index += 1
if index < len(condition):
......@@ -53,4 +55,47 @@ class Base:
r_list.append(r_dict)
return r_list
# 结果转换列表
@staticmethod
def result_to_list(result):
r_l = []
for row in result:
r_l.append(row[0])
return r_l
# Value条件组装
@staticmethod
def value_to_str(col, result):
value = ''
index = 0
for c in col:
index += 1
if index < len(col):
value += '\'' + str(result[c]) + '\','
else:
value += '\'' + str(result[c]) + '\''
return value
# 删除Linux本地文件
@staticmethod
def rm_local_file(file_name):
cmd = "rm -f " + file_name
subprocess.getoutput(cmd)
# 读取本地文件
@staticmethod
def read_local_file(file_name):
result = []
fo = open(file_name)
for line in fo.readlines():
result.append(line)
return result
from extract.ex_base import Base
from config.conn_list import ConnList
from utils.db_handler import DBHandler
class ExBehavior(Base):
"""
行为日志
"""
def behavior_log(self, condition):
# 筛选字段
col = ['ip', 'behavior', 'platform', 'create_time', 'user_id', 'adtag', 'param', 'scene', 'user_sign', 'ptag', 'behavior']
col_str = super().col_to_str(col)
# 连接数据库
db = ConnList.Behavior()
con_str = super().condition_to_str(condition['condition'])
start_time = condition['start_time']
end_time = condition['end_time']
sql = "SELECT %s \
FROM lie_behavior_log \
WHERE create_time BETWEEN %d AND %d AND %s" % (col_str, start_time, end_time, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
from extract.ex_base import Base
from config.conn_list import ConnList
from utils.db_handler import DBHandler
class ExCrm(Base):
def crm_user(self, start_time, end_time):
# 订单数据库
db = ConnList.WrCrm()
sql = "SELECT outter_uid,user_id FROM lie_user WHERE create_time BETWEEN %d AND %d AND source IN (1,2) LIMIT 100000" % (start_time, end_time)
results = DBHandler.read(db=db, sql=sql)
return results
\ No newline at end of file
from extract.ex_base import Base
from config.conn_list import ConnList
from utils.db_handler import DBHandler
class ExDashboard(Base):
def email_list(self, condition):
# 筛选字段
con_str = super().condition_to_str(condition['condition'])
# 订单数据库
db = ConnList.Dashboard()
sql = "SELECT email \
FROM \
lie_email_list \
WHERE %s" \
% con_str
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_list(results)
print('final_result', len(final_result))
return final_result
"""
汇总订单数据
"""
def sum_order(self, condition):
# 筛选字段
col = ['sum(order_count)', 'sum(order_paid_count)']
col_str = super().col_to_str(col)
# 连接数据库
db = ConnList.Dashboard()
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT %s \
FROM lie_order_cal \
WHERE %s" % (col_str, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
抽取adtag
"""
def lie_adtag(self):
# 筛选字段
col = ['one_level_channel_en']
col_str = super().col_to_str(col)
# 连接数据库
db = ConnList.Dashboard()
sql = "SELECT %s FROM lie_adtag_config GROUP BY one_level_channel_en" % col_str
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
获取百度统计URL
"""
def baidu_page(self, condition):
# 筛选字段
col = ['url', 'pv_count']
col_str = super().col_to_str(col)
# 订单数据库
db = ConnList.Dashboard()
sql = "SELECT %s FROM lie_baidu_cal_visitpage WHERE cal_ts = \'%s\'" % (col_str, condition)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
获取ptag
"""
def lie_ptag(self):
# 筛选字段
col = ['ptag', 'regex', 'id']
col_str = super().col_to_str(col)
# 连接数据库
db = ConnList.Dashboard()
sql = "SELECT %s FROM lie_page" % col_str
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
获取活动数据
"""
def re_activity(self, condition):
# 筛选字段
col = ['id', 'sign', 'name', 'start_time', 'end_time', 'status']
col_str = super().col_to_str(col)
# 连接数据库
db = ConnList.Activity()
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT %s FROM lie_activity WHERE %s" % (col_str, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
获取答题数据
"""
def answer(self, condition):
# 筛选字段
col = ['user_id', 'activity_id', 'score']
col_str = super().col_to_str(col)
# 连接数据库
db = ConnList.Order()
con_str = super().condition_to_str(condition['condition'])
start_time = condition['start_time']
end_time = condition['end_time']
sql = "SELECT %s FROM lie_question_user_score WHERE \
user_id not in (SELECT user_id FROM lie_user_main WHERE is_test = 1) AND \
create_time BETWEEN %d AND %d AND %s" % (col_str, start_time, end_time, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
获取自营料号数据
"""
def zy_goods(self, condition):
# 筛选字段
col = ['goods_id']
col_str = super().col_to_str(col)
# 连接数据库
db = ConnList.Dashboard()
start_time = condition['start_time']
sql = "SELECT %s FROM lie_sku_expose_goods WHERE cal_ts = %d" %\
(col_str, start_time)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
神策PV、UV
"""
def ex_shence_pu(self, condition):
# 筛选字段
col = ['pv', 'uv']
col_str = super().col_to_str(col)
# 连接数据库
db = ConnList.Dashboard()
start_time = condition['start_time']
sql = "SELECT %s FROM lie_shence_cal WHERE cal_ts = \'%s\'" % (col_str, start_time)
results = DBHandler.read(db, sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
SKU曝光总数
"""
def sku_sum_goods(self, condition):
col = ['goods_list']
# 连接数据库
db = ConnList.Dashboard()
start_time = condition['start_time']
sql = "SELECT sum(goods_list) FROM lie_sku_expose WHERE cal_ts = \'%s\'" % start_time
results = DBHandler.read(db, sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
平均周数据
"""
def avg_operate_data(self, condition):
col = ['avg(pv)', 'avg(uv)', 'avg(pv)/avg(uv)', 'sum(reg_user)', 'avg(active_user)']
col_str = super().col_to_str(col)
# 连接数据库
db = ConnList.Dashboard()
start_time = condition['start_time']
end_time = condition['end_time']
sql = "SELECT %s FROM lie_operate_daily WHERE cal_ts BETWEEN \'%s\' AND \'%s\'" % (col_str, start_time, end_time)
results = DBHandler.read(db, sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
from extract.ex_base import Base
from utils.db_handler import DBHandler
from config.conn_list import ConnList
from suds.client import Client
import hprose
import json
class ExERP(Base):
def __init__(self, date):
self.api_url = 'http://119.23.228.186:6868/ormrpc/services'
self.cmp_url = 'http://119.23.228.186:8081'
self.date = date
self.credit = ConnList.Credit()
self.credit_source = ConnList.CreditSource()
self.credit_local = ConnList.CreditLocal()
"""
获取ERP汇率
"""
def get_erp_exchange(self):
client = Client(self.api_url + '/WSIchuntjKFacade?wsdl')
params_dict = {"CURRENCY": "美元", "BIZDATE": self.date}
return client.service.getExchangeRate(json.dumps(params_dict))
"""
获取ERP元数据
"""
def get_erp_company(self, company):
try:
client = hprose.HttpClient(self.cmp_url)
params_dict = {"CUSTOMER": company}
res = client.getReceivableList(json.dumps(params_dict))
return json.loads(res)
except:
return {}
"""
获取DB ERP数据
"""
def get_erp_data(self, con_str, db):
col = ['id', 'order_amount', 'entru_no', 'business_time', 'erp_company_code', 'gross_profit', 'deadline_day', 'delay_day', 'return_amount', 'return_time', 'receive_time', 'tolerance']
col_str = super().col_to_str(col)
sql = "SELECT %s FROM lie_basic_detail %s" % (col_str, con_str)
results = DBHandler.read(db=db, sql=sql)
final_result = super().result_to_dict(col, results)
return final_result
"""
获取ERP 列表
"""
def get_erp_list(self, db):
sql = "SELECT erp_company_code,company_name FROM lie_com_credits WHERE erp_company_code != \'\' GROUP BY erp_company_code,company_name"
result = DBHandler.read(db=db, sql=sql)
rd = []
for row in result:
erp_company_code = row[0]
company_name = row[1]
rd.append({
'cmp_code': erp_company_code,
'cmp_name': company_name
})
return rd
"""
获取ERP编码
"""
def get_erp_code(self, db):
sql = "SELECT erp_company_code FROM lie_basic_detail WHERE erp_company_code != \'\' GROUP BY erp_company_code"
result = DBHandler.read(db=db, sql=sql)
rd = []
for row in result:
erp_company_code = row[0]
rd.append(erp_company_code)
return rd
"""
获取不同ERP月份
"""
def get_erp_diff_month(self, table, db):
sql = "SELECT month FROM %s GROUP BY month" % table
result = DBHandler.read(db=db, sql=sql)
rd = []
for row in result:
month = row[0]
rd.append(month)
return rd
"""
获取最近六个月的数据
"""
def get_six_data(self, months, table, db):
sql = "SELECT appoint_tolerance,erp_company_code,SUM(delay_avg_day * receive_count) / SUM(receive_count),SUM(period_user_level) / 6,SUM(receive_amount),SUM(appoint_tolerance) \
FROM %s \
WHERE MONTH IN %s \
GROUP BY erp_company_code \
ORDER BY MONTH DESC" % (table, months)
result = DBHandler.read(db=db, sql=sql)
rd = []
for row in result:
rd.append({
'tolerance': int(row[0]),
'cmp_code': row[1],
'delay_avg_day': float(row[2]) if row[2] is not None else 0,
'use_level': float(row[3]) if row[3] is not None else 0,
'receive_amount': float(row[4]),
'total_tolerance': int(row[5])
})
return rd
# def get_six_data_temp(self, months, table, db):
# sql = "SELECT appoint_tolerance,erp_company_code,delay_avg_day,receive_amount \
# FROM %s \
# WHERE MONTH IN %s" % (table, months)
# result = DBHandler.read(db=db, sql=sql)
from extract.ex_base import Base
from config.conn_list import ConnList
from utils.db_handler import DBHandler
import pymysql
import requests
class ExGoods(Base):
"""
自营SKU
"""
def ex_zy_goods(self, condition):
# 筛选字段
col = ['goods_name', 'ladder_price']
col_str = super().col_to_str(col)
rd = {}
# 连接数据库
db = ConnList.Zy()
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT %s \
FROM lie_goods \
WHERE %s" % (col_str, con_str)
results = DBHandler.read(db=db, sql=sql)
for row in results:
rd[row[0]] = row[1]
return rd
"""
联营供应商列表
"""
def ex_ly_supplier(self, condition):
# 筛选字段
col = ['supplier_code', 'channel_uid', 'supplier_name']
col_str = super().col_to_str(col)
# 连接数据库
db = ConnList.SupData()
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT %s FROM lie_supplier_channel WHERE %s" % (col_str, con_str)
results = DBHandler.read(db=db, sql=sql)
return super().result_to_dict(col, results)
"""
获取猎芯联营下所有SKUID和价格
"""
def ex_ly_goods(self):
host = '172.18.137.29'
user = 'LxiCSpuR35'
password = 'Mysx3Tyzlo00oxlmllyR'
rd = {}
for i in range(0, 10):
db_name = 'liexin_sku_' + str(i)
print(db_name)
for j in range(0, 10):
table_name = 'lie_sku_' + str(j)
db = pymysql.connect(host, user, password, db_name, charset='utf8')
sql = "SELECT spu_id,ladder_price,canal FROM %s \
WHERE supplier_id = %d AND goods_status = 1 AND ladder_price != \'\'" % (table_name, 17)
result = DBHandler.read(db, sql)
for row in result:
rd[row[0]] = {'price': row[1], 'canal': row[2]}
print(len(rd))
return rd
"""
获取ERP联营采购数据
"""
def ex_erp_goods(self, condition):
# 筛选字段
col = ['supplier_name', 'sale_man', 'sale_order_no', 'pur_order_no',
'sale_man', 'pur_order_bizdate']
col_str = super().col_to_str(col)
# 连接数据库
db = ConnList.Order()
start_time = condition['start_time']
end_time = condition['end_time']
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT %s FROM lie_erp_purchase \
WHERE pur_order_bizdate BETWEEN %d AND %d AND %s" % \
(col_str, start_time, end_time, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
from extract.ex_base import Base
from config.conn_list import ConnList
from config.db import rabbitMq_server
from utils.rabbitMq_handler import Customer
from utils.msg_handler import MsgHandler
from utils.db_handler import DBHandler
from utils.date_handler import DateHandler
from utils.log_handler import LogHandler
import traceback
import pymysql
import time
import json
class ExIckey(Base):
"""
处理ICKEY自营数据
"""
def ex_ickey_goods(self):
mq_db = ConnList.Dashboard()
def callback(ch, method, properties, body):
try:
self.storage(body, mq_db)
time.sleep(0.1)
except pymysql.err.OperationalError:
MsgHandler.send_dd_msg("【ICKEY数据库出错】\n" + "【出错数据】" + str(body) + "\n" + traceback.format_exc())
customer.send_task(body)
ch.basic_ack(delivery_tag=method.delivery_tag)
except Exception:
MsgHandler.send_dd_msg("【ICKEY采集出错】\n" + "【出错数据】" + str(body) + "\n" + traceback.format_exc())
else:
ch.basic_ack(delivery_tag=method.delivery_tag) # tell mq the task has done
# 判断队列数据数
msg_count = ch.queue_declare(queue='ickey_goods_store', durable=True).method.message_count
# 任务结束
if msg_count <= 0:
# 停止消费
ch.stop_consuming()
customer = Customer(rabbitMq_server['user'], rabbitMq_server['password'],
rabbitMq_server['host'], 'ickey_goods_store', '')
customer.server_forever(callback)
"""
读取云汉指数并写入文件
"""
def ex_ickey_index(self):
# 发送任务开始消息
# MsgHandler.send_dd_msg("【云汉指数数据采集】 启动")
start = DateHandler.now_datetime()
# 文件写入
file_name = r'/data3/hdfs_data/ickey_index/' + 'ickey_' + DateHandler.now_date(days=0, d_type=2) + '.txt'
fo = open(file_name, "w")
def callback(ch, method, properties, body):
try:
# 判断是否有数据
if ch.queue_declare(queue='ickey_index_data', durable=True).method.message_count > 0:
# 数据解析与写入
body_json = json.loads(str(body, encoding="utf-8"))
time.sleep(0.001)
result = "%s|%s|%s|%s|%s|%s|%s" % (body_json['goods_name'],
body_json['searchTrendAnalysis'],
body_json['searchAreaTrendAnalysis'],
body_json['industryType'],
body_json['search_num'],
body_json['trade_num'],
body_json['buyer_num'])
fo.write(result + "\n")
else:
ch.stop_consuming()
except Exception:
MsgHandler.send_dd_msg("【云汉指数采集出错】\n" + "【出错数据】" + str(body) + "\n" + traceback.format_exc())
else:
ch.basic_ack(delivery_tag=method.delivery_tag) # tell mq the task has done
# 判断队列数据数
msg_count = ch.queue_declare(queue='ickey_index_data', durable=True).method.message_count
# 刷新内存
if msg_count % 10000 == 0:
fo.flush()
# 任务结束
if msg_count <= 0:
# 发送任务结束消息
end = DateHandler.now_datetime()
dur = DateHandler.cal_duration(start, end, t_type=2)
MsgHandler.send_dd_msg("【云汉指数采集任务】 结束\n" +
"任务耗时:" + str(dur) + "分钟\n")
# 关闭文件流
fo.close()
# 停止消费
ch.stop_consuming()
customer = Customer(rabbitMq_server['user'], rabbitMq_server['password'],
rabbitMq_server['host'], 'ickey_index_data', '')
customer.server_forever(callback)
"""
ickey自营数据存储
"""
def storage(self, body, db):
result = json.loads(body)
if 'goods_sn' in result:
sql_exist = "SELECT id FROM lie_ickey_goods_stock WHERE goods_sn = %d" % result['goods_sn']
exist = DBHandler.read(db, sql_exist)
# 不存在则插入,存在则更新
# 插入
if len(exist) == 0:
sql = "INSERT INTO lie_ickey_goods_stock(goods_sn, pn, goods_name, brand_name, stock, moq, increment, \
url, ts, prices) VALUES ('%d', '%s', '%s', '%s', '%d', '%d', '%d', '%s', '%d', '%s')" % \
(result['goods_sn'], result['pn'], result['goods_name'], result['brand_name'], int(result['stock'][1]),
int(result['stock'][0]), result['increment'], result['url'], result['time'], result['prices'])
DBHandler.insert(db, sql)
# 更新
else:
sql = "UPDATE lie_ickey_goods_stock SET stock='%d', moq='%d', increment='%d', prices='%s', ts = '%d' \
WHERE goods_sn = '%d'" % \
(int(result['stock'][1]), int(result['stock'][0]), result['increment'],
result['prices'], DateHandler.now_datetime(), result['goods_sn'])
cursor = db.cursor()
try:
cursor.execute(sql)
except:
db.rollback()
# LogHandler.elk_log(str(traceback.format_exc()), 500, "/data3/dataLog/ex_ickey.log", "EX_ICKEY")
"""
获取所有ickey指数trade_num >= 10的数据
"""
def ex_ickey_index_goods(self):
index = 0
sum = 0
rd = {}
hdfs_file = r'/ickey_index/' + 'ickey_' + \
DateHandler.now_date(days=0, d_type=2) + '.txt'
hdfs_data = DBHandler.hdfs_read(hdfs_file)
print(hdfs_file)
for row in hdfs_data:
row = str(row, encoding="utf-8")
try:
goods_name = str(row).split('|')[0]
search_num = int(str(row).split('|')[4])
trade_num = int(str(row).split('|')[5])
buyer_num = int(str(row).split('|')[6])
sum += 1
if trade_num >= 10 and goods_name not in rd:
rd[goods_name] = {'ickey_search_count': search_num, 'ickey_trade_count': trade_num,
'ickey_buyer_count': buyer_num, 'create_time': DateHandler.now_datetime(),
'update_time': DateHandler.now_datetime(), 'goods_name': goods_name}
index += 1
except:
pass
print(index, sum)
return rd
......@@ -2,6 +2,8 @@ from extract.ex_base import Base
from config.conn_list import ConnList
from utils.date_handler import DateHandler
from utils.msg_handler import MsgHandler
from utils.log_handler import LogHandler
import subprocess
class ExLySku(Base):
......@@ -79,3 +81,65 @@ class ExLySku(Base):
MsgHandler.send_dd_msg("【Mongo数据删除任务】 结束\n" +
"任务耗时:" + str(dur) + "分钟\n" +
"删除数量:" + str(del_doc.deleted_count))
@staticmethod
def put_hdfs():
# 发送任务开始消息
MsgHandler.send_dd_msg("【数据上传HDFS任务】 启动")
start = DateHandler.now_datetime()
# 执行CMD命令
# 上传hdfs文件
cmd_1 = "hdfs dfs -put /data3/hdfs_data/ly_sku_price/sku_" + \
DateHandler.now_date(days=0, d_type=2) + \
".txt " + \
"/ly_sku_price/"
# 删除本地文件
cmd_2 = "rm -f " + "/data3/hdfs_data/ly_sku_price/sku_" + \
DateHandler.now_date(days=0, d_type=2) + \
".txt"
subprocess.getoutput(cmd_1)
subprocess.getoutput(cmd_2)
# 发送任务结束消息
end = DateHandler.now_datetime()
dur = DateHandler.cal_duration(start, end, t_type=2)
MsgHandler.send_dd_msg("【数据上传HDFS任务】 结束\n" +
"任务耗时:" + str(dur) + "分钟\n")
@staticmethod
def load_hbase():
# 发送任务开始消息
MsgHandler.send_dd_msg("【HDFS数据写入HBASE任务】 启动")
start = DateHandler.now_datetime()
msg = ""
# 执行CMD命令
cmd = "/data2/hbase/hbase-2.0.1/bin/hbase org.apache.hadoop.hbase.mapreduce.ImportTsv " \
"-Dimporttsv.columns=HBASE_ROW_KEY,cf1:sku_id,cf1:tiered,cf1:time " \
"'-Dimporttsv.separator=|' sku:stockgoods /ly_sku_price/sku_" + \
DateHandler.now_date(days=0, d_type=2) + ".txt"
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
for info in p.communicate():
msg += str(info) + "\n"
# 日志写道ELK
LogHandler.elk_log(msg, 101, '/data3/dataLog/ly_price_collect_' + DateHandler.now_date(0, 2) + '.log',
'sku_collect')
# 发送任务结束消息
end = DateHandler.now_datetime()
dur = DateHandler.cal_duration(start, end, t_type=2)
MsgHandler.send_dd_msg("【HDFS数据写入HBASE任务】 结束\n" +
"任务耗时:" + str(dur) + "分钟\n")
@staticmethod
def del_hdfs():
hdfs_file = '/ly_sku_price/sku_' + \
DateHandler.now_date(days=0, d_type=2) + '.txt'
cmd = 'hadoop dfs -rm -r ' + hdfs_file
subprocess.getoutput(cmd)
from extract.ex_base import Base
from config.conn_list import ConnList
from utils.date_handler import DateHandler
from utils.db_handler import DBHandler
class ExMarket(Base):
def ex_prize_log(self, condition):
# 筛选字段
col = ['user_id']
col_str = super().col_to_str(col)
# 连接数据库
db = ConnList.Order()
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT %s FROM lie_prize_winner WHERE %s" % (col_str, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
优惠券订单信息
"""
def ex_user_coupon(self, condition):
# 筛选字段
col = ['c.user_id', 'o.order_amount', 'o.status', 'o.currency']
col_str = super().col_to_str(col)
# 连接数据库
db = ConnList.Order()
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT %s \
FROM lie_user_coupon c \
LEFT JOIN lie_order o \
ON c.order_id = o.order_id \
WHERE %s" % (col_str, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
......@@ -14,8 +14,8 @@ class ExOrder(Base):
def all_order(self, condition):
# 筛选字段
col = ['user_id', 'order_id', 'order_source', 'order_type', 'status', 'order_amount',
'order_goods_type', 'currency']
col = ['user_id', 'order_id', 'order_source', 'order_type', 'status', 'order_amount', 'create_time',
'order_goods_type', 'currency', 'order_sn', 'sale_id', 'order_pay_type', 'cancel_reason']
col_str = super().col_to_str(col)
# 订单数据库
......@@ -30,14 +30,42 @@ class ExOrder(Base):
create_time BETWEEN %d AND %d \
AND is_type = 0 \
AND order_type = 1 \
AND user_id NOT IN ( SELECT user_id FROM lie_user_main WHERE is_test = 1 ) %s" \
AND user_id NOT IN (SELECT user_id FROM lie_user_main WHERE is_test = 1 OR is_type = 1) AND %s" \
% (col_str, start_time, end_time, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
平台汇总订单
"""
def all_pf_order(self, condition):
# 筛选字段
col = ['user_id', 'order_id', 'order_source', 'order_type', 'status', 'order_amount',
'order_goods_type', 'currency', 'order_sn', 'sale_id', 'order_type']
col_str = super().col_to_str(col)
# 订单数据库
db = ConnList.Order()
start_time = condition['start_time']
end_time = condition['end_time']
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT %s \
FROM \
lie_order \
WHERE \
create_time BETWEEN %d AND %d \
AND is_type = 0 \
AND user_id NOT IN ( SELECT user_id FROM lie_user_main WHERE is_test = 1 ) AND %s" \
% (col_str, start_time, end_time, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
print('final_result', len(final_result))
print(final_result[0])
return final_result
......@@ -51,8 +79,8 @@ class ExOrder(Base):
# 筛选字段
col = ['o.user_id', 'o.order_id', 'o.order_source', 'o.order_type', 'o.status', 'o.order_amount',
'o.order_goods_type', 'o.currency', 'o.pay_time', 'o.sale_id', 'i.tax_title', 'i.nike_name',
'u.mobile', 'u.email']
'o.order_goods_type', 'o.currency', 'o.pay_time', 'o.create_time', 'o.sale_id',
'i.tax_title', 'i.nike_name', 'u.mobile', 'u.email']
col_str = super().col_to_str(col)
# 订单数据库
......@@ -70,7 +98,7 @@ class ExOrder(Base):
o.pay_time BETWEEN %d AND %d \
AND o.is_type = 0 \
AND o.order_type = 1 \
AND u.is_test = 0 %s" \
AND u.is_test = 0 AND %s" \
% (col_str, start_time, end_time, con_str)
results = DBHandler.read(db=db, sql=sql)
......@@ -87,7 +115,83 @@ class ExOrder(Base):
def order_items(self, condition):
# 筛选字段
col = ['i.order_id', 'u.user_id', 'i.goods_id', 'o.order_sn', 'o.status']
col = ['i.order_id', 'i.goods_id', 'i.goods_price', 'i.goods_number', 'i.single_pre_price', 'i.goods_type', 'i.status as items_status',
'i.canal', 'i.supplier_name', 'i.supplier_id', 'i.brand_id', 'i.goods_name', 'i.brand_name', 'i.goods_class',
'o.order_sn', 'o.status', 'o.currency', 'o.pay_time', 'o.create_time', 'o.order_pay_type',
'o.sale_id', 'o.order_source', 'o.order_source', 'o.cancel_reason',
'u.user_id', 'u.mobile', 'u.email',
'v.tax_title', 'v.nike_name', 'v.inv_type', 'v.company_phone']
# col = ['o.order_sn', 'i.supplier_id', 'i.supplier_name', 'i.canal', 'o.status']
col_str = super().col_to_str(col)
# 订单数据库
db = ConnList.Order()
start_time = condition['start_time']
end_time = condition['end_time']
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT %s FROM \
lie_order_items i\
LEFT JOIN lie_order o ON i.order_id = o.order_id \
LEFT JOIN lie_user_main u ON u.user_id = o.user_id \
LEFT JOIN lie_order_invoice v ON v.order_id = i.order_id \
WHERE o.create_time BETWEEN %d AND %d \
AND o.is_type = 0 \
AND o.order_type = 1 \
AND u.is_test = 0 AND %s" \
% (col_str, start_time, end_time, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
平台订单明细
"""
def order_pf_price(self, condition):
# 筛选字段
col = ['p.price', 'p.price_type', 'o.currency', 'o.order_type', 'o.order_goods_type',
'o.order_source', 'o.user_id', 'o.order_id']
col_str = super().col_to_str(col)
# 订单数据库
db = ConnList.Order()
start_time = condition['start_time']
end_time = condition['end_time']
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT %s FROM \
lie_order_price p\
LEFT JOIN lie_order o \
ON p.order_id = o.order_id \
LEFT JOIN lie_user_main u \
ON u.user_id = o.user_id \
WHERE \
p.create_time BETWEEN %d AND %d \
AND o.is_type = 0 \
AND u.is_test = 0 AND %s" \
% (col_str, start_time, end_time, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
获取订单明细 V1.0 zzl
1. 关联订单表获取
"""
def order_paid_items(self, condition):
# 筛选字段
col = ['i.order_id', 'u.user_id', 'i.goods_id', 'o.order_sn', 'o.status', 'i.goods_price', 'i.goods_number',
'o.currency', 'i.supplier_id', 'i.canal', 'i.supplier_name', 'u.mobile', 'u.email', 'o.pay_time',
'v.tax_title', 'v.nike_name', 'o.sale_id', 'o.order_source']
# col = ['o.order_sn', 'i.supplier_id', 'i.supplier_name', 'i.canal', 'o.status']
col_str = super().col_to_str(col)
# 订单数据库
......@@ -101,12 +205,14 @@ class ExOrder(Base):
ON i.order_id = o.order_id \
LEFT JOIN lie_user_main u \
ON u.user_id = o.user_id \
LEFT JOIN lie_order_invoice v \
ON v.order_id = i.order_id \
WHERE \
o.create_time BETWEEN %d AND %d \
o.pay_time BETWEEN %d AND %d \
AND o.is_type = 0 \
AND o.status > 2 \
AND o.order_type = 1 \
AND o.order_goods_type = 1 \
AND u.is_test = 0 %s" \
AND u.is_test = 0 AND %s" \
% (col_str, start_time, end_time, con_str)
results = DBHandler.read(db=db, sql=sql)
......@@ -115,3 +221,632 @@ class ExOrder(Base):
return final_result
"""
获取订单付款详情 V1.0 zzl
1. 关联订单表
"""
def order_price(self, condition):
# 筛选字段
col = ['o.order_goods_type', 'u.user_id', 'o.status', 'o.order_sn', 'o.order_id', 'p.price', 'p.price_type']
col_str = super().col_to_str(col)
# 订单数据库
db = ConnList.Order()
start_time = condition['start_time']
end_time = condition['end_time']
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT %s FROM \
lie_order_price p \
LEFT JOIN lie_order o \
ON p.order_id = o.order_id \
LEFT JOIN lie_user_main u \
ON u.user_id = o.user_id \
WHERE \
p.create_time BETWEEN %d AND %d \
AND o.is_type = 0 \
AND o.order_type = 1 \
AND u.is_test = 0 AND %s" \
% (col_str, start_time, end_time, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
获取订单下指定ID的数据
"""
def order_items_specify(self, data):
sku = {}
# 筛选字段
col = ['goods_id', 'goods_number']
col_str = super().col_to_str(col)
for row in data:
order_id = row['order_id']
db = ConnList.Order()
sql = "SELECT %s FROM lie_order_items WHERE order_id = %d AND status = 1" % (col_str, order_id)
result = DBHandler.read(db=db, sql=sql)
for r in result:
goods_id = r[0]
goods_number = r[1]
if goods_id not in sku:
sku[goods_id] = goods_number
else:
sku[goods_id] += goods_number
return sku
"""
获取京东订单
"""
def jdLyOrder(self, condition):
# 筛选字段
col = ['count(*)', 'sum(order_payment)']
col_str = super().col_to_str(col)
# 订单数据库
db = ConnList.Bigdata()
start_time = condition['start_time']
end_time = condition['end_time']
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT %s FROM lie_jd_order_main WHERE create_time BETWEEN %d AND %d AND %s" \
% (col_str, start_time, end_time, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
rd = []
for row in results:
rd.append({'count': row[0], 'amount': float(row[1]) if row[1] is not None else 0})
return rd
"""
订单付款明细
主表:
lie_pay_log
字段:
user_id: 用户id
order_id: 订单id
order_sn: 订单编号
pay_time: 支付时间
pay_amount: 支付金额
连接表:
lie_user_main
字段:
mobile: 手机
email: 邮箱
invite_uid: 邀请人ID
连接表:
lie_user_info
字段:
safe_mobile: 安全手机
连接表:
lie_order
字段:
currency: 币种 1-人民币 2-美元
条件
o.is_type != 1 不为尽调单
o.status > 2 订单已付款
o.order_pay_type != 3 订单不为账期单
u.is_test != 1 不为测试用户
"""
def wallet_pay_log(self, condition):
col = ['p.user_id',
'p.order_id',
'p.order_sn',
'p.pay_time',
'p.pay_amount',
'u.mobile',
'u.email',
'u.invite_uid',
'i.safe_mobile',
'o.currency']
col_str = super().col_to_str(col)
db = ConnList.Order()
# db = ConnList.Local()
start_time = condition['start_time']
end_time = condition['end_time']
create_start_time = condition['create_start_time']
create_end_time = condition['create_end_time']
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT %s FROM lie_pay_log p \
LEFT JOIN lie_user_main u ON u.user_id = p.user_id \
LEFT JOIN lie_user_info i ON i.user_id = p.user_id \
LEFT JOIN lie_order o ON o.order_id = p.order_id \
WHERE p.pay_time BETWEEN %d AND %d \
AND u.create_time BETWEEN %d AND %d \
AND %s ORDER BY p.pay_time" % (col_str, start_time, end_time, create_start_time, create_end_time, con_str)
results = DBHandler.read(db=db, sql=sql)
return super().result_to_dict(col, results)
"""
首次下单用户
"""
def first_order_user(self, condition):
# 筛选字段
col = ['user_id']
# 订单数据库
db = ConnList.Order()
start_time = condition['start_time']
end_time = condition['end_time']
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT o.user_id FROM lie_order o \
LEFT JOIN lie_user_main u ON o.user_id = u.user_id \
WHERE o.user_id NOT IN (SELECT user_id FROM lie_order WHERE create_time < %d AND is_type = 0 AND order_goods_type IN (1,2)) AND \
o.create_time BETWEEN %d AND %d AND o.is_type = 0 AND o.order_type = 1 AND o.order_goods_type IN (1,2) AND u.is_test = 0 AND u.is_type = 0 AND %s GROUP BY o.user_id" \
% (start_time, start_time, end_time, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
首次下单具体数据
"""
def first_order_detail(self, condition):
# 筛选字段
col = ['o.user_id',
'o.order_amount',
'o.currency',
'o.order_goods_type',
'o.status',
'u.mobile',
'u.email',
'o.pay_time',
'o.order_source',
'u.create_time as reg_time',
'o.create_time']
col_str = super().col_to_str(col)
# 订单数据库
db = ConnList.Order()
start_time = condition['start_time']
end_time = condition['end_time']
order_goods_type = condition['order_goods_type']
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT %s \
FROM lie_order o \
LEFT JOIN lie_user_main u ON o.user_id = u.user_id \
WHERE o.user_id NOT IN (SELECT user_id FROM lie_order WHERE create_time < %d AND is_type = 0 AND order_goods_type IN %s) AND \
o.create_time BETWEEN %d AND %d \
AND o.is_type = 0 \
AND o.order_type = 1 \
AND o.order_goods_type IN %s \
AND u.is_test = 0 \
AND u.is_type = 0 \
AND %s" \
% (col_str, start_time, order_goods_type, start_time, end_time, order_goods_type, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
联营首次下单用户
"""
def first_ly_order_user(self, condition):
# 筛选字段
col = ['user_id']
# 订单数据库
db = ConnList.Order()
start_time = condition['start_time']
end_time = condition['end_time']
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT o.user_id FROM lie_order o \
LEFT JOIN lie_user_main u ON o.user_id = u.user_id \
WHERE o.user_id NOT IN (SELECT user_id FROM lie_order WHERE create_time < %d AND is_type = 0 AND order_goods_type = 1) AND \
o.create_time BETWEEN %d AND %d AND o.is_type = 0 AND o.order_type = 1 AND o.order_goods_type = 1 AND u.is_test = 0 AND u.is_type = 0 AND %s GROUP BY o.user_id" \
% (start_time, start_time, end_time, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
自营首次下单用户
"""
def first_zy_order_user(self, condition):
# 筛选字段
col = ['user_id']
# 订单数据库
db = ConnList.Order()
start_time = condition['start_time']
end_time = condition['end_time']
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT o.user_id \
FROM lie_order o \
LEFT JOIN lie_user_main u ON o.user_id = u.user_id \
WHERE o.user_id NOT IN (SELECT user_id FROM lie_order WHERE create_time < %d AND is_type = 0 AND order_goods_type = 2) AND \
o.create_time BETWEEN %d AND %d \
AND o.is_type = 0 \
AND o.order_type = 1 \
AND o.order_goods_type = 2 \
AND u.is_test = 0 \
AND u.is_type = 0 \
AND %s \
GROUP BY o.user_id" \
% (start_time, start_time, end_time, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
首次付款用户
"""
def first_paid_user(self, condition):
# 筛选字段
col = ['user_id']
# 订单数据库
db = ConnList.Order()
start_time = condition['start_time']
end_time = condition['end_time']
sql = "SELECT p.user_id FROM lie_pay_log p \
LEFT JOIN lie_order o ON p.order_id = o.order_id \
LEFT JOIN lie_user_main u ON p.user_id = u.user_id \
WHERE p.user_id NOT IN \
(SELECT p.user_id FROM lie_pay_log p LEFT JOIN lie_order o ON p.order_id = o.order_id WHERE p.pay_time < %d AND o.order_type = 1 AND o.order_goods_type IN (1,2) GROUP BY p.user_id) \
AND p.pay_time BETWEEN %d AND %d \
AND o.order_type = 1 \
AND o.order_goods_type IN (1,2) \
AND o.is_type = 0 \
AND u.is_test = 0 \
AND u.is_type = 0 \
GROUP BY p.user_id" \
% (start_time, start_time, end_time)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
自营分类
"""
def self_classify(self, condition):
# 筛选字段
col = ['class_id', 'class_name', 'parent_id']
col_str = super().col_to_str(col)
# 订单数据库
db = ConnList.lxData()
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT %s FROM lie_self_classify WHERE %s" % (col_str, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
联营首次付款用户
"""
def first_ly_paid_user(self, condition):
# 筛选字段
col = ['user_id']
# 订单数据库
db = ConnList.Order()
start_time = condition['start_time']
end_time = condition['end_time']
sql = "SELECT p.user_id \
FROM lie_pay_log p \
LEFT JOIN lie_order o ON p.order_id = o.order_id \
LEFT JOIN lie_user_main u ON p.user_id = u.user_id \
WHERE p.user_id NOT IN (SELECT p.user_id FROM lie_pay_log p LEFT JOIN lie_order o ON p.order_id = o.order_id WHERE p.pay_time < %d AND o.order_type = 1 AND o.order_goods_type = 1 AND o.is_type = 0 GROUP BY p.user_id) \
AND p.pay_time BETWEEN %d AND %d AND o.order_type = 1 AND o.order_goods_type = 1 AND o.is_type = 0 AND u.is_test = 0 AND u.is_type = 0 GROUP BY p.user_id" \
% (start_time, start_time, end_time)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
自营首次付款用户
"""
def first_zy_paid_user(self, condition):
# 筛选字段
col = ['user_id']
# 订单数据库
db = ConnList.Order()
start_time = condition['start_time']
end_time = condition['end_time']
sql = "SELECT p.user_id \
FROM lie_pay_log p \
LEFT JOIN lie_order o ON p.order_id = o.order_id \
LEFT JOIN lie_user_main u ON p.user_id = u.user_id \
WHERE p.user_id NOT IN (SELECT p.user_id FROM lie_pay_log p LEFT JOIN lie_order o ON p.order_id = o.order_id WHERE p.pay_time < %d AND o.order_type = 1 AND o.order_goods_type = 2 AND o.is_type = 0 GROUP BY p.user_id) \
AND p.pay_time BETWEEN %d AND %d AND o.order_type = 1 AND o.order_goods_type = 2 AND o.is_type = 0 AND u.is_test = 0 AND u.is_type = 0 GROUP BY p.user_id" \
% (start_time, start_time, end_time)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
取消订单数
"""
def cancel_order(self, condition):
# 筛选字段
col = ['order_id']
# 订单数据库
db = ConnList.Order()
start_time = condition['start_time']
end_time = condition['end_time']
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT order_id FROM lie_order WHERE create_time BETWEEN %d AND %d AND status = -1 AND order_type = 1 AND order_goods_type IN (1,2) AND is_type = 0 AND %s" \
% (start_time, end_time, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
注册用户转化率
"""
def reg_trans(self, condition):
# 筛选字段
col = ['u.user_id']
db = ConnList.Order()
start_time = condition['start_time']
end_time = condition['end_time']
col_str = super().col_to_str(col)
sql = "SELECT %s FROM lie_user_main u \
LEFT JOIN lie_order o ON u.user_id = o.user_id \
WHERE o.create_time BETWEEN %d AND %d AND \
u.create_time BETWEEN %d AND %d \
AND o.is_type = 0 \
AND o.status > 2 \
AND u.is_test = 0 \
AND u.is_type = 0 GROUP BY u.user_id" % (col_str, start_time, end_time, start_time, end_time)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
支付详情
"""
def pay_log_detail(self, condition):
# 筛选字段
col = ['p.user_id', 'p.pay_amount', 'o.currency', 'o.order_goods_type']
db = ConnList.Order()
start_time = condition['start_time']
end_time = condition['end_time']
col_str = super().col_to_str(col)
sql = "SELECT %s \
FROM lie_pay_log p \
LEFT JOIN lie_order o ON p.order_id = o.order_id \
LEFT JOIN lie_user_main u ON p.user_id = u.user_id \
WHERE \
o.is_type = 0 \
AND o.order_goods_type IN (1, 2) \
AND u.is_test = 0 \
AND u.is_type = 0 \
AND p.pay_time BETWEEN %d AND %d" % (col_str, start_time, end_time)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
自营订单总用户数
"""
def zy_order_user(self, start_time):
col = ['user_id']
db = ConnList.Order()
sql = "SELECT o.user_id FROM lie_order o\
LEFT JOIN lie_user_main u ON o.user_id = u.user_id \
WHERE o.order_goods_type = 2 AND o.order_type = 1 AND o.is_type = 0 AND u.is_test = 0 AND u.is_type = 0 AND o.status > 2 \
AND o.create_time <= %d \
GROUP BY o.user_id" % start_time
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
自营+联营单
"""
def zy_ly_order_user(self, start_time):
col = ['user_id']
db = ConnList.Order()
sql = "SELECT o.user_id FROM lie_order o \
LEFT JOIN lie_user_main u ON o.user_id = u.user_id \
WHERE o.user_id IN (SELECT user_id FROM lie_order WHERE order_goods_type = 2 AND status > 2 AND create_time <= %d) \
AND o.order_type = 1 AND o.order_goods_type = 1 AND o.is_type = 0 AND u.is_test = 0 AND u.is_type = 0 AND o.status > 2 \
AND o.create_time <= %d\
GROUP BY o.user_id" % (start_time, start_time)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
复购
"""
def repeat_buy(self, start_time, order_type):
col = ['user_id']
db = ConnList.Order()
sql = "SELECT o.user_id FROM lie_order o \
LEFT JOIN lie_user_main u ON o.user_id = u.user_id \
WHERE o.order_type = 1 AND o.order_goods_type = %d AND o.is_type = 0 AND u.is_test = 0 AND u.is_type = 0 AND o.status > 2 \
AND o.create_time <= %d \
GROUP BY o.user_id HAVING COUNT(o.user_id) > 1" % (order_type, start_time)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
线上总用户
"""
def online_user(self, start_time):
db = ConnList.Order()
sql = "SELECT o.user_id FROM lie_order o \
LEFT JOIN lie_user_main u ON o.user_id = u.user_id \
WHERE u.is_test = 0 \
AND u.is_type = 0 \
AND o.order_type = 1 \
AND o.order_goods_type IN (1,2) \
AND o.is_type = 0 \
AND o.status = 10 \
AND o.create_time < %d \
GROUP BY o.user_id" % start_time
results = DBHandler.read(db=db, sql=sql)
user = len(results)
return user
"""
下单并付款用户
"""
def order_and_paid_user(self, condition):
#
ul = []
# 订单数据库
db = ConnList.Order()
start_time = condition['start_time']
end_time = condition['end_time']
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT o.user_id FROM lie_order o \
LEFT JOIN lie_user_main u ON o.user_id = u.user_id \
WHERE u.is_test = 0 \
AND u.is_type = 0 \
AND o.order_type = 1 \
AND o.order_goods_type IN (1,2) \
AND o.is_type = 0 \
AND o.status > 2 \
AND o.create_time BETWEEN %d AND %d \
AND o.order_goods_type IN %s \
GROUP BY o.user_id" \
% (start_time, end_time, con_str)
results = DBHandler.read(db=db, sql=sql)
for row in results:
if row[0] not in ul:
ul.append(row[0])
return ul
"""
下单用户详情
"""
def order_and_paid_detail(self, condition):
# 筛选字段
col = ['o.user_id',
'o.order_amount',
'o.currency',
'o.order_goods_type',
'o.status',
'o.order_sn',
'u.email',
'u.mobile']
col_str = super().col_to_str(col)
# 订单数据库
db = ConnList.Order()
start_time = condition['start_time']
end_time = condition['end_time']
order_goods_type = condition['order_goods_type']
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT %s \
FROM lie_order o \
LEFT JOIN lie_user_main u ON o.user_id = u.user_id \
WHERE \
o.create_time BETWEEN %d AND %d \
AND o.is_type = 0 \
AND o.order_type = 1 \
AND o.order_goods_type IN %s \
AND u.is_test = 0 \
AND u.is_type = 0 \
AND %s" \
% (col_str, start_time, end_time, order_goods_type, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
from extract.ex_base import Base
from config.conn_list import ConnList
from utils.db_handler import DBHandler
class ExOthers(Base):
def email_list(self, condition):
# 筛选字段
con_str = super().condition_to_str(condition['condition'])
# 订单数据库
db = ConnList.Dashboard()
sql = "SELECT email \
FROM \
lie_email_list \
WHERE id > 0 %s" \
% con_str
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_list(results)
print('final_result', len(final_result))
return final_result
from extract.ex_base import Base
from config.conn_list import ConnList
from utils.db_handler import DBHandler
class ExPurchase(Base):
"""
库存预警
"""
def safe_stock(self, condition):
# 筛选字段
col = ['sku_id', 'b.packing_name']
col_str = super().col_to_str(col)
# 订单数据库
db = ConnList.Wms()
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT %s FROM \
lie_safe_stock a \
LEFT JOIN lie_packing b \
ON a.packing_id = b.packing_id \
WHERE %s" \
% (col_str, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
采购单明细
i.status 1:正常 -1:已删除
p.status -10:作废 -1:草稿 1:待审核 4:待发货 6:部分发货 10:完全发货 11:强制完成
"""
def purchase_count(self, condition):
# 筛选字段
col = ['count(*) as count', 'sum(picking_number * picking_price) amount']
col_str = super().col_to_str(col)
db = ConnList.Wms()
start_time = condition['start_time']
end_time = condition['end_time']
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT %s FROM \
lie_purchase_items i \
LEFT JOIN lie_purchase p ON i.picking_id = p.picking_id \
WHERE p.audit_time BETWEEN %d AND %d AND %s" % (col_str, start_time, end_time, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
rd = []
for row in results:
rd.append({'count': row[0], 'amount': float(row[1]) if row[1] is not None else 0})
return rd
"""
采购单明细
"""
def purchase_items(self, condition):
# 筛选字段
col = ['brand_name', '(picking_number * picking_price) amount']
col_str = super().col_to_str(col)
db = ConnList.Wms()
start_time = condition['start_time']
end_time = condition['end_time']
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT %s FROM \
lie_purchase_items i \
LEFT JOIN lie_purchase p ON i.picking_id = p.picking_id \
WHERE p.audit_time BETWEEN %d AND %d AND %s" % (col_str, start_time, end_time, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
rd = []
for row in results:
rd.append({'brand': row[0], 'amount': float(row[1]) if row[1] is not None else 0})
return rd
"""
待入库数量
"""
def in_stock(self, condition):
db = ConnList.lxData()
start_time = condition['start_time']
end_time = condition['end_time']
sql = "SELECT count(*) FROM lie_examine WHERE status = 1 AND add_time \
BETWEEN %d AND %d" % (start_time, end_time)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_list(results)
return final_result
"""
总待入库数量
"""
def wait_stock(self):
db = ConnList.lxData()
sql = "SELECT count(*) FROM lie_goods WHERE status = 0"
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_list(results)
return final_result
"""
总待审核数量
"""
def wait_examine(self):
# 订单数据库
db = ConnList.lxData()
sql = "SELECT count(*) FROM lie_examine WHERE status = 0"
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_list(results)
return final_result
from extract.ex_base import Base
from utils.date_handler import DateHandler
import requests
import json
class ExShenCe(Base):
url = 'https://shence.ichunt.com/api/events/report?project=production&token=c792cda539bafaaff5100c269f702999f0b24ac9fb3262f5f4341486701c600f'
url2 = 'https://shence.ichunt.com/api/retentions/report?project=production&token=c792cda539bafaaff5100c269f702999f0b24ac9fb3262f5f4341486701c600f'
# URL
def url_detail(self):
rd = []
headers = {'content-type': 'application/json'}
body = {
"measures": [{
"event_name": "$pageview",
"aggregator": "general"
}],
# 时间单位
"unit": "day",
# 筛选条件
"filter": {},
# 分组属性
"by_fields": ["event.$pageview.current_url"],
# 抽量因子,64为全量
"sampling_factor": 64,
# "axis_config": {"isNormalize": False, "left": [], "right": []},
# 起始时间
"from_date": DateHandler.now_date(1, 1),
# 结束时间
"to_date": DateHandler.now_date(1, 1),
# "limit": 10,
"use_cache": True
}
r = requests.post(self.url, data=json.dumps(body, ensure_ascii=False), headers=headers)
try:
rows = r.json()['rows']
for r in rows:
rd.append({'url': r['by_values'][0], 'pv_count': r['values'][0][0]})
except:
pass
return rd
# 汇总PV
def sum_pv(self):
rd = []
headers = {'content-type': 'application/json'}
body = {
"measures": [
{
# 事件名称
"event_name": "$pageview",
# 聚合操作符
"aggregator": "general"
}
],
# 时间单位
"unit": "day",
# 筛选条件
"filter": {},
# 分组属性
"by_fields": [],
# 抽量因子,64为全量
"sampling_factor": 64,
# "axis_config": {"isNormalize": False, "left": [], "right": []},
# 起始时间
"from_date": DateHandler.now_date(1, 1),
# 结束时间
"to_date": DateHandler.now_date(1, 1),
# "limit": 10,
"use_cache": True
}
r = requests.post(self.url, data=json.dumps(body, ensure_ascii=False), headers=headers)
try:
pf = r.json()['rows'][0]
rd.append(pf['values'][0][0])
except:
pass
return rd
# 获取不同平台PV
def ex_pv(self):
rd = {'PC': 0, 'H5': 0}
headers = {'content-type': 'application/json'}
body = {
"measures": [
{
# 事件名称
"event_name": "$pageview",
# 聚合操作符
"aggregator": "general"
}
],
# 时间单位
"unit": "day",
# 筛选条件
"filter": {},
# 分组属性
"by_fields": [
"event.$pageview.platformType"
],
# 抽量因子,64为全量
"sampling_factor": 64,
# "axis_config": {"isNormalize": False, "left": [], "right": []},
# 起始时间
"from_date": DateHandler.now_date(1, 1),
# 结束时间
"to_date": DateHandler.now_date(1, 1),
# "limit": 10,
"use_cache": True
}
r = requests.post(self.url, data=json.dumps(body, ensure_ascii=False), headers=headers)
try:
pf_1 = r.json()['rows'][0]
pf_2 = r.json()['rows'][1]
rd[pf_1['by_values'][0]] = pf_1['values'][0]
rd[pf_2['by_values'][0]] = pf_2['values'][0]
except:
pass
return rd
# 汇总UV
def sum_uv(self):
rd = []
headers = {'content-type': 'application/json'}
body = {
"measures": [
{
# 事件名称
"event_name": "$pageview",
# 聚合操作符
"aggregator": "unique"
}
],
# 时间单位
"unit": "day",
# 筛选条件
"filter": {},
# 分组属性
"by_fields": [],
# 抽量因子,64为全量
"sampling_factor": 64,
# "axis_config": {"isNormalize": False, "left": [], "right": []},
# 起始时间
"from_date": DateHandler.now_date(1, 1),
# 结束时间
"to_date": DateHandler.now_date(1, 1),
# "limit": 10,
"use_cache": True
}
r = requests.post(self.url, data=json.dumps(body, ensure_ascii=False), headers=headers)
try:
pf = r.json()['rows'][0]
rd.append(pf['values'][0][0])
except:
pass
return rd
# 获取PV
def ex_uv(self):
rd = {'PC': 0, 'H5': 0}
headers = {'content-type': 'application/json'}
body = {
"measures": [
{
# 事件名称
"event_name": "$pageview",
# 聚合操作符
"aggregator": "unique"
}
],
# 时间单位
"unit": "day",
# 筛选条件
"filter": {},
# 分组属性
"by_fields": [
"event.$pageview.platformType"
],
# 抽量因子,64为全量
"sampling_factor": 64,
# "axis_config": {"isNormalize": False, "left": [], "right": []},
# 起始时间
"from_date": DateHandler.now_date(1, 1),
# 结束时间
"to_date": DateHandler.now_date(1, 1),
# "limit": 10,
"use_cache": True
}
r = requests.post(self.url, data=json.dumps(body, ensure_ascii=False), headers=headers)
try:
pf_1 = r.json()['rows'][0]
pf_2 = r.json()['rows'][1]
rd[pf_1['by_values'][0]] = pf_1['values'][0]
rd[pf_2['by_values'][0]] = pf_2['values'][0]
except:
pass
return rd
# 正则表达式同时获取PV,UV,IP
def reg_pui(self, params):
rd = {'pv': 0, 'uv': 0, 'ip': 0}
headers = {'content-type': 'application/json'}
body = {
"measures": [{
"event_name": "$pageview",
"aggregator": "general"
}, {
"event_name": "$pageview",
"aggregator": "unique"
}, {
"event_name": "$pageview",
"aggregator": "uniqCount",
"field": "event.$pageview.$ip"
}],
"unit": "day",
"filter": {
"conditions": [{
"field": "event.$pageview.current_url",
"function": "rlike",
"params": [params]
}]
},
"by_fields": [],
"sampling_factor": 64,
"axis_config": {
"isNormalize": False,
"left": [],
"right": []
},
# 起始时间
"from_date": DateHandler.now_date(1, 1),
# 结束时间
"to_date": DateHandler.now_date(1, 1),
"bookmarkid": "9",
"tType": "n",
"ratio": "n",
"request_id": "1550559653526:226907",
"use_cache": True
}
r = requests.post(self.url, data=json.dumps(body, ensure_ascii=False), headers=headers)
try:
values = r.json()['rows'][0]['values'][0]
rd['pv'] = values[0]
rd['uv'] = values[1]
rd['ip'] = values[2]
except:
pass
return rd
# 跳出率
def bounce_rate(self, start_time, end_time):
rd = {'bounce_rate': ''}
headers = {'content-type': 'application/json'}
body = {
"measures": [
{
# 事件名称
"event_name": "$Anything",
# 聚合操作符
"aggregator": "bounce_rate",
"by_session": True,
}
],
# 时间单位
"unit": "day",
# 筛选条件
"filter": {},
# 抽量因子,64为全量
"sampling_factor": 64,
# session
"session_name": "session_default",
# "axis_config": {"isNormalize": False, "left": [], "right": []},
# 起始时间
"from_date": start_time,
# 结束时间
"to_date": end_time,
# "limit": 10,
"use_cache": True
}
r = requests.post(self.url, data=json.dumps(body, ensure_ascii=False), headers=headers)
try:
rs = r.json()['rows'][0]
rd['bounce_rate'] = str(rs['values'][0][0])
except:
pass
return rd
"""
停留时长,单位(秒)
"""
def stay_time(self, start_time, end_time):
rd = {'stay_time': ''}
headers = {'content-type': 'application/json'}
body = {
"measures": [
{
# 事件名称
"event_name": "$Anything",
# 聚合操作符
"aggregator": "AVG",
"field": "session.session_default.$session_duration",
"by_session": True,
}
],
# 时间单位
"unit": "day",
# 筛选条件
"filter": {},
# 抽量因子,64为全量
"sampling_factor": 64,
# session
"session_name": "session_default",
# "axis_config": {"isNormalize": False, "left": [], "right": []},
# 起始时间
"from_date": start_time,
# 结束时间
"to_date": end_time,
# "limit": 10,
"use_cache": True
}
r = requests.post(self.url, data=json.dumps(body, ensure_ascii=False), headers=headers)
try:
rs = r.json()['rows'][0]
rd['stay_time'] = str(rs['values'][0][0])
except:
pass
return rd
"""
注册人数
"""
def reg_user(self, start_time, end_time):
rd = {'reg_user': ''}
headers = {'content-type': 'application/json'}
body = {
"measures": [
{
# 事件名称
"event_name": "signUp",
# 聚合操作符
"aggregator": "unique",
}
],
# 时间单位
"unit": "day",
# 筛选条件
"filter": {},
# 抽量因子,64为全量
"sampling_factor": 64,
# "axis_config": {"isNormalize": False, "left": [], "right": []},
# 起始时间
"from_date": start_time,
# 结束时间
"to_date": end_time,
# "limit": 10,
"use_cache": True
}
r = requests.post(self.url, data=json.dumps(body, ensure_ascii=False), headers=headers)
try:
rs = r.json()['rows'][0]
rd['reg_user'] = str(rs['values'][0][0])
except:
pass
return rd
"""
登录人数
"""
def log_user(self, start_time, end_time):
rd = {'log_user': ''}
headers = {'content-type': 'application/json'}
body = {
"measures": [
{
# 事件名称
"event_name": "login",
# 聚合操作符
"aggregator": "unique",
}
],
# 时间单位
"unit": "day",
# 筛选条件
"filter": {},
# 抽量因子,64为全量
"sampling_factor": 64,
# "axis_config": {"isNormalize": False, "left": [], "right": []},
# 起始时间
"from_date": start_time,
# 结束时间
"to_date": end_time,
# 汇总
# "detail_and_rollup": True,
# "limit": 10,
"use_cache": True
}
r = requests.post(self.url, data=json.dumps(body, ensure_ascii=False), headers=headers)
try:
rs = r.json()['rows'][0]
rd['log_user'] = str(rs['values'][0][0])
except:
pass
return rd
"""
日活人数
"""
def active_user(self, start_time, end_time):
rd = {'active_user': ''}
headers = {'content-type': 'application/json'}
body = {
"measures": [
{
# 事件名称
"event_name": "cs",
# 聚合操作符
"aggregator": "unique",
}
],
# 时间单位
"unit": "day",
# 筛选条件
"filter": {},
# 抽量因子,64为全量
"sampling_factor": 64,
# "axis_config": {"isNormalize": False, "left": [], "right": []},
# 起始时间
"from_date": start_time,
# 结束时间
"to_date": end_time,
# 汇总
# "detail_and_rollup": True,
# "limit": 10,
"use_cache": True
}
r = requests.post(self.url, data=json.dumps(body, ensure_ascii=False), headers=headers)
try:
rs = r.json()['rows'][0]
rd['active_user'] = str(rs['values'][0][0])
except:
pass
return rd
"""
搜索人数
"""
def search_user(self, start_time, end_time):
rd = {'search_user': ''}
headers = {'content-type': 'application/json'}
body = {
"measures": [
{
# 事件名称
"event_name": "search",
# 聚合操作符
"aggregator": "unique",
}
],
# 时间单位
"unit": "day",
# 筛选条件
"filter": {},
# 抽量因子,64为全量
"sampling_factor": 64,
# "axis_config": {"isNormalize": False, "left": [], "right": []},
# 起始时间
"from_date": start_time,
# 结束时间
"to_date": end_time,
# 汇总
# "detail_and_rollup": True,
# "limit": 10,
"use_cache": True
}
r = requests.post(self.url, data=json.dumps(body, ensure_ascii=False), headers=headers)
try:
rs = r.json()['rows'][0]
rd['search_user'] = str(rs['values'][0][0])
except:
pass
return rd
"""
搜索次数
"""
def search_count(self, start_time, end_time):
rd = {'search_count': ''}
headers = {'content-type': 'application/json'}
body = {
"measures": [
{
# 事件名称
"event_name": "search",
# 聚合操作符
"aggregator": "general",
}
],
# 时间单位
"unit": "day",
# 筛选条件
"filter": {},
# 抽量因子,64为全量
"sampling_factor": 64,
# "axis_config": {"isNormalize": False, "left": [], "right": []},
# 起始时间
"from_date": start_time,
# 结束时间
"to_date": end_time,
# 汇总
# "detail_and_rollup": True,
# "limit": 10,
"use_cache": True
}
r = requests.post(self.url, data=json.dumps(body, ensure_ascii=False), headers=headers)
try:
rs = r.json()['rows'][0]
rd['search_count'] = str(rs['values'][0][0])
except:
pass
return rd
"""
次日留存
"""
def preserve(self, start_time, end_time):
rd = {'preserve_radio': 0}
pre = 0
reg = 0
headers = {'content-type': 'application/json'}
body = {
# 筛选条件
"first_event": {
"event_name": "signUp",
"filter": {}
},
"second_event": {
"event_name": "$pageview",
"filter": {}
},
"measures": [],
"rangeText": "",
"extend_over_end_date": True,
"duration": 1,
"unit": "day",
"chartsType": "raw",
"user_filter": {},
"is_wastage": False,
# 抽量因子,64为全量
"sampling_factor": 64,
# 起始时间
"from_date": start_time,
# 结束时间
"to_date": end_time,
# 汇总
# "detail_and_rollup": True,
# "limit": 10,
"use_cache": True
}
r = requests.post(self.url2, data=json.dumps(body, ensure_ascii=False), headers=headers)
print(r.json())
try:
rows = r.json()['rows']
for r in rows:
cells = r['cells']
pre += cells[1]['people']
reg += r['total_people']
except:
pass
rd['preserve_radio'] = round((pre / reg) * 100, 2)
return rd
import json
import requests
import subprocess
import time
from extract.ex_base import Base
from extract.ex_order import ExOrder
from utils.db_handler import DBHandler
from utils.date_handler import DateHandler
from config.conn_list import ConnList
from config.supplier import supplier, supplier_collect
from pipeline.pi_email import PiEmail
from load.load_mysql import LoadMysql
from utils.excel_handler import ExcelHandler
from config.db import rabbitMq_server
from utils.rabbitMq_handler import Customer
class ExSkuExpose(Base):
@staticmethod
def merge_file():
# HDFS数据写到本地
time_ts = DateHandler.now_date(days=1, d_type=2)
des_file = ' /data3/hdfs_data/sku_expose/sku_expose_' + time_ts
# 文件回传HDFS
cmd = 'hadoop dfs -put ' + des_file + ' /sku_expose/'
subprocess.getoutput(cmd)
print(cmd)
# 删除本地文件
cmd = 'rm /data3/hdfs_data/sku_expose_' + time_ts
subprocess.getoutput(cmd)
print(cmd)
@staticmethod
def get_hdfs_data():
# 读取HDFS数据
time_ts = DateHandler.now_date(days=1, d_type=2)
hdfs_file = '/sku_expose/sku_expose_' + time_ts
hdfs_data = DBHandler.hdfs_read(hdfs_file)
return hdfs_data
@staticmethod
def collect_rq():
# 文件写入
file_name = r'/data3/hdfs_data/sku_expose/' + 'sku_expose_' + DateHandler.now_date(days=0, d_type=2)
fo = open(file_name, "a+")
def callback(ch, method, properties, body):
try:
# 判断是否有数据
if ch.queue_declare(queue='search_show_sku_list', durable=True).method.message_count > 0:
# 数据解析与写入
# body_json = json.loads(str(body, encoding="utf-8"))
time.sleep(0.001)
fo.write(str(body, encoding="utf-8") + "\n")
else:
ch.stop_consuming()
except Exception:
pass
else:
ch.basic_ack(delivery_tag=method.delivery_tag) # tell mq the task has done
# 判断队列数据数
msg_count = ch.queue_declare(queue='search_show_sku_list', durable=True).method.message_count
# 刷新内存
if msg_count % 10000 == 0:
fo.flush()
# 任务结束
if msg_count <= 0:
# 关闭文件流
fo.close()
# 停止消费
ch.stop_consuming()
customer = Customer(rabbitMq_server['user'], rabbitMq_server['password'],
rabbitMq_server['host'], 'search_show_sku_list', '')
customer.server_forever(callback)
"""
汇总SKU计算
"""
@staticmethod
def cal_sku_expose(hdfs_data):
# 汇总
sku_cal = {}
keyword_list = []
lowest_supplier = {}
sum_effect = 0
# 初始化sku_cal
for sp in supplier:
sku_cal[sp] = {'ex_count': 0,
'goods_list': [],
'keyword_list': [],
'order_sku': 0,
'order_paid_sku': 0,
'supplier_name': sp,
'supplier_id': supplier[sp],
'effect_sku': 0,
'canal': [],
'goods_name': [],
'lowest_radio': '0%'}
# 遍历获取数据
for row in hdfs_data:
try:
row = json.loads(row)
goods_id = int(row['goods_id'])
goods_name = row['goods_name']
supplier_type = row['supplier_type']
supplier_name = row['supplier_name']
keyword = row['keyword']
ladder_price = row['ladder_price']
# 获取渠道
canal_arr = supplier_name.split('-')
if len(canal_arr) > 1 and canal_arr[0] == '猎芯联营':
canal = canal_arr[1]
else:
canal = ''
# 特殊渠道转换
if supplier_type == '猎芯寄售':
supplier_type = '猎芯自营'
elif supplier_type == '猎芯联营':
supplier_type = '猎芯联营-渠道开发'
# 渠道转换
if canal != '' and canal in supplier_collect:
supplier_type = '猎芯联营-技术采集'
# supplier_type在渠道配置文件里面
if supplier_type in sku_cal:
# 渠道去重
if canal not in sku_cal[supplier_type]['canal'] and canal != '':
sku_cal[supplier_type]['canal'].append(canal)
# 汇总搜索词去重
if keyword not in keyword_list:
keyword_list.append(keyword)
# 渠道曝光计算
sku_cal[supplier_type]['ex_count'] += 1
# SKU去重
if goods_id not in sku_cal[supplier_type]['goods_list']:
sku_cal[supplier_type]['goods_list'].append(goods_id)
# 搜索词去重
if keyword not in sku_cal[supplier_type]['keyword_list']:
sku_cal[supplier_type]['keyword_list'].append(keyword)
# 商品名去重
if goods_name not in sku_cal[supplier_type]['goods_name']:
sku_cal[supplier_type]['goods_name'].append(goods_name)
# 渠道最低价
if len(ladder_price) > 0:
lowest_supplier = ExSkuExpose.merge_lowest_price(goods_name, ladder_price[0], supplier_type,
lowest_supplier)
except:
pass
# 计算最低价格
lowest_result = ExSkuExpose.cal_lowest_price(lowest_supplier)
# 获取订单明细数据
where = {'start_time': DateHandler.date_time(1), 'end_time': DateHandler.date_time(0),
'condition': ['i.order_id > 1',
'o.order_source not like \'%pf=-1%\'',
'o.order_source like \'%search%\'']}
ex_order = ExOrder('料号明细')
data = ex_order.order_items(where)
# 遍历订单数据
for row in data:
supplier_name = row['supplier_name']
canal = row['canal']
# 猎芯联营渠道特殊处理
if supplier_name == '猎芯联营':
supplier_name = '猎芯联营-渠道开发' if canal in sku_cal['猎芯联营-渠道开发']['canal'] else '猎芯联营-技术采集'
# sku_cal更新
if supplier_name in sku_cal:
sku_cal[supplier_name]['order_sku'] += 1
if row['status'] > 2:
sku_cal[supplier_name]['order_paid_sku'] += 1
# SKU数值计算
for supplier_name in sku_cal:
sku_cal[supplier_name]['supplier_name'] = supplier_name
sku_cal[supplier_name]['goods_list'] = len(sku_cal[supplier_name]['goods_list'])
sku_cal[supplier_name]['keyword_list'] = len(sku_cal[supplier_name]['keyword_list'])
sku_cal[supplier_name]['ex_radio'] = \
str(round(sku_cal[supplier_name]['ex_count'] / len(hdfs_data) * 100, 2)) + '%'
# 区分自营联营
if supplier_name == '猎芯自营':
# 计算自营
db = ConnList.Zy()
sql = "SELECT count(*) FROM lie_goods WHERE status = 1"
result = DBHandler.read(db, sql)
if len(result) > 0:
sku_cal[supplier_name]['goods_radio'] = \
str(round(sku_cal[supplier_name]['goods_list'] / result[0][0] * 100, 2)) + '%'
sku_cal[supplier_name]['effect_sku'] = result[0][0]
sum_effect += result[0][0]
else:
sku_cal[supplier_name]['goods_radio'] = '0%'
elif supplier_name == '撮合':
# 获取撮合有效SKU数量
url = "http://footstone.ichunt.net/webapi/hd_matches_goods"
r = requests.post(url, data=body)
sku_cal[supplier_name]['effect_sku'] = r.json()['data']['total']
sum_effect += r.json()['data']['total']
# 计算撮合占比
sku_cal[supplier_name]['goods_radio'] = \
str(round(sku_cal[supplier_name]['goods_list'] / r.json()['data']['total'] * 100, 2)) + '%'
else:
# 有效sku数
sku_num = 0
# sku搜索接口
url = "http://so12.ichunt.com/search/es/searchsku"
# 联营渠道判断
canal_size = len(sku_cal[supplier_name]['canal'])
if canal_size > 0:
# 遍历渠道
for canal in sku_cal[supplier_name]['canal']:
body = {"supplier_id": sku_cal[supplier_name]['supplier_id'], "goods_status/condition": 1,
"status/condition": 1, 'canal/condition': canal}
r = requests.post(url, data=body)
sku_num += int(r.json()['data']['total']) if r.json()['error_code'] == 0 else 0
else:
body = {"supplier_id": sku_cal[supplier_name]['supplier_id'], "goods_status/condition": 1,
"status/condition": 1}
r = requests.post(url, data=body)
sku_num += int(r.json()['data']['total']) if r.json()['error_code'] == 0 else 0
# 计算结果
if sku_num > 0:
sku_cal[supplier_name]['goods_radio'] = \
str(round(sku_cal[supplier_name]['goods_list'] / int(sku_num) * 100, 2)) + '%'
else:
sku_cal[supplier_name]['goods_radio'] = '0%'
# 汇总计算
sum_effect += sku_num
sku_cal[supplier_name]['effect_sku'] = sku_num
# 订单曝光占比
if sku_cal[supplier_name]['effect_sku'] > 0:
sku_cal[supplier_name]['order_ep_radio'] = \
str(round(sku_cal[supplier_name]['order_sku'] / sku_cal[supplier_name]['effect_sku'] * 100,
4)) + '%'
else:
sku_cal[supplier_name]['order_ep_radio'] = '0%'
# 曝光转化率
if sku_cal[supplier_name]['order_sku'] > 0:
sku_cal[supplier_name]['ex_order_radio'] = str(round(sku_cal[supplier_name]['order_sku'] / sku_cal[supplier_name]['goods_list'] * 100, 2)) + '%' if sku_cal[supplier_name]['goods_list'] != 0 else 0
else:
sku_cal[supplier_name]['ex_order_radio'] = '0%'
# 搜索命中率
if int(sku_cal[supplier_name]['effect_sku']) > 0:
sku_cal[supplier_name]['keyword_radio'] = str(round(int(sku_cal[supplier_name]['keyword_list']) /
int(sku_cal[supplier_name]['effect_sku']) * 100, 2)) + '%'
else:
sku_cal[supplier_name]['keyword_radio'] = '0%'
# 曝光最低价结果计算
if supplier_name in lowest_result:
sku_cal[supplier_name]['lowest_radio'] = str(round(int(lowest_result[supplier_name]) / len(sku_cal[supplier_name]['goods_name']) * 100, 2)) + '%' if len(sku_cal[supplier_name]['goods_name']) > 0 else '0%'
# 更改数据为邮件格式
index = 1
e_data = {'day': DateHandler.now_date(1, 1)}
for sku in sku_cal:
# 有效SKU占比
sku_cal[sku]['effect_sku_radio'] = str(round(int(sku_cal[sku]['effect_sku']) / sum_effect * 100, 2)) + '%'
e_data['a_' + str(index)] = sku
e_data['b_' + str(index)] = sku_cal[sku]['effect_sku']
e_data['c_' + str(index)] = sku_cal[sku]['effect_sku_radio']
e_data['d_' + str(index)] = sku_cal[sku]['ex_count']
e_data['e_' + str(index)] = sku_cal[sku]['ex_radio']
e_data['f_' + str(index)] = sku_cal[sku]['goods_list']
e_data['g_' + str(index)] = sku_cal[sku]['goods_radio']
e_data['h_' + str(index)] = sku_cal[sku]['order_sku']
e_data['i_' + str(index)] = sku_cal[sku]['order_paid_sku']
e_data['j_' + str(index)] = sku_cal[sku]['order_ep_radio']
e_data['k_' + str(index)] = sku_cal[sku]['ex_order_radio']
e_data['l_' + str(index)] = sku_cal[sku]['keyword_list']
e_data['m_' + str(index)] = sku_cal[sku]['keyword_radio']
index += 1
# 写入MySQL
load_col = ['insert_time', 'cal_ts', 'ex_count', 'ex_radio', 'goods_list', 'goods_radio', 'effect_sku',
'effect_sku_radio', 'ex_order_radio', 'keyword_radio', 'order_sku', 'order_paid_sku',
'order_ep_radio', 'keyword_list', 'supplier_name', 'lowest_radio']
LoadMysql.sample_load(load_col, 'lie_sku_expose', sku_cal, db=ConnList.Dashboard())
PiEmail.pipeline_sku_expose(e_data)
"""
猎芯联营细分渠道计算
"""
@staticmethod
def cal_sku_expose_detail(hdfs_data):
# 计算集合
sku_cal = {}
# 遍历获取数据
for row in hdfs_data:
try:
row = json.loads(row)
goods_id = row['goods_id']
supplier_name = row['supplier_name']
keyword = row['keyword']
# 判断是否猎芯联营
if '猎芯联营-L' in supplier_name:
canal_arr = supplier_name.split('-')
# 判断字典中是否存在相同key
if supplier_name not in sku_cal:
sku_cal[supplier_name] = {'ex_count': 1, 'goods_list': [goods_id], 'keyword_list': [keyword],
'order_sku': 0, 'order_paid_sku': 0, 'supplier_name': supplier_name,
'effect_sku': 0, 'canal': canal_arr[1]}
else:
sku_cal[supplier_name]['ex_count'] += 1
# 料号去重
if goods_id not in sku_cal[supplier_name]['goods_list']:
sku_cal[supplier_name]['goods_list'].append(goods_id)
# 搜索词去重
if keyword not in sku_cal[supplier_name]['keyword_list']:
sku_cal[supplier_name]['keyword_list'].append(keyword)
except:
pass
# 获取订单明细数据
where = {'start_time': DateHandler.date_time(1), 'end_time': DateHandler.date_time(0),
'condition': ['i.order_id > 1',
'o.order_source not like \'%pf=-1%\'',
'o.order_source like \'%search%\'']}
ex_order = ExOrder('料号明细')
data = ex_order.order_items(where)
# 遍历订单数据
for row in data:
supplier_name = '猎芯联营-' + row['canal']
# sku_cal更新
if supplier_name in sku_cal:
sku_cal[supplier_name]['order_sku'] += 1
if row['status'] > 2:
sku_cal[supplier_name]['order_paid_sku'] += 1
# 获取已经计算好的汇总数据
db = ConnList.Dashboard()
sql = "SELECT sum(ex_count), sum(goods_list), sum(keyword_list), sum(effect_sku) \
FROM lie_sku_expose \
WHERE cal_ts = '%s'" % DateHandler.now_date(1, 1)
result = DBHandler.read(db, sql)
ex_count = result[0][0]
goods_list = result[0][1]
keyword_list = result[0][2]
effect_sku = result[0][3]
# SKU数值计算
for supplier_name in sku_cal:
# sku搜索接口
url = "http://so12.ichunt.com/search/es/searchsku"
canal = sku_cal[supplier_name]['canal']
body = {"supplier_id": '17', "goods_status/condition": 1,
"status/condition": 1, 'canal/condition': canal}
r = requests.post(url, data=body)
sku_cal[supplier_name]['effect_sku'] = int(r.json()['data']['total']) if r.json()['error_code'] == 0 else 0
sku_cal[supplier_name]['ex_radio'] = str(round(sku_cal[supplier_name]['ex_count']
/ ex_count * 100, 2)) + '%' if ex_count > 0 else '0%'
sku_cal[supplier_name]['goods_radio'] = str(round(len(sku_cal[supplier_name]['goods_list'])
/ goods_list * 100, 2)) + '%' if goods_list > 0 else '0%'
sku_cal[supplier_name]['keyword_radio'] = str(round(len(sku_cal[supplier_name]['keyword_list'])
/ keyword_list * 100, 2)) + '%' if keyword_list > 0 else '0%'
sku_cal[supplier_name]['effect_sku_radio'] = str(round(sku_cal[supplier_name]['effect_sku']
/ effect_sku * 100, 2)) + '%' if effect_sku > 0 else '0%'
sku_cal[supplier_name]['ex_order_radio'] = str(round(sku_cal[supplier_name]['order_sku']
/ len(sku_cal[supplier_name]['goods_list']) * 100, 2)) + '%' if len(sku_cal[supplier_name]['goods_list']) > 0 else '0%'
sku_cal[supplier_name]['order_ep_radio'] = str(round(sku_cal[supplier_name]['order_sku']
/ sku_cal[supplier_name]['effect_sku'] * 100, 2)) + '%' if sku_cal[supplier_name]['effect_sku'] > 0 else '0%'
sku_cal[supplier_name]['keyword_list'] = len(sku_cal[supplier_name]['keyword_list'])
sku_cal[supplier_name]['goods_list'] = len(sku_cal[supplier_name]['goods_list'])
# 写入MySQL
load_col = ['insert_time', 'cal_ts', 'ex_count', 'ex_radio', 'goods_list', 'goods_radio', 'effect_sku',
'effect_sku_radio', 'ex_order_radio', 'keyword_radio', 'order_sku', 'order_paid_sku',
'order_ep_radio', 'keyword_list', 'supplier_name']
LoadMysql.sample_load(load_col, 'lie_sku_expose_lxly', sku_cal, db=ConnList.Dashboard())
"""
计算过期sku曝光数据
"""
@staticmethod
def cal_sku_expose_expire(hdfs_data):
# 汇总
sku_cal = {}
# 初始化sku_cal
for sp in supplier:
if sp not in ['撮合', '猎芯自营']: # 去除非联营数据
sp_id = supplier[sp]
if '猎芯联营' in sp: # 统一为猎芯联营
sp = '猎芯联营'
sku_cal[sp] = {'ex_count': 0, # 曝光次数
'ex_rd': '0%', # 曝光次数占比
'goods_list': [], # 曝光数量
'goods_rd': '0%', # 曝光数量占比
'expire_num': 0, # 过期数量
'expire_rd': '0%', # 过期数量占比
'order_num': 0, # 下单sku数量
'supplier_name': sp, # 供应商名称
'supplier_id': sp_id} # 供应商id
# 遍历获取数据
for row in hdfs_data:
row = json.loads(row)
goods_id = row['goods_id']
supplier_type = row['supplier_type']
status = int(row['status']) if 'status' in row else 1
if status == 2 and supplier_type in sku_cal: # 1为非过期,2为过期,且supplier_type在cal中
sku_cal[supplier_type]['ex_count'] += 1 # 计算曝光次数
if goods_id not in sku_cal[supplier_type]['goods_list']: # 计算曝光数量
sku_cal[supplier_type]['goods_list'].append(goods_id)
# 计算有效sku数和过期sku数
for st in sku_cal:
url = "http://so12.ichunt.com/search/es/searchsku"
body = {"supplier_id": sku_cal[st]['supplier_id'],
"goods_status/condition": 1, # 商品状态 0:待审核 1:审核通过(上架)2:审核不通过 3:下架 4:删除
"status/condition": 0 # 是否过期,0为过期,1为非过期
}
r = requests.post(url, data=body)
expire_num = int(r.json()['data']['total']) if r.json()['error_code'] == 0 else 0
sku_cal[st]['expire_num'] = expire_num
# 获取订单sku数
order = ExOrder('').order_items({'start_time': DateHandler.date_time(1),
'end_time': DateHandler.date_time(0),
'condition': ['o.order_id > 0']})
for od in order:
sp_id = od['supplier_id']
bd_id = od['brand_id']
gn = od['goods_name']
sp_name = od['supplier_name']
body = {
"supplier_id": sp_id, # 供应商id
"brand_id/condition": bd_id, # 品牌id
"goods_name/condition": gn, # 型号
"goods_status/condition": 1,
"status/condition": 0
}
r = requests.post(url, data=body)
is_exit = int(r.json()['data']['total']) if r.json()['error_code'] == 0 else 0
if is_exit != 0: # 匹配成功
sku_cal[sp_name]['order_num'] += 1
# 计算汇总值
expire_sum = 0 # 总过期数量
ex_sum = 0 # 总曝光次数
for st in sku_cal:
expire_sum += sku_cal[st]['expire_num']
ex_sum += sku_cal[st]['ex_count']
sku_cal[st]['goods_list'] = len(sku_cal[st]['goods_list']) # 获取列表长度
# 比例计算
rd = {}
index = 0
for st in sku_cal:
if ex_sum > 0: # 曝光次数占比
sku_cal[st]['ex_rd'] = str(round(sku_cal[st]['ex_count'] / ex_sum * 100, 2)) + '%'
if expire_sum > 0: # 过期sku占比
sku_cal[st]['expire_rd'] = str(round(sku_cal[st]['expire_num'] / expire_sum * 100,
2)) + '%'
if sku_cal[st]['expire_num'] > 0: # 过期SKU渠道占比
sku_cal[st]['goods_rd'] = str(round(sku_cal[st]['goods_list']
/ sku_cal[st]['expire_num'] * 100, 2)) + '%'
rd[index] = sku_cal[st]
index += 1
# 输出Excel
title = ['供应商', '过期SKU数量', '过期SKU占比', '过期SKU曝光次数', '过期SKU曝光次数占比',
'过期SKU曝光数量', '过期SKU渠道占比', '下单SKU数量']
content = ['supplier_name', 'expire_num', 'expire_rd', 'ex_count', 'ex_rd',
'goods_list', 'goods_rd', 'order_num']
ExcelHandler.write_to_excel(title, content, rd, "result", result_type=1)
"""
计算曝光下的搜索词
"""
@staticmethod
def cal_sku_keyword(hdfs_data):
# 去除的搜索词
nokey = ['撮合', '']
zy = ['猎芯自营', '猎芯寄售']
# 计算集合
keyword_cal = {}
try:
# 遍历获取数据
for row in hdfs_data:
row = json.loads(row)
supplier_type = row['supplier_type']
keyword = row['keyword'].replace('\'', '')
# 去除撮合搜索词
if supplier_type not in nokey:
# 判断keyword是否存在
if keyword not in keyword_cal:
keyword_cal[keyword] = {'zy_count': 0, 'ly_count': 0, 'sum_count': 0, 'keyword': keyword}
# 判断自营联营
if supplier_type in zy:
keyword_cal[keyword]['zy_count'] += 1
else:
keyword_cal[keyword]['ly_count'] += 1
keyword_cal[keyword]['sum_count'] += 1
# 写入MySQL
load_col = ['keyword', 'zy_count', 'ly_count', 'sum_count', 'insert_time', 'cal_ts']
LoadMysql.sample_load(load_col, 'lie_sku_expose_keyword', keyword_cal, db=ConnList.Dashboard())
except:
pass
@staticmethod
def cal_lc_expose(hdfs_data):
db = ConnList.lxData()
wr_db = ConnList.Dashboard()
goods_list = []
print(len(hdfs_data))
for row in hdfs_data:
try:
row = json.loads(row)
supplier_type = row['supplier_type']
goods_id = int(row['goods_id'])
if supplier_type == '猎芯自营':
goods_list.append(goods_id)
except:
pass
lc_count = 0
lc_goods = set()
for goods_id in goods_list:
sql = "SELECT self_supplier_type FROM lie_goods WHERE goods_id = %d" % goods_id
result = DBHandler.read(db, sql)
sp_type = result[0][0]
if sp_type == 2:
lc_count += 1
if goods_id not in lc_goods:
lc_goods.add(goods_id)
# 订单详情
condition = {
'start_time': DateHandler.date_time(1),
'end_time': DateHandler.date_time(0),
'condition': ['o.order_goods_type = 2']
}
items = ExOrder('').order_items(condition)
order_goods_set = set()
order_user_set = set()
paid_goods_set = set()
paid_user_set = set()
for it in items:
goods_id = it['goods_id']
user_id = it['user_id']
status = it['status as items_status']
sql = "SELECT self_supplier_type FROM lie_goods WHERE goods_id = %d" % goods_id
result = DBHandler.read(db, sql)
sp_type = result[0][0]
if sp_type == 2:
order_goods_set.add(goods_id)
order_user_set.add(user_id)
if status == 1:
paid_goods_set.add(goods_id)
paid_user_set.add(user_id)
lc_goods = len(lc_goods)
lc_order_goods = len(order_goods_set)
lc_order_user = len(order_user_set)
lc_paid_goods = len(paid_goods_set)
lc_paid_user = len(paid_user_set)
cal_ts = DateHandler.now_date(1, 1)
sql = "INSERT INTO lie_sku_expose_lc (lc_count,lc_goods,lc_order_goods,lc_order_user,lc_paid_goods,lc_paid_user,cal_ts) \
VALUES (%d,%d,%d,%d,%d,%d,\'%s\')" % \
(lc_count, lc_goods, lc_order_goods, lc_order_user, lc_paid_goods, lc_paid_user, cal_ts)
DBHandler.insert(wr_db, sql)
"""
计算自营曝光料号
"""
@staticmethod
def cal_zy_expose(hdfs_data):
zy_list = []
re_list = []
try:
# 遍历获取数据
for row in hdfs_data:
row = json.loads(row)
supplier_type = row['supplier_type']
goods_id = row['goods_id']
if supplier_type == '猎芯自营' and goods_id not in re_list:
re_list.append(goods_id)
zy_list.append({'goods_id': goods_id})
# 写入MySQL
load_col = ['goods_id', 'cal_ts']
LoadMysql.simple_dict_load(load_col, 'lie_sku_expose_goods', zy_list, db=ConnList.Dashboard())
except:
pass
"""
计算渠道供应商
"""
@staticmethod
def cal_supplier_count(hdfs_data):
keyword_d = {}
result_d = {}
rd = {}
# 遍历获取数据
for row in hdfs_data:
row = json.loads(row)
show_time = row['show_time']
keyword = row['keyword']
supplier_type = row['supplier_type']
key = keyword + str(show_time)
if key not in keyword_d:
keyword_d[key] = [supplier_type]
else:
if supplier_type not in keyword_d[key]:
keyword_d[key].append(supplier_type)
for ls in keyword_d:
for sp in keyword_d[ls]:
if sp not in result_d:
result_d[sp] = 1
else:
result_d[sp] += 1
for rs in result_d:
if rs != '':
rd[rs] = {'supplier': rs, 'search_count': result_d[rs], 'create_time': DateHandler.now_datetime(), 'cal_ts': DateHandler.now_date(1, 1)}
col = ['supplier', 'search_count', 'create_time', 'cal_ts']
LoadMysql.sample_load(col, 'lie_sku_expose_search', rd, db=ConnList.Dashboard(), cal_time=False)
"""
汇总渠道最低价
"""
@staticmethod
def merge_lowest_price(goods_name, ladder_price, supplier_type, lowest_supplier):
# 最低起订量
purchases = int(ladder_price['purchases']) if 'purchases' in ladder_price else 1
price_cn = float(ladder_price['price_cn']) if 'price_cn' in ladder_price else 0
try:
if purchases > 0 and goods_name is not None:
price_cn = round(price_cn / purchases, 2)
# 判断最低价
if goods_name in lowest_supplier:
if supplier_type not in lowest_supplier[goods_name]:
lowest_supplier[goods_name][supplier_type] = price_cn
else:
lowest_supplier[goods_name] = {supplier_type: price_cn}
except:
print(goods_name, ladder_price, supplier_type, lowest_supplier)
return lowest_supplier
"""
计算渠道最低价
"""
@staticmethod
def cal_lowest_price(lowest_supplier):
rd = {}
for goods_name in lowest_supplier:
sup_list = lowest_supplier[goods_name]
max_price = 99999999
supplier_type = ''
# 遍历找到最低价
for sup in sup_list:
if supplier[sup] < max_price:
max_price = supplier[sup]
supplier_type = sup
if supplier_type not in rd:
rd[supplier_type] = 1
else:
rd[supplier_type] += 1
return rd
......@@ -11,7 +11,7 @@ class ExUser(Base):
def reg_user(self, condition):
# 筛选字段
col = ['user_id', 'create_time', 'mobile', 'email']
col = ['user_id', 'create_time', 'mobile', 'email', 'reg_remark', 'create_device']
col_str = super().col_to_str(col)
# 用户数据库
......@@ -26,8 +26,217 @@ class ExUser(Base):
WHERE \
create_time BETWEEN %d AND %d \
AND is_test = 0 \
AND is_type = 0 %s" \
AND is_type = 0 \
AND create_device != 20 %s" \
% (col_str, start_time, end_time, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
获取登录用户
"""
def login_user(self, condition):
# 筛选字段
col = ['user_id', 'login_remark', 'platform']
col_str = super().col_to_str(col)
# 用户数据库
db = ConnList.Order()
start_time = condition['start_time']
end_time = condition['end_time']
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT %s \
FROM \
lie_user_login_log \
WHERE \
last_login_time BETWEEN %d AND %d \
AND user_id NOT IN (SELECT user_id FROM lie_user_main WHERE is_test = 1) %s" \
% (col_str, start_time, end_time, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
用户优惠券
"""
def user_coupon(self, condition):
# 筛选字段
col = ['source']
col_str = super().col_to_str(col)
# 用户数据库
db = ConnList.Order()
start_time = condition['start_time']
end_time = condition['end_time']
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT %s FROM lie_user_coupon \
WHERE create_time BETWEEN %d AND %d %s" \
% (col_str, start_time, end_time, con_str)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
"""
复购人员
"""
def re_pur_user(self, condition):
# 筛选字段
col = ['o.user_id']
col_str = super().col_to_str(col)
# 用户数据库
db = ConnList.Order()
start_time = condition['start_time']
count = condition['count']
sql = "SELECT %s \
FROM \
lie_order o \
LEFT JOIN lie_user_main u \
ON o.user_id = u.user_id \
WHERE \
o.create_time <= %d \
AND o.STATUS > 2 \
AND o.order_type = 1 \
AND o.is_type = 0 \
AND u.is_test = 0 \
AND u.create_device != 20 \
GROUP BY o.user_id \
HAVING COUNT( o.user_id ) >= %d" \
% (col_str, start_time, count)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_list(results)
return final_result
"""
复购人员账号
"""
def re_pur_account(self, condition):
# 筛选字段
col = ['u.email', 'u.mobile', 'u.create_time']
col_str = super().col_to_str(col)
# 用户数据库
db = ConnList.Order()
start_time = condition['start_time']
count = condition['count']
sql = "SELECT %s \
FROM \
lie_order o \
LEFT JOIN lie_user_main u \
ON o.user_id = u.user_id \
WHERE \
o.create_time <= %d \
AND o.STATUS > 2 \
AND o.order_type = 1 \
AND o.is_type = 0 \
AND u.is_test = 0 \
AND u.create_device != 20 \
GROUP BY o.user_id \
HAVING COUNT( o.user_id ) >= %d" \
% (col_str, start_time, count)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(results)
return final_result
"""
历史未注册用户
"""
def his_no_buy(self, condition):
#
col = ['user_id']
col_str = super().col_to_str(col)
sql = "SELECT %s \
FROM lie_user_main \
WHERE is_test = 0 AND is_type = 0" % col_str
"""
首购用户
"""
def first_buy_user(self, condition):
# 筛选字段
col = ['o.user_id', 'o.order_amount', 'o.status', 'o.currency', 'o.pay_time', 'o.sale_id',
'o.create_time', 'i.tax_title', 'i.nike_name', 'u.mobile', 'u.email']
col_str = super().col_to_str(col)
# 用户数据库
db = ConnList.Order()
start_time = condition['start_time']
end_time = condition['end_time']
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT %s \
FROM lie_order o\
LEFT JOIN lie_order_invoice i \
ON o.order_id = i.order_id \
LEFT JOIN lie_user_main u \
ON o.user_id = u.user_id \
WHERE o.create_time BETWEEN %d AND %d \
AND o.order_type = 1 \
AND o.is_type = 0 \
AND u.is_test != 1 \
AND %s \
AND o.user_id NOT IN \
(SELECT user_id FROM lie_order WHERE is_type = 0 AND order_type = 1 AND create_time <= %d GROUP BY user_id)" \
% (col_str, start_time, end_time, con_str, start_time)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
def new_user_order(self, condition):
col = ['o.user_id', 'u.mobile', 'u.email']
col_str = super().col_to_str(col)
# 用户数据库
db = ConnList.Order()
start_time = condition['start_time']
end_time = condition['end_time']
sql = "SELECT %s FROM lie_order o \
LEFT JOIN lie_user_main u \
ON o.user_id = u.user_id \
WHERE o.user_id IN \
( \
SELECT user_id FROM lie_user_main WHERE user_id NOT IN \
( \
SELECT user_id FROM lie_order WHERE is_type = 0 AND order_type = 1 \
AND create_time <= %s \
) \
AND is_test = 0 \
) AND o.create_time \
BETWEEN %s AND %s \
AND o.order_type = 1 AND o.is_type = 0 GROUP BY o.user_id" % (col_str, start_time, start_time, end_time)
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
......@@ -35,3 +244,60 @@ class ExUser(Base):
print('final_result', len(final_result))
return final_result
"""
根据文件获取用户id
"""
def file_user(self):
rd = []
db = ConnList.Order()
data = self.read_local_file('name.txt')
for d in data:
mobile = d.replace('\n', '')
sql = "SELECT user_id,email FROM lie_user_main WHERE mobile = %d" % int(mobile)
print(sql)
results = DBHandler.read(db=db, sql=sql)
if len(results) > 0:
rd.append({'mobile': mobile, 'email': results[0][1], 'user_id': results[0][0]})
return rd
"""
获取报价系统user_id
"""
def lx_offer(self, condition):
# 用户数据库
db = ConnList.Dashboard()
con_str = super().condition_to_str(condition['condition'])
sql = "SELECT user_id FROM lie_offer WHERE %s " % con_str
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_list(results)
return final_result
"""
测试IP
"""
def test_ip(self):
# 筛选字段
col = ['ip']
col_str = super().col_to_str(col)
# 用户数据库
db = ConnList.Order()
sql = "SELECT %s \
FROM lie_test_ip"\
% col_str
results = DBHandler.read(db=db, sql=sql)
# 结果格式转换为字典
final_result = super().result_to_dict(col, results)
return final_result
No preview for this file type
This diff could not be displayed because it is too large.
# .bash_profile
# Get the aliases and functions
if [ -f ~/.bashrc ]; then
. ~/.bashrc
fi
# User specific environment and startup programs
HADOOP_HOME=/data2/hadoop/hadoop-2.7.6
PATH=$PATH:$HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
export PATH HADOOP_HOME
!connect jdbc:hive2://localhost:10000
hive
{"/usr/python":{"last_check":"2019-09-24T03:07:38Z","pypi_version":"19.2.3"}}
\ No newline at end of file
This diff could not be displayed because it is too large.
/*
Navicat Premium Data Transfer
Source Server : liexin_credit
Source Server Type : MySQL
Source Server Version : 50548
Source Host : 192.168.2.232:3306
Source Schema : liexin_credit
Target Server Type : MySQL
Target Server Version : 50548
File Encoding : 65001
Date: 13/12/2019 14:18:06
*/
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for lie_com_credits
-- ----------------------------
DROP TABLE IF EXISTS `lie_com_credits`;
CREATE TABLE `lie_com_credits` (
`id` int(11) UNSIGNED NOT NULL AUTO_INCREMENT COMMENT '主键id',
`apply_account` varchar(11) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '' COMMENT '申请账号 手机号 法人电话',
`erp_company_code` varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '' COMMENT 'erp公司编码',
`company_name` varchar(60) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '' COMMENT '公司名字',
`static_credit_score` decimal(6, 2) UNSIGNED NOT NULL DEFAULT 0.00 COMMENT '静态信用分',
`dynamic_credit_score` decimal(6, 2) UNSIGNED NOT NULL DEFAULT 0.00 COMMENT '动态信用分',
`total_credit_score` decimal(6, 2) NOT NULL DEFAULT 0.00 COMMENT '总信用分',
`status` tinyint(1) UNSIGNED NOT NULL DEFAULT 1 COMMENT '1已申请 5待评分 10已评分',
`blacklist_hit` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '' COMMENT '黑名单命中',
`credit_invest_result` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '' COMMENT '征信达标结果',
`ichunt_hunting_core` tinyint(1) NOT NULL COMMENT '猎芯网评分分级',
`recommended_amount` decimal(12, 2) NOT NULL DEFAULT 0.00 COMMENT '建议额度',
`recom_time_limit` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '' COMMENT '建议区间',
`admin_id` int(10) UNSIGNED NOT NULL DEFAULT 0 COMMENT '评分人ID',
`admin_name` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL DEFAULT '' COMMENT '评分人',
`score_time` int(11) UNSIGNED NOT NULL DEFAULT 0 COMMENT '评分时间',
`create_time` int(11) UNSIGNED NOT NULL DEFAULT 0 COMMENT '创建时间',
`update_time` int(11) UNSIGNED NOT NULL DEFAULT 0 COMMENT '修改时间',
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `company_name`(`company_name`) USING BTREE,
INDEX `company_code_2`(`erp_company_code`, `status`, `create_time`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 48 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_general_ci COMMENT = '公司信用分管理' ROW_FORMAT = Compact;
-- ----------------------------
-- Records of lie_com_credits
-- ----------------------------
INSERT INTO `lie_com_credits` VALUES (2, '13277999723', 'LX001', '猎芯科技', 23.50, 17.00, 40.50, 10, '', '', 4, 50000.00, '5天', 1354, '朱国军', 1575453290, 1574675417, 1575453290);
INSERT INTO `lie_com_credits` VALUES (5, '5695', 'LX002', '小明科技', 16.50, 13.00, 29.50, 10, '', '', 5, 0.00, '0', 1357, '朱继来', 1576047410, 1574695555, 1576047410);
INSERT INTO `lie_com_credits` VALUES (9, '17600091664', 'LX003', '猎芯科技1', 24.25, 0.00, 24.25, 10, '', '', 5, 0.00, '0', 1354, '朱国军', 1575531942, 1575355839, 1575531942);
INSERT INTO `lie_com_credits` VALUES (15, '15365025118', 'LX004', '211221', 15.50, 28.00, 43.50, 10, '', '', 4, 50000.00, '5天', 0, '', 0, 1575448470, 0);
INSERT INTO `lie_com_credits` VALUES (16, '18589050841', 'LX005', '18589050841', 16.00, 16.00, 32.00, 10, '', '', 5, 0.00, '0', 0, '', 0, 1575449102, 0);
INSERT INTO `lie_com_credits` VALUES (17, '15989570000', 'LX006', '深圳市猎芯科技有限公司', 10.00, 0.00, 10.00, 10, '', '', 5, 0.00, '0', 1354, '朱国军', 1575969536, 1575669102, 1575969536);
INSERT INTO `lie_com_credits` VALUES (18, '15989571111', 'LX007', '深圳卓越飞讯电子有限公司', 10.50, 0.00, 10.50, 10, '', '', 5, 0.00, '0', 1354, '朱国军', 1576045211, 1575779102, 1576045211);
INSERT INTO `lie_com_credits` VALUES (19, '15989572222', 'LX008', '北京天涯泰盟科技股份有限公司', 10.00, 0.00, 10.00, 10, '', '', 5, 0.00, '0', 1354, '朱国军', 1576045976, 1575889102, 1576045976);
INSERT INTO `lie_com_credits` VALUES (20, '13397978887', 'LX008', '深圳市同创芯科技有限公司', 10.00, 0.00, 10.00, 10, '', '', 5, 0.00, '0', 1354, '朱国军', 1575968590, 1575966673, 1575968590);
INSERT INTO `lie_com_credits` VALUES (21, '13397978829', 'LX007', '深圳市极限网络科技有限公司', 10.50, 0.00, 10.50, 10, '', '', 5, 0.00, '0', 1354, '朱国军', 1575969906, 1575969833, 1575969906);
INSERT INTO `lie_com_credits` VALUES (22, '13640960251', 'LX007', '深圳市品慧电子有限公司', 10.00, 0.00, 10.00, 10, '', '', 5, 0.00, '0', 1354, '朱国军', 1576035832, 1575970573, 1576035832);
INSERT INTO `lie_com_credits` VALUES (23, '13388880000', 'LX007', '深圳市百姓通商网络科技有限公司', 9.50, 0.00, 9.50, 10, '', '', 5, 0.00, '0', 1354, '朱国军', 1575970936, 1575970821, 1575970936);
INSERT INTO `lie_com_credits` VALUES (24, '16600001111', 'LX007', '深圳市锦懋微电子有限公司', 10.50, 0.00, 10.50, 10, '', '', 5, 0.00, '0', 1354, '朱国军', 1576048947, 1576048830, 1576048947);
INSERT INTO `lie_com_credits` VALUES (25, '13602602902', '', '深圳一二三科技有限公司', 10.00, 0.00, 10.00, 10, '', '', 1, 300.00, '1天', 1357, '朱继来', 1576118530, 1576050156, 1576118530);
INSERT INTO `lie_com_credits` VALUES (26, '13012340000', '', '创业集团', 9.50, 0.00, 9.50, 10, '', '', 1, 300.00, '1天', 0, '', 0, 1576053465, 0);
INSERT INTO `lie_com_credits` VALUES (27, '13012340001', '', '深圳明主科技有限公司1', 10.00, 0.00, 1.00, 10, '', '', 1, 300.00, '1天', 1354, '朱国军', 1576058659, 1576054647, 1576058659);
INSERT INTO `lie_com_credits` VALUES (28, '13012340003', '', '自由公司', 0.00, 0.00, 0.00, 1, '', '', 0, 0.00, '', 1357, '朱继来', 1576057792, 1576056986, 1576057792);
INSERT INTO `lie_com_credits` VALUES (29, '13012340000', 'LX008', '完美世界游戏有限责任公司', 11.00, 0.00, 11.00, 10, '', '', 5, 0.00, '0', 1354, '朱国军', 1576144150, 1576118006, 1576144150);
INSERT INTO `lie_com_credits` VALUES (30, '13602602902', '', '扫毒2A', 0.00, 0.00, 0.00, 1, '', '', 0, 0.00, '', 1515, '孙龙', 1576134327, 1576120755, 1576134327);
INSERT INTO `lie_com_credits` VALUES (31, '13602602902', '', 'company', 6.00, 0.00, 6.00, 1, '', '', 1, 300.00, '1天', 0, '', 0, 1576131241, 1576131241);
INSERT INTO `lie_com_credits` VALUES (32, '17600091664', '', '中印云端(深圳)科技有限公司', 0.00, 0.00, 0.00, 1, '', '', 0, 0.00, '', 0, '', 0, 1576134467, 1576134467);
INSERT INTO `lie_com_credits` VALUES (33, '17600091664', '', '深圳卓越飞讯科技有限公司', 0.00, 0.00, 0.00, 1, '', '', 0, 0.00, '', 0, '', 0, 1576134486, 1576134486);
INSERT INTO `lie_com_credits` VALUES (34, '17600091664', '', '深圳市展创电子有限公司', 0.00, 0.00, 0.00, 1, '', '', 0, 0.00, '', 0, '', 0, 1576134503, 1576134503);
INSERT INTO `lie_com_credits` VALUES (35, '17600091664', '', '深圳市义熙科技有限公司', 0.00, 0.00, 0.00, 1, '', '', 0, 0.00, '', 0, '', 0, 1576134511, 1576134511);
INSERT INTO `lie_com_credits` VALUES (36, '17600091664', '', '深圳市耀亮科技有限公司', 0.00, 0.00, 0.00, 1, '', '', 0, 0.00, '', 0, '', 0, 1576134518, 1576134518);
INSERT INTO `lie_com_credits` VALUES (37, '17600091664', '', '深圳市尚格实业有限公司', 0.00, 0.00, 0.00, 1, '', '', 0, 0.00, '', 0, '', 0, 1576134534, 1576134534);
INSERT INTO `lie_com_credits` VALUES (38, '17600091664', '', '深圳市晶尚景电子科技有限公司', 0.00, 0.00, 0.00, 1, '', '', 0, 0.00, '', 0, '', 0, 1576134542, 1576134542);
INSERT INTO `lie_com_credits` VALUES (39, '17600091664', '', '深圳市锦锐科技有限公司', 0.00, 0.00, 0.00, 1, '', '', 0, 0.00, '', 0, '', 0, 1576134548, 1576134548);
INSERT INTO `lie_com_credits` VALUES (40, '17600091664', '', '深圳市弘安盛电子有限公司', 0.00, 0.00, 0.00, 1, '', '', 0, 0.00, '', 0, '', 0, 1576134562, 1576134562);
INSERT INTO `lie_com_credits` VALUES (41, '17600091664', '', '深圳市和世达电子科技有限公司', 0.00, 0.00, 0.00, 1, '', '', 0, 0.00, '', 0, '', 0, 1576134568, 1576134568);
INSERT INTO `lie_com_credits` VALUES (42, '17600091664', '', '深圳市禾田普达科技有限公司', 0.00, 0.00, 0.00, 1, '', '', 0, 0.00, '', 0, '', 0, 1576134574, 1576134574);
INSERT INTO `lie_com_credits` VALUES (43, '17600091664', '', '上海麦霖电子技术有限公司', 0.00, 0.00, 0.00, 5, '', '', 0, 0.00, '', 1515, '孙龙', 1576217604, 1576134581, 1576217604);
INSERT INTO `lie_com_credits` VALUES (44, '17600091664', '', '成都鸿悦科技有限公司', 0.00, 0.00, 0.00, 5, '', '', 0, 0.00, '', 1515, '孙龙', 1576216747, 1576134587, 1576216747);
INSERT INTO `lie_com_credits` VALUES (45, '17600091664', 'GGN0001477', '北京大唐高鸿数据网络技术有限公司', 10.00, 0.00, 10.00, 10, '', '', 5, 0.00, '0', 1515, '孙龙', 1576199464, 1576134600, 1576199464);
INSERT INTO `lie_com_credits` VALUES (46, '18912340000', '', '小红公司', 0.00, 0.00, 0.00, 1, '', '', 0, 0.00, '', 0, '', 0, 1576205207, 1576205207);
INSERT INTO `lie_com_credits` VALUES (47, '13602602902', '', '小学公司', 0.00, 0.00, 0.00, 1, '', '', 0, 0.00, '', 0, '', 0, 1576206833, 1576206833);
SET FOREIGN_KEY_CHECKS = 1;
-- MySQL dump 10.13 Distrib 5.7.22, for Linux (x86_64)
--
-- Host: localhost Database: liexin_kaipao
-- ------------------------------------------------------
-- Server version 5.7.22
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Table structure for table `lie_feedbackheadr`
--
DROP TABLE IF EXISTS `lie_feedbackheadr`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `lie_feedbackheadr` (
`unino` varchar(20) NOT NULL DEFAULT '' COMMENT '编号',
`invoicetypecode` varchar(10) NOT NULL DEFAULT '' COMMENT '类别代码',
`invoiceno` varchar(120) NOT NULL DEFAULT '' COMMENT '发票号',
`sta` varchar(10) NOT NULL DEFAULT '' COMMENT '状态',
`kpr` varchar(10) NOT NULL DEFAULT '' COMMENT '开票人',
`affdate` datetime DEFAULT NULL COMMENT '开票日期',
`invoicenature` tinyint(1) NOT NULL DEFAULT '0' COMMENT '发票性质',
`amount` float NOT NULL DEFAULT '0' COMMENT '金额',
`taxamount` float NOT NULL DEFAULT '0' COMMENT '税额',
`rowtotals` float NOT NULL DEFAULT '0' COMMENT '合计行含税金额',
`totalamount` float NOT NULL DEFAULT '0' COMMENT '合计行金额',
`totaltaxamount` float NOT NULL DEFAULT '0' COMMENT '合计行税额',
`memo` varchar(230) NOT NULL DEFAULT '' COMMENT '备注',
`flag` tinyint(1) DEFAULT '0' COMMENT '是否需要同步该记录,0为需要,1为已经同步过',
UNIQUE KEY `unino_invoiceno` (`unino`,`invoiceno`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='开票机器发票返回处理';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `lie_feedbackheadr`
--
LOCK TABLES `lie_feedbackheadr` WRITE;
/*!40000 ALTER TABLE `lie_feedbackheadr` DISABLE KEYS */;
INSERT INTO `lie_feedbackheadr` VALUES ('1','4403173320','04250072','FRM','梁婷招','2018-05-28 14:26:22',0,203.72,34.63,238.35,203.72,34.63,'YSFP-PP-00000091',1),('1','4403174130','18334921','FRM','梁婷招','2018-06-26 16:39:21',0,9999.8,1599.97,11599.8,9999.8,1599.97,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334922','FRM','梁婷招','2018-06-26 16:39:25',0,9991.38,1598.62,11590,9991.38,1598.62,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334923','FRM','梁婷招','2018-06-26 16:39:27',0,9991.38,1598.62,11590,9991.38,1598.62,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334924','FRM','梁婷招','2018-06-26 16:39:28',0,9984.91,1597.59,11582.5,9984.91,1597.59,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334925','FRM','梁婷招','2018-06-26 16:39:30',0,9984.91,1597.59,11582.5,9984.91,1597.59,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334926','FRM','梁婷招','2018-06-26 16:39:31',0,9984.91,1597.59,11582.5,9984.91,1597.59,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334927','FRM','梁婷招','2018-06-26 16:39:32',0,9984.91,1597.59,11582.5,9984.91,1597.59,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334928','FRM','梁婷招','2018-06-26 16:39:34',0,9995.84,1599.34,11595.2,9995.84,1599.34,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334929','FRM','梁婷招','2018-06-26 16:39:35',0,9997.99,1599.68,11597.7,9997.99,1599.68,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334930','FRM','梁婷招','2018-06-26 16:39:37',0,9983.19,1597.31,11580.5,9983.19,1597.31,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334931','FRM','梁婷招','2018-06-26 16:39:38',0,9983.19,1597.31,11580.5,9983.19,1597.31,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334932','FRM','梁婷招','2018-06-26 16:39:40',0,9983.19,1597.31,11580.5,9983.19,1597.31,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334933','FRM','梁婷招','2018-06-26 16:39:41',0,9983.19,1597.31,11580.5,9983.19,1597.31,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334934','FRM','梁婷招','2018-06-26 16:39:42',0,9993.75,1599,11592.8,9993.75,1599,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334935','FRM','梁婷招','2018-06-26 16:39:44',0,9993.75,1599,11592.8,9993.75,1599,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334936','FRM','梁婷招','2018-06-26 16:39:45',0,9991.29,1598.61,11589.9,9991.29,1598.61,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334937','FRM','梁婷招','2018-06-26 16:39:47',0,9964.41,1594.31,11558.7,9964.41,1594.31,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334938','FRM','梁婷招','2018-06-26 16:39:48',0,9980.68,1596.9,11577.6,9980.68,1596.9,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334939','FRM','梁婷招','2018-06-26 16:39:50',0,9995.09,1599.21,11594.3,9995.09,1599.21,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334940','FRM','梁婷招','2018-06-26 16:39:51',0,9991.24,1598.6,11589.8,9991.24,1598.6,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334941','FRM','梁婷招','2018-06-26 16:39:53',0,9999.71,1599.96,11599.7,9999.71,1599.96,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334942','FRM','梁婷招','2018-06-26 16:39:54',0,9999.74,1599.95,11599.7,9999.74,1599.95,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334943','FRM','梁婷招','2018-06-26 16:39:56',0,9987.36,1597.98,11585.3,9987.36,1597.98,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334944','FRM','梁婷招','2018-06-26 16:39:57',0,9999.74,1599.96,11599.7,9999.74,1599.96,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334945','FRM','梁婷招','2018-06-26 16:39:59',0,5472.42,875.58,6348,5472.42,875.58,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334946','FRM','梁婷招','2018-06-26 17:13:50',0,9984.47,1597.5,11582,9984.47,1597.5,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334947','FRM','梁婷招','2018-06-26 17:13:53',0,9999.79,1599.97,11599.8,9999.79,1599.97,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334948','FRM','梁婷招','2018-06-26 17:13:55',0,9997.09,1599.54,11596.6,9997.09,1599.54,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334949','FRM','梁婷招','2018-06-26 17:13:56',0,9984.4,1597.52,11581.9,9984.4,1597.52,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334950','FRM','梁婷招','2018-06-26 17:13:58',0,9965.22,1594.42,11559.6,9965.22,1594.42,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334951','FRM','梁婷招','2018-06-26 17:14:00',0,9990.82,1598.54,11589.4,9990.82,1598.54,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334952','FRM','梁婷招','2018-06-26 17:14:01',0,9994.8,1599.17,11594,9994.8,1599.17,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334953','FRM','梁婷招','2018-06-26 17:14:03',0,9997.45,1599.59,11597,9997.45,1599.59,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334954','FRM','梁婷招','2018-06-26 17:14:05',0,9956.55,1593.04,11549.6,9956.55,1593.04,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334955','FRM','梁婷招','2018-06-26 17:14:06',0,9999.03,1599.84,11598.9,9999.03,1599.84,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334956','FRM','梁婷招','2018-06-26 17:14:08',0,9995.92,1599.33,11595.2,9995.92,1599.33,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334957','FRM','梁婷招','2018-06-26 17:14:10',0,9995.32,1599.23,11594.5,9995.32,1599.23,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('1','4403174130','18334958','FRM','梁婷招','2018-06-26 17:14:12',0,9995.67,1599.31,11595,9995.67,1599.31,'YSFP-ZP-00003257、YSFP-ZP-00003259、YSFP-ZP-00003264、YSFP-ZP-00003266、YSFP-ZP-00003267、YSFP-ZP-00003411、YSFP-ZP-00003412',1),('105','4403182130','20267731','FRM','梁婷招','2018-11-12 12:32:40',0,594.83,95.17,690,594.83,95.17,'',1),('106','4403182130','20267733','FRM','梁婷招','2018-11-12 12:47:21',0,3606.05,576.97,4183.02,3606.05,576.97,'',1),('108','4403182130','20267734','FRM','梁婷招','2018-11-12 12:47:23',0,537.93,86.07,624,537.93,86.07,'',1),('111','4403182130','20267732','FRM','梁婷招','2018-11-12 12:32:51',0,1870.37,299.26,2169.63,1870.37,299.26,'',1),('115','4403182130','20267785','FRM','梁婷招','2018-11-12 17:24:02',0,1623.49,259.76,1883.25,1623.49,259.76,'',1),('116','4403182130','20267786','FRM','梁婷招','2018-11-12 17:24:05',0,1079.09,172.66,1251.75,1079.09,172.66,'',1),('117','4403182130','20267983','FRM','梁婷招','2018-11-13 18:35:34',0,195.34,31.26,226.6,195.34,31.26,'',1),('118','4403182130','20267984','FRM','梁婷招','2018-11-13 18:35:38',0,517.24,82.76,600,517.24,82.76,'',1),('119','4403182130','20267985','FRM','梁婷招','2018-11-13 18:35:41',0,213.62,34.18,247.8,213.62,34.18,'',1),('120','4403182130','20268182','FRM','梁婷招','2018-11-15 16:35:23',0,3203.1,512.5,3715.6,3203.1,512.5,'',1),('2018-05-28','4403173320','04250073','FRM','梁婷招','2018-05-28 14:41:37',0,201.3,34.22,235.52,201.3,34.22,'YSFP-PP-00000081',1),('2018060012','4403174130','11210838','FRM','梁婷招','2018-06-08 15:43:43',0,5964,1013.88,6977.88,5964,1013.88,'',1),('20180606001','4403174130','11210649','FRM','梁婷招','2018-06-06 16:44:30',0,1311.38,209.82,1521.2,1311.38,209.82,'YSFP-ZP-00002675',1),('20180606002','4403174130','11210650','FRM','梁婷招','2018-06-06 17:07:34',0,5896.55,943.45,6840,5896.55,943.45,'YSFP-ZP-00002805',1),('20180606003','4403174130','11210651','FRM','梁婷招','2018-06-06 17:07:36',0,1604.26,256.68,1860.94,1604.26,256.68,'YSFP-ZP-00002626',1),('20180606004','4403174130','11210663','FRM','梁婷招','2018-06-06 17:43:32',0,2817.8,450.85,3268.65,2817.8,450.85,'YSFP-ZP-00002732',1),('20180606005','4403174130','11210665','FRM','梁婷招','2018-06-06 17:43:35',0,9654.91,1544.79,11199.7,9654.91,1544.79,'YSFP-ZP-00002687、YSFP-ZP-00002935、YSFP-ZP-00002936',1),('20180606006','4403174130','11210652','FRM','梁婷招','2018-06-06 17:43:12',0,9995.3,1699.2,11694.5,9995.3,1699.2,'YSFP-ZP-00002784、YSFP-ZP-00002785、YSFP-ZP-00002987、YSFP-ZP-00002988、YSFP-ZP-00002989、YSFP-ZP-00002965、YSFP-ZP-00003049',1),('20180606006','4403174130','11210653','FRM','梁婷招','2018-06-06 17:43:15',0,9995.3,1699.2,11694.5,9995.3,1699.2,'YSFP-ZP-00002784、YSFP-ZP-00002785、YSFP-ZP-00002987、YSFP-ZP-00002988、YSFP-ZP-00002989、YSFP-ZP-00002965、YSFP-ZP-00003049',1),('20180606006','4403174130','11210654','FRM','梁婷招','2018-06-06 17:43:17',0,9995.3,1699.2,11694.5,9995.3,1699.2,'YSFP-ZP-00002784、YSFP-ZP-00002785、YSFP-ZP-00002987、YSFP-ZP-00002988、YSFP-ZP-00002989、YSFP-ZP-00002965、YSFP-ZP-00003049',1),('20180606006','4403174130','11210655','FRM','梁婷招','2018-06-06 17:43:18',0,9999.35,1699.89,11699.2,9999.35,1699.89,'YSFP-ZP-00002784、YSFP-ZP-00002785、YSFP-ZP-00002987、YSFP-ZP-00002988、YSFP-ZP-00002989、YSFP-ZP-00002965、YSFP-ZP-00003049',1),('20180606006','4403174130','11210656','FRM','梁婷招','2018-06-06 17:43:20',0,9975.66,1695.86,11671.5,9975.66,1695.86,'YSFP-ZP-00002784、YSFP-ZP-00002785、YSFP-ZP-00002987、YSFP-ZP-00002988、YSFP-ZP-00002989、YSFP-ZP-00002965、YSFP-ZP-00003049',1),('20180606006','4403174130','11210657','FRM','梁婷招','2018-06-06 17:43:21',0,9975.66,1695.86,11671.5,9975.66,1695.86,'YSFP-ZP-00002784、YSFP-ZP-00002785、YSFP-ZP-00002987、YSFP-ZP-00002988、YSFP-ZP-00002989、YSFP-ZP-00002965、YSFP-ZP-00003049',1),('20180606006','4403174130','11210658','FRM','梁婷招','2018-06-06 17:43:24',0,9975.66,1695.86,11671.5,9975.66,1695.86,'YSFP-ZP-00002784、YSFP-ZP-00002785、YSFP-ZP-00002987、YSFP-ZP-00002988、YSFP-ZP-00002989、YSFP-ZP-00002965、YSFP-ZP-00003049',1),('20180606006','4403174130','11210659','FRM','梁婷招','2018-06-06 17:43:26',0,9975.66,1695.86,11671.5,9975.66,1695.86,'YSFP-ZP-00002784、YSFP-ZP-00002785、YSFP-ZP-00002987、YSFP-ZP-00002988、YSFP-ZP-00002989、YSFP-ZP-00002965、YSFP-ZP-00003049',1),('20180606006','4403174130','11210660','FRM','梁婷招','2018-06-06 17:43:28',0,9975.66,1695.86,11671.5,9975.66,1695.86,'YSFP-ZP-00002784、YSFP-ZP-00002785、YSFP-ZP-00002987、YSFP-ZP-00002988、YSFP-ZP-00002989、YSFP-ZP-00002965、YSFP-ZP-00003049',1),('20180606006','4403174130','11210661','FRM','梁婷招','2018-06-06 17:43:29',0,9975.66,1695.86,11671.5,9975.66,1695.86,'YSFP-ZP-00002784、YSFP-ZP-00002785、YSFP-ZP-00002987、YSFP-ZP-00002988、YSFP-ZP-00002989、YSFP-ZP-00002965、YSFP-ZP-00003049',1),('20180606006','4403174130','11210662','FRM','梁婷招','2018-06-06 17:43:31',0,9975.66,1695.86,11671.5,9975.66,1695.86,'YSFP-ZP-00002784、YSFP-ZP-00002785、YSFP-ZP-00002987、YSFP-ZP-00002988、YSFP-ZP-00002989、YSFP-ZP-00002965、YSFP-ZP-00003049',1),('20180606006','4403174130','11210664','FRM','梁婷招','2018-06-06 17:43:34',0,9998.76,1699.8,11698.6,9998.76,1699.8,'YSFP-ZP-00002784、YSFP-ZP-00002785、YSFP-ZP-00002987、YSFP-ZP-00002988、YSFP-ZP-00002989、YSFP-ZP-00002965、YSFP-ZP-00003049',1),('20180606006','4403174130','11210666','FRM','梁婷招','2018-06-06 17:43:37',0,6496.13,1104.34,7600.47,6496.13,1104.34,'YSFP-ZP-00002784、YSFP-ZP-00002785、YSFP-ZP-00002987、YSFP-ZP-00002988、YSFP-ZP-00002989、YSFP-ZP-00002965、YSFP-ZP-00003049',1),('20180606006','4403174130','11210667','FRM','梁婷招','2018-06-06 17:43:38',0,9975.42,1695.82,11671.2,9975.42,1695.82,'YSFP-ZP-00002784、YSFP-ZP-00002785、YSFP-ZP-00002987、YSFP-ZP-00002988、YSFP-ZP-00002989、YSFP-ZP-00002965、YSFP-ZP-00003049',1),('20180606006','4403174130','11210668','FRM','梁婷招','2018-06-06 17:43:40',0,9978.27,1696.3,11674.6,9978.27,1696.3,'YSFP-ZP-00002784、YSFP-ZP-00002785、YSFP-ZP-00002987、YSFP-ZP-00002988、YSFP-ZP-00002989、YSFP-ZP-00002965、YSFP-ZP-00003049',1),('20180606006','4403174130','11210669','FRM','梁婷招','2018-06-06 17:43:41',0,9999.78,1699.98,11699.8,9999.78,1699.98,'YSFP-ZP-00002784、YSFP-ZP-00002785、YSFP-ZP-00002987、YSFP-ZP-00002988、YSFP-ZP-00002989、YSFP-ZP-00002965、YSFP-ZP-00003049',1),('20180606007','4403174130','11210670','FRM','梁婷招','2018-06-06 18:03:14',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606007','4403174130','11210671','FRM','梁婷招','2018-06-06 18:03:15',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606007','4403174130','11210672','FRM','梁婷招','2018-06-06 18:03:17',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606007','4403174130','11210673','FRM','梁婷招','2018-06-06 18:03:18',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606007','4403174130','11210674','FRM','梁婷招','2018-06-06 18:03:21',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606007','4403174130','11210675','FRM','梁婷招','2018-06-06 18:03:23',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606007','4403174130','11210676','FRM','梁婷招','2018-06-06 18:03:25',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606007','4403174130','11210677','FRM','梁婷招','2018-06-06 18:03:26',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210678','FRM','梁婷招','2018-06-06 18:21:05',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210679','FRM','梁婷招','2018-06-06 18:21:07',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210680','FRM','梁婷招','2018-06-06 18:21:09',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210681','FRM','梁婷招','2018-06-06 18:21:11',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210682','FRM','梁婷招','2018-06-06 18:21:12',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210683','FRM','梁婷招','2018-06-06 18:21:14',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210684','FRM','梁婷招','2018-06-06 18:21:15',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210685','FRM','梁婷招','2018-06-06 18:21:18',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210686','FRM','梁婷招','2018-06-06 18:21:19',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210687','FRM','梁婷招','2018-06-06 18:21:21',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210688','FRM','梁婷招','2018-06-06 18:21:22',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210689','FRM','梁婷招','2018-06-06 18:21:24',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210690','FRM','梁婷招','2018-06-06 18:21:26',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210691','FRM','梁婷招','2018-06-06 18:21:27',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210692','FRM','梁婷招','2018-06-06 18:21:28',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210693','FRM','梁婷招','2018-06-06 18:21:30',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210694','FRM','梁婷招','2018-06-06 18:21:31',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210695','FRM','梁婷招','2018-06-06 18:21:33',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210696','FRM','梁婷招','2018-06-06 18:21:34',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210697','FRM','梁婷招','2018-06-06 18:21:36',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210698','FRM','梁婷招','2018-06-06 18:21:37',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210699','FRM','梁婷招','2018-06-06 18:21:39',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210700','FRM','梁婷招','2018-06-06 18:21:40',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210701','FRM','梁婷招','2018-06-06 18:21:42',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210702','FRM','梁婷招','2018-06-06 18:21:43',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210703','FRM','梁婷招','2018-06-06 18:39:56',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210704','FRM','梁婷招','2018-06-06 18:39:59',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210705','FRM','梁婷招','2018-06-06 18:40:00',0,9879.31,1580.69,11460,9879.31,1580.69,'YSFP-ZP-00003028',1),('20180606008','4403174130','11210706','FRM','梁婷招','2018-06-06 18:40:02',0,6586.22,1053.78,7640,6586.22,1053.78,'YSFP-ZP-00003028',1),('20180606009','4403174130','11210707','FRM','梁婷招','2018-06-07 10:40:51',0,9994.02,1698.98,11693,9994.02,1698.98,'YSFP-ZP-00002810、YSFP-ZP-00002811、YSFP-ZP-00002812、YSFP-ZP-00002813、YSFP-ZP-00002814、YSFP-ZP-00002815、YSFP-ZP-00002990、YSFP-ZP-00002992、YSFP-ZP-00003110',1),('20180606009','4403174130','11210708','FRM','梁婷招','2018-06-07 10:40:54',0,9976,1695.92,11671.9,9976,1695.92,'YSFP-ZP-00002810、YSFP-ZP-00002811、YSFP-ZP-00002812、YSFP-ZP-00002813、YSFP-ZP-00002814、YSFP-ZP-00002815、YSFP-ZP-00002990、YSFP-ZP-00002992、YSFP-ZP-00003110',1),('20180606009','4403174130','11210709','FRM','梁婷招','2018-06-07 10:40:56',0,9872.72,1678.36,11551.1,9872.72,1678.36,'YSFP-ZP-00002810、YSFP-ZP-00002811、YSFP-ZP-00002812、YSFP-ZP-00002813、YSFP-ZP-00002814、YSFP-ZP-00002815、YSFP-ZP-00002990、YSFP-ZP-00002992、YSFP-ZP-00003110',1),('20180606009','4403174130','11210710','FRM','梁婷招','2018-06-07 10:40:57',0,9998.87,1699.81,11698.7,9998.87,1699.81,'YSFP-ZP-00002810、YSFP-ZP-00002811、YSFP-ZP-00002812、YSFP-ZP-00002813、YSFP-ZP-00002814、YSFP-ZP-00002815、YSFP-ZP-00002990、YSFP-ZP-00002992、YSFP-ZP-00003110',1),('20180606009','4403174130','11210711','FRM','梁婷招','2018-06-07 10:40:59',0,9998.27,1699.71,11698,9998.27,1699.71,'YSFP-ZP-00002810、YSFP-ZP-00002811、YSFP-ZP-00002812、YSFP-ZP-00002813、YSFP-ZP-00002814、YSFP-ZP-00002815、YSFP-ZP-00002990、YSFP-ZP-00002992、YSFP-ZP-00003110',1),('20180606009','4403174130','11210712','FRM','梁婷招','2018-06-07 10:41:00',0,9832.83,1671.58,11504.4,9832.83,1671.58,'YSFP-ZP-00002810、YSFP-ZP-00002811、YSFP-ZP-00002812、YSFP-ZP-00002813、YSFP-ZP-00002814、YSFP-ZP-00002815、YSFP-ZP-00002990、YSFP-ZP-00002992、YSFP-ZP-00003110',1),('20180606009','4403174130','11210713','FRM','梁婷招','2018-06-07 10:41:03',0,9998.71,1699.77,11698.5,9998.71,1699.77,'YSFP-ZP-00002810、YSFP-ZP-00002811、YSFP-ZP-00002812、YSFP-ZP-00002813、YSFP-ZP-00002814、YSFP-ZP-00002815、YSFP-ZP-00002990、YSFP-ZP-00002992、YSFP-ZP-00003110',1),('20180606009','4403174130','11210714','FRM','梁婷招','2018-06-07 10:41:05',0,9953.87,1692.15,11646,9953.87,1692.15,'YSFP-ZP-00002810、YSFP-ZP-00002811、YSFP-ZP-00002812、YSFP-ZP-00002813、YSFP-ZP-00002814、YSFP-ZP-00002815、YSFP-ZP-00002990、YSFP-ZP-00002992、YSFP-ZP-00003110',1),('20180606009','4403174130','11210715','FRM','梁婷招','2018-06-07 10:41:06',0,9985.33,1697.49,11682.8,9985.33,1697.49,'YSFP-ZP-00002810、YSFP-ZP-00002811、YSFP-ZP-00002812、YSFP-ZP-00002813、YSFP-ZP-00002814、YSFP-ZP-00002815、YSFP-ZP-00002990、YSFP-ZP-00002992、YSFP-ZP-00003110',1),('20180606009','4403174130','11210716','FRM','梁婷招','2018-06-07 10:41:08',0,9998.93,1699.83,11698.8,9998.93,1699.83,'YSFP-ZP-00002810、YSFP-ZP-00002811、YSFP-ZP-00002812、YSFP-ZP-00002813、YSFP-ZP-00002814、YSFP-ZP-00002815、YSFP-ZP-00002990、YSFP-ZP-00002992、YSFP-ZP-00003110',1),('20180606009','4403174130','11210717','FRM','梁婷招','2018-06-07 10:41:10',0,9999.76,1699.94,11699.7,9999.76,1699.94,'YSFP-ZP-00002810、YSFP-ZP-00002811、YSFP-ZP-00002812、YSFP-ZP-00002813、YSFP-ZP-00002814、YSFP-ZP-00002815、YSFP-ZP-00002990、YSFP-ZP-00002992、YSFP-ZP-00003110',1),('20180606009','4403174130','11210718','FRM','梁婷招','2018-06-07 10:41:11',0,9996.5,1699.41,11695.9,9996.5,1699.41,'YSFP-ZP-00002810、YSFP-ZP-00002811、YSFP-ZP-00002812、YSFP-ZP-00002813、YSFP-ZP-00002814、YSFP-ZP-00002815、YSFP-ZP-00002990、YSFP-ZP-00002992、YSFP-ZP-00003110',1),('20180606009','4403174130','11210719','FRM','梁婷招','2018-06-07 10:41:13',0,9992.52,1698.71,11691.2,9992.52,1698.71,'YSFP-ZP-00002810、YSFP-ZP-00002811、YSFP-ZP-00002812、YSFP-ZP-00002813、YSFP-ZP-00002814、YSFP-ZP-00002815、YSFP-ZP-00002990、YSFP-ZP-00002992、YSFP-ZP-00003110',1),('20180606009','4403174130','11210720','FRM','梁婷招','2018-06-07 10:41:15',0,8506.45,1446.11,9952.56,8506.45,1446.11,'YSFP-ZP-00002810、YSFP-ZP-00002811、YSFP-ZP-00002812、YSFP-ZP-00002813、YSFP-ZP-00002814、YSFP-ZP-00002815、YSFP-ZP-00002990、YSFP-ZP-00002992、YSFP-ZP-00003110',1),('20180607020','4403174130','11210796','FRM','梁婷招','2018-06-07 18:05:28',0,1341.88,228.12,1570,1341.88,228.12,'YSFP-ZP-00002703',1),('20180607021','4403174130','11210797','FRM','梁婷招','2018-06-07 18:17:01',0,5696.12,911.38,6607.5,5696.12,911.38,'YSFP-ZP-00002930、YSFP-ZP-00003099',1),('20180607021','4403174130','11210798','FRM','梁婷招','2018-06-07 18:17:03',0,9998.57,1599.78,11598.3,9998.57,1599.78,'YSFP-ZP-00002930、YSFP-ZP-00003099',1),('20180607021','4403174130','11210799','FRM','梁婷招','2018-06-07 18:17:04',0,9999.7,1599.95,11599.7,9999.7,1599.95,'YSFP-ZP-00002930、YSFP-ZP-00003099',1),('20180607022','4403174130','11210800','FRM','梁婷招','2018-06-07 18:23:51',0,9994.66,1599.14,11593.8,9994.66,1599.14,'YSFP-ZP-00002850、YSFP-ZP-00002858',1),('20180607022','4403174130','11210801','FRM','梁婷招','2018-06-07 18:23:53',0,9998.31,1599.73,11598,9998.31,1599.73,'YSFP-ZP-00002850、YSFP-ZP-00002858',1),('20180607022','4403174130','11210802','FRM','梁婷招','2018-06-07 18:23:54',0,9996.03,1599.37,11595.4,9996.03,1599.37,'YSFP-ZP-00002850、YSFP-ZP-00002858',1),('20180607022','4403174130','11210803','FRM','梁婷招','2018-06-07 18:23:56',0,9997.11,1599.54,11596.7,9997.11,1599.54,'YSFP-ZP-00002850、YSFP-ZP-00002858',1),('20180607022','4403174130','11210804','FRM','梁婷招','2018-06-07 18:23:58',0,9998.13,1599.7,11597.8,9998.13,1599.7,'YSFP-ZP-00002850、YSFP-ZP-00002858',1),('20180607022','4403174130','11210805','FRM','梁婷招','2018-06-07 18:23:59',0,6401.38,1024.22,7425.6,6401.38,1024.22,'YSFP-ZP-00002850、YSFP-ZP-00002858',1),('20180607022','4403174130','11210806','FRM','梁婷招','2018-06-07 18:24:01',0,9998.56,1599.76,11598.3,9998.56,1599.76,'YSFP-ZP-00002850、YSFP-ZP-00002858',1),('20180607022','4403174130','11210807','FRM','梁婷招','2018-06-07 18:24:04',0,9999.44,1599.92,11599.4,9999.44,1599.92,'YSFP-ZP-00002850、YSFP-ZP-00002858',1),('20180607023','4403174130','11210808','FRM','梁婷招','2018-06-07 18:43:09',0,4423.08,751.92,5175,4423.08,751.92,'YSFP-ZP-00002593',1),('20180607024','4403174130','11210809','FRM','梁婷招','2018-06-07 19:00:37',0,4780.86,764.94,5545.8,4780.86,764.94,'YSFP-ZP-00002624、YSFP-ZP-00002669、YSFP-ZP-00002917、YSFP-ZP-00003010',1),('20180607024','4403174130','11210810','FRM','梁婷招','2018-06-07 19:00:39',0,9995.53,1599.27,11594.8,9995.53,1599.27,'YSFP-ZP-00002624、YSFP-ZP-00002669、YSFP-ZP-00002917、YSFP-ZP-00003010',1),('2018060801','4403174130','11210814','FRM','梁婷招','2018-06-08 12:48:26',0,2833.43,453.35,3286.78,2833.43,453.35,'YSFP-ZP-00002639、YSFP-ZP-00002794',1),('2018060802','4403174130','11210812','FRM','梁婷招','2018-06-08 12:48:21',0,4704.27,799.73,5504,4704.27,799.73,'YSFP-ZP-00003138',1),('2018060803','4403174130','11210813','FRM','梁婷招','2018-06-08 12:48:25',0,3960.85,673.35,4634.2,3960.85,673.35,'YSFP-ZP-00002634',1),('2018060804','4403174130','11210815','FRM','梁婷招','2018-06-08 12:48:28',0,9999.26,1599.89,11599.2,9999.26,1599.89,'YSFP-ZP-00003140',1),('2018060804','4403174130','11210816','FRM','梁婷招','2018-06-08 12:48:29',0,9836.94,1573.91,11410.8,9836.94,1573.91,'YSFP-ZP-00003140',1),('2018060807','4403174130','11210839','FRM','梁婷招','2018-06-08 15:43:46',0,2805.47,448.87,3254.34,2805.47,448.87,'YSFP-ZP-00002688',1),('2018060807','4403174130','11210841','FRM','梁婷招','2018-06-08 15:43:49',0,9995.74,1599.32,11595.1,9995.74,1599.32,'YSFP-ZP-00002688',1),('2018060811','4403174130','11210842','FRM','梁婷招','2018-06-08 15:43:50',0,5125.86,820.14,5946,5125.86,820.14,'YSFP-ZP-00002956',1),('2018060812','4403174130','11210840','FRM','梁婷招','2018-06-08 15:43:47',0,3736.31,635.17,4371.48,3736.31,635.17,'YSFP-ZP-00002879',1),('2018060813','4403174130','11210843','FRM','梁婷招','2018-06-08 15:43:52',0,3897.43,662.57,4560,3897.43,662.57,'',1),('2018060813','4403174130','11210844','FRM','梁婷招','2018-06-08 15:43:54',0,9991.45,1698.55,11690,9991.45,1698.55,'',1),('2018060814','4403174130','11210852','FRM','梁婷招','2018-06-08 17:06:25',0,9848.09,1575.69,11423.8,9848.09,1575.69,'YSFP-ZP-00002955',1),('2018060815','4403174130','11210845','FRM','梁婷招','2018-06-08 17:06:13',0,7940.33,1270.45,9210.78,7940.33,1270.45,'YSFP-ZP-00002932',1),('2018060815','4403174130','11210850','FRM','梁婷招','2018-06-08 17:06:22',0,9999.1,1599.86,11599,9999.1,1599.86,'YSFP-ZP-00002932',1),('2018060816','4403174130','11210846','FRM','梁婷招','2018-06-08 17:06:16',0,9997.59,1599.61,11597.2,9997.59,1599.61,'YSFP-ZP-00003094',1),('2018060816','4403174130','11210847','FRM','梁婷招','2018-06-08 17:06:18',0,9997.59,1599.61,11597.2,9997.59,1599.61,'YSFP-ZP-00003094',1),('2018060816','4403174130','11210848','FRM','梁婷招','2018-06-08 17:06:19',0,9999.22,1599.88,11599.1,9999.22,1599.88,'YSFP-ZP-00003094',1),('2018060816','4403174130','11210849','FRM','梁婷招','2018-06-08 17:06:20',0,2100.92,336.14,2437.06,2100.92,336.14,'YSFP-ZP-00003094',1),('2018060816','4403174130','11210851','FRM','梁婷招','2018-06-08 17:06:24',0,9994.68,1599.16,11593.8,9994.68,1599.16,'YSFP-ZP-00003094',1),('2018061103','4403174130','11210901','FRM','梁婷招','2018-06-11 18:40:53',0,9994.66,1599.14,11593.8,9994.66,1599.14,'YSFP-ZP-00003144',1),('2018061103','4403174130','11210902','FRM','梁婷招','2018-06-11 18:40:56',0,9994.66,1599.14,11593.8,9994.66,1599.14,'YSFP-ZP-00003144',1),('2018061103','4403174130','11210903','FRM','梁婷招','2018-06-11 18:40:58',0,4799.14,767.86,5567,4799.14,767.86,'YSFP-ZP-00003144',1),('2018061103','4403174130','11210904','FRM','梁婷招','2018-06-11 18:41:00',0,9996.38,1599.42,11595.8,9996.38,1599.42,'YSFP-ZP-00003144',1),('2018061103','4403174130','11210905','FRM','梁婷招','2018-06-11 18:41:01',0,9999.96,1600,11600,9999.96,1600,'YSFP-ZP-00003144',1),('2018061103','4403174130','11210906','FRM','梁婷招','2018-06-11 18:41:03',0,9998.88,1599.82,11598.7,9998.88,1599.82,'YSFP-ZP-00003144',1),('2018061103','4403174130','11210907','FRM','梁婷招','2018-06-11 18:41:05',0,9998.92,1599.82,11598.7,9998.92,1599.82,'YSFP-ZP-00003144',1),('2018061103','4403174130','11210908','FRM','梁婷招','2018-06-11 18:41:07',0,9997.57,1599.63,11597.2,9997.57,1599.63,'YSFP-ZP-00003144',1),('2018061104','4403173320','04313019','FRM','梁婷招','2018-06-11 18:56:48',0,40.41,6.47,46.88,40.41,6.47,'YSFP-PP-00000102',1),('2018061201','4403174130','11210909','FRM','梁婷招','2018-06-12 10:45:54',0,5533.54,940.7,6474.24,5533.54,940.7,'YSFP-ZP-00002261、YSFP-ZP-00002465、YSFP-ZP-00002517',1),('2018061201','4403174130','11210910','FRM','梁婷招','2018-06-12 10:46:02',0,9999.37,1699.89,11699.3,9999.37,1699.89,'YSFP-ZP-00002261、YSFP-ZP-00002465、YSFP-ZP-00002517',1),('2018061202','4403174130','11210911','FRM','梁婷招','2018-06-12 10:54:10',0,9998.8,1599.8,11598.6,9998.8,1599.8,'YSFP-ZP-00002832、YSFP-ZP-00003156',1),('2018061202','4403174130','11210912','FRM','梁婷招','2018-06-12 10:54:11',0,9995.51,1599.29,11594.8,9995.51,1599.29,'YSFP-ZP-00002832、YSFP-ZP-00003156',1),('2018061202','4403174130','11210913','FRM','梁婷招','2018-06-12 10:54:13',0,4638.45,742.15,5380.6,4638.45,742.15,'YSFP-ZP-00002832、YSFP-ZP-00003156',1),('2018061203','4403174130','11210914','FRM','梁婷招','2018-06-12 11:11:35',0,9999.87,1699.97,11699.8,9999.87,1699.97,'YSFP-ZP-00003109',1),('2018061203','4403174130','11210915','FRM','梁婷招','2018-06-12 11:11:37',0,9692.45,1647.71,11340.2,9692.45,1647.71,'YSFP-ZP-00003109',1),('2018061205','4403174130','11210916','FRM','梁婷招','2018-06-12 14:38:28',0,9984.62,1697.38,11682,9984.62,1697.38,'YSFP-ZP-00003169',1),('2018061205','4403174130','11210917','FRM','梁婷招','2018-06-12 14:38:32',0,3520,598.4,4118.4,3520,598.4,'YSFP-ZP-00003169',1),('2018061206','4403174130','11210918','FRM','梁婷招','2018-06-12 14:38:34',0,185.9,31.6,217.5,185.9,31.6,'YSFP-ZP-00002780',1),('2018061207','4403174130','11210919','FRM','梁婷招','2018-06-12 14:38:36',0,259.23,44.07,303.3,259.23,44.07,'YSFP-ZP-00003117',1),('2018061208','4403174130','11210924','FRM','梁婷招','2018-06-12 17:10:36',0,1511.54,256.96,1768.5,1511.54,256.96,'YSFP-ZP-00003120、YSFP-ZP-00003121',1),('2018061209','4403174130','11210920','FRM','梁婷招','2018-06-12 14:38:37',0,4723.08,802.92,5526,4723.08,802.92,'YSFP-ZP-00003170',1),('2018061210','4403174130','11210921','FRM','梁婷招','2018-06-12 14:38:39',0,3461.54,588.46,4050,3461.54,588.46,'YSFP-ZP-00002951',1),('2018061211','4403174130','11210922','FRM','梁婷招','2018-06-12 17:10:31',0,1880.34,319.66,2200,1880.34,319.66,'YSFP-ZP-00003123',1),('2018061212','4403174130','11210923','FRM','梁婷招','2018-06-12 17:10:34',0,1791.45,304.55,2096,1791.45,304.55,'YSFP-ZP-00003168',1),('2018061213','4403174130','11210925','FRM','梁婷招','2018-06-12 17:10:37',0,9993.67,1698.93,11692.6,9993.67,1698.93,'YSFP-ZP-00003169',1),('2018061214','4403174130','11210926','FRM','梁婷招','2018-06-12 18:00:43',0,9996.01,1599.36,11595.4,9996.01,1599.36,'YSFP-ZP-00003164、YSFP-ZP-00003163',1),('2018061214','4403174130','11210927','FRM','梁婷招','2018-06-12 18:00:47',0,5392.26,862.77,6255.03,5392.26,862.77,'YSFP-ZP-00003164、YSFP-ZP-00003163',1),('2018061215','4403174130','11210928','FRM','梁婷招','2018-06-12 18:00:49',0,4406.9,705.1,5112,4406.9,705.1,'YSFP-ZP-00002822',1),('2018061216','4403174130','11210929','FRM','梁婷招','2018-06-12 18:00:50',0,279.49,47.51,327,279.49,47.51,'YSFP-ZP-00003084',1),('2018061219','4403174130','11210933','FRM','梁婷招','2018-06-12 18:18:10',0,9990,1598.4,11588.4,9990,1598.4,'YSFP-ZP-00003164',1),('2018061220','4403174130','11210930','FRM','梁婷招','2018-06-12 18:18:04',0,9996.38,1599.42,11595.8,9996.38,1599.42,'YSFP-ZP-00002870',1),('2018061220','4403174130','11210931','FRM','梁婷招','2018-06-12 18:18:07',0,9141.55,1462.65,10604.2,9141.55,1462.65,'YSFP-ZP-00002870',1),('2018061221','4403174130','11210932','FRM','梁婷招','2018-06-12 18:18:08',0,141.03,23.97,165,141.03,23.97,'YSFP-ZP-00002472',1),('20180620001','4403174130','18334664','FRM','梁婷招','2018-06-20 15:01:21',0,9993.1,1598.9,11592,9993.1,1598.9,'YSFP-ZP-00003175',1),('20180620001','4403174130','18334665','FRM','梁婷招','2018-06-20 15:01:24',0,9993.1,1598.9,11592,9993.1,1598.9,'YSFP-ZP-00003175',1),('20180620001','4403174130','18334666','FRM','梁婷招','2018-06-20 15:01:25',0,289.66,46.34,336,289.66,46.34,'YSFP-ZP-00003175',1),('20180620002','4403174130','18334667','FRM','梁婷招','2018-06-20 15:01:27',0,9998.53,1599.77,11598.3,9998.53,1599.77,'YSFP-ZP-00003029',1),('20180620002','4403174130','18334668','FRM','梁婷招','2018-06-20 15:01:28',0,863.54,138.16,1001.7,863.54,138.16,'YSFP-ZP-00003029',1),('20180620003','4403174130','18334669','FRM','梁婷招','2018-06-20 15:01:30',0,9984.91,1597.59,11582.5,9984.91,1597.59,'YSFP-ZP-00003223',1),('20180620003','4403174130','18334670','FRM','梁婷招','2018-06-20 15:01:31',0,9984.91,1597.59,11582.5,9984.91,1597.59,'YSFP-ZP-00003223',1),('20180620003','4403174130','18334671','FRM','梁婷招','2018-06-20 15:01:32',0,9984.91,1597.59,11582.5,9984.91,1597.59,'YSFP-ZP-00003223',1),('20180620003','4403174130','18334672','FRM','梁婷招','2018-06-20 15:01:34',0,4453.44,712.56,5166,4453.44,712.56,'YSFP-ZP-00003223',1),('20180620003','4403174130','18334673','FRM','梁婷招','2018-06-20 15:01:35',0,9988.38,1598.12,11586.5,9988.38,1598.12,'YSFP-ZP-00003223',1),('20180620004','4403174130','18334674','FRM','梁婷招','2018-06-20 15:19:37',0,9993.1,1598.9,11592,9993.1,1598.9,'YSFP-ZP-00003269',1),('20180620004','4403174130','18334675','FRM','梁婷招','2018-06-20 15:19:40',0,9993.1,1598.9,11592,9993.1,1598.9,'YSFP-ZP-00003269',1),('20180620004','4403174130','18334676','FRM','梁婷招','2018-06-20 15:19:42',0,4151.73,664.27,4816,4151.73,664.27,'YSFP-ZP-00003269',1),('20180620005','4403174130','18334677','FRM','梁婷招','2018-06-20 15:19:44',0,9994.83,1599.17,11594,9994.83,1599.17,'YSFP-ZP-00003190',1),('20180620005','4403174130','18334678','FRM','梁婷招','2018-06-20 15:19:45',0,9994.83,1599.17,11594,9994.83,1599.17,'YSFP-ZP-00003190',1),('20180620005','4403174130','18334679','FRM','梁婷招','2018-06-20 15:19:46',0,9994.83,1599.17,11594,9994.83,1599.17,'YSFP-ZP-00003190',1),('20180620005','4403174130','18334680','FRM','梁婷招','2018-06-20 15:19:48',0,9994.83,1599.17,11594,9994.83,1599.17,'YSFP-ZP-00003190',1),('20180620005','4403174130','18334681','FRM','梁婷招','2018-06-20 15:19:49',0,9994.83,1599.17,11594,9994.83,1599.17,'YSFP-ZP-00003190',1),('20180620005','4403174130','18334682','FRM','梁婷招','2018-06-20 15:19:51',0,9994.83,1599.17,11594,9994.83,1599.17,'YSFP-ZP-00003190',1),('20180620005','4403174130','18334683','FRM','梁婷招','2018-06-20 15:19:52',0,9994.83,1599.17,11594,9994.83,1599.17,'YSFP-ZP-00003190',1),('20180620005','4403174130','18334684','FRM','梁婷招','2018-06-20 15:19:54',0,9994.83,1599.17,11594,9994.83,1599.17,'YSFP-ZP-00003190',1),('20180620005','4403174130','18334685','FRM','梁婷招','2018-06-20 15:19:55',0,9994.83,1599.17,11594,9994.83,1599.17,'YSFP-ZP-00003190',1),('20180620005','4403174130','18334686','FRM','梁婷招','2018-06-20 15:19:56',0,4874.12,779.88,5654,4874.12,779.88,'YSFP-ZP-00003190',1),('20180620007','4403174130','18334688','FRM','梁婷招','2018-06-20 16:04:39',0,324.61,51.94,376.55,324.61,51.94,'YSFP-ZP-00003248',1),('20180620008','4403174130','18334689','FRM','梁婷招','2018-06-20 16:04:42',0,6783.97,1085.43,7869.4,6783.97,1085.43,'YSFP-ZP-00003249',1),('20180620009','4403174130','18334690','FRM','梁婷招','2018-06-20 16:04:43',0,9997.45,1599.59,11597,9997.45,1599.59,'YSFP-ZP-00003182、YSFP-ZP-00003270',1),('20180620009','4403174130','18334691','FRM','梁婷招','2018-06-20 16:04:44',0,9997.45,1599.59,11597,9997.45,1599.59,'YSFP-ZP-00003182、YSFP-ZP-00003270',1),('20180620009','4403174130','18334692','FRM','梁婷招','2018-06-20 16:04:46',0,9997.45,1599.59,11597,9997.45,1599.59,'YSFP-ZP-00003182、YSFP-ZP-00003270',1),('20180620009','4403174130','18334693','FRM','梁婷招','2018-06-20 16:04:47',0,9997.45,1599.59,11597,9997.45,1599.59,'YSFP-ZP-00003182、YSFP-ZP-00003270',1),('20180620009','4403174130','18334694','FRM','梁婷招','2018-06-20 16:04:48',0,9997.45,1599.59,11597,9997.45,1599.59,'YSFP-ZP-00003182、YSFP-ZP-00003270',1),('20180620009','4403174130','18334695','FRM','梁婷招','2018-06-20 16:04:50',0,9997.45,1599.59,11597,9997.45,1599.59,'YSFP-ZP-00003182、YSFP-ZP-00003270',1),('20180620009','4403174130','18334696','FRM','梁婷招','2018-06-20 16:04:51',0,9997.45,1599.59,11597,9997.45,1599.59,'YSFP-ZP-00003182、YSFP-ZP-00003270',1),('20180620009','4403174130','18334697','FRM','梁婷招','2018-06-20 16:04:52',0,9997.45,1599.59,11597,9997.45,1599.59,'YSFP-ZP-00003182、YSFP-ZP-00003270',1),('20180620009','4403174130','18334698','FRM','梁婷招','2018-06-20 16:04:54',0,9997.45,1599.59,11597,9997.45,1599.59,'YSFP-ZP-00003182、YSFP-ZP-00003270',1),('20180620009','4403174130','18334699','FRM','梁婷招','2018-06-20 16:04:55',0,9268.97,1483.03,10752,9268.97,1483.03,'YSFP-ZP-00003182、YSFP-ZP-00003270',1),('20180620009','4403174130','18334700','FRM','梁婷招','2018-06-20 16:04:56',0,9268.97,1483.03,10752,9268.97,1483.03,'YSFP-ZP-00003182、YSFP-ZP-00003270',1),('20180620009','4403174130','18334701','FRM','梁婷招','2018-06-20 16:04:58',0,9268.97,1483.03,10752,9268.97,1483.03,'YSFP-ZP-00003182、YSFP-ZP-00003270',1),('20180620009','4403174130','18334702','FRM','梁婷招','2018-06-20 16:04:59',0,6179.29,988.71,7168,6179.29,988.71,'YSFP-ZP-00003182、YSFP-ZP-00003270',1),('20180620009','4403174130','18334703','FRM','梁婷招','2018-06-20 16:05:00',0,8238.26,1318.14,9556.4,8238.26,1318.14,'YSFP-ZP-00003182、YSFP-ZP-00003270',1),('20180620009','4403174130','18334704','FRM','梁婷招','2018-06-20 16:05:02',0,9995.9,1599.34,11595.2,9995.9,1599.34,'YSFP-ZP-00003182、YSFP-ZP-00003270',1),('20180620010','4403174130','18334705','FRM','梁婷招','2018-06-20 16:40:28',0,9636.21,1541.79,11178,9636.21,1541.79,'YSFP-ZP-00003302',1),('20180620010','4403174130','18334706','FRM','梁婷招','2018-06-20 16:40:29',0,9636.21,1541.79,11178,9636.21,1541.79,'YSFP-ZP-00003302',1),('20180620010','4403174130','18334707','FRM','梁婷招','2018-06-20 16:40:31',0,9636.21,1541.79,11178,9636.21,1541.79,'YSFP-ZP-00003302',1),('20180620010','4403174130','18334708','FRM','梁婷招','2018-06-20 16:40:32',0,9636.21,1541.79,11178,9636.21,1541.79,'YSFP-ZP-00003302',1),('20180620010','4403174130','18334709','FRM','梁婷招','2018-06-20 16:40:33',0,9636.21,1541.79,11178,9636.21,1541.79,'YSFP-ZP-00003302',1),('20180620010','4403174130','18334710','FRM','梁婷招','2018-06-20 16:40:35',0,9636.21,1541.79,11178,9636.21,1541.79,'YSFP-ZP-00003302',1),('20180620010','4403174130','18334711','FRM','梁婷招','2018-06-20 16:40:36',0,9636.21,1541.79,11178,9636.21,1541.79,'YSFP-ZP-00003302',1),('20180620010','4403174130','18334712','FRM','梁婷招','2018-06-20 16:40:38',0,9636.21,1541.79,11178,9636.21,1541.79,'YSFP-ZP-00003302',1),('20180620010','4403174130','18334713','FRM','梁婷招','2018-06-20 16:40:39',0,9636.21,1541.79,11178,9636.21,1541.79,'YSFP-ZP-00003302',1),('20180620010','4403174130','18334714','FRM','梁婷招','2018-06-20 16:40:41',0,9636.21,1541.79,11178,9636.21,1541.79,'YSFP-ZP-00003302',1),('20180620011','4403174130','18334715','FRM','梁婷招','2018-06-20 16:56:28',0,9636.21,1541.79,11178,9636.21,1541.79,'YSFP-ZP-00003302',1),('20180620011','4403174130','18334716','FRM','梁婷招','2018-06-20 16:56:31',0,9636.21,1541.79,11178,9636.21,1541.79,'YSFP-ZP-00003302',1),('20180620011','4403174130','18334717','FRM','梁婷招','2018-06-20 16:56:33',0,9636.21,1541.79,11178,9636.21,1541.79,'YSFP-ZP-00003302',1),('20180620011','4403174130','18334718','FRM','梁婷招','2018-06-20 16:56:34',0,9636.2,1541.8,11178,9636.2,1541.8,'YSFP-ZP-00003302',1),('20180620011','4403174130','18334719','FRM','梁婷招','2018-06-20 16:56:36',0,1769.48,283.12,2052.6,1769.48,283.12,'YSFP-ZP-00003302',1),('20180620011','4403174130','18334720','FRM','梁婷招','2018-06-20 16:56:37',0,9762.07,1561.93,11324,9762.07,1561.93,'YSFP-ZP-00003302',1),('20180620011','4403174130','18334721','FRM','梁婷招','2018-06-20 16:56:39',0,9995.68,1599.32,11595,9995.68,1599.32,'YSFP-ZP-00003302',1),('20180620012','4403174130','18334730','FRM','梁婷招','2018-06-20 17:59:21',0,7969.84,1275.16,9245,7969.84,1275.16,'YSFP-ZP-00003100、YSFP-ZP-00003308',1),('20180620013','4403174130','18334722','FRM','梁婷招','2018-06-20 17:59:08',0,2293.11,366.89,2660,2293.11,366.89,'YSFP-ZP-00003229',1),('20180620013','4403174130','18334728','FRM','梁婷招','2018-06-20 17:59:18',0,9991.38,1598.62,11590,9991.38,1598.62,'YSFP-ZP-00003229',1),('20180620014','4403174130','18334723','FRM','梁婷招','2018-06-20 17:59:11',0,1890.52,302.48,2193,1890.52,302.48,'YSFP-ZP-00002859',1),('20180620014','4403174130','18334729','FRM','梁婷招','2018-06-20 17:59:20',0,9964.66,1594.34,11559,9964.66,1594.34,'YSFP-ZP-00002859',1),('20180620015','4403174130','18334724','FRM','梁婷招','2018-06-20 17:59:12',0,9993.79,1599.01,11592.8,9993.79,1599.01,'YSFP-ZP-00003314',1),('20180620015','4403174130','18334725','FRM','梁婷招','2018-06-20 17:59:14',0,9993.79,1599.01,11592.8,9993.79,1599.01,'YSFP-ZP-00003314',1),('20180620015','4403174130','18334726','FRM','梁婷招','2018-06-20 17:59:15',0,9993.79,1599.01,11592.8,9993.79,1599.01,'YSFP-ZP-00003314',1),('20180620015','4403174130','18334727','FRM','梁婷招','2018-06-20 17:59:17',0,4880.7,780.9,5661.6,4880.7,780.9,'YSFP-ZP-00003314',1),('20180620016','4403174130','18334731','FRM','梁婷招','2018-06-20 18:28:05',0,8568.97,1371.03,9940,8568.97,1371.03,'YSFP-ZP-00003083、YSFP-ZP-00003287',1),('20180620016','4403174130','18334732','FRM','梁婷招','2018-06-20 18:28:08',0,9994.83,1599.17,11594,9994.83,1599.17,'YSFP-ZP-00003083、YSFP-ZP-00003287',1),('20180620016','4403174130','18334733','FRM','梁婷招','2018-06-20 18:28:09',0,9994.83,1599.17,11594,9994.83,1599.17,'YSFP-ZP-00003083、YSFP-ZP-00003287',1),('20180620016','4403174130','18334734','FRM','梁婷招','2018-06-20 18:28:11',0,9056.89,1449.11,10506,9056.89,1449.11,'YSFP-ZP-00003083、YSFP-ZP-00003287',1),('20180620016','4403174130','18334735','FRM','梁婷招','2018-06-20 18:28:12',0,9994.83,1599.17,11594,9994.83,1599.17,'YSFP-ZP-00003083、YSFP-ZP-00003287',1),('20180620016','4403174130','18334736','FRM','梁婷招','2018-06-20 18:28:14',0,2865.52,458.48,3324,2865.52,458.48,'YSFP-ZP-00003083、YSFP-ZP-00003287',1),('20180620016','4403174130','18334737','FRM','梁婷招','2018-06-20 18:28:15',0,9987.93,1598.07,11586,9987.93,1598.07,'YSFP-ZP-00003083、YSFP-ZP-00003287',1),('20180620017','4403174130','18334738','FRM','梁婷招','2018-06-21 11:21:43',0,9936.45,1589.83,11526.3,9936.45,1589.83,'YSFP-ZP-00003019',1),('20180620017','4403174130','18334739','FRM','梁婷招','2018-06-21 11:21:49',0,3536.7,565.87,4102.57,3536.7,565.87,'YSFP-ZP-00003019',1),('20180620018','4403174130','18334740','FRM','梁婷招','2018-06-21 11:25:47',0,9999.87,1599.98,11599.8,9999.87,1599.98,'YSFP-ZP-00002953',1),('20180620018','4403174130','18334741','FRM','梁婷招','2018-06-21 11:25:49',0,9999.87,1599.98,11599.8,9999.87,1599.98,'YSFP-ZP-00002953',1),('20180620018','4403174130','18334742','FRM','梁婷招','2018-06-21 11:25:50',0,9994.83,1599.17,11594,9994.83,1599.17,'YSFP-ZP-00002953',1),('20180620018','4403174130','18334743','FRM','梁婷招','2018-06-21 11:25:52',0,9937.5,1590,11527.5,9937.5,1590,'YSFP-ZP-00002953',1),('20180620018','4403174130','18334744','FRM','梁婷招','2018-06-21 11:25:53',0,375,60,435,375,60,'YSFP-ZP-00002953',1),('20180620018','4403174130','18334752','FRM','梁婷招','2018-06-21 11:26:05',0,9997.93,1599.67,11597.6,9997.93,1599.67,'YSFP-ZP-00002953',1),('20180620018','4403174130','18334755','FRM','梁婷招','2018-06-21 11:26:09',0,9999.91,1599.99,11599.9,9999.91,1599.99,'YSFP-ZP-00002953',1),('20180620018','4403174130','18334756','FRM','梁婷招','2018-06-21 11:26:10',0,9984.05,1597.45,11581.5,9984.05,1597.45,'YSFP-ZP-00002953',1),('20180620019','4403174130','18334745','FRM','梁婷招','2018-06-21 11:25:55',0,452.58,72.42,525,452.58,72.42,'YSFP-ZP-00003001',1),('20180620019','4403174130','18334753','FRM','梁婷招','2018-06-21 11:26:06',0,9990.52,1598.48,11589,9990.52,1598.48,'YSFP-ZP-00003001',1),('20180620020','4403174130','18334746','FRM','梁婷招','2018-06-21 11:25:56',0,9997.5,1599.6,11597.1,9997.5,1599.6,'YSFP-ZP-00003011',1),('20180620020','4403174130','18334747','FRM','梁婷招','2018-06-21 11:25:58',0,9994.83,1599.17,11594,9994.83,1599.17,'YSFP-ZP-00003011',1),('20180620020','4403174130','18334748','FRM','梁婷招','2018-06-21 11:25:59',0,9994.83,1599.17,11594,9994.83,1599.17,'YSFP-ZP-00003011',1),('20180620020','4403174130','18334749','FRM','梁婷招','2018-06-21 11:26:01',0,9994.83,1599.17,11594,9994.83,1599.17,'YSFP-ZP-00003011',1),('20180620020','4403174130','18334750','FRM','梁婷招','2018-06-21 11:26:02',0,9994.83,1599.17,11594,9994.83,1599.17,'YSFP-ZP-00003011',1),('20180620020','4403174130','18334751','FRM','梁婷招','2018-06-21 11:26:03',0,3489.64,558.36,4048,3489.64,558.36,'YSFP-ZP-00003011',1),('20180620020','4403174130','18334754','FRM','梁婷招','2018-06-21 11:26:07',0,9981.81,1597.09,11578.9,9981.81,1597.09,'YSFP-ZP-00003011',1),('20180620021','4403174130','18334757','FRM','梁婷招','2018-06-21 11:26:12',0,6194.67,991.15,7185.82,6194.67,991.15,'YSFP-ZP-00003014、YSFP-ZP-00003018',1),('20180620022','4403174130','18334768','FRM','梁婷招','2018-06-21 12:10:03',0,9997.76,1599.64,11597.4,9997.76,1599.64,'YSFP-ZP-00003021',1),('20180620022','4403174130','18334769','FRM','梁婷招','2018-06-21 12:10:04',0,7460.17,1193.63,8653.8,7460.17,1193.63,'YSFP-ZP-00003021',1),('20180620022','4403174130','18334773','FRM','梁婷招','2018-06-21 12:10:10',0,9994.69,1599.16,11593.8,9994.69,1599.16,'YSFP-ZP-00003021',1),('20180620023','4403174130','18334774','FRM','梁婷招','2018-06-21 12:10:11',0,7786.64,1245.86,9032.5,7786.64,1245.86,'YSFP-ZP-00003022、YSFP-ZP-00003268',1),('20180620024','4403174130','18334758','FRM','梁婷招','2018-06-21 12:09:46',0,7945.22,1271.24,9216.46,7945.22,1271.24,'YSFP-ZP-00003000',1),('20180620024','4403174130','18334759','FRM','梁婷招','2018-06-21 12:09:49',0,7983.02,1277.28,9260.3,7983.02,1277.28,'YSFP-ZP-00003000',1),('20180620024','4403174130','18334760','FRM','梁婷招','2018-06-21 12:09:51',0,7983.02,1277.28,9260.3,7983.02,1277.28,'YSFP-ZP-00003000',1),('20180620024','4403174130','18334761','FRM','梁婷招','2018-06-21 12:09:52',0,7983.02,1277.28,9260.3,7983.02,1277.28,'YSFP-ZP-00003000',1),('20180620024','4403174130','18334762','FRM','梁婷招','2018-06-21 12:09:54',0,7983.02,1277.28,9260.3,7983.02,1277.28,'YSFP-ZP-00003000',1),('20180620024','4403174130','18334763','FRM','梁婷招','2018-06-21 12:09:55',0,7983.02,1277.28,9260.3,7983.02,1277.28,'YSFP-ZP-00003000',1),('20180620024','4403174130','18334764','FRM','梁婷招','2018-06-21 12:09:57',0,7983.02,1277.28,9260.3,7983.02,1277.28,'YSFP-ZP-00003000',1),('20180620024','4403174130','18334765','FRM','梁婷招','2018-06-21 12:09:58',0,3991.49,638.66,4630.15,3991.49,638.66,'YSFP-ZP-00003000',1),('20180620025','4403174130','18334766','FRM','梁婷招','2018-06-21 12:10:00',0,9996.72,1599.48,11596.2,9996.72,1599.48,'YSFP-ZP-00003041',1),('20180620025','4403174130','18334767','FRM','梁婷招','2018-06-21 12:10:01',0,9996.72,1599.48,11596.2,9996.72,1599.48,'YSFP-ZP-00003041',1),('20180620025','4403174130','18334770','FRM','梁婷招','2018-06-21 12:10:06',0,9998.97,1599.83,11598.8,9998.97,1599.83,'YSFP-ZP-00003041',1),('20180620025','4403174130','18334771','FRM','梁婷招','2018-06-21 12:10:07',0,9991.91,1598.71,11590.6,9991.91,1598.71,'YSFP-ZP-00003041',1),('20180620025','4403174130','18334772','FRM','梁婷招','2018-06-21 12:10:08',0,4562.58,730.02,5292.6,4562.58,730.02,'YSFP-ZP-00003041',1),('20180620025','4403174130','18334775','FRM','梁婷招','2018-06-21 12:10:13',0,9935.34,1589.66,11525,9935.34,1589.66,'YSFP-ZP-00003041',1),('20180620025','4403174130','18334776','FRM','梁婷招','2018-06-21 12:10:14',0,9997.32,1599.58,11596.9,9997.32,1599.58,'YSFP-ZP-00003041',1),('20180620025','4403174130','18334777','FRM','梁婷招','2018-06-21 12:10:16',0,9973.55,1595.77,11569.3,9973.55,1595.77,'YSFP-ZP-00003041',1),('20180620025','4403174130','18334778','FRM','梁婷招','2018-06-21 12:10:17',0,9992.4,1598.78,11591.2,9992.4,1598.78,'YSFP-ZP-00003041',1),('20180620025','4403174130','18334779','FRM','梁婷招','2018-06-21 12:10:19',0,9992.58,1598.82,11591.4,9992.58,1598.82,'YSFP-ZP-00003041',1),('20180621001','4403173320','12927189','FRM','梁婷招','2018-06-21 16:12:23',0,664.91,106.39,771.3,664.91,106.39,'YSFP-PP-00000172',1),('20180621002','4403173320','12927178','FRM','梁婷招','2018-06-21 16:12:00',0,404.05,64.65,468.7,404.05,64.65,'YSFP-ZP-00003333',1),('20180621003','4403173320','12927179','FRM','梁婷招','2018-06-21 16:12:08',0,523.24,83.72,606.96,523.24,83.72,'YSFP-PP-00000149',1),('20180621004','4403173320','12927180','FRM','梁婷招','2018-06-21 16:12:09',0,130.78,20.93,151.71,130.78,20.93,'YSFP-PP-00000173',1),('20180621005','4403173320','12927190','FRM','梁婷招','2018-06-21 16:12:25',0,1212.93,194.07,1407,1212.93,194.07,'YSFP-PP-00000148',1),('20180621006','4403173320','12927181','FRM','梁婷招','2018-06-21 16:12:11',0,1376.97,220.31,1597.28,1376.97,220.31,'YSFP-PP-00000162',1),('20180621007','4403173320','12927191','FRM','梁婷招','2018-06-21 16:12:27',0,661.42,105.83,767.25,661.42,105.83,'YSFP-PP-00000167、YSFP-PP-00000168',1),('20180621008','4403173320','12927182','FRM','梁婷招','2018-06-21 16:12:13',0,795.41,127.27,922.68,795.41,127.27,'YSFP-PP-00000123',1),('20180621009','4403173320','12927183','FRM','梁婷招','2018-06-21 16:12:15',0,161.03,25.77,186.8,161.03,25.77,'YSFP-PP-00000179',1),('20180621010','4403173320','12927184','FRM','梁婷招','2018-06-21 16:12:16',0,6520.69,1043.31,7564,6520.69,1043.31,'YSFP-PP-00000171',1),('20180621011','4403173320','12927185','FRM','梁婷招','2018-06-21 16:12:18',0,73.91,11.82,85.73,73.91,11.82,'YSFP-PP-00000107',1),('20180621012','4403173320','12927192','FRM','梁婷招','2018-06-21 16:12:28',0,79.01,12.65,91.66,79.01,12.65,'YSFP-PP-00000118、YSFP-PP-00000112',1),('20180621013','4403173320','12927186','FRM','梁婷招','2018-06-21 16:12:19',0,154.21,24.67,178.88,154.21,24.67,'YSFP-PP-00000114',1),('20180621014','4403173320','12927187','FRM','梁婷招','2018-06-21 16:12:21',0,317.43,50.79,368.22,317.43,50.79,'YSFP-PP-00000116',1),('20180621015','4403173320','12927188','FRM','梁婷招','2018-06-21 16:12:22',0,308.88,49.42,358.3,308.88,49.42,'YSFP-PP-00000120',1),('20180621016','4403173320','12927195','FRM','梁婷招','2018-06-21 17:03:00',0,500.69,80.11,580.8,500.69,80.11,'YSFP-PP-00000172',1),('20180621017','4403173320','12927196','FRM','梁婷招','2018-06-21 17:03:03',0,164.55,26.33,190.88,164.55,26.33,'YSFP-PP-00000178',1),('20180621018','4403173320','12927200','FRM','梁婷招','2018-06-21 17:03:09',0,349.16,55.86,405.02,349.16,55.86,'YSFP-PP-00000165',1),('20180621019','4403173320','12927197','FRM','梁婷招','2018-06-21 17:03:05',0,565,90.4,655.4,565,90.4,'YSFP-PP-00000176',1),('20180621020','4403173320','12927198','FRM','梁婷招','2018-06-21 17:03:06',0,590.52,94.48,685,590.52,94.48,'YSFP-PP-00000121',1),('20180621021','4403173320','12927199','FRM','梁婷招','2018-06-21 17:03:08',0,2931.04,468.96,3400,2931.04,468.96,'YSFP-ZP-00003157',1),('20180625001','4403174130','18334843','FRM','梁婷招','2018-06-25 12:36:57',0,9962.76,1594.04,11556.8,9962.76,1594.04,'YSFP-ZP-00003092',1),('20180625001','4403174130','18334844','FRM','梁婷招','2018-06-25 12:37:01',0,5323.28,851.72,6175,5323.28,851.72,'YSFP-ZP-00003092',1),('20180625001','4403174130','18334849','FRM','梁婷招','2018-06-25 12:37:10',0,9978.02,1596.48,11574.5,9978.02,1596.48,'YSFP-ZP-00003092',1),('20180625001','4403174130','18334850','FRM','梁婷招','2018-06-25 12:37:11',0,9719.82,1555.18,11275,9719.82,1555.18,'YSFP-ZP-00003092',1),('20180625001','4403174130','18334856','FRM','梁婷招','2018-06-25 12:37:22',0,9963.7,1594.2,11557.9,9963.7,1594.2,'YSFP-ZP-00003092',1),('20180625001','4403174130','18334858','FRM','梁婷招','2018-06-25 12:37:25',0,9953.28,1592.52,11545.8,9953.28,1592.52,'YSFP-ZP-00003092',1),('20180625002','4403174130','18334845','FRM','梁婷招','2018-06-25 12:37:03',0,9811.97,1668.03,11480,9811.97,1668.03,'YSFP-ZP-00001937',1),('20180625002','4403174130','18334846','FRM','梁婷招','2018-06-25 12:37:05',0,4393.16,746.84,5140,4393.16,746.84,'YSFP-ZP-00001937',1),('20180625002','4403174130','18334847','FRM','梁婷招','2018-06-25 12:37:07',0,9990.67,1698.41,11689.1,9990.67,1698.41,'YSFP-ZP-00001937',1),('20180625002','4403174130','18334848','FRM','梁婷招','2018-06-25 12:37:08',0,610.26,103.74,714,610.26,103.74,'YSFP-ZP-00001937',1),('20180625002','4403174130','18334851','FRM','梁婷招','2018-06-25 12:37:13',0,9989.74,1698.26,11688,9989.74,1698.26,'YSFP-ZP-00001937',1),('20180625002','4403174130','18334852','FRM','梁婷招','2018-06-25 12:37:14',0,9805.13,1666.87,11472,9805.13,1666.87,'YSFP-ZP-00001937',1),('20180625002','4403174130','18334853','FRM','梁婷招','2018-06-25 12:37:16',0,7419.66,1261.34,8681,7419.66,1261.34,'YSFP-ZP-00001937',1),('20180625002','4403174130','18334854','FRM','梁婷招','2018-06-25 12:37:17',0,9670.59,1644.01,11314.6,9670.59,1644.01,'YSFP-ZP-00001937',1),('20180625002','4403174130','18334855','FRM','梁婷招','2018-06-25 12:37:19',0,9911.11,1684.89,11596,9911.11,1684.89,'YSFP-ZP-00001937',1),('20180625002','4403174130','18334857','FRM','梁婷招','2018-06-25 12:37:24',0,9964.78,1694.02,11658.8,9964.78,1694.02,'YSFP-ZP-00001937',1),('20180625002','4403174130','18334859','FRM','梁婷招','2018-06-25 12:37:27',0,9993.04,1698.82,11691.9,9993.04,1698.82,'YSFP-ZP-00001937',1),('20180625002','4403174130','18334860','FRM','梁婷招','2018-06-25 12:37:28',0,9989.75,1698.25,11688,9989.75,1698.25,'YSFP-ZP-00001937',1),('20180625002','4403174130','18334861','FRM','梁婷招','2018-06-25 12:37:30',0,9977.78,1696.22,11674,9977.78,1696.22,'YSFP-ZP-00001937',1),('20180625002','4403174130','18334862','FRM','梁婷招','2018-06-25 12:37:32',0,9996.74,1699.44,11696.2,9996.74,1699.44,'YSFP-ZP-00001937',1),('20180626050','4403174130','18334960','FRM','梁婷招','2018-06-26 18:15:15',0,9996.44,1599.43,11595.9,9996.44,1599.43,'YSFP-ZP-00003393',1),('20180626050','4403174130','18334961','FRM','梁婷招','2018-06-26 18:15:18',0,694.94,111.19,806.13,694.94,111.19,'YSFP-ZP-00003393',1),('2018072001','4403181130','09670027','FRM','梁婷招','2018-07-20 15:47:53',0,9006.21,1440.99,10447.2,9006.21,1440.99,'YSFP-ZP-00002681',1),('2018072002','4403181130','09670028','FRM','梁婷招','2018-07-20 15:48:01',0,849.93,135.98,985.91,849.93,135.98,'YSFP-ZP-00002749',1),('2018072003','4403181130','09670029','FRM','梁婷招','2018-07-20 15:48:04',0,8203.56,1312.57,9516.13,8203.56,1312.57,'YSFP-ZP-00002712、YSFP-ZP-00002795、YSFP-ZP-00002972、YSFP-ZP-00003075、YSFP-ZP-00003295、YSFP-ZP-00003430、YSFP-ZP-00003509、YSFP-ZP-00003676',1),('51','4403182130','04950327','FRM','梁婷招','2018-10-12 17:00:52',0,816.11,130.59,946.7,816.11,130.59,'',1),('53','4403182130','04950403','FRM','梁婷招','2018-10-15 17:00:07',0,400.52,64.08,464.6,400.52,64.08,'',1),('54','4403182130','04950457','FRM','梁婷招','2018-10-16 11:33:38',0,1187.07,189.93,1377,1187.07,189.93,'',1),('55','4403182130','04950473','FRM','梁婷招','2018-10-16 12:44:33',0,172.94,27.67,200.61,172.94,27.67,'',1),('56','4403182130','04950474','FRM','梁婷招','2018-10-16 12:44:36',0,267.03,42.71,309.74,267.03,42.71,'',1),('57','4403182130','04950475','FRM','梁婷招','2018-10-16 12:44:38',0,297.93,47.67,345.6,297.93,47.67,'',1),('58','4403182130','04950525','FRM','梁婷招','2018-10-16 17:03:36',0,557.64,89.23,646.87,557.64,89.23,'',1),('62','4403182130','04950726','FRM','梁婷招','2018-10-18 16:52:30',0,155.72,24.91,180.63,155.72,24.91,'',1),('65','4403182130','04950918','FRM','梁婷招','2018-10-23 17:06:23',0,2242,358.7,2600.7,2242,358.7,'',1),('66','4403182130','04950919','FRM','梁婷招','2018-10-23 17:06:26',0,1124.14,179.86,1304,1124.14,179.86,'',1),('67','4403182130','04950922','FRM','梁婷招','2018-10-23 17:27:44',0,379.31,60.69,440,379.31,60.69,'',1),('68','4403182130','04950921','FRM','梁婷招','2018-10-23 17:27:41',0,322.41,51.59,374,322.41,51.59,'',1),('72','4403182130','04950982','FRM','梁婷招','2018-10-24 17:37:12',0,253.5,40.55,294.05,253.5,40.55,'',1),('73','4403182130','04950983','FRM','梁婷招','2018-10-24 17:37:15',0,211.2,33.8,245,211.2,33.8,'',1),('74','4403182130','04950988','FRM','梁婷招','2018-10-24 17:54:56',0,1725.86,276.14,2002,1725.86,276.14,'',1),('76','4403182130','04950989','FRM','梁婷招','2018-10-24 18:00:09',0,280.61,44.9,325.51,280.61,44.9,'',1),('88','4403182130','04951086','FRM','梁婷招','2018-10-29 18:53:35',0,1896.55,303.45,2200,1896.55,303.45,'',1),('89','4403182130','04951085','FRM','梁婷招','2018-10-29 18:53:31',0,310.34,49.66,360,310.34,49.66,'',1),('90','4403182130','04951146','FRM','梁婷招','2018-10-30 17:01:49',0,949.14,151.86,1101,949.14,151.86,'',1),('94','4403182130','04951205','FRM','梁婷招','2018-10-31 11:22:04',0,3446.33,551.39,3997.72,3446.33,551.39,'',1),('95','4403182130','04951206','FRM','梁婷招','2018-10-31 11:22:11',0,1342.76,214.84,1557.6,1342.76,214.84,'',1),('96','4403182130','04951207','FRM','梁婷招','2018-10-31 11:22:19',0,599.66,95.94,695.6,599.66,95.94,'',1),('IVC2018090733588','4403174320','05290096','FRM','管理员','2018-09-07 20:09:47',0,503.17,80.51,583.68,503.17,80.51,'',1),('IVC2018091759103','4403174320','05290103','FRM','梁婷招','2018-09-18 17:49:42',0,100.34,16.06,116.4,100.34,16.06,'',1),('IVC2018091789197','4403174320','05290104','FRM','梁婷招','2018-09-18 17:49:44',0,120.26,19.24,139.5,120.26,19.24,'',1),('IVC2018092893237','0440318001','05713226','FRM','邹育林','2018-09-29 10:46:58',0,14.63,2.34,16.97,14.63,2.34,'',1),('IVC2018092989849','0440318001','05713227','FRM','邹育林','2018-09-29 10:59:45',0,8.36,1.34,9.7,8.36,1.34,'',1),('IVC2018093082425','0440318001','05713228','FRM','邹育林','2018-09-30 11:46:14',0,711.12,113.78,824.9,711.12,113.78,'',1),('IVC2018100929726','0440318001','05713229','FRM','邹育林','2018-10-09 11:11:42',0,662.76,106.04,768.8,662.76,106.04,'',1),('IVC2018101090764','0440318001','05713230','FRM','邹育林','2018-10-10 18:30:28',0,180.47,28.88,209.35,180.47,28.88,'',1),('IVC2018101179266','0440318001','05713231','FRM','邹育林','2018-10-11 14:37:20',1,955.86,152.94,1108.8,955.86,152.94,'',1),('IVC2018101502454','0440318001','05713232','FRM','邹育林','2018-10-15 16:46:24',0,11.97,1.91,13.88,11.97,1.91,'',1),('IVC2018101727614','0440318001','05713233','FRM','邹育林','2018-10-17 19:27:15',0,210.34,33.66,244,210.34,33.66,'',1),('IVC2018101827069','0440318001','05713234','FRM','邹育林','2018-10-18 15:13:16',0,1742.62,278.84,2021.46,1742.62,278.84,'',1),('IVC2018101937023','0440318001','05713235','FRM','邹育林','2018-10-19 12:55:13',0,5997.33,959.57,6956.9,5997.33,959.57,'',1),('IVC2018101947248','0440318001','05713236','FRM','邹育林','2018-10-19 17:05:01',0,40.34,6.46,46.8,40.34,6.46,'',1),('IVC2018102300505','0440318001','05713238','FRM','邹育林','2018-10-23 18:08:28',0,11.67,1.86,13.53,11.67,1.86,'',1),('IVC2018102351684','0440318001','05713239','FRM','邹育林','2018-10-23 18:11:16',0,7.86,1.27,9.13,7.86,1.27,'',1),('IVC2018102390894','0440318001','05713237','FRM','邹育林','2018-10-23 18:03:55',0,7.88,1.27,9.15,7.88,1.27,'',1),('IVC2018102510556','0440318001','05713287','FRM','邹育林','2018-10-25 12:09:08',0,8.45,1.35,9.8,8.45,1.35,'',1),('IVC2018102537609','0440318001','05713288','FRM','邹育林','2018-10-25 12:10:16',0,8.83,1.41,10.24,8.83,1.41,'',1),('IVC2018102552170','0440318001','05713290','FRM','邹育林','2018-10-25 18:57:06',0,98.27,15.73,114,98.27,15.73,'',1),('IVC2018102555504','0440318001','05713289','FRM','邹育林','2018-10-25 14:56:49',0,88.53,14.16,102.69,88.53,14
\ No newline at end of file
No preview for this file type
No preview for this file type
from utils.date_handler import DateHandler
from utils.msg_handler import MsgHandler
from utils.log_handler import LogHandler
import subprocess
class LoadHbase:
"""
sep: 分隔符
"""
@staticmethod
def cmd_load(row, table, hdfs_file, message, sep):
# 执行CMD命令
msg = ""
cmd = "/data2/hbase/hbase-2.0.1/bin/hbase org.apache.hadoop.hbase.mapreduce.ImportTsv \
-Dimporttsv.columns=HBASE_ROW_KEY,%s '-Dimporttsv.separator=%s' %s %s" % (row, sep, table, hdfs_file)
# cmd = "/data2/hbase/hbase-2.0.1/bin/hbase org.apache.hadoop.hbase.mapreduce.ImportTsv " \
# "-Dimporttsv.columns=HBASE_ROW_KEY," + row + \
# " '-Dimporttsv.separator=|' " + table + " " + hdfs_file
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
for info in p.communicate():
msg += str(info) + "\n"
# 日志写道ELK
LogHandler.elk_log(msg, 101, '/data3/dataLog/ickey_index' + DateHandler.now_date(0, 2) + '.log',
'ickey_index')
# 发送信息
MsgHandler.send_dd_msg(message)
from extract.ex_base import Base
from utils.date_handler import DateHandler
from utils.db_handler import DBHandler
class LoadMysql:
"""
col:字段
table:表
data:数据
db:数据库
"""
@staticmethod
def sample_load(col, table, data, db, cal_time=True):
col_to_str = Base.col_to_str(col)
for d in data:
if cal_time:
data[d]['insert_time'] = DateHandler.now_datetime()
data[d]['cal_ts'] = DateHandler.now_date(1, 1)
value_to_str = Base.value_to_str(col, data[d])
sql = "INSERT INTO %s (%s) VALUES (%s)" % (table, col_to_str, value_to_str)
DBHandler.insert(db, sql)
db.close()
"""
col:字段
table:表
data:数据
db:数据库
"""
@staticmethod
def simple_dict_load(col, table, data, db, cal_time=True):
col_to_str = Base.col_to_str(col)
for d in data:
if cal_time:
d['cal_ts'] = DateHandler.date_time(1)
value_to_str = Base.value_to_str(col, d)
sql = "INSERT INTO %s (%s) VALUES (%s)" % (table, col_to_str, value_to_str)
DBHandler.insert(db, sql)
db.close()
@staticmethod
def keyword_load(col, table, value, db):
col_to_str = Base.col_to_str(col)
value_to_str = Base.col_to_str(value)
sql = "INSERT INTO %s (%s) VALUES (%s)" % (table, col_to_str, value_to_str)
DBHandler.insert(db, sql)
db.close()
from utils.excel_handler import ExcelHandler
from utils.db_handler import DBHandler
from utils.date_handler import DateHandler
from config.conn_list import ConnList
import datetime
data = ExcelHandler.read_to_excel('待分配客户-重新分配.xls', 'Sheet1', 1)
data.pop(0)
db = ConnList.Order()
ic_db = ConnList.IcData()
ds_db = ConnList.WrCrm()
now_ts = DateHandler.now_datetime()
com_types = {'默认': 0, '代工厂': 1, '终端': 2, '代理商': 3, '贸易商': 4, '其他': 5}
cus_types = {'默认': 0, '老板': 1, '采购': 2, '工程师': 3, '学生': 4, '其他': 5}
print(len(data))
for row in data:
email = str(row[0]).strip()
mobile = str(row[1]).strip()
company = str(row[2]).strip()
sale_man = str(row[3]).strip()
if mobile != '':
sql = "SELECT user_id FROM lie_user_main WHERE mobile = \'%s\'" % mobile
else:
sql = "SELECT user_id FROM lie_user_main WHERE email = \'%s\'" % email
user = DBHandler.read(db, sql)
if len(user) > 0:
outter_uid = user[0][0]
sql = "select userId from user_info where name = \'%s\'" % sale_man
sale = DBHandler.read(db=ic_db, sql=sql)
if len(sale) > 0:
sale_id = sale[0][0]
sql = "SELECT user_id FROM lie_user WHERE outter_uid = %d AND source IN (1,2)" % outter_uid
is_exist = DBHandler.read(ds_db, sql)
if len(is_exist) > 0:
user_id = is_exist[0][0]
sql = "UPDATE lie_salesman SET user_id = %d,sale_id = %d WHERE user_id = %d" % (user_id, sale_id, outter_uid)
DBHandler.update(ds_db, sql)
# print(len(data))
from config.conn_list import ConnList
from utils.db_handler import DBHandler
db = ConnList.Order()
r = ConnList.WriteRedis23()
sql = "SELECT i.brand_id, COUNT(i.brand_id) FROM lie_order_items i \
LEFT JOIN lie_order o ON i.order_id = o.order_id \
WHERE i.goods_type = 2 AND i.status = 1 AND i.brand_id != 0 \
GROUP BY i.brand_id ORDER BY COUNT(i.brand_id) DESC"
result = DBHandler.read(db, sql)
print(len(result))
for row in result:
print(row)
brand_id = row[0]
brand_count = row[1]
# r.hset("Top_Seller_Brand", brand_id, brand_count)
from config.conn_list import ConnList
from utils.db_handler import DBHandler
chain_db = ConnList.Chain()
credit_db = ConnList.Credit()
sql = "SELECT id,company_name FROM lie_com_credits WHERE erp_company_code = \'\'"
result = DBHandler.read(credit_db, sql)
for row in result:
com_id = row[0]
company_name = row[1]
sql = "SELECT customer_code \
FROM lie_company c \
LEFT JOIN lie_customer u ON c.company_id = u.company_id\
WHERE company_full_name = \'%s\' LIMIT 1" % company_name
exist = DBHandler.read(chain_db, sql)
if len(exist) > 0:
company_code = exist[0][0]
if company_code is not None:
sql = "UPDATE lie_com_credits SET erp_company_code = \'%s\' WHERE id = %d" % (company_code, com_id)
DBHandler.update(credit_db, sql)
\ No newline at end of file
from pipeline.pi_behavior import PiBehavior
from pipeline.pi_tag import PiTag
from pipeline.pi_goods import PiGoods
from pipeline.pi_page import PiPage
from pipeline.pi_activity import PiActivity
from pipeline.pi_daily import PiDaily
from pipeline.pi_weekly import PiWeekly
from pipeline.pi_lx_log import PiLxLog
from pipeline.pi_lx_log_url import PiLxLogUrl
from pipeline.pi_rank import PiRank
from utils.date_handler import DateHandler
import sys
if __name__ == '__main__':
if len(sys.argv) > 1:
if sys.argv[1] == 'sum_behavior': #
PiBehavior.pipeline_sum_behavior()
if sys.argv[1] == 'zy_adtag': # 自营adtag
PiTag().pipeline_zy_adtag()
if sys.argv[1] == 'all_adtag': #
PiTag().pipeline_all_adtag()
if sys.argv[1] == 'erp_order': # 联营线下订单
PiGoods.pipeline_special_canal()
if sys.argv[1] == 'page_trans': # 页面转换
PiPage().pipeline_pageTag()
if sys.argv[1] == 'ac_cal': # 活动计算
PiActivity.pipeline_daily_activity()
if sys.argv[1] == 'share_friend': # 邀请好友
PiActivity.pipeline_wallet_activity()
if sys.argv[1] == 'week_hot_goods': # 每周热卖
PiWeekly.pipeline_week_hot_goods()
if sys.argv[1] == 'week_classify_hot_goods': # 每周热卖一二级物料
PiWeekly.pipeline_week_classify_hot_goods()
if sys.argv[1] == 'lx_brand': # 每周品牌
PiWeekly.pipeline_lx_brand()
if sys.argv[1] == 'lx_order': # 每周订单
PiWeekly.pipeline_lx_order()
if sys.argv[1] == 'crm_user': # 更新crm用户信息
PiDaily.pipeline_crm_user()
if sys.argv[1] == 'lx_log': # 日志解析
pi_lx_log = PiLxLog()
pi_lx_log.pipeline_lx_log()
if sys.argv[1] == 'lx_log_url':
pi_lx_log_url = PiLxLogUrl()
pi_lx_log_url.pipeline_lx_log_url()
if sys.argv[1] == 'ly_log':
pi_ly_log = PiLxLog()
pi_ly_log.pipeline_ly_log()
if sys.argv[1] == 'lx_rank1':
start_time = DateHandler.date_time(1)
end_time = DateHandler.date_time(0)
PiRank().rank_one(start_time, end_time)
if sys.argv[1] == 'lx_rank3':
PiRank().rank_three()
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
from extract.ex_dashboard import ExDashboard
from extract.ex_user import ExUser
from extract.ex_activity import ExActivity
from extract.ex_order import ExOrder
from translate.ts_activity import TsActivity
from translate.ts_wallet import TsWallet
from load.load_mysql import LoadMysql
from extract.ex_erp import ExERP
from config.conn_list import ConnList
from utils.date_handler import DateHandler
from utils.db_handler import DBHandler
from utils.excel_handler import ExcelHandler
import time
class PiActivity:
"""
流失召回数据
"""
@staticmethod
def pipeline_recall_user():
rd = []
db = ConnList.Order()
ic_db = ConnList.IcData()
ac_start_time = 1562515200
ac_end_time = 1564156800
# ac_end_time = 1563764597
data = ExcelHandler.read_to_excel('流失召回用户.xlsx', '旧同事四个月未跟进重新分配激活名单', 1)
data.pop(0)
for row in data:
company = row[0]
account = row[1] if type(row[1]) == str else int(row[1])
sale_man = ''
if '@' in str(account):
sql_user = "SELECT user_id FROM lie_user_main WHERE email = \'%s\'" % account
else:
sql_user = "SELECT user_id FROM lie_user_main WHERE mobile = \'%s\'" % account
rs_user = DBHandler.read(db, sql_user)
user_id = rs_user[0][0] if len(rs_user) > 0 else 99999999
if len(rs_user) > 0:
sql_sale = "SELECT sale_id FROM lie_order WHERE user_id = %d ORDER BY order_id DESC LIMIT 1" % user_id
sale = DBHandler.read(db, sql_sale)
if len(sale) > 0:
sale_id = sale[0][0]
sql = 'select name from user_info where userId = %d' % sale_id
results = DBHandler.read(db=ic_db, sql=sql)
sale_man = results[0][0] if len(results) > 0 else ''
print(account, user_id, sale_man)
pay_amount = 0
max_time = 0
min_time = 9999999999
sql_paid = "SELECT p.pay_amount,p.pay_time,o.currency \
FROM lie_pay_log p \
LEFT JOIN lie_order o ON p.order_id = o.order_id \
LEFT JOIN lie_user_main u ON p.user_id = u.user_id \
WHERE \
o.is_type = 0 \
AND o.order_pay_type != 3 \
AND p.user_id = %d" % (user_id)
rs_paid = DBHandler.read(db, sql_paid)
for rp in rs_paid:
pay_time = rp[1]
amount = float(rp[0]) if rp[2] == 1 else float(rp[0]) * 6.85
if max_time < pay_time < ac_start_time:
max_time = pay_time
if ac_start_time <= pay_time <= ac_end_time and pay_time < min_time:
min_time = pay_time
pay_amount += amount
ac_between = round((min_time - max_time) / 86400)
rd.append({'company': company,
'account': account,
'sale_man': sale_man,
'pay_amount': pay_amount if pay_amount != 0 else '',
'max_time': DateHandler.unix_to_date(max_time, "%Y-%m-%d") if max_time != 0 else '',
'min_time': DateHandler.unix_to_date(min_time, "%Y-%m-%d") if min_time != 9999999999 else '',
'ac_between': str(ac_between) + '天' if ac_between < 365 * 10 else ''})
# Excel标题
title = ['公司', '账号', '跟进业务员', '付款金额', '活动前最后一次付款时间', '活动中第一次付款时间', '付款时间间隔']
# Excel内容
content = ['company', 'account', 'sale_man', 'pay_amount', 'max_time', 'min_time', 'ac_between']
ExcelHandler.write_to_excel(title, content, rd, "result", result_type=2)
"""
历史数据
"""
@staticmethod
def pipeline_history():
# time
db = ConnList.Order()
ul_1 = []
ul_2 = []
# 6.10
day_1 = 1560096000
start_time_1 = (day_1 - 86400 * 30)
end_time_1 = day_1 + 86400
start_time_2 = 1560700800
end_time_2 = 1561737600
# 7.10
# day_1 = 1562688000
# start_time_1 = (day_1 - 86400 * 30)
# end_time_1 = day_1 + 86400
#
# start_time_2 = 1563120000
# end_time_2 = 1564156800
sql_his = "SELECT o.user_id \
FROM lie_order o \
LEFT JOIN lie_user_main u ON o.user_id = u.user_id \
WHERE u.is_type = 0 \
AND u.is_test = 0 \
AND o.is_type = 0 \
AND o.status = 10 \
AND o.order_type = 1 \
AND o.order_goods_type IN (1,2) \
AND o.create_time <= %d \
GROUP BY o.user_id \
HAVING COUNT(o.user_id) > 1" % start_time_1
his_user = DBHandler.read(db, sql_his)
sql_paid = "SELECT p.user_id \
FROM lie_pay_log p \
LEFT JOIN lie_order o ON p.order_id = o.order_id \
LEFT JOIN lie_user_main u ON p.user_id = u.user_id \
WHERE \
o.is_type = 0 \
AND u.is_test = 0 \
AND u.is_type = 0 \
AND p.pay_time BETWEEN %d AND %d \
GROUP BY p.user_id" % (start_time_2, end_time_2)
paid = DBHandler.read(db, sql_paid)
for row in paid:
ul_1.append(row[0])
for row in his_user:
user_id = row[0]
sql_exist = "SELECT 1 FROM lie_order WHERE create_time BETWEEN %d AND %d AND user_id = %d" % (start_time_1, end_time_1, user_id)
exist = DBHandler.read(db, sql_exist)
if len(exist) == 0:
ul_2.append(user_id)
print(len(ul_1), len(ul_2), list(set(ul_1).intersection(set(ul_2))))
print('占比: ', len(list(set(ul_1).intersection(set(ul_2)))) / len(ul_1) * 100)
"""
所有用户明细
"""
@staticmethod
def pipeline_all_user_detail():
ud = {}
rd = []
db = ConnList.Order()
ic_db = ConnList.IcData()
exchange = ExERP(DateHandler.now_date(0, 1)).get_erp_exchange()
ac_start_time = 1562515200
ac_end_time = 1564156800
ac_start_time_2 = 1564329600
ac_end_time_2 = 1572537600
sql = "SELECT p.user_id, p.pay_amount, o.currency, o.sale_id, p.order_id, p.pay_time, u.mobile, u.email, v.tax_title \
FROM lie_pay_log p \
LEFT JOIN lie_order o ON p.order_id = o.order_id \
LEFT JOIN lie_user_main u ON p.user_id = u.user_id \
LEFT JOIN lie_order_invoice v ON p.order_id = v.order_id \
WHERE \
o.is_type = 0 \
AND u.is_test = 0 \
AND u.is_type = 0 \
AND o.order_pay_type != 3 \
AND p.pay_time BETWEEN %d AND %d" % (ac_start_time_2, ac_end_time_2)
result = DBHandler.read(db, sql)
for row in result:
user_id = row[0]
pay_amount = float(row[1])
currency = row[2]
sale_id = row[3]
order_id = row[4]
pay_time = row[5]
mobile = row[6]
email = row[7]
tax_title = row[8]
sql = 'select name from user_info where userId = %d' % sale_id
results = DBHandler.read(db=ic_db, sql=sql)
sale_man = results[0][0] if len(results) > 0 else ''
pay_amount = pay_amount if currency == 1 else pay_amount * exchange
if user_id not in ud:
ud[user_id] = {'account': mobile if mobile != '' else email,
'tax_title': tax_title,
'sale_man': sale_man,
'pay_amount': pay_amount,
'order': [order_id]}
else:
ud[user_id]['pay_amount'] += pay_amount
ud[user_id]['order'].append(order_id)
for user_id in ud:
order = ud[user_id]['order']
order_str = "("
for order_id in order:
order_str += str(order_id) + ","
order_str = order_str[:-1] + ")"
sql = "SELECT brand_name,supplier_name FROM lie_order_items WHERE order_id IN %s" % order_str
bs = DBHandler.read(db, sql)
brand_name = ""
supplier_name = ""
for row in bs:
bn = row[0]
sn = row[1]
brand_name += bn + ", " if bn not in brand_name else ""
supplier_name += sn + ", " if sn not in supplier_name else ""
rd.append({'account': ud[user_id]['account'],
'tax_title': ud[user_id]['tax_title'],
'sale_man': ud[user_id]['sale_man'],
'pay_amount': ud[user_id]['pay_amount'],
'supplier_name': supplier_name,
'brand_name': brand_name})
# Excel标题
title = ['账号', '下单公司名', '对应交易员', '累计实付金额', '供应商', '品牌']
# Excel内容
content = ['account', 'tax_title', 'sale_man', 'pay_amount', 'supplier_name', 'brand_name']
ExcelHandler.write_to_excel(title, content, rd, "result", result_type=2)
"""
新用户明细
"""
@staticmethod
def pipeline_new_user_detail():
ud = {}
rd = []
db = ConnList.Order()
ic_db = ConnList.IcData()
exchange = ExERP(DateHandler.now_date(0, 1)).get_erp_exchange()
ac_start_time = 1562515200
ac_end_time = 1564156800
ac_start_time_2 = 1560096000
ac_end_time_2 = 1561737600
sql = "SELECT p.user_id, p.pay_amount, o.currency, o.sale_id, p.order_id, p.pay_time, u.mobile, u.email, v.tax_title \
FROM lie_pay_log p \
LEFT JOIN lie_order o ON p.order_id = o.order_id \
LEFT JOIN lie_user_main u ON p.user_id = u.user_id \
LEFT JOIN lie_order_invoice v ON p.order_id = v.order_id \
WHERE p.user_id NOT IN \
(SELECT p.user_id FROM lie_pay_log p LEFT JOIN lie_order o ON p.order_id = o.order_id WHERE p.pay_time < %d AND o.order_type = 1 AND o.order_goods_type IN (1,2) GROUP BY p.user_id) \
AND p.pay_time \
BETWEEN %d AND %d \
AND o.order_type = 1 \
AND o.order_goods_type IN (1,2) \
AND o.is_type = 0 \
AND o.order_pay_type != 3 \
AND u.is_test = 0 \
AND o.status > 2 \
AND u.is_type = 0" \
% (ac_start_time_2, ac_start_time_2, ac_end_time_2)
result = DBHandler.read(db, sql)
for row in result:
user_id = row[0]
pay_amount = float(row[1])
currency = row[2]
sale_id = row[3]
order_id = row[4]
pay_time = DateHandler.unix_to_date(row[5], "%Y-%m-%d")
mobile = row[6]
email = row[7]
tax_title = row[8]
sql = 'select name from user_info where userId = %d' % sale_id
results = DBHandler.read(db=ic_db, sql=sql)
sale_man = results[0][0] if len(results) > 0 else ''
pay_amount = pay_amount if currency == 1 else pay_amount * exchange
if user_id not in ud:
ud[user_id] = {'account': mobile if mobile != '' else email,
'tax_title': tax_title,
'sale_man': sale_man,
'pay_amount': pay_amount,
'order': [order_id],
'pay_time': [pay_time]
}
else:
ud[user_id]['pay_amount'] += pay_amount
ud[user_id]['order'].append(order_id)
if pay_time not in ud[user_id]['pay_time']:
ud[user_id]['pay_time'].append(pay_time)
for user_id in ud:
order = ud[user_id]['order']
order_str = "("
for order_id in order:
order_str += str(order_id) + ","
order_str = order_str[:-1] + ")"
sql = "SELECT brand_name,supplier_name FROM lie_order_items WHERE order_id IN %s" % order_str
bs = DBHandler.read(db, sql)
brand_name = ""
supplier_name = ""
for row in bs:
bn = row[0]
sn = row[1]
brand_name += bn + ", " if bn not in brand_name else ""
supplier_name += sn + ", " if sn not in supplier_name else ""
rd.append({'account': ud[user_id]['account'],
'tax_title': ud[user_id]['tax_title'],
'sale_man': ud[user_id]['sale_man'],
'pay_times': len(ud[user_id]['pay_time']),
'pay_amount': ud[user_id]['pay_amount'],
'supplier_name': supplier_name,
'brand_name': brand_name})
# Excel标题
title = ['账号', '下单公司名', '对应交易员', '付款笔数', '累计实付金额', '供应商', '品牌']
# Excel内容
content = ['account', 'tax_title', 'sale_man', 'pay_times', 'pay_amount', 'supplier_name', 'brand_name']
ExcelHandler.write_to_excel(title, content, rd, "result", result_type=2)
"""
活动新用户明细
"""
@staticmethod
def pipeline_new_user_detail_2():
ud = {}
rd = []
db = ConnList.Order()
ic_db = ConnList.IcData()
exchange = ExERP(DateHandler.now_date(0, 1)).get_erp_exchange()
ac_start_time = 1570464000
ac_end_time = 1572537600
# 原始逻辑
sql = "SELECT p.user_id, p.pay_amount, o.currency, o.sale_id, p.order_id, p.pay_time, u.mobile, u.email, v.tax_title, o.create_time \
FROM lie_pay_log p \
LEFT JOIN lie_order o ON p.order_id = o.order_id \
LEFT JOIN lie_user_main u ON p.user_id = u.user_id \
LEFT JOIN lie_order_invoice v ON p.order_id = v.order_id \
WHERE p.user_id NOT IN \
(SELECT p.user_id FROM lie_pay_log p LEFT JOIN lie_order o ON p.order_id = o.order_id WHERE p.pay_time < %d AND o.order_type = 1 AND o.order_goods_type IN (1,2) GROUP BY p.user_id) \
AND p.pay_time BETWEEN %d AND %d \
AND o.create_time BETWEEN %d AND %d \
AND o.order_type = 1 \
AND o.order_goods_type IN (1,2) \
AND o.is_type = 0 \
AND o.order_pay_type != 3 \
AND o.status > 2 \
AND u.is_test = 0 \
AND u.is_type = 0 \
ORDER BY o.create_time" \
% (ac_start_time, ac_start_time, ac_end_time, ac_start_time, ac_end_time)
result = DBHandler.read(db, sql)
for row in result:
user_id = row[0]
pay_amount = float(row[1])
currency = row[2]
sale_id = row[3]
order_id = row[4]
pay_time = DateHandler.unix_to_date(row[5], "%Y-%m-%d")
mobile = row[6]
email = row[7]
tax_title = row[8]
order_time = DateHandler.unix_to_date(row[9], "%Y-%m-%d")
sql = 'select name from user_info where userId = %d' % sale_id
results = DBHandler.read(db=ic_db, sql=sql)
sale_man = results[0][0] if len(results) > 0 else ''
pay_amount = pay_amount if currency == 1 else pay_amount * exchange
if user_id not in ud:
ud[user_id] = {
'account': mobile if mobile != '' else email,
'tax_title': tax_title,
'sale_man': sale_man,
'order_list': {order_time: pay_amount},
'order_ids': [order_id]
# 'pay_times': {''},
# 'pay_amount': pay_amount,
# 'fs_amount': pay_amount,
# 'fs_time': order_time,
}
else:
# ud[user_id]['pay_amount'] += pay_amount
if order_time not in ud[user_id]['order_list']:
ud[user_id]['order_list'][order_time] = pay_amount
else:
ud[user_id]['order_list'][order_time] += pay_amount
if order_id not in ud[user_id]['order_ids']:
ud[user_id]['order_ids'].append(order_id)
for user_id in ud:
order = ud[user_id]['order_ids']
order_str = "("
for order_id in order:
order_str += str(order_id) + ","
order_str = order_str[:-1] + ")"
sql = "SELECT brand_name,supplier_name FROM lie_order_items WHERE order_id IN %s" % order_str
bs = DBHandler.read(db, sql)
brand_name = ""
supplier_name = ""
for row in bs:
bn = row[0]
sn = row[1]
brand_name += bn + ", " if bn not in brand_name else ""
supplier_name += sn + ", " if sn not in supplier_name else ""
fs_amount = 0
pay_times = 0
pay_amount = 0
fs_time = 0
order_list = ud[user_id]['order_list']
for order_time in order_list:
amount = order_list[order_time]
if fs_amount == 0:
fs_amount = amount
fs_time = order_time
pay_times += 1
pay_amount += amount
rd.append({
'user_id': user_id,
'account': ud[user_id]['account'],
'tax_title': ud[user_id]['tax_title'],
'sale_man': ud[user_id]['sale_man'],
'pay_times': pay_times,
'fs_amount': fs_amount,
'pay_amount': pay_amount,
'fs_time': fs_time,
'supplier_name': supplier_name,
'brand_name': brand_name
})
# Excel标题
title = ['会员ID', '账号', '下单公司名', '对应交易员', '首购付款金额', '累计实付金额', '累计付款笔数', '首购付款时间', '供应商', '品牌']
# Excel内容
content = ['user_id', 'account', 'tax_title', 'sale_man', 'fs_amount', 'pay_amount', 'pay_times', 'fs_time', 'supplier_name', 'brand_name']
ExcelHandler.write_to_excel(title, content, rd, "result", result_type=2)
"""
下单并付款用户
"""
@staticmethod
def pipeline_new_user_detail_3():
ud = {}
rd = []
db = ConnList.Order()
ic_db = ConnList.IcData()
exchange = ExERP(DateHandler.now_date(0, 1)).get_erp_exchange()
ac_start_time = 0
ac_end_time = 1572537600
# AND o.order_pay_type != 3 \
sql = "SELECT p.user_id, p.pay_amount, o.currency, o.sale_id, p.order_id, p.pay_time, u.mobile, u.email, v.tax_title, o.create_time \
FROM lie_pay_log p \
LEFT JOIN lie_order o ON p.order_id = o.order_id \
LEFT JOIN lie_user_main u ON p.user_id = u.user_id \
LEFT JOIN lie_order_invoice v ON p.order_id = v.order_id \
WHERE o.create_time BETWEEN %d AND %d \
AND p.pay_time BETWEEN %d AND %d \
AND o.order_type = 1 \
AND o.order_goods_type IN (1,2) \
AND o.is_type = 0 \
AND o.status > 2 \
AND u.is_test = 0 \
AND u.is_type = 0 \
AND u.user_id IN (159460,4049,136511,137003,144156,159702,160313,160374,160546) \
ORDER BY o.create_time" \
% (ac_start_time, ac_end_time, ac_start_time, ac_end_time)
result = DBHandler.read(db, sql)
for row in result:
user_id = row[0]
pay_amount = float(row[1])
currency = row[2]
sale_id = row[3]
order_id = row[4]
pay_time = DateHandler.unix_to_date(row[5], "%Y-%m-%d")
mobile = row[6]
email = row[7]
tax_title = row[8]
order_time = DateHandler.unix_to_date(row[9], "%Y-%m-%d")
sql = 'select name from user_info where userId = %d' % sale_id
results = DBHandler.read(db=ic_db, sql=sql)
sale_man = results[0][0] if len(results) > 0 else ''
sql = "SELECT 1 FROM lie_order WHERE create_time < %d AND user_id = %d" % (ac_start_time, user_id)
is_new = DBHandler.read(db, sql)
if (len(is_new) == 0):
flag = '是'
else:
flag = '否'
pay_amount = pay_amount if currency == 1 else pay_amount * exchange
if user_id not in ud:
ud[user_id] = {
'tax_title': tax_title,
'account': mobile if mobile != '' else email,
'is_new': flag,
'pay_time': pay_time,
'pay_amount': pay_amount,
'pay_times': [order_id],
'sale_man': sale_man,
}
else:
ud[user_id]['pay_amount'] += pay_amount
if order_id not in ud[user_id]['pay_times']:
ud[user_id]['pay_times'].append(order_id)
for user_id in ud:
order = ud[user_id]['pay_times']
order_str = "("
for order_id in order:
order_str += str(order_id) + ","
order_str = order_str[:-1] + ")"
sql = "SELECT brand_name,supplier_name FROM lie_order_items WHERE order_id IN %s" % order_str
bs = DBHandler.read(db, sql)
brand_name = ""
supplier_name = ""
for row in bs:
bn = row[0]
sn = row[1]
brand_name += bn + ", " if bn not in brand_name else ""
supplier_name += sn + ", " if sn not in supplier_name else ""
rd.append({
'user_id': user_id,
'tax_title': ud[user_id]['tax_title'],
'account': ud[user_id]['account'],
'is_new': ud[user_id]['is_new'],
'pay_time': ud[user_id]['pay_time'],
'pay_amount': ud[user_id]['pay_amount'],
'pay_times': len(ud[user_id]['pay_times']),
'sale_man': ud[user_id]['sale_man'],
'supplier_name': supplier_name,
'brand_name': brand_name
})
# Excel标题
title = ['会员ID', '会员账号', '下单公司名', '是否新用户', '付款时间', '累计付款金额(元)', '累计付款笔数', '对应交易员', '供应商', '品牌']
# Excel内容
content = ['user_id', 'account', 'tax_title', 'is_new', 'pay_time', 'pay_amount', 'pay_times', 'sale_man', 'supplier_name', 'brand_name']
ExcelHandler.write_to_excel(title, content, rd, "result", result_type=2)
from utils.excel_handler import ExcelHandler
from translate.ts_behavior import TsBehavior
from extract.ex_user import ExUser
from extract.ex_behavior import ExBehavior
from extract.ex_dashboard import ExDashboard
from extract.ex_order import ExOrder
from utils.date_handler import DateHandler
from load.load_mysql import LoadMysql
from config.conn_list import ConnList
from utils.db_handler import DBHandler
import time
class PiBehavior:
@staticmethod
def pipeline_user_behavior():
where = {'start_time': 1543593600, 'end_time': 1546272000, 'condition': []}
ex_user = ExUser('新用户购买')
# 首购用户
# data = ex_user.new_user_order(where)
# 新注册用户
data = ex_user.reg_user(where)
# 文件用户
# data = ex_user.file_user()
ts_be = TsBehavior(name='行为转化', data=data)
ts_be.trans_user_behavior(where)
@staticmethod
def pipeline_sum_behavior():
# 获取测试IP
test_ip = []
data = ExUser('测试ip').test_ip()
for d in data:
test_ip.append(d['ip'])
# 行为数据
where = {'start_time': DateHandler.date_time(1), 'end_time': DateHandler.date_time(0),
'condition': ['behavior_id > 0']}
ex_be = ExBehavior('读取行为日志')
be_data = ex_be.behavior_log(where)
# 转化
ts_be = TsBehavior(name='行为日志计算', data=be_data)
be_data_ts = ts_be.trans_sum_behavior(test_ip=test_ip)
# 订单数据
# PC
where = {'condition': ['cal_ts = \'' + DateHandler.now_date(1, 1) + '\'',
'order_type = 1']}
ex_order = ExDashboard('读取订单')
od_data = ex_order.sum_order(where)
be_data_ts['new_order_count']['pc'] = int(od_data[0]['sum(order_count)'])
be_data_ts['pay_order_count']['pc'] = int(od_data[0]['sum(order_paid_count)'])
# H5
where = {'condition': ['cal_ts = \'' + DateHandler.now_date(1, 1) + '\'',
'order_type = 2']}
ex_order = ExDashboard('读取订单')
od_data = ex_order.sum_order(where)
be_data_ts['new_order_count']['h5'] = int(od_data[0]['sum(order_count)'])
be_data_ts['pay_order_count']['h5'] = int(od_data[0]['sum(order_paid_count)'])
# 写入数据
col = ['search_count', 'custom_count', 'addcart_count', 'buy_count', 'confirm_count',
'pay_count', 'new_order_count', 'pay_order_count']
# PC
value = []
for c in col:
value.append(str(be_data_ts[c]['pc']))
col.append('insert_time')
col.append('cal_ts')
col.append('platform')
value.append(str(DateHandler.now_datetime()))
value.append('\'' + DateHandler.now_date(1, 1) + '\'')
value.append('1')
LoadMysql.keyword_load(col=col, table='lie_sum_behavior_cal', value=value, db=ConnList.Dashboard())
# H5
col = ['search_count', 'custom_count', 'addcart_count', 'buy_count', 'confirm_count',
'pay_count', 'new_order_count', 'pay_order_count']
value = []
for c in col:
value.append(str(be_data_ts[c]['h5']))
col.append('insert_time')
col.append('cal_ts')
col.append('platform')
value.append(str(DateHandler.now_datetime()))
value.append('\'' + DateHandler.now_date(1, 1) + '\'')
value.append('2')
LoadMysql.keyword_load(col=col, table='lie_sum_behavior_cal', value=value, db=ConnList.Dashboard())
@staticmethod
def pipeline_all_behavior():
where = {'start_time': 1533052800, 'end_time': 1548950400, 'condition': ['behavior_id > 0']}
ex_be = ExBehavior('行为转化')
data = ex_be.behavior_log(where)
print(len(data))
ts_be = TsBehavior(name="行为转化", data=data)
ts_be.trans_output_behavior()
@staticmethod
def pipeline_order_behavior():
where = {'start_time': 1541001600, 'end_time': 1543593600, 'condition': ['order_id > 0']}
ex_order = ExOrder('订单详情')
data = ex_order.all_order(where)
print(len(data))
ts_be = TsBehavior(name="行为转化", data=data)
ts_be.trans_order_behavior()
@staticmethod
def pipeline_reg_without_login():
where = {'start_time': 1, 'end_time': 1546444800,
'condition': ['AND user_id NOT IN (SELECT user_id FROM lie_user_login_log \
WHERE last_login_time BETWEEN 1538496000 AND 1546444800 GROUP BY user_id)']}
ex_user = ExUser('注册未登录用户')
data = ex_user.reg_user(where)
print(len(data))
ts_be = TsBehavior(name="会员行为", data=data)
ts_be.trans_user_exit_behavior('(1,2,3)')
@staticmethod
def pipeline_no_login():
ul = []
tl = []
fl = {}
title = ['账号', '时间']
content = ['mobile', 'create_time']
sql = "SELECT user_id FROM lie_user_main WHERE is_test = 1"
db = ConnList.Order()
test = DBHandler.read(db, sql)
for row in test:
tl.append(row[0])
db.close()
sql = "SELECT user_id FROM lie_behavior_log WHERE create_time BETWEEN %d AND %d AND behavior in (4, 11) GROUP BY user_id" % (0, 1554185690)
db = ConnList.Behavior()
behavior = DBHandler.read(db, sql)
for row in behavior:
if row[0] not in tl and row[0] not in ul:
ul.append(row[0])
db.close()
sql = "SELECT user_id FROM lie_order WHERE order_type = 1 AND is_type != 0 GROUP BY user_id"
db = ConnList.Order()
order = DBHandler.read(db, sql)
for row in order:
if row[0] not in tl and row[0] not in ul:
ul.append(row[0])
index = 0
for user_id in ul:
sql = "SELECT max(last_login_time),mobile,email,create_time FROM lie_user_login_log l LEFT JOIN lie_user_main u ON u.user_id = l.user_id WHERE l.user_id = %d" % user_id
result = DBHandler.read(db, sql)
if len(result) > 0 and result[0][0] is not None:
if 1554185690 - result[0][0] > 86400 * 60:
fl[index] = {'mobile': result[0][1] if result[0][1] != '' else result[0][2],
'create_time': time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(result[0][3]))}
index += 1
print(index)
else:
print('None')
print(user_id)
break
ExcelHandler.write_to_excel_with_openpyxl(title, content, fl, "result.xls")
from extract.ex_user import ExUser
from extract.ex_purchase import ExPurchase
from extract.ex_order import ExOrder
from extract.ex_shence import ExShenCe
from extract.ex_crm import ExCrm
from extract.ex_dashboard import ExDashboard
from extract.ex_erp import ExERP
from translate.ts_user import TsUser
from translate.ts_order import TsOrder
from utils.date_handler import DateHandler
from utils.db_handler import DBHandler
from load.load_mysql import LoadMysql
from config.conn_list import ConnList
from pipeline.pi_email import PiEmail
from translate.ts_daily import TsDaily
from translate.ts_crm import TsCrm
from utils.msg_handler import MsgHandler
import traceback
import time
class PiDaily:
@staticmethod
def pipeline_core_daily():
try:
# 时间范围
start_time = DateHandler.date_time(1)
end_time = DateHandler.date_time(0)
# 登录用户
u_con = {'start_time': start_time, 'end_time': end_time, 'condition': []}
user_login = TsUser('登录', ExUser('').login_user(u_con)).trans_pf_user('platform')
col = ['login_sum_count', 'login_pc_count', 'login_h5_count', 'insert_time', 'cal_ts']
log_data = {'login_sum_count': user_login['sum'], 'login_pc_count': user_login['pc'],
'login_h5_count': user_login['h5'], 'insert_time': DateHandler.now_datetime(),
'cal_ts': DateHandler.now_date(1, 1)}
LoadMysql.simple_dict_load(col=col, table='lie_user_login_cal',
data=[log_data],
db=ConnList.Dashboard(), cal_time=False)
# 注册用户
user_reg = TsUser('注册', ExUser('').reg_user(u_con)).trans_pf_user('create_device')
col = ['reg_sum_count', 'reg_pc_count', 'reg_h5_count', 'insert_time', 'cal_ts']
reg_data = {'reg_sum_count': user_reg['sum'],
'reg_pc_count': user_reg['pc'],
'reg_h5_count': user_reg['h5'],
'insert_time': DateHandler.now_datetime(),
'cal_ts': DateHandler.now_date(1, 1)}
LoadMysql.simple_dict_load(col=col,
table='lie_user_reg_cal',
data=[reg_data],
db=ConnList.Dashboard(), cal_time=False)
# 优惠券
user_coupon = TsUser('优惠券', ExUser('').user_coupon(u_con)).trans_pf_regex_user('source')
col = ['coupon_sum_count', 'coupon_pc_count', 'coupon_h5_count', 'insert_time', 'cal_ts']
coupon_data = {'coupon_sum_count': user_coupon['sum'], 'coupon_pc_count': user_coupon['pc'],
'coupon_h5_count': user_coupon['h5'], 'insert_time': DateHandler.now_datetime(),
'cal_ts': DateHandler.now_date(1, 1)}
LoadMysql.simple_dict_load(col=col, table='lie_coupon_cal',
data=[coupon_data],
db=ConnList.Dashboard(), cal_time=False)
# sku概况
key = 'djdj93ichuntj56dksisearchdj45eieapi'
url = 'http://so12.ichunt.com/search/ServerApi/index'
sku = DBHandler.esEncryptData(key, url)
col = ['goods_type', 'goods_self_up_count', 'goods_self_down_count', 'goods_upload_count',
'goods_sum_count', 'cal_ts', 'insert_time']
col_dict = {'self': 1, 'ly': 2, 'zm': 3}
rd = []
for c in col_dict:
rd.append({'goods_type': col_dict[c], 'goods_self_up_count': sku[c+'_up'],
'goods_self_down_count': sku[c+'_down'],
'goods_upload_count': sku[c+'_inc'] if c+'_inc' in sku else 0,
'goods_sum_count': sku[c+'_total'], 'insert_time': DateHandler.now_datetime(),
'cal_ts': DateHandler.now_date(1, 1)})
LoadMysql.simple_dict_load(col=col, table='lie_lx_goods_cal', data=rd,
db=ConnList.Dashboard(), cal_time=False)
# 采购概况
pc_yes_con = {'start_time': start_time, 'end_time': end_time,
'condition': ['i.status = 1', 'p.status > 1']}
pc_yes_self_con = {'start_time': start_time, 'end_time': end_time,
'condition': ['i.status = 1', 'p.status > 1', 'p.picking_type = 1']}
pc_yes_consign_con = {'start_time': start_time, 'end_time': end_time,
'condition': ['i.status = 1', 'p.status > 1', 'p.picking_type != 1']}
pc_all_con = {'start_time': 0, 'end_time': end_time,
'condition': ['i.status = 1', 'p.status > 1']}
in_stock_con = {'start_time': start_time, 'end_time': end_time}
pc_yes_count = ExPurchase('').purchase_count(pc_yes_con)
pc_yes_self_count = ExPurchase('').purchase_count(pc_yes_self_con)
pc_yes_consign_count = ExPurchase('').purchase_count(pc_yes_consign_con)
pc_all_count = ExPurchase('').purchase_count(pc_all_con)
in_stock = ExPurchase('').in_stock(in_stock_con)
wait_stock = ExPurchase('').wait_stock()
wait_examine = ExPurchase('').wait_examine()
col = ['putaway_yes_count',
'wait_putaway_sum_count',
'wait_examine_sum_count',
'purchase_yes_count',
'purchase_yes_self_count',
'purchase_yes_consign_count',
'purchase_sum_count',
'purchase_yes_amount',
'purchase_yes_self_amount',
'purchase_yes_consign_amount',
'purchase_sum_amount',
'insert_time',
'cal_ts']
purchase_data = {'putaway_yes_count': in_stock[0],
'wait_putaway_sum_count': wait_stock[0],
'wait_examine_sum_count': wait_examine[0],
'purchase_yes_count': pc_yes_count[0]['count'],
'purchase_yes_self_count': pc_yes_self_count[0]['count'],
'purchase_yes_consign_count': pc_yes_consign_count[0]['count'],
'purchase_sum_count': pc_all_count[0]['count'],
'purchase_yes_amount': pc_yes_count[0]['amount'],
'purchase_yes_self_amount': pc_yes_self_count[0]['amount'],
'purchase_yes_consign_amount': pc_yes_consign_count[0]['amount'],
'purchase_sum_amount': pc_all_count[0]['amount'],
'insert_time': DateHandler.now_datetime(),
'cal_ts': DateHandler.now_date(1, 1)}
# 写入DB
LoadMysql.simple_dict_load(col=col, table='lie_purchase_cal',
data=[purchase_data],
db=ConnList.Dashboard(), cal_time=False)
# 订单
# 京东联营订单
jd_all_ly_con = {'start_time': start_time, 'end_time': end_time,
'condition': ['id > 0']}
jd_paid_ly_con = {'start_time': start_time, 'end_time': end_time,
'condition': ['order_state != \'TRADE_CANCELED\'']}
jd_all_order = ExOrder('京东联营').jdLyOrder(jd_all_ly_con)
jd_paid_order = ExOrder('京东联营').jdLyOrder(jd_paid_ly_con)
# 平台应付订单
o_con = {'start_time': start_time, 'end_time': end_time,
'condition': ['order_id > 0']}
pf_order = TsOrder('平台应付订单', ExOrder('').all_pf_order(o_con)).trans_order_cal()
pf_order['hz']['count'] += jd_all_order[0]['count']
pf_order['jd']['count'] += jd_all_order[0]['count']
pf_order['hz']['amount'] += jd_all_order[0]['amount']
pf_order['jd']['amount'] += jd_all_order[0]['amount']
# 平台实付订单
p_con = {'start_time': start_time, 'end_time': end_time,
'condition': ['o.status > 2']}
pf_paid_order = TsOrder('平台实付订单', ExOrder('').order_pf_price(p_con)).trans_order_paid_cal()
pf_paid_order['hz']['count'] += jd_paid_order[0]['count']
pf_paid_order['jd']['count'] += jd_paid_order[0]['count']
pf_paid_order['hz']['amount'] += jd_paid_order[0]['amount']
pf_paid_order['jd']['amount'] += jd_paid_order[0]['amount']
# 计算客单价
for pf in pf_paid_order:
if pf_paid_order[pf]['count'] > 0:
pf_paid_order[pf]['pct'] = round(pf_paid_order[pf]['amount']/pf_paid_order[pf]['count'], 2)
else:
pf_paid_order[pf]['pct'] = 0
# 写入数据
col = ['order_type', 'order_count', 'order_money', 'order_paid_count', 'order_people',
'order_paid_people', 'order_paid_money', 'order_pct', 'order_favour_count',
'order_favour_money', 'insert_time', 'cal_ts']
order_db = {'pc': 1, 'h5': 2, 'jd': 3, 'ly': 4, 'zy': 5, 'ht': 6, 'hz': 7, 'xcx': 8}
for tp in order_db:
LoadMysql.simple_dict_load(col=col, table='lie_order_cal',
data=[{'order_type': order_db[tp],
'order_count': pf_order[tp]['count'],
'order_money': pf_order[tp]['amount'],
'order_people': len(pf_order[tp]['people']),
'order_paid_count': pf_paid_order[tp]['count'],
'order_paid_money': pf_paid_order[tp]['amount'],
'order_paid_people': len(pf_paid_order[tp]['people']),
'order_pct': pf_paid_order[tp]['pct'],
'order_favour_count': pf_paid_order[tp]['favour_count'],
'order_favour_money': pf_paid_order[tp]['favour_amount'],
'insert_time': DateHandler.now_datetime(),
'cal_ts': DateHandler.now_date(1, 1)}],
db=ConnList.Dashboard(), cal_time=False)
# 神策数据
try:
pv = ExShenCe('').ex_pv()
uv = ExShenCe('').ex_uv()
sum_pv = ExShenCe('').sum_pv()
sum_uv = ExShenCe('').sum_uv()
print(pv, uv, sum_pv, sum_uv)
sc_data = {'pv': sum_pv[0], 'uv': sum_uv[0], 'pc_pv': pv['PC'][0], 'pc_uv': uv['PC'][0],
'h5_pv': pv['H5'][0], 'h5_uv': uv['H5'][0], 'cal_ts': DateHandler.now_date(1, 1)}
col = ['pv', 'uv', 'pc_pv', 'pc_uv', 'h5_pv', 'h5_uv', 'cal_ts']
LoadMysql.simple_dict_load(col=col, table='lie_shence_cal', data=[sc_data],
db=ConnList.Dashboard(), cal_time=False)
except:
pass
# 发送邮件
keyword = 'report-daily'
send_data = {'yes_ts': DateHandler.now_date(0, 1)}
send_data.update(sc_data)
send_data.update(reg_data)
send_data.update(log_data)
send_data.update(coupon_data)
send_data.update(purchase_data)
# 拼接订单数据
for tp in order_db:
send_data[tp + '_order_count'] = pf_order[tp]['count']
send_data[tp + '_order_money'] = pf_order[tp]['amount']
send_data[tp + '_order_paid_count'] = pf_paid_order[tp]['count']
send_data[tp + '_order_paid_money'] = pf_paid_order[tp]['amount']
send_data[tp + '_order_pct'] = pf_paid_order[tp]['pct']
send_data[tp + '_order_favour_count'] = pf_paid_order[tp]['favour_count']
send_data[tp + '_order_favour_money'] = pf_paid_order[tp]['favour_amount']
# 拼接sku数据
for cd in col_dict:
send_data[cd + '_goods_self_up_count'] = sku[cd + '_up']
send_data[cd + '_goods_self_down_count'] = sku[cd + '_down']
send_data[cd + '_goods_upload_count'] = sku[cd + '_inc'] if cd + '_inc' in sku else 0
send_data[cd + '_goods_sum_count'] = sku[cd + '_total']
# Send E
PiEmail.pipeline_core_report(send_data, keyword)
except:
MsgHandler.send_dd_msg('【风控报警】:运营日报%s' % traceback.format_exc())
"""
运营日报
"""
@staticmethod
def pipeline_operate_daily():
col = ['pv',
'uv',
'depth',
'bounce_rate',
'stay_time',
'reg_user',
'log_user',
'active_user',
'search_user',
'search_count',
'goods_list',
'order_count',
'order_user',
'order_amount',
'ly_order_count',
'ly_order_user',
'ly_order_amount',
'zy_order_count',
'zy_order_user',
'zy_order_amount',
'new_order_user',
'new_paid_order_user',
'ly_new_order_user',
'zy_new_order_user',
'ly_new_paid_order_user',
'zy_new_paid_order_user',
'ly_cancel_order',
'zy_cancel_order',
'cal_ts']
cal_ts = DateHandler.now_date(1, 1)
cal_start_ts = DateHandler.date_time(1)
cal_end_ts = DateHandler.date_time(0)
# 流量
pu = ExDashboard('').ex_shence_pu(condition={'start_time': cal_ts, 'condition': []})
bounce_rate = ExShenCe('').bounce_rate(cal_ts, cal_ts)
stay_time = ExShenCe('').stay_time(cal_ts, cal_ts)
# 新注册用户
u_con = {'start_time': cal_start_ts, 'end_time': cal_end_ts, 'condition': ['AND create_device not like \'%adtag=zh%\'']}
user_reg = TsUser('注册', ExUser('').reg_user(u_con)).trans_pf_user('create_device')
log_user = ExShenCe('').log_user(cal_ts, cal_ts)
active_user = ExShenCe('').active_user(cal_ts, cal_ts)
# 搜索
search_user = ExShenCe('').search_user(cal_ts, cal_ts)
search_count = ExShenCe('').search_count(cal_ts, cal_ts)
goods_list = ExDashboard('').sku_sum_goods(condition={'start_time': cal_ts, 'condition': []})
# 下单
exchange = ExERP(cal_ts).get_erp_exchange()
all_order = ExOrder('').all_order(condition={'start_time': cal_start_ts, 'end_time': cal_end_ts, 'condition': ['order_id > 0']})
ts_order = TsDaily('', all_order).trans_order(exchange)
# 首购用户
fs_user = ExOrder('').first_order_user(condition={'start_time': cal_start_ts, 'end_time': cal_end_ts, 'condition': ['order_id > 0']})
fs_ly_user = ExOrder('').first_ly_order_user(condition={'start_time': cal_start_ts, 'end_time': cal_end_ts, 'condition': ['order_id > 0']})
fs_zy_user = ExOrder('').first_zy_order_user(condition={'start_time': cal_start_ts, 'end_time': cal_end_ts, 'condition': ['order_id > 0']})
fs_paid_user = ExOrder('').first_paid_user(condition={'start_time': cal_start_ts, 'end_time': cal_end_ts, 'condition': ['order_id > 0']})
fs_ly_paid_user = ExOrder('').first_ly_paid_user(condition={'start_time': cal_start_ts, 'end_time': cal_end_ts, 'condition': ['order_id > 0']})
fs_zy_paid_user = ExOrder('').first_zy_paid_user(condition={'start_time': cal_start_ts, 'end_time': cal_end_ts, 'condition': ['order_id > 0']})
# 取消订单
ly_cancel_order = ExOrder('').cancel_order(condition={'start_time': cal_start_ts, 'end_time': cal_end_ts, 'condition': ['order_goods_type = 1']})
zy_cancel_order = ExOrder('').cancel_order(condition={'start_time': cal_start_ts, 'end_time': cal_end_ts, 'condition': ['order_goods_type = 2']})
rs = {'pv': pu[0]['pv'] if len(pu) > 0 else 0,
'uv': pu[0]['uv'] if len(pu) > 0 else 0,
'depth': pu[0]['pv'] / pu[0]['uv'] if len(pu) > 0 and pu[0]['uv'] > 0 else 0,
'bounce_rate': bounce_rate['bounce_rate'],
'stay_time': stay_time['stay_time'],
'reg_user': user_reg['sum'],
'log_user': log_user['log_user'] if len(log_user['log_user']) > 0 else 0,
'active_user': active_user['active_user'] if len(active_user['active_user']) > 0 else 0,
'search_user': search_user['search_user'] if len(search_user['search_user']) > 0 else 0,
'search_count': search_count['search_count'] if len(search_count['search_count']) > 0 else 0,
'goods_list': int(goods_list[0]['goods_list']) if goods_list[0]['goods_list'] is not None else 0,
'order_count': ts_order['order_count'],
'order_user': ts_order['order_user'],
'order_amount': ts_order['order_amount'],
# 联营下单
'ly_order_count': ts_order['ly_count'],
'ly_order_user': ts_order['ly_user'],
'ly_order_amount': ts_order['ly_amount'],
# 自营下单
'zy_order_count': ts_order['zy_count'],
'zy_order_user': ts_order['zy_user'],
'zy_order_amount': ts_order['zy_amount'],
# 新用户下单
'new_order_user': len(fs_user),
'new_paid_order_user': len(fs_paid_user),
'ly_new_order_user': len(fs_ly_user),
'zy_new_order_user': len(fs_zy_user),
'ly_new_paid_order_user': len(fs_ly_paid_user),
'zy_new_paid_order_user': len(fs_zy_paid_user),
'ly_cancel_order': len(ly_cancel_order),
'zy_cancel_order': len(zy_cancel_order),
'cal_ts': cal_ts}
print(len(ly_cancel_order), len(zy_cancel_order))
# 写DB
LoadMysql.simple_dict_load(col=col,
table='lie_operate_daily',
data=[rs],
db=ConnList.Dashboard(),
cal_time=False)
# 发邮件
PiEmail.pipeline_operate_daily()
"""
运营周报
"""
@staticmethod
def pipeline_operate_weekly():
col = ['pv',
'uv',
'depth',
'reg_user',
'reg_trans',
'preserve_radio',
'active_user',
'new_order_user',
'new_order_paid_user',
'utm_new_paid_user',
'order_user',
'paid_user',
'order_count',
'paid_count',
'order_amount',
'paid_amount',
'cancel_order',
'zy_order_user',
'zy_sum_user',
'online_user',
'order_and_paid',
'zy_order_and_paid',
'zy_bg_user',
'zy_bg_count',
'zy_bg_amount',
'zy_paid_bg_user',
'zy_paid_bg_count',
'zy_paid_bg_amount',
'zy_first_user',
'zy_first_count',
'zy_first_amount',
'zy_first_paid_user',
'zy_first_paid_count',
'zy_first_paid_amount',
'first_order_and_paid',
'zy_first_order_and_paid',
'cal_ts']
d = time.strftime("%w", time.localtime())
# 周六
if int(d) == 6:
# 时间点
today = DateHandler.now_date(0, 1)
mon_day = DateHandler.now_date(5, 1)
thu_day = DateHandler.now_date(2, 1)
fri_day = DateHandler.now_date(1, 1)
last_sat = DateHandler.now_date(7, 1)
mon_day_ts = DateHandler.str_to_unix(mon_day, "%Y-%m-%d")
fri_day_ts = DateHandler.str_to_unix(fri_day, "%Y-%m-%d")
today_ts = DateHandler.str_to_unix(today, "%Y-%m-%d")
last_sat_ts = DateHandler.str_to_unix(last_sat, "%Y-%m-%d")
# 汇率
exchange = ExERP(today).get_erp_exchange()
# 流量
operate_data = ExDashboard('').avg_operate_data({'start_time': mon_day, 'end_time': fri_day})
avg_pv = round(operate_data[0]['avg(pv)'], 1)
avg_uv = round(operate_data[0]['avg(uv)'], 1)
avg_depth = round(operate_data[0]['avg(pv)/avg(uv)'], 1)
# 用户
online_user = ExOrder('').online_user(today_ts)
# 计算月时间
sat_ts = fri_day_ts + 86400
last_four = sat_ts - (86400 * 7 * 4)
last_eight = sat_ts - (86400 * 7 * 8)
# 下单且付款
order_and_paid_user_1 = ExOrder('').order_and_paid_user({'start_time': last_four, 'end_time': sat_ts, 'condition': ['(1,2)']})
order_and_paid_user_2 = ExOrder('').order_and_paid_user({'start_time': last_eight, 'end_time': last_four, 'condition': ['(1,2)']})
mix_user = len(list(set(order_and_paid_user_1).intersection(set(order_and_paid_user_2))))
order_and_paid_user_radio = round(mix_user / len(order_and_paid_user_2) * 100)
user = ExDashboard('').avg_operate_data({'start_time': last_sat, 'end_time': fri_day})
reg_user = user[0]['sum(reg_user)']
reg_trans = ExOrder('').reg_trans({'start_time': last_sat_ts, 'end_time': fri_day_ts + 86400})
reg_radio = round((len(reg_trans) / reg_user) * 100) if reg_user > 0 else 0
ac_user = round(operate_data[0]['avg(active_user)'], 1)
preserve = ExShenCe('').preserve(mon_day, thu_day)
# 下单
condition = {'start_time': last_sat_ts, 'end_time': fri_day_ts + 86400, 'condition': ['order_id > 0']}
# 原始数据
fs_user = ExOrder('').first_order_user(condition)
all_order = ExOrder('').all_order(condition)
pay_log = ExOrder('').pay_log_detail(condition)
cancel_order = ExOrder('').cancel_order(condition)
# 转换数据
ts_order = TsDaily('', all_order).trans_order(exchange)
ts_order_paid = TsDaily('', pay_log).trans_paid_order(exchange)
# 自营
zy_user = ExOrder('').zy_order_user(today_ts)
zy_ly_user = ExOrder('').zy_ly_order_user(today_ts)
# 自营近一个月复购率
zy_order_and_paid_user_1 = ExOrder('').order_and_paid_user(
{'start_time': last_four, 'end_time': sat_ts, 'condition': ['(2)']})
zy_order_and_paid_user_2 = ExOrder('').order_and_paid_user(
{'start_time': last_eight, 'end_time': last_four, 'condition': ['(2)']})
zy_mix_user = len(list(set(zy_order_and_paid_user_1).intersection(set(zy_order_and_paid_user_2))))
zy_order_and_paid_user_radio = round(zy_mix_user / len(zy_order_and_paid_user_2) * 100)
"""
* 7.17 更新
1. 下单人数、订单数、下单金额
2. 付款人数、付款订单数、付款订单金额
3. 首次下单人数、首次下单数、首次下单金额
4. 首次付款人数、首次付款订单数、首次付款订单金额
"""
# 下单
zy_bg_user = ts_order['zy_user']
zy_bg_count = ts_order['zy_count']
zy_bg_amount = ts_order['zy_amount']
# 付款
zy_paid_bg_user = ts_order['zy_paid_user']
zy_paid_bg_count = ts_order['zy_paid_count']
zy_paid_bg_amount = ts_order['zy_paid_amount']
# 自营首次下单并付款
zy_first_order = ExOrder('').first_order_detail(
condition={'start_time': last_sat_ts, 'end_time': fri_day_ts + 86400, 'order_goods_type': "(2)", 'condition': ['o.order_id > 0']})
ts_zy_first_order = TsDaily('', zy_first_order).trans_order(exchange)
zy_first_user = ts_zy_first_order['zy_user']
zy_first_count = ts_zy_first_order['zy_count']
zy_first_amount = ts_zy_first_order['zy_amount']
zy_first_paid_user = ts_zy_first_order['zy_paid_user']
zy_first_paid_count = ts_zy_first_order['zy_paid_count']
zy_first_paid_amount = ts_zy_first_order['zy_paid_amount']
"""
* 7.17 更新
1. 自营新用户复购率
2. 新用户复购率
3. 自营用户下单联营的占比
"""
# 首次下单并付款 上周
last_week_first_order = ExOrder('').first_order_detail(
condition={'start_time': last_sat_ts - 86400 * 7, 'end_time': fri_day_ts + 86400 - 86400 * 7,
'order_goods_type': "(1,2)", 'condition': ['o.status > 2']})
# 下单并付款 本周
this_week_order = ExOrder('').order_and_paid_detail(
condition={'start_time': last_sat_ts, 'end_time': fri_day_ts + 86400, 'order_goods_type': "(1,2)",
'condition': ['o.status > 2']})
trans_mixed = TsDaily('', None).trans_mixed(last_week_first_order, this_week_order)
"""
* 8.8 更新
1. 新付款用户数
"""
fs_paid_user = ExOrder('').first_paid_user(
condition={'start_time': last_sat_ts, 'end_time': fri_day_ts + 86400, 'condition': ['order_id > 0']})
rs = {'pv': avg_pv,
'uv': avg_uv,
'depth': avg_depth,
'reg_user': reg_user,
'reg_trans': reg_radio,
'preserve_radio': preserve['preserve_radio'],
'active_user': ac_user,
'new_order_user': len(fs_user),
'new_order_paid_user': len(fs_paid_user),
'utm_new_paid_user': 0,
'order_user': ts_order['order_user'],
'paid_user': ts_order_paid['order_user'],
'order_count': ts_order['order_count'],
'paid_count': ts_order_paid['order_count'],
'order_amount': ts_order['order_amount'],
'paid_amount': ts_order_paid['order_amount'],
'cancel_order': len(cancel_order),
'zy_order_user': len(zy_user),
'zy_sum_user': len(zy_ly_user),
'online_user': online_user,
'order_and_paid': order_and_paid_user_radio,
'zy_order_and_paid': zy_order_and_paid_user_radio,
'zy_bg_user': zy_bg_user,
'zy_bg_count': zy_bg_count,
'zy_bg_amount': zy_bg_amount,
'zy_paid_bg_user': zy_paid_bg_user,
'zy_paid_bg_count': zy_paid_bg_count,
'zy_paid_bg_amount': zy_paid_bg_amount,
'zy_first_user': zy_first_user,
'zy_first_count': zy_first_count,
'zy_first_amount': zy_first_amount,
'zy_first_paid_user': zy_first_paid_user,
'zy_first_paid_count': zy_first_paid_count,
'zy_first_paid_amount': zy_first_paid_amount,
'first_order_and_paid': trans_mixed['all_radio'],
'zy_first_order_and_paid': trans_mixed['zy_radio'],
'cal_ts': mon_day}
# 写DB
LoadMysql.simple_dict_load(col=col,
table='lie_operate_weekly',
data=[rs],
db=ConnList.Dashboard(),
cal_time=False)
PiEmail.pipeline_operate_weekly()
@staticmethod
def pipeline_crm_user():
start_time = 0
end_time = DateHandler.date_time(0)
db = ConnList.Order()
wr_db = ConnList.WrCrm()
user = ExCrm('').crm_user(start_time, end_time)
ts_user = TsCrm(db, wr_db, user)
print(len(user))
ts_user.is_order()
ts_user.last_order()
ts_user.order_num()
ts_user.model_num()
ts_user.order_amount()
ts_user.paid_amount()
ts_user.contact_info()
ts_user.iap_data()
print(len(user))
from utils.msg_handler import MsgHandler
from utils.date_handler import DateHandler
from extract.ex_dashboard import ExDashboard
class PiEmail:
"""
专卖下架邮件
"""
@staticmethod
def pipeline_zm_down():
# 获取数据
key = 'djdj93ichuntj56dksisearchdj45eieapi'
url = "http://so12.ichunt.com/search/ServerApi/encodedCount"
data = MsgHandler.get_encrypt_msg(key, url)
# 获取邮件列表
condition = {'condition': ['email_id = 5', 'status = 1']}
email_list = ExDashboard('采集').email_list(condition)
# 数据格式转换
send_data = {'code': '', 'num': '', 'canal': '', 'yes_day': DateHandler.now_date(1, 1)}
for d in data:
send_data['code'] += d['encoded'] + '<br>'
send_data['num'] += d['num'] + '<br>'
send_data['canal'] += d['canal'] + '<br>'
# 发送邮件
MsgHandler.send_email(send_data, email_list, 'zm-down-daily')
"""
专卖提前下架预警
"""
@staticmethod
def pipeline_zm_pre_down(data):
# 获取邮件列表
condition = {'condition': ['email_id = 8', 'status = 1']}
email_list = ExDashboard('采集').email_list(condition)
# 数据格式转换
send_data = {'code': '', 'num': '', 'canal': '', 'yes_day': DateHandler.now_date(1, 1)}
for d in data:
send_data['code'] += str(d['purchase_uid']) + '<br>'
send_data['num'] += str(d['count']) + '<br>'
send_data['canal'] += d['supplier_code'] + '<br>'
# 发送邮件
MsgHandler.send_email(send_data, email_list, 'zm-warn')
"""
库存预警
"""
@staticmethod
def pipeline_safe_stock(count):
# 获取邮件列表
condition = {'condition': ['email_id = 3', 'status = 1']}
email_list = ExDashboard('采集').email_list(condition)
# 数据格式转换
info = '下载库存预警文件' if count > 0 else '无库存预警文件下载'
link = 'http://shuju.ichunt.net/analysis/stockDownload?' + DateHandler.now_date(0, 1) if count > 0 else ''
send_data = {'day': DateHandler.now_date(0, 1),
'num': count,
'info': info,
'link': link}
# 发送邮件
MsgHandler.send_email(send_data, email_list, 'stock-report')
"""
sku曝光
"""
@staticmethod
def pipeline_sku_expose(data):
# 获取邮件列表
condition = {'condition': ['email_id = 6', 'status = 1']}
email_list = ExDashboard('采集').email_list(condition)
# 发送邮件
MsgHandler.send_email(data, email_list, 'sku-expose')
"""
搜索无结果
"""
@staticmethod
def pipeline_search_no_result(data, keyword):
# 获取邮件列表
condition = {'condition': ['email_id = 2', 'status = 1']}
email_list = ExDashboard('采集').email_list(condition)
# 发送邮件
MsgHandler.send_email(data, email_list, keyword)
"""
核心日报
"""
@staticmethod
def pipeline_core_report(data, keyword):
# 获取邮件列表
condition = {'condition': ['email_id = 1', 'status = 1']}
email_list = ExDashboard('采集').email_list(condition)
# 发送邮件
MsgHandler.send_email(data, email_list, keyword)
@staticmethod
def pipeline_zyly_match(data):
# 获取邮件列表
condition = {'condition': ['email_id = 7', 'status = 1']}
email_list = ExDashboard('').email_list(condition)
# 发送邮件
MsgHandler.send_email(data, email_list, 'zyly_price_match')
@staticmethod
def pipeline_operate_daily():
# 获取邮件列表
condition = {'condition': ['email_id = 9', 'status = 1']}
email_list = ExDashboard('').email_list(condition)
# 数据格式转换
link = 'http://shuju.ichunt.net/api/ApiOperateDaily?day=' + DateHandler.now_date(0, 1)
send_data = {'day': DateHandler.now_date(0, 1),
'link': link}
# 发送邮件
MsgHandler.send_email(send_data, email_list, 'operate_daily')
@staticmethod
def pipeline_operate_weekly():
# 获取邮件列表
condition = {'condition': ['email_id = 10', 'status = 1']}
email_list = ExDashboard('').email_list(condition)
# 数据格式转换
link = 'http://shuju.ichunt.net/api/ApiOperateWeekly'
send_data = {'link': link}
# 发送邮件
MsgHandler.send_email(send_data, email_list, 'operate_weekly')
import math
import traceback
from extract.ex_erp import ExERP
from translate.ts_erp import TsERP
from utils.db_handler import DBHandler
from config.conn_list import ConnList
from utils.msg_handler import MsgHandler
from utils.date_handler import DateHandler
from utils.excel_handler import ExcelHandler
from utils.date_handler import DateHandler
class PiErp:
def __init__(self):
super().__init__()
self.erp = ExERP(DateHandler.now_date(0, 1))
self.ts_erp = TsERP()
# 业务数据拉取
def pipeline_erp_company(self, credit, credit_source):
company_list = self.erp.get_erp_list(credit)
total_count = 0
cmp_size = len(company_list)
# 清空数据
self.ts_erp.truncate_erp('lie_basic_detail', credit_source)
self.ts_erp.truncate_erp('lie_basic_info', credit)
for row in company_list:
try:
rd = []
ad = []
cmp_code = row['cmp_code']
cmp_name = row['cmp_name']
res = self.erp.get_erp_company(cmp_name)
total_count += len(res)
print(cmp_code, len(res))
for row in res:
poolfundNo = row[0][1] # 资金池编号
poolfundSourceNo = row[1][1] # 资金池来源编码
entruNo = row[2][1] # 入仓号
tradeAmount = float(row[3][1]) # 交易金额
bizdate = row[4][1] # 业务日期
refundDate = str(row[5][1]).replace(' ', '') # 回款日期
term = int(row[6][1]) if type(row[6][1]) == int else 0 # 期限
refundAMt = float(row[7][1]) # 回款金额
isSettle = 0 # 是否结清
overdueDays = 0 # 逾期支付天数
overdueAmount = 0.00 # 逾期支付金额
receDate = row[8][1] # 应收日期
iscredit = row[9][1] # 信用, 0 无信用,1有信用
gross_profit = float(row[10][1]) if type(row[10][1]) == float else 0.0 # 毛利
poolfundType = row[11][1] # 资金池类型
tolerance = 10 # 容差值,目前默认为10
source_type = row[12][1] # 来源类型
# 来源
if source_type == '供应链':
source_type = 1
else:
source_type = 2
# 日期时间转换
biz_ts = DateHandler.str_to_unix(bizdate, fmt="%Y-%m-%d") if bizdate != "" else 0
today_ts = DateHandler.date_time(0)
# 金额是否结清,1: 已结清 2:未结清
is_close = math.isclose(tradeAmount, refundAMt)
if is_close:
isSettle = 1
else:
isSettle = 0
# 信用处理
iscredit = iscredit if iscredit != "" else 0
# 应收日期,应收日期
receDate_ts = DateHandler.str_to_unix(receDate, fmt="%Y-%m-%d") if receDate != "" else 0
# 逾期支付天数,逾期支付天数=回款时间-应收时间
if refundDate != '':
refundDate_ts = DateHandler.str_to_unix(refundDate, fmt="%Y-%m-%d")
due = (refundDate_ts - receDate_ts) / 86400
overdueDays = due
else:
# 回款时间延至今天
refundDate_ts = today_ts
if today_ts > receDate_ts:
due = (today_ts - receDate_ts) / 86400
overdueDays = due
# 逾期支付金额,逾期支付金额=逾期天数>0的回款金额
if overdueDays > 0:
overdueAmount = refundAMt
# 数据集合
dt = {
'poolfundNo': poolfundNo,
'poolfundSourceNo': poolfundSourceNo,
'entruNo': entruNo,
'tradeAmount': tradeAmount,
'bizdate': biz_ts,
'refundDate': refundDate_ts,
'term': term,
'refundAMt': refundAMt,
'isSettle': isSettle,
'overdueDays': overdueDays,
'receDate': receDate_ts,
'company_code': cmp_code,
'overdueAmount': overdueAmount,
'iscredit': iscredit,
'gross_profit': gross_profit,
'poolfundType': poolfundType,
'tolerance': tolerance,
'source_type': source_type
}
# 筛选了有信用的数据集
if iscredit == 1:
rd.append(dt)
# 全部数据集
ad.append(dt)
# 计算公司汇总值
self.pipeline_erp_info(ad, credit)
# 写入数据
self.ts_erp.load_erp(rd, credit_source)
except:
MsgHandler.send_dd_msg('【风控报警】:%s' % traceback.format_exc())
# 发送消息
MsgHandler.send_dd_msg('【风控报警】:公司数量%d,订单总数量%d' % (cmp_size, total_count))
# 业务数据计算
def pipeline_erp_info(self, data, credit):
# 初始化字典
ct = self.ts_erp.init_erp_info()
try:
for row in data:
order_amount = row['tradeAmount'] # 下单金额
gross_profit = row['gross_profit'] # 毛利
deadline_day = row['term'] # 账期期限
delay_day = row['overdueDays'] # 逾期天数
entru_no = row['entruNo'] # 入仓号
business_time = row['bizdate'] # 下单时间
cmp_code = row['company_code'] # 公司编号
iscredit = row['iscredit']
# 公司编码
ct['cmp_code'] = cmp_code
# 首次下单时间
first_order_time = ct['first_order_time']
if (first_order_time == 0 or (first_order_time != 0 and first_order_time > business_time)):
ct['first_order_time'] = business_time
# 历史交易金额(下单总额)
ct['total_order_amount'] += order_amount
# 历史累计毛利
ct['total_gross_profit'] += gross_profit
# 近3个月交易
if self.ts_erp.is_between_time(business_time, 3):
ct['recently_thr_total_amount'] += order_amount
ct['recently_thr_total_count'] += 1
ct['recently_thr_max_amount'] = max(ct['recently_thr_max_amount'], order_amount)
# 近6个月交易
if self.ts_erp.is_between_time(business_time, 6):
ct['recently_six_total_amount'] += order_amount
ct['recently_six_total_count'] += 1
ct['recently_six_max_amount'] = max(ct['recently_six_max_amount'], order_amount)
# 上个月交易额
if self.ts_erp.is_between_time(business_time, 1):
ct['last_month_amount'] += order_amount
# 已获账期金额
if deadline_day > 0:
ct['reveive_period_aomunt'] += order_amount
# 历史逾期次数
if delay_day > 0 and iscredit == 1:
ct['total_delay_times'] += 1
thr_count = 3
six_count = 6
ct['recently_thr_average_amount'] = ct['recently_thr_total_amount'] / thr_count
ct['recently_six_average_amount'] = ct['recently_six_total_amount'] / six_count
# 写入数据
if ct['cmp_code'] != '':
self.ts_erp.load_erp_info(ct, credit)
except:
MsgHandler.send_dd_msg('【风控报警】:%s' % traceback.format_exc())
# 业务数据按月份计算
def pipeline_erp_month(self, is_tolerance, con_str, table, credit, credit_source):
# 初始化
cmp_dt = {}
# 清空数据
self.ts_erp.truncate_erp(table, credit)
# 获取ERP数据
erp_data = self.erp.get_erp_data(con_str, credit_source)
print(len(erp_data))
# 数据归类
for row in erp_data:
try:
erp_id = row['id'] # ID
order_amount = float(row['order_amount']) # 下单金额
gross_profit = float(row['gross_profit']) # 毛利
deadline_day = row['deadline_day'] # 账期期限
delay_day = row['delay_day'] # 逾期天数
entru_no = row['entru_no'] # 入仓号
business_time = row['business_time'] # 下单时间
cmp_code = row['erp_company_code'] # 公司编号
return_amount = float(row['return_amount']) # 回款金额
return_time = row['return_time'] # 回款时间
receive_time = row['receive_time'] # 应收日期
tolerance = row['tolerance'] # 容差值
rt_rec_fmt = DateHandler.unix_to_date(receive_time, fmt="%Y-%m")
rt_bus_fmt = DateHandler.unix_to_date(business_time, fmt="%Y-%m")
# 容差是否计算
if is_tolerance != True:
tolerance = 0
else:
if cmp_code == 'GGN0001232' or cmp_code == 'MY002':
continue
# 初始化公司字典
if cmp_code not in cmp_dt:
cmp_dt[cmp_code] = {}
# 初始化应收时间
if rt_rec_fmt not in cmp_dt[cmp_code]:
cmp_dt[cmp_code][rt_rec_fmt] = self.ts_erp.init_erp_month_temp()
cmp_detail = cmp_dt[cmp_code][rt_rec_fmt]
if return_amount > 0:
# 每笔付款账期合计
cmp_detail['delay_sum_day'] += int((return_time-business_time) / 86400)
if delay_day > 0 + tolerance:
# 逾期金额
cmp_detail['delay_amount'] += order_amount
# 逾期笔数
cmp_detail['delay_count'] += 1
# 逾期总天数
cmp_detail['delay_day'] += delay_day
if delay_day < 0 + tolerance:
# 容差范围内收款金额
cmp_detail['tolerance_receive_amount'] += return_amount
# 容差范围内收款笔数
cmp_detail['tolerance_receive_count'] += 1
# 按期交货总金额
cmp_detail['schedule_delivery_amount'] += order_amount
# 收款总额
cmp_detail['receive_amount'] += order_amount
# 收款笔数
cmp_detail['receive_count'] += 1
# 每笔付款账期合计
cmp_detail['period_day'] += int((return_time - business_time) / 86400)
# 给定账期
cmp_detail['appoint_tolerance'] += int((receive_time - business_time) / 86400)
# 初始化下单时间
if rt_bus_fmt not in cmp_dt[cmp_code]:
cmp_dt[cmp_code][rt_bus_fmt] = self.ts_erp.init_erp_month_temp()
cmp_bus_detail = cmp_dt[cmp_code][rt_bus_fmt]
if return_amount < 0:
# 退货总金额
cmp_bus_detail['return_amount'] += return_amount
if order_amount > 0:
# 出货总金额
cmp_bus_detail['unload_amount'] += order_amount
except:
MsgHandler.send_dd_msg('【风控报警】:%s' % traceback.format_exc())
# 汇总计算
for cmp_code in cmp_dt:
try:
fmt_dt = cmp_dt[cmp_code]
for fmt in fmt_dt:
cmp_detail = fmt_dt[fmt]
return_amount = cmp_detail['return_amount']
unload_amount = cmp_detail['unload_amount']
receive_amount = cmp_detail['receive_amount']
receive_count = cmp_detail['receive_count']
delay_sum_day = cmp_detail['delay_sum_day']
delay_count = cmp_detail['receive_count']
period_day = cmp_detail['period_day']
# 平均付款天数
if delay_count != 0:
cmp_detail['delay_avg_day'] = delay_sum_day / delay_count
# 平均给定账期
if receive_count != 0:
cmp_detail['appoint_tolerance'] /= receive_count
else:
cmp_detail['appoint_tolerance'] = 0
appoint_tolerance = cmp_detail['appoint_tolerance']
# 时间格式转换
#cmp_detail['period_day'] /= 86400
# 当前客户账期使用程度算法
if (receive_count != 0) and (appoint_tolerance != 0):
cmp_detail['period_user_level'] = period_day / receive_count / appoint_tolerance
# 交货总金额
cmp_detail['delivery_amount'] = return_amount + unload_amount
except:
MsgHandler.send_dd_msg('【风控报警】:%s' % traceback.print_exc())
# 写入数据
self.ts_erp.load_erp_month(cmp_dt, table, credit)
# 钉钉消息
cmp_size = len(cmp_dt)
MsgHandler.send_dd_msg('【风控报警】:basic_month表写入数据%d' % (cmp_size))
# 按月计算权重
def pipeline_erp_weight(self, table, db):
month_list = self.erp.get_erp_diff_month(table, db)
for month in month_list:
try:
print(month)
# 近六个月月份
months = self.ts_erp.recently_six_month(str(month))
# 近六个月数据
six_data = self.erp.get_six_data(months, table, db)
# 结果集
rd = []
weight_A = 0
weight_B = 0
period_weight_A = 0
period_weight_B = 0
total_amount = 0
total_use_level = 0
user_level = 0
period_platform_level = 0
period_use_times_single = 0
period_use_times_six = 0
period_user_level_six = 0
for sd in six_data:
cmp_code = sd['cmp_code']
tolerance = sd['tolerance']
delay_avg_day = sd['delay_avg_day']
use_level = sd['use_level']
receive_amount = sd['receive_amount']
weight_A += delay_avg_day * receive_amount
weight_B += tolerance * receive_amount
total_amount += receive_amount
total_use_level += use_level
if total_amount != 0:
period_weight_A = weight_A / total_amount
period_weight_B = weight_B / total_amount
if period_weight_B != 0:
period_platform_level = period_weight_A / period_weight_B
for sd in six_data:
cmp_code = sd['cmp_code']
use_level = sd['use_level']
total_tolerance = sd['total_tolerance']
delay_avg_day = sd['delay_avg_day']
if period_platform_level != 0:
period_use_times_single = use_level / period_platform_level
period_user_level_six = delay_avg_day / total_tolerance if total_tolerance != 0 else 0
period_use_times_six = period_user_level_six / period_platform_level
rd = {
'month' : month,
'cmp_code' : cmp_code,
'period_weight_A' : period_weight_A,
'period_weight_B' : period_weight_B,
'period_platform_level' : period_platform_level,
'period_use_times_single' : period_use_times_single,
'period_use_times_six' : period_use_times_six
}
self.ts_erp.update_basic_month(rd, table, db)
except:
MsgHandler.send_dd_msg('【风控报警】:%s' % traceback.format_exc())
# 定时更新ERP_CODE
def pipeline_erp_CmpCode(self):
try:
chain_db = ConnList.Chain()
credit_db = ConnList.Credit()
sql = "SELECT id,company_name FROM lie_com_credits WHERE erp_company_code = \'\'"
result = DBHandler.read(credit_db, sql)
for row in result:
com_id = row[0]
company_name = row[1]
sql = "SELECT customer_code \
FROM lie_company c \
LEFT JOIN lie_customer u ON c.company_id = u.company_id\
WHERE company_full_name = \'%s\' LIMIT 1" % company_name
exist = DBHandler.read(chain_db, sql)
if len(exist) > 0:
company_code = exist[0][0]
if company_code is not None:
sql = "UPDATE lie_com_credits SET erp_company_code = \'%s\' WHERE id = %d" % (company_code, com_id)
DBHandler.update(credit_db, sql)
except:
MsgHandler.send_dd_msg('【风控报警】:%s' % traceback.print_exc())
from extract.ex_ickey import ExIckey
from extract.ex_goods import ExGoods
from extract.ex_order import ExOrder
from extract.ex_dashboard import ExDashboard
from extract.ex_purchase import ExPurchase
from translate.ts_goods import TsGoods
from load.load_mysql import LoadMysql
from config.conn_list import ConnList
from utils.date_handler import DateHandler
from pipeline.pi_email import PiEmail
class PiGoods:
"""
自营猎芯联营SKU匹配
"""
@staticmethod
def pipeline_zylxly_goods():
where = {'condition': ['status = 1']}
ex_goods = ExGoods('自营数据')
zy_data = ex_goods.ex_zy_goods(where)
# 获取猎芯联营SKU_ID
data = ex_goods.ex_ly_goods()
# 获取猎芯联营型号
ts_goods = TsGoods('', data)
ts_data = ts_goods.trans_goods_name()
# 自营联营同一型号匹配
print(len(zy_data), len(ts_data))
final_result = ts_goods.trans_match_name(zy_data, ts_data, 6.88, 1.16)
match_result = ts_goods.trans_zy_low_price(final_result)
"""
自营备货
"""
@staticmethod
def pipeline_zy_stock():
# 获取云汉指数数据
ex_ickey = ExIckey('')
ickey = ex_ickey.ex_ickey_index_goods()
# 获取立创自营数据
ts_goods = TsGoods(name='', data=ickey)
ts_goods.trans_lc_data()
ts_goods.trans_lx_search()
# final_result = ts_goods.return_data()
# 写入DB
ts_goods.trans_load_gn()
"""
专卖下架提前预警
"""
@staticmethod
def pipeline_zm_warn():
# 获取联营供应商数据
where = {'condition': ['status = 2']}
ex_goods = ExGoods('')
sup = ex_goods.ex_ly_supplier(where)
# 数据处理
ts_goods = TsGoods('', data=sup)
ts_sup = ts_goods.trans_zm_warn()
# 发送邮件
PiEmail.pipeline_zm_pre_down(ts_sup)
"""
处理指定人员下的联营渠道数据
参数:
10034 陈泽彬
10044 庞力
10076 许金荣
"""
@staticmethod
def pipeline_special_canal():
guy = [[10034, '陈泽彬'], [10044, '庞力'], [10076, '许金荣']]
guy_str = '('
# name_str = '('
for i in range(0, len(guy)):
if i != len(guy) - 1:
guy_str += str(guy[i][0]) + ', '
# name_str += '\'' + str(guy[i][1]) + '\'' + ', '
else:
guy_str += str(guy[i][0]) + ')'
# name_str += '\'' + str(guy[i][1]) + '\'' + ')'
where = {'condition': ['status = 2', 'channel_uid in %s' % guy_str]}
erp_where = {'start_time': DateHandler.date_time(1),
'end_time': DateHandler.date_time(0),
# 'condition': ['sale_man in %s' % name_str]
'condition': ['source_type = \'ERP\'']}
sup = ExGoods('').ex_ly_supplier(where)
erp = ExGoods('').ex_erp_goods(erp_where)
rd = TsGoods('', sup).trans_special_canal(erp)
col = ['supplier_code', 'supplier_name', 'effect_num', 'order_num', 'cal_ts', 'insert_time']
LoadMysql.sample_load(col, 'lie_special_order_cal', rd, db=ConnList.Dashboard(), cal_time=False)
"""
自营每日汇总
汇总字段
品牌
一级类目
"""
@staticmethod
def pipeline_zy_daily_summary():
# 自营曝光数据
zy_ep = ExDashboard('').zy_goods({'start_time': DateHandler.date_time(1)})
o_w = {'start_time': DateHandler.date_time(1),
'end_time': DateHandler.date_time(0),
'condition': ['o.order_goods_type = 2']}
# o_w = {'start_time': DateHandler.date_time(27),
# 'end_time': DateHandler.date_time(-1),
# 'condition': ['o.order_goods_type = 2']}
# 订单数据
order = ExOrder('').order_items(o_w)
o_w['condition'].append('o.status > 2')
p_order = ExOrder('').order_paid_items(o_w)
# 采购数据
pur = ExPurchase('').purchase_items({'start_time': DateHandler.date_time(1),
'end_time': DateHandler.date_time(0),
'condition': ['p.status > 1']})
# 曝光计算
rd = TsGoods('', zy_ep).trans_zy_expose_type()
# 订单计算
TsGoods('', order).trans_zy_order_type(rd)
TsGoods('', p_order).trans_zy_paid_order_type(rd)
# 采购计算
TsGoods('', pur).trans_zy_purchase(rd)
# 写入DB
col = ['expose', 'order_count', 'paid_order', 'cost', 'pur_cost', 'name_type', 'name', 'cal_ts']
for r in ['brand', 'class']:
result = []
for d in rd[r]:
rd[r][d]['name_type'] = 1 if r == 'brand' else 2
rd[r][d]['name'] = d
rd[r][d]['cal_ts'] = DateHandler.now_date(1, 1)
# rd[r][d]['cal_ts'] = DateHandler.now_date(0, 1)
result.append(rd[r][d])
LoadMysql.simple_dict_load(col,
'lie_zy_summary',
result,
db=ConnList.Dashboard(),
cal_time=False)
import subprocess
import time
import re
from utils.date_handler import DateHandler
from utils.db_handler import DBHandler
from config.conn_list import ConnList
class PiLxLog:
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.db = ConnList.Dashboard()
self.page_count = {}
self.status_count = {}
self.ip_section = {}
self.day = DateHandler.now_date(1, 1)
self.mon = {'JAN': '01', 'FEB': '02', 'MAR': '03', 'APR': '04', 'MAY': '05', 'JUN': '06', 'JUL': '07', 'AUG': '08', 'SEP': '09', 'OCT': '10', 'NOV': '11', 'DEC': '12'}
self.match = {
'zy': '((.*)/item/(.*)|(.*)/xianhuo(.*)|(.*)/product(.*))',
'ly': '((.*)goods_(.*)|(.*)/spu/(.*)|(.*)/brand/list/(.*))',
'search': '((.*)/s/(.*))',
'ic': '((.*)/ic/(.*))',
'info': '((.*)news.html(.*)|(.*)/article/(.*))'
}
self.block_ip = ['39.156.65', '47.92.125', '14.215.176']
def merge_file(self):
# HDFS数据写入本地
day = self.day
hdfs_file = '/lx_log/lx_log_' + day.split('-')[0][2:4] + day.split('-')[1] + day.split('-')[2] + '/small/*'
des_file = ' /data3/hdfs_data/lx_log'
cmd = 'hadoop dfs -get ' + hdfs_file + des_file
subprocess.getoutput(cmd)
# 合并小文件
cmd = 'find' + des_file + ' -type f -exec cat {} \; >' + des_file + '/sum'
subprocess.getoutput(cmd)
# 文件回传HDFS
cmd = 'hadoop dfs -put /data3/hdfs_data/lx_log/sum ' + '/lx_log/lx_log_' + day.split('-')[0][2:4] + day.split('-')[1] + day.split('-')[2]
subprocess.getoutput(cmd)
# 删除本地文件
cmd = 'rm -f /data3/hdfs_data/lx_log/*'
subprocess.getoutput(cmd)
# 删除HDFS小文件
hdfs_file = '/lx_log/lx_log_' + day.split('-')[0][2:4] + day.split('-')[1] + day.split('-')[2] + '/small/*'
cmd = 'hadoop dfs -rm -r ' + hdfs_file
subprocess.getoutput(cmd)
def merge_ly_file(self):
# HDFS数据写入本地
day = self.day
hdfs_file = '/ly_log/ly_log_' + day.split('-')[0][2:4] + day.split('-')[1] + day.split('-')[2] + '/small/*'
des_file = ' /data3/hdfs_data/ly_log'
cmd = 'hadoop dfs -get ' + hdfs_file + des_file
subprocess.getoutput(cmd)
# 合并小文件
cmd = 'find' + des_file + ' -type f -exec cat {} \; >' + des_file + '/sum'
subprocess.getoutput(cmd)
# 文件回传HDFS
cmd = 'hadoop dfs -put /data3/hdfs_data/ly_log/sum ' + '/ly_log/ly_log_' + day.split('-')[0][2:4] + day.split('-')[1] + day.split('-')[2]
subprocess.getoutput(cmd)
# 删除本地文件
cmd = 'rm -f /data3/hdfs_data/ly_log/*'
subprocess.getoutput(cmd)
# 删除HDFS小文件
hdfs_file = '/ly_log/ly_log_' + day.split('-')[0][2:4] + day.split('-')[1] + day.split('-')[2] + '/small/*'
cmd = 'hadoop dfs -rm -r ' + hdfs_file
subprocess.getoutput(cmd)
def get_ly_hdfs_data(self):
# 读取HDFS数据
day = self.day
hdfs_file = '/ly_log/ly_log_' + day.split('-')[0][2:4] + day.split('-')[1] + day.split('-')[2] + "/sum"
hdfs_data = DBHandler.hdfs_read(hdfs_file)
return hdfs_data
def get_hdfs_data(self):
# 读取HDFS数据
day = self.day
hdfs_file = '/lx_log/lx_log_' + day.split('-')[0][2:4] + day.split('-')[1] + day.split('-')[2] + "/sum"
hdfs_data = DBHandler.hdfs_read(hdfs_file)
return hdfs_data
def wash_data(self, data):
f = open('/data3/hdfs_data/spider_url/url.txt', 'w')
for row in data:
try:
row = row.decode("utf-8")
if 'Baiduspider' in row:
arr = row.split(" ")
ip = arr[0]
time_base = arr[3]
url = arr[6]
status = int(arr[8])
# 屏蔽IP
for block_ip in self.block_ip:
if block_ip in ip:
continue
time_str = self.time_analyzing(time_base)
time_ts = DateHandler.str_to_unix(time_str)
page = self.url_analyzing(url)
self.ip_section_analyzing(ip)
if page not in self.page_count:
self.page_count[page] = 1
else:
self.page_count[page] += 1
if status not in self.status_count:
self.status_count[status] = 1
else:
self.status_count[status] += 1
if status >= 400:
self.insert_seo_url(url, status, time_ts, 'lie_seo_spider_url')
else:
line = url + "|" + str(time_ts) + "|" + ip
f.write(line + "\n")
f.flush()
except:
pass
self.insert_seo_status(self.status_count, 'lie_seo_spider_status')
self.insert_seo_daily(self.page_count)
self.insert_seo_ip_section(self.ip_section)
f.close()
def wash_ly_data(self, data):
for row in data:
try:
row = row.decode("utf-8")
arr = row.split(" ")
url = arr[6]
status = int(arr[8])
time_base = arr[3]
time_str = self.time_analyzing(time_base)
time_ts = DateHandler.str_to_unix(time_str)
if '.html' in url or '/' == url:
if status not in self.status_count:
self.status_count[status] = 1
else:
self.status_count[status] += 1
if status >= 400:
self.insert_seo_url(url, status, time_ts, 'lie_seo_ly_url')
except:
pass
self.insert_seo_status(self.status_count, 'lie_seo_ly_status')
# 时间分析
def time_analyzing(self, time_str):
# example: 29/Aug/2019:15:38:00
time_str = time_str[1:]
split_1 = time_str.split("/")
split_2 = split_1[2].split(":")
day = split_1[0]
mon = self.mon[split_1[1].upper()]
year = split_2[0]
hour = split_2[1]
minute = split_2[2]
sec = split_2[3]
return (year + '-' + mon + '-' + day + ' ' + hour + ':' + minute + ':' + sec)
# URL分析
def url_analyzing(self, url):
zy = self.match['zy']
ly = self.match['ly']
search = self.match['search']
ic = self.match['ic']
info = self.match['info']
if re.match(zy, url, re.M | re.I):
return '自营'
elif re.match(ly, url, re.M | re.I):
return '联营'
elif re.match(search, url, re.M | re.I):
return '搜索'
elif re.match(ic, url, re.M | re.I):
return 'IC'
elif re.match(info, url, re.M | re.I):
return '资讯'
else:
return '其他'
# IP段累加统计
def ip_section_analyzing(self, ip):
if re.match('([0-9]+)\.([0-9]+)\.([0-9]+)\.([0-9]+)', ip, re.M | re.I):
ip_arr = ip.split('.')
new_ip = '%s.%s.%s' % (ip_arr[0], ip_arr[1], ip_arr[2])
if new_ip not in self.ip_section:
self.ip_section[new_ip] = 1
else:
self.ip_section[new_ip] += 1
def pipeline_lx_log(self):
self.merge_file()
data = self.get_hdfs_data()
self.wash_data(data)
def pipeline_ly_log(self):
self.merge_ly_file()
data = self.get_ly_hdfs_data()
self.wash_ly_data(data)
def insert_seo_url(self, url, status, time_ts, table):
sql = "INSERT INTO %s (url,status,cal_ts,spider_time) VALUES (\'%s\',\'%s\',\'%s\',%d)" % (table,url,status,self.day,time_ts)
DBHandler.insert(self.db, sql)
def insert_seo_status(self, status_count, table):
for status in status_count:
count = status_count[status]
sql = "INSERT INTO %s (status,count,cal_ts) VALUES (\'%s\',\'%s\',\'%s\')" % (table, status,count,self.day)
DBHandler.insert(self.db, sql)
def insert_seo_daily(self, page_count):
for page in page_count:
count = page_count[page]
sql = "INSERT INTO lie_seo_spider_daily (page,count,cal_ts) VALUES (\'%s\',\'%s\',\'%s\')" % (page,count,self.day)
DBHandler.insert(self.db, sql)
def insert_seo_ip_section(self, ip_section):
for ip in ip_section:
count = ip_section[ip]
sql = "INSERT INTO lie_seo_spider_ip_section (ip_section,spider_count,cal_ts) VALUES (\'%s\',\'%s\',\'%s\')" % (ip,count,self.day)
DBHandler.insert(self.db, sql)
import re
import subprocess
import requests
import json
import time
import multiprocessing as mp
from config.conn_list import ConnList
from utils.db_handler import DBHandler
from utils.date_handler import DateHandler
from concurrent.futures import ThreadPoolExecutor
db = ConnList.Dashboard()
def deal_data(url, tid, dur, table):
time.sleep(dur)
r = requests.get(url)
res = r.json()
included_status = res['has_included']
included_time = res['included_time']
now_ts = DateHandler.now_datetime()
if included_status == 1:
if re.match(('([0-9]+)年([0-9]+)月([0-9]+)日 ([0-9]+):([0-9]+):([0-9]+)'), included_time, re.M | re.I):
included_time = DateHandler.str_to_unix(included_time, "%Y年%m月%d日 %H:%M:%S")
sql = "UPDATE %s SET is_include=%d,include_time=%d,update_time=%d WHERE id=%d" % (table,included_status,included_time,now_ts,tid)
else:
included_time = 0
sql = "UPDATE %s SET is_include=%d,include_time=%d,update_time=%d WHERE id=%d" % (table,included_status,included_time,now_ts,tid)
else:
included_time = 0
sql = "UPDATE %s SET is_include=%d,include_time=%d,update_time=%d WHERE id=%d" % (table,included_status,included_time,now_ts,tid)
DBHandler.update(db, sql)
class PiLxLogUrl:
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.db = ConnList.Dashboard()
self.day = DateHandler.now_date(1, 1)
self.ip_section = {}
self.post_url = "http://127.0.0.1:12588/seocheck"
self.executor = ThreadPoolExecutor(max_workers=20)
def del_file(self):
cmd = "rm -f /data3/hdfs_data/spider_url/url.txt"
subprocess.getoutput(cmd)
def read_file(self):
now_ts = DateHandler.now_datetime()
with open('/data3/hdfs_data/spider_url/url.txt', 'r', encoding='utf-8') as f:
line = f.readline()
while line:
try:
arr = line.split('|')
url = self.reg_url(arr[0])
time = int(arr[1])
ip = arr[2]
line = f.readline()
sql = "INSERT INTO lie_seo_bd_record (url,spider_time,is_include,include_time,update_time,ip) VALUES (\'%s\',%d,%d,%d,%d,\'%s\')" % (url,time,0,0,now_ts,ip)
DBHandler.insert(self.db, sql)
except:
pass
def reg_url(self, url):
if ('?' in url):
return url.split('?')[0]
def read_data(self):
sql = "SELECT id,url,ip,is_include FROM lie_seo_bd_record"
data = DBHandler.read(self.db, sql)
return data
def include_baidu(self, data):
p1 = mp.Pool(processes=10)
for row in data:
tid = row[0]
url = row[1]
url = "http://127.0.0.1:12588/seocheck?target=www.ichunt.com" + url
p1.apply_async(deal_data, (url, tid, 0.1, 'lie_seo_bd_record'))
p1.close()
p1.join()
# 收录分析
def include_analyzing(self, data):
for row in data:
ip = row[2]
is_include = row[3]
if (re.match('([0-9]+)\.([0-9]+)\.([0-9]+)\.([0-9]+)', ip, re.M | re.I)) and (is_include == 1):
ip_arr = ip.split('.')
new_ip = '%s.%s.%s' % (ip_arr[0], ip_arr[1], ip_arr[2])
if new_ip not in self.ip_section:
self.ip_section[new_ip] = 1
else:
self.ip_section[new_ip] += 1
# 指定页面收录分析
def include_appoint_analyzing(self):
sql = "SELECT id,url FROM lie_seo_appoint_record"
result = DBHandler.read(self.db, sql)
for row in result:
tid = row[0]
url = "http://127.0.0.1:12588/seocheck?target=" + row[1]
deal_data(url, tid, 0.1, 'lie_seo_appoint_record')
# 收录插入
def insert_seo_ip_section(self):
for ip in self.ip_section:
count = self.ip_section[ip]
sql = "INSERT INTO lie_seo_include_ip_section (ip_section,include_count,cal_ts) VALUES (\'%s\',\'%s\',\'%s\')" % (ip,count,self.day)
DBHandler.insert(self.db, sql)
def pipeline_lx_log_url(self):
self.read_file()
data = self.read_data()
self.include_baidu(data)
self.include_analyzing(data)
self.include_appoint_analyzing()
self.insert_seo_ip_section()
self.del_file()
from extract.ex_market import ExMarket
from translate.ts_base import TsBase
from translate.ts_order import TsOrder
class PiMarket:
@staticmethod
def pipeline_lottery():
# ---库存数据---
where = {'condition': ['lottery_id = 61']}
ex_l = ExMarket('抽奖数据')
lottery_data = ex_l.ex_prize_log(where)
# 转换
ts = TsBase('抽奖转换', lottery_data)
print('抽奖人数', ts.no_repeat_people())
print('抽奖次数', ts.return_times())
@staticmethod
def pipeline_coupon_order():
# ---库存数据---
where = {'condition': ['c.coupon_id in (243,244,245)', 'o.order_amount is not NULL']}
ex_l = ExMarket('抽奖数据')
coupon = ex_l.ex_user_coupon(where)
print(coupon)
ts = TsOrder('下单概览转换', coupon)
print(ts.return_times(), ts.trans_order_target(6.9))
from extract.ex_order import ExOrder
from extract.ex_user import ExUser
from translate.ts_order import TsOrder
from config.conn_list import ConnList
from utils.db_handler import DBHandler
from utils.date_handler import DateHandler
from utils.excel_handler import ExcelHandler
from translate.ts_user import TsUser
import time
class PiOrder:
"""
订单概况
"""
@staticmethod
def pipeline_order_people():
def pipeline_order_view():
where = {'start_time': 1539273600, 'end_time': 1541001600, 'condition': ['o.order_goods_type = 1',
'o.order_pay_type != 3',
'o.order_source like \'%pf=-1%\'',
'o.status in (7 ,8, 10)']}
# where = {'start_time': 1541001600, 'end_time': 1543593600, 'condition': ['order_pay_type != 3',
# 'order_source not like \'%pf=-1%\'']}
where = {'start_time': 1554825600, 'end_time': 1556294400,
'condition': ['order_source like \'%adtag=qq%\'']}
ex_order = ExOrder('订单')
data = ex_order.person_order_detail(where)
ts_order = TsOrder('订单', data=data)
print(ts_order.trans_order_target(6.8))
# 生成Excel
# ts_order.trans_order_people(6.95)
@staticmethod
def pipeline_order_people():
# where = {'start_time': 1539273600, 'end_time': 1541001600, 'condition': ['o.order_goods_type = 1',
# 'o.order_pay_type != 3',
# 'o.order_source like \'%pf=-1%\'',
# 'o.status in (7 ,8, 10)']}
where = {'start_time': 1538323200, 'end_time': 1541001600, 'condition': ['o.order_pay_type != 3',
'o.order_source not like \'%pf=-1%\'',
'o.status > 2']}
# E
ex_order = ExOrder('订单详情')
data = ex_order.person_order_detail(where)
......@@ -21,15 +47,256 @@ class PiOrder:
@staticmethod
def pipeline_order_items():
where = {'start_time': 1538150400, 'end_time': 1541952000, 'condition': ['i.supplier_id = 17']}
where = {'start_time': 1552320000, 'end_time': 1555084800,
'condition': ['i.supplier_id = 1',
# 'o.status > 2',
# 'o.order_source like \'%adtag=qq%\'',
'o.order_pay_type != 3',
'o.order_source not like \'%pf=-1%\'']}
# where = {'start_time': 1551715200, 'end_time': 1552665600,
# 'condition': ['i.supplier_id in (5,10)',
# # 'o.status > 2',
# 'o.order_pay_type != 3',
# 'o.order_source like \'%adtag=qq%\'',
# 'o.order_source not like \'%pf=-1%\'']}
# E
ex_order = ExOrder('order detail')
data = ex_order.order_items(where)
for row in data:
if row['user_id'] == 50393:
print(row)
# T
ts_order = TsOrder('', data=data)
ts_order.trans_order_items_canal("L0000089")
# 订单明细
result = ts_order.trans_order_target(6.7)
print(result)
ts_order.trans_order_people(6.7)
# # 新老用户明细
# ts_user = TsUser('', data=data)
# user = ts_user.trans_no_user(where['start_time'], 6.7)
# 筛选渠道
# ts_order.trans_order_items_canal("L0000089")
@staticmethod
def pipeline_order_coupon():
where = {'start_time': 1541001600, 'end_time': 1543248000,
'condition': ['o.status > 2']}
ex_order = ExOrder('order detail')
data = ex_order.order_price(where)
ts_order = TsOrder('', data=data)
print(ts_order.trans_order_coupon(6.95))
@staticmethod
def pipeline_order_offer():
where = {'condition': ['status = 1']}
data = ExUser('offer').lx_offer(where)
ts_data = TsOrder('', data).trans_user_offer()
# 更新数据
db = ConnList.Dashboard()
for user_id in ts_data:
sql = "UPDATE lie_offer SET trade_order_num=%d, trade_sku_num=%d, last_order_time=%d, \
update_time=%d WHERE user_id = %d" \
% (ts_data[user_id]['trade_order_num'], ts_data[user_id]['trade_sku_num'],
ts_data[user_id]['last_order_time'], DateHandler.now_datetime(), user_id)
cursor = db.cursor()
cursor.execute(sql)
db.commit()
db.close()
@staticmethod
def pipeline_output_order_detail():
title = ['订单ID',
'订单编号',
'会员账号',
'收货人',
'下单日期',
'平台来源',
'商品型号',
'制造商',
'供应商',
'数量',
'单价',
'均摊后单价',
'商品小计',
'订单总计',
'币种',
'订单支付类型',
'订单状态',
'收货地址',
'发票类型',
'发票抬头',
'公司电话',
'adtag来源',
'取消原因'
]
content = ['order_id',
'order_sn',
'mobile',
'consignee',
'create_time',
'pf',
'goods_name',
'brand_name',
'supplier_name',
'goods_number',
'goods_price',
'single_pre_price',
'sum_price',
'order_amount',
'currency',
'order_pay_type',
'status',
'address',
'inv_type',
'tax_title',
'company_phone',
'order_source',
'cancel_reason'
]
where = {'start_time': 0, 'end_time': 1551801600, 'condition': ['order_goods_type = 2']}
data = ExOrder('order').all_order(where)
ExcelHandler.write_to_excel_with_openpyxl(title, content,
TsOrder('', data).trans_order_excel(), "zy.xls")
print(len(data))
@staticmethod
def pipeline_order_whatever():
ul = {}
first_ul = {}
rd = []
where = {'start_time': 1514736000, 'end_time': 1546272000, 'condition': ['status > 2']}
order = ExOrder('订单').all_order(where)
for row in order:
user_id = row['user_id']
order_amount = float(row['order_amount']) if row['currency'] == 1 else float(row['order_amount']) * 6.7
if user_id not in ul:
ul[user_id] = {'count': 1, 'amount': order_amount}
else:
ul[user_id]['count'] += 1
ul[user_id]['amount'] += order_amount
for user_id in ul:
count = ul[user_id]['count']
amount = ul[user_id]['amount']
if count >= 3 and amount >= 5000:
first_ul[user_id] = ul[user_id]
title = ['会员账号', '公司名称', '收货人', '收货电话', '订单笔数', '订单金额']
content = ['account', 'com_name', 'consignee', 'mobile', 'count', 'amount']
db = ConnList.Order()
for user_id in first_ul:
sql = "SELECT 1 FROM lie_order WHERE create_time BETWEEN 1548950400 AND 1554998400 AND user_id = %d" % user_id
result = DBHandler.read(db, sql)
if len(result) == 0:
sql = "SELECT u.mobile,u.email,com_name,consignee,a.mobile FROM lie_user_main u \
LEFT JOIN lie_user_company c ON u.user_id = c.user_id \
LEFT JOIN lie_user_address a ON u.user_id = a.user_id \
WHERE u.user_id = %d" % user_id
info = DBHandler.read(db, sql)[0]
rd.append(
{'account': info[0] if info[0] != '' else info[1], 'com_name': info[2], 'consignee': info[3],
'mobile': info[4], 'count': first_ul[user_id]['count'], 'amount': first_ul[user_id]['amount']})
ExcelHandler.write_to_excel(title, content, rd, "result", result_type=2)
@staticmethod
def pipeline_order_log():
rd = {}
ld = []
start_time = 1560700800
end_time = 1561737600
db = ConnList.Order()
sql = "SELECT u.user_id,u.mobile,u.email,p.pay_amount,o.currency,o.sale_id,i.tax_title,i.nike_name,p.pay_time,p.order_id,o.order_pay_type FROM lie_pay_log p \
LEFT JOIN lie_order o ON p.order_id = o.order_id \
LEFT JOIN lie_user_main u ON p.user_id = u.user_id \
LEFT JOIN lie_order_invoice i ON i.order_id = p.order_id \
WHERE p.pay_time BETWEEN %d AND %d AND o.order_type = 1 AND o.order_pay_type != 3 AND o.order_goods_type IN (1,2) AND o.is_type = 0 AND u.is_test = 0 AND u.is_type = 0" % \
(start_time, end_time)
data = DBHandler.read(db, sql)
for row in data:
user_id = row[0]
mobile = row[1]
email = row[2]
pay_amount = float(row[3]) if row[4] == 1 else float(row[3]) * 6.8
sale_id = row[5]
tax_title = row[6]
nike_name = row[7]
pay_time = DateHandler.unix_to_date(row[8], "%Y-%m-%d")
order_id = row[9]
if user_id not in rd:
ic_db = ConnList.IcData()
ic_sql = 'select name from user_info where userId = %d' % sale_id
results = DBHandler.read(db=ic_db, sql=ic_sql)
sale_man = results[0][0] if len(results) > 0 else ''
rd[user_id] = {'tax_title': tax_title + ' ' + nike_name,
'account': mobile if mobile != '' else email,
'sale_man': sale_man,
'order': {pay_time: {order_id: pay_amount}}}
else:
if pay_time not in rd[user_id]['order']:
rd[user_id]['order'][pay_time] = {order_id: pay_amount}
else:
if order_id in rd[user_id]['order'][pay_time]:
rd[user_id]['order'][pay_time][order_id] += pay_amount
else:
rd[user_id]['order'][pay_time][order_id] = pay_amount
for r in rd:
tax_title = rd[r]['tax_title']
account = rd[r]['account']
sale_man = rd[r]['sale_man']
order = rd[r]['order']
for pt in order:
am = 0
ol = ''
for oi in order[pt]:
am += order[pt][oi]
if str(oi) not in ol:
ol += str(oi) + ','
ol = ol[:-1]
sql = "SELECT brand_name,supplier_name FROM lie_order_items WHERE order_id IN (%s)" % ol
bs = DBHandler.read(db, sql)
bn = ""
sn = ""
for b in bs:
brand_name = b[0]
supplier_name = b[1]
if brand_name not in bn:
bn += brand_name + ","
if supplier_name not in sn:
sn += supplier_name + ","
if r == 41558:
print(am)
ld.append({'tax_title': tax_title,
'account': account,
'pay_time': pt,
'pay_amount': am,
'sale_man': sale_man,
'supplier_name': sn,
'brand_name': bn})
title = ['姓名&公司名', '手机号', '付款时间', '单笔付款金额(元)', '对应交易员', '供应商', '品牌']
content = ['tax_title', 'account', 'pay_time', 'pay_amount', 'sale_man', 'supplier_name', 'brand_name']
ExcelHandler.write_to_excel(title, content, ld, "result", result_type=2)
from extract.ex_dashboard import ExDashboard
from extract.ex_user import ExUser
from extract.ex_behavior import ExBehavior
from extract.ex_order import ExOrder
from translate.ts_page import TsPage
from utils.date_handler import DateHandler
from load.load_mysql import LoadMysql
from config.conn_list import ConnList
class PiPage:
def __init__(self):
# ptag
self.ptag = ExDashboard('获取adtag').lie_ptag()
# rd
self.ts_page = TsPage(self.ptag)
def pipeline_pageTag(self):
self.ts_page.trans_sc_regex()
# 注册登录数据
where = {'start_time': DateHandler.date_time(1),
'end_time': DateHandler.date_time(0),
'condition': []}
reg_data = ExUser('').reg_user(where)
log_data = ExUser('').login_user(where)
self.ts_page.trans_user_rl(reg_data, 'reg_remark', 'create_device', 'reg_count')
self.ts_page.trans_user_rl(log_data, 'login_remark', 'platform', 'log_count')
# 行为数据
# 获取昨日行为数据
where = {'start_time': DateHandler.date_time(1), 'end_time': DateHandler.date_time(0),
'condition': ['behavior_id > 0']}
be_data = ExBehavior('行为转化').behavior_log(where)
test_ip = []
data = ExUser('测试ip').test_ip()
for d in data:
test_ip.append(d['ip'])
self.ts_page.trans_be_times(be_data, test_ip)
# 订单数据
where['condition'] = ['order_id > 0']
all_order = ExOrder('').all_order(where)
self.ts_page.trans_order(all_order)
# 写入DB
rd = []
col = ['pv_count', 'uv_count', 'ip_count', 'entry_count', 'outward_count',
'exit_count', 'stay_time_count', 'reg_count', 'log_count', 'insert_time',
'cal_ts', 'page_id', 'platform', 'detail_count', 'addcart_count',
'buy_count', 'order_count', 'order_pay_count', 'addcart_people_count',
'buy_people_count', 'search_people_count', 'reg_people_count',
'log_people_count', 'qq_people_count', 'confirm_count', 'search_count',
'no_result_qq', 'result_qq', 'right_qq', 'top_qq']
for pd in self.ts_page.pd:
for pf in ['pc', 'h5']:
rs = self.ts_page.pd[pd][pf]
rs['addcart_people_count'] = len(rs['addcart_people_count'])
rs['buy_people_count'] = len(rs['buy_people_count'])
rs['search_people_count'] = len(rs['search_people_count'])
rs['reg_people_count'] = len(rs['reg_people_count'])
rs['log_people_count'] = len(rs['log_people_count'])
rs['qq_people_count'] = len(rs['qq_people_count'])
rs['cal_ts'] = DateHandler.now_date(1, 1)
rs['insert_time'] = DateHandler.now_datetime()
rd.append(rs)
LoadMysql.simple_dict_load(col, 'lie_page_cal', rd, db=ConnList.Dashboard(), cal_time=False)
from extract.ex_purchase import ExPurchase
from extract.ex_order import ExOrder
from translate.ts_purchase import TsPurchase
from pipeline.pi_email import PiEmail
from load.load_mysql import LoadMysql
from utils.date_handler import DateHandler
from config.conn_list import ConnList
class PiPurchase:
@staticmethod
def pipeline_safe_stock():
# ---库存数据---
where = {'condition': ['a.status = 1', '(purchase_num + stock + wait_stock) < (mpq)']}
ex_purchase = ExPurchase('采购数据')
stock_data = ex_purchase.safe_stock(where)
# 近90天自营订单数据
now_time = DateHandler.date_time(0)
where = {'start_time': 0, 'end_time': now_time,
'condition': ['status >= 8', 'order_goods_type = 2']}
ex_order = ExOrder('订单数据')
order_data = ex_order.all_order(where)
# 获取指定订单下的所有goods_id
goods_data = ex_order.order_items_specify(order_data)
# 统计出预警Sku
warn_sku = {}
for g_sku in goods_data:
for s_sku in stock_data:
if g_sku == s_sku['sku_id']:
warn_sku[g_sku] = {'out_stock': goods_data[g_sku], 'packing': s_sku['packing_name'], 'sku_id': g_sku}
break
# 从WMS中补全信息
TsPurchase.trans_sku_wms(warn_sku)
# 去除下架sku
warn_sku = TsPurchase.trans_rid_soldout_goods(warn_sku)
# 发送邮件
PiEmail.pipeline_safe_stock(len(warn_sku))
# 写入MySQL
load_col = ['sku_id', 'goods_name', 'class_name', 'brand_name', 'encap', 'mpq', 'goods_status',
'goods_type', 'purchase_num', 'in_num', 'lock_stock', 'wait_stock', 'stock', 'actual_stock',
'insert_time', 'cal_ts', 'packing', 'out_stock', 'supplier_name', 'creater', 'sum_picking_number',
'max_picking_price', 'min_picking_price', 'sum_putaway_number']
LoadMysql.sample_load(load_col, 'lie_safe_stock_cal', warn_sku, db=ConnList.Dashboard())
from config.conn_list import ConnList
from utils.db_handler import DBHandler
from utils.date_handler import DateHandler
import re
import traceback
class PiRank:
def __init__(self):
super().__init__()
self.lx_db = ConnList.Order()
self.lx_rank = ConnList.LxRank()
self.now_ts = DateHandler.now_datetime()
def rank_one(self, start_time, end_time):
sql = "SELECT goods_name \
FROM lie_order_items i \
LEFT JOIN lie_order o ON i.order_id = o.order_id \
LEFT JOIN lie_user_main u ON i.user_id = u.user_id \
WHERE o.is_type = 0 \
AND o.order_type = 1 \
AND o.order_goods_type = 1 \
AND u.is_test = 0 \
AND u.is_type = 0 \
AND i.status = 1 \
AND o.create_time BETWEEN %d AND %d \
GROUP BY goods_name" % (start_time, end_time)
result = DBHandler.read(self.lx_db, sql)
for row in result:
goods_name = row[0]
sql = "SELECT 1 FROM lie_goods_rank1 WHERE goods_name = \'%s\'" % goods_name
is_exist = DBHandler.read(self.lx_rank, sql)
if len(is_exist) == 0:
sql = "INSERT INTO lie_goods_rank1 (goods_name,insert_time) VALUES (\'%s\',%d)" % (goods_name, self.now_ts)
DBHandler.insert(self.lx_rank, sql)
def rank_three(self):
day = DateHandler.now_date(1, 1)
hdfs_file = '/lx_log/lx_log_' + day.split('-')[0][2:4] + day.split('-')[1] + day.split('-')[2] + "/sum"
hdfs_data = DBHandler.hdfs_read(hdfs_file)
print(len(hdfs_data))
for row in hdfs_data:
try:
row = row.decode("utf-8")
arr = row.split(" ")
url = arr[6]
if 'goods_' in url:
matchObj = re.match('((.*)(goods_)(\d+)(.*))', url, re.M | re.I)
if matchObj:
goods_id = matchObj.group(4)
sql = "SELECT 1 FROM lie_goods_rank3 WHERE goods_id = \'%s\'" % (goods_id)
is_exist = DBHandler.read(self.lx_rank, sql)
if len(is_exist) == 0:
sql = "INSERT INTO lie_goods_rank3 (goods_id,insert_time) VALUES (\'%s\',%d)" % (goods_id, self.now_ts)
DBHandler.insert(self.lx_rank, sql)
except:
traceback.print_exc()
\ No newline at end of file
from extract.ex_user import ExUser
from extract.ex_order import ExOrder
from translate.ts_rate import TsRate
from translate.ts_order import TsOrder
class PiRate:
"""
复购
"""
@staticmethod
def pipeline_re_purchase():
# ex_user = ExUser('计算复购')
# ex_order = ExOrder('计算读取订单')
# TsRate.trans_re_put_account(ex_user, ex_order)
where = {'start_time': 1543766400, 'end_time': 1546272000,
'condition': ['o.order_pay_type != 3',
'o.order_source not like \'%pf=-1%\'',
'o.status > 2']}
ex_order = ExOrder('订单')
data = ex_order.person_order_detail(where)
ts_rate = TsRate('计算复购', data)
ts_rate.trans_fb_data(6.88)
"""
首购
"""
@staticmethod
def pipeline_fs_purchase():
# 概览
# where = {'start_time': 1541001600, 'end_time': 1543593600,
# 'condition': ['order_pay_type != 3',
# 'order_source not like \'%pf=-1%\'']}
# 详情
where = {'start_time': 1551369600, 'end_time': 1553616000,
'condition': [
# 'order_pay_type != 3',
# 'status in (7,8,10)',
'o.status > 2',
# 'order_source like \'%adtag=qq%\'',
'order_source not like \'%pf=-1%\'']}
# 详情
# where = {'start_time': 1541001600, 'end_time': 1543593600,
# 'condition': ['order_pay_type != 3',
# 'order_source not like \'%pf=-1%\'',
# 'order_source like \'%adtag=wechat%\'']}
ex_fs = ExUser('计算首购')
data = ex_fs.first_buy_user(where)
print(len(data))
TsRate('', data).trans_total_paid()
# print(data)
# 常用指标
# ts_view = TsOrder('概览', data)
# print(ts_view.trans_order_target(6.88))
# # 首购详情
# ts_rate = TsRate('转化', data)
# ts_rate.trans_fb_data(6.88)
from utils.date_handler import DateHandler
from utils.db_handler import DBHandler
from extract.ex_user import ExUser
from translate.ts_user import TsUser
from pipeline.pi_email import PiEmail
from load.load_mysql import LoadMysql
from config.conn_list import ConnList
import socket, struct
class PiSearch:
"""
搜索无结果
"""
@staticmethod
def search_no_result():
# 获取周几
day = DateHandler.weekday()
# 周一则时间范围为上周
if day == 0:
end_time = DateHandler.date_time(0)
start_time = DateHandler.date_time(7)
keyword = 'keyword-weekly'
else:
end_time = DateHandler.date_time(0)
start_time = DateHandler.date_time(1)
keyword = 'keyword-daily'
# 获取测试IP
test_ip = []
data = ExUser('测试ip').test_ip()
for d in data:
ip = socket.inet_ntoa(struct.pack('!L', d['ip']))
test_ip.append(ip)
# 获取无结果词
no_result = DBHandler.scroll_read(url="http://so12.ichunt.com/search/searchLog/index",
body={"create_time/range": "%d,%d" % (start_time, end_time),
"flag/condition": "2", "p": 1,
"offset": "1000", "is_scroll": "1"},
key='list')
# 获取所有结果词
all_result = DBHandler.scroll_read(url="http://so12.ichunt.com/search/searchLog/index",
body={"create_time/range": "%d,%d" % (start_time, end_time),
"p": 1, "offset": "1000", "is_scroll": "1"},
key='list')
# 搜索关键词统计
ts_keyword = TsUser('搜索词统计', no_result)
no_result_pc = ts_keyword.trans_search_keyword(test_ip=test_ip, pf='PC')
no_result_h5 = ts_keyword.trans_search_keyword(test_ip=test_ip, pf='H5')
all_result_ph = TsUser('所有搜索词统计', all_result).trans_search_keyword(test_ip=test_ip)
# 邮件发送数据
send_data = {}
for i in range(20):
send_data['pc_k_' + str(i)] = no_result_pc[i][0].replace('\'', '') if i < len(no_result_pc) else '-'
send_data['pc_n_' + str(i)] = no_result_pc[i][1] if i < len(no_result_pc) else '-'
send_data['h5_k_' + str(i)] = no_result_h5[i][0].replace('\'', '') if i < len(no_result_h5) else '-'
send_data['h5_n_' + str(i)] = no_result_h5[i][1] if i < len(no_result_h5) else '-'
send_data['all_k_' + str(i)] = all_result_ph[i][0].replace('\'', '') if i < len(all_result_ph) else '-'
send_data['all_n_' + str(i)] = all_result_ph[i][1] if i < len(all_result_ph) else '-'
send_data['size1'] = len(no_result_pc) if len(no_result_pc) <= 20 else 20
send_data['size2'] = len(no_result_h5) if len(no_result_h5) <= 20 else 20
send_data['all_size1'] = len(no_result_pc)
send_data['all_size2'] = len(no_result_h5)
send_data['all_size3'] = len(all_result_ph)
send_data['yes_day'] = DateHandler.now_date(1, 1)
send_data['week'] = '上周'
# 数据格式转换
send_no_r_pc = {}
index = 0
for row in no_result_pc:
send_no_r_pc[index] = {'keyword': row[0].replace('\'', ''), 'count': row[1], 'platform': 1}
index += 1
send_no_r_h5 = {}
index = 0
for row in no_result_h5:
send_no_r_h5[index] = {'keyword': row[0].replace('\'', ''), 'count': row[1], 'platform': 2}
index += 1
# 写入MySQL
load_col = ['keyword', 'count', 'platform', 'insert_time', 'cal_ts']
LoadMysql.sample_load(load_col, 'lie_search_no_result_cal', send_no_r_pc, db=ConnList.Dashboard())
LoadMysql.sample_load(load_col, 'lie_search_no_result_cal', send_no_r_h5, db=ConnList.Dashboard())
PiEmail.pipeline_search_no_result(send_data, keyword)
from extract.ex_user import ExUser
from extract.ex_dashboard import ExDashboard
from extract.ex_behavior import ExBehavior
from extract.ex_order import ExOrder
from extract.ex_sku_expose import ExSkuExpose
from extract.ex_shence import ExShenCe
from translate.ts_tag import TsTag
from utils.date_handler import DateHandler
from config.conn_list import ConnList
from load.load_mysql import LoadMysql
class PiTag:
def __init__(self):
# 测试IP
self.test_ip = []
data = ExUser('测试ip').test_ip()
for d in data:
self.test_ip.append(d['ip'])
# adtag
ex_dash = ExDashboard('获取adtag')
self.adtag = ex_dash.lie_adtag()
# 神策URL
self.sc = ExShenCe('神策').url_detail()
# 获取昨日行为数据
where = {'start_time': DateHandler.date_time(1), 'end_time': DateHandler.date_time(0),
'condition': ['behavior_id > 0']}
self.be_data = ExBehavior('行为转化').behavior_log(where)
# 获取昨日曝光数据
self.expose_data = ExSkuExpose.get_hdfs_data()
# 注册数据
where['condition'] = []
self.reg_data = ExUser('').reg_user(where)
# 登录数据
self.log_data = ExUser('').login_user(where)
# 自营adtag
def pipeline_zy_adtag(self):
# 获取订单数据
where = {'start_time': DateHandler.date_time(1), 'end_time': DateHandler.date_time(0),
'condition': ['order_goods_type = 2']}
ex_order = ExOrder('订单数据')
order_data = ex_order.all_order(where)
# 初始化字典
ts_tag = TsTag(name='tag转化', data=self.adtag)
init_data = ts_tag.trans_init_dict()
ts_tag.set_data(init_data)
# 神策统计
ts_tag.trans_bd_tag_value(self.sc, False)
# 行为统计
ts_tag.trans_be_tag_value(self.be_data, order_data, self.expose_data, self.reg_data,
self.log_data, self.test_ip, False)
# 写入DB
rd = ts_tag.trans_result(ts_tag.return_data(), 'lie_zy_behavior_adtag_order')
# 汇总计算数据写入DB
load_col = ['pv_count', 'pv_radio', 'search_count', 'search_people_count', 'reg_count', 'log_count',
'insert_time', 'cal_ts', 'adtag', 'platform', 'addcart_count', 'buy_count', 'order_count',
'addcart_people_count', 'buy_people_count']
LoadMysql.simple_dict_load(load_col, 'lie_zy_behavior_adtag_cal', rd, db=ConnList.Dashboard(), cal_time=False)
# 汇总adtag
def pipeline_all_adtag(self):
# 获取订单数据
where = {'start_time': DateHandler.date_time(1), 'end_time': DateHandler.date_time(0),
'condition': ['order_id > 0']}
ex_order = ExOrder('订单数据')
order_data = ex_order.all_order(where)
# 初始化字典
ts_tag = TsTag(name='tag转化', data=self.adtag)
init_data = ts_tag.trans_init_dict()
ts_tag.set_data(init_data)
# 神策统计
ts_tag.trans_bd_tag_value(self.sc, True)
# 行为统计
ts_tag.trans_be_tag_value(self.be_data, order_data, self.expose_data, self.reg_data,
self.log_data, self.test_ip, True)
# 结果数据
rd = ts_tag.trans_result(ts_tag.return_data(), 'lie_behavior_adtag_order')
# 汇总计算数据写入DB
load_col = ['pv_count', 'pv_radio', 'search_count', 'search_people_count', 'reg_count', 'log_count',
'insert_time', 'cal_ts', 'adtag', 'platform', 'addcart_count', 'buy_count', 'order_count',
'addcart_people_count', 'buy_people_count']
LoadMysql.simple_dict_load(load_col, 'lie_behavior_adtag_cal', rd, db=ConnList.Dashboard(), cal_time=False)
from pipeline.ex_base import Base
from extract.ex_user import ExUser
from extract.ex_order import ExOrder
from translate.ts_user import TsUser
from utils.excel_handler import ExcelHandler
from config.conn_list import ConnList
from utils.db_handler import DBHandler
from utils.date_handler import DateHandler
import time
class PiUser(Base):
class PiUser:
def pipeline_user_without_login(self):
@staticmethod
def pipeline_user_without_login():
where = {'start_time': 1527782400, 'end_time': 1535644800, 'condition': []}
where = {'start_time': 0, 'end_time': 1553134250, 'condition': []}
# E
ex_user = ExUser('新用户')
......@@ -15,4 +21,65 @@ class PiUser(Base):
# T
ts_user = TsUser('用户转化', data=data)
ts_user.trans_user_without_login(90)
# ts_user.trans_user_without_login(90, 1538323200, 1543507200)
ts_user.trans_user_without_login_time(1546272000)
@staticmethod
def pipeline_user_order():
where = {'start_time': 1527782400, 'end_time': 1543507200, 'condition': ['order_goods_type = 2']}
ex_order = ExOrder('自营订单')
data = ex_order.all_order(where)
ts_user = TsUser('下单账号', data=data)
ts_user.trans_user_order()
@staticmethod
def pipeline_user_output():
data = ExcelHandler.read_to_excel('近期注册未下单用户.xlsx', 'Sheet1', 1)
data.pop(0)
# TsUser('', data).trans_user_info()
# TsUser('', data).trans_user_info_2()
TsUser('', data).trans_user_info_3()
@staticmethod
def temp():
db = ConnList.Order()
people = []
sum_amount = 0
order_sql = "SELECT user_id,order_amount,currency FROM lie_order WHERE create_time BETWEEN 1552233600 AND 1554480000 AND \
user_id NOT IN (SELECT p.user_id FROM lie_pay_log p LEFT JOIN lie_order o ON p.order_id = o.order_id WHERE p.pay_time < 1552233600 AND o.is_type = 0 GROUP BY p.user_id) AND is_type = 0 AND order_type = 1 AND order_source like \'%adtag=qq%\'"
order = DBHandler.read(db, order_sql)
for row in order:
user_id = row[0]
order_amount = float(row[1])
currency = row[2]
if user_id not in people:
people.append(user_id)
order_amount = order_amount if currency == 1 else order_amount * 6.7
sum_amount += order_amount
print(len(people), sum_amount)
from utils.date_handler import DateHandler
from extract.ex_order import ExOrder
from config.conn_list import ConnList
from utils.db_handler import DBHandler
import time
import requests
"""
周数据处理
"""
class PiWeekly:
@staticmethod
def pipeline_week_hot_goods():
d = time.strftime("%w", time.localtime())
# 周一
if int(d) == 1:
goods = {}
r = ConnList.WriteRedis()
# r = ConnList.LocalRedis()
r.delete('weekly_hot_goods')
# 时间
monday = DateHandler.date_time(7)
sunday = DateHandler.date_time(0)
# 读取db
condition = {'start_time': monday, 'end_time': sunday, 'condition': ['u.is_type = 0', 'o.order_goods_type = 2', 'i.goods_id != 0', 'o.status > 2']}
order = ExOrder('').order_items(condition)
for row in order:
goods_id = row['goods_id']
goods_number = row['goods_number']
if goods_id not in goods:
goods[goods_id] = goods_number
else:
goods[goods_id] += goods_number
res = sorted(goods.items(), key=lambda goods: goods[1], reverse=True)
# 提取前18个数据
for i in range(0, 18):
goods_id = res[i][0]
goods_count = res[i][1]
r.hset("weekly_hot_goods", goods_id, goods_count)
# 每周一二级热卖物料
@staticmethod
def pipeline_week_classify_hot_goods():
d = time.strftime("%w", time.localtime())
# 周一
if int(d) == 1:
r = ConnList.WriteRedis()
r.delete('first_classify_goods')
r.delete('second_classify_goods')
# 读取分类
self_classify = ExOrder('').self_classify(condition={'condition': ['class_id > 0']})
classify = {}
classify_parent = {}
# 分类初始化
for row in self_classify:
class_id = row['class_id']
class_name = row['class_name']
parent_id = row['parent_id']
if parent_id != 0:
classify[class_name] = {'class_id': class_id, 'class_name': class_name, 'parent_id': parent_id, 'goods': {}}
else:
classify_parent[class_id] = {}
# 历史自营数据
condition = {'start_time': 0,
'end_time': DateHandler.now_datetime(),
'condition': ['u.is_type = 0', 'o.order_goods_type = 2', 'i.goods_id != 0', 'o.status > 2']}
order = ExOrder('').order_items(condition)
# 计算分类详情
for row in order:
goods_id = row['goods_id']
goods_number = row['goods_number']
goods_class = row['goods_class']
try:
# 二级分类
if goods_id not in classify[goods_class]['goods']:
classify[goods_class]['goods'][goods_id] = goods_number
else:
classify[goods_class]['goods'][goods_id] += goods_number
# 一级分类
parent_id = classify[goods_class]['parent_id']
if goods_id not in classify_parent[parent_id]:
classify_parent[parent_id][goods_id] = goods_number
else:
classify_parent[parent_id][goods_id] += goods_number
except:
pass
# 二级分类排序插入
for class_name in classify:
class_id = classify[class_name]['class_id']
goods = classify[class_name]['goods']
res = sorted(goods.items(), key=lambda goods: goods[1], reverse=True)
lt = []
if len(res) > 0:
for i in range(0, 10):
if i < len(res):
goods_id = res[i][0]
lt.append(goods_id)
else:
break
r.hset("second_classify_goods", class_id, lt)
# 一级分类排序插入
for class_id in classify_parent:
goods = classify_parent[class_id]
res = sorted(goods.items(), key=lambda goods: goods[1], reverse=True)
lt = []
if len(res) > 0:
for i in range(0, 10):
if i < len(res):
goods_id = res[i][0]
lt.append(goods_id)
else:
break
r.hset("first_classify_goods", class_id, lt)
@staticmethod
def pipeline_lx_brand():
d = time.strftime("%w", time.localtime())
# 周一
if int(d) == 1:
client = ConnList.SeoMongoDB()
db = client['seo']
table = db['lianyin_top_brand']
table.delete_many({})
sql = "SELECT i.brand_id, COUNT(i.brand_id) FROM lie_order_items i \
LEFT JOIN lie_order o ON i.order_id = o.order_id \
WHERE i.goods_type = 2 AND i.status = 1 AND i.brand_id != 0 AND o.is_type = 0 \
GROUP BY i.brand_id ORDER BY COUNT(i.brand_id) DESC"
result = DBHandler.read(ConnList.Order(), sql)
for row in result:
brand_id = row[0]
brand_count = row[1]
table.insert_one({'brand_id': brand_id, 'brand_count': brand_count})
@staticmethod
def pipeline_lx_order():
d = time.strftime("%w", time.localtime())
# 周一
if int(d) == 1:
client = ConnList.SeoMongoDB()
db = client['seo']
table = db['lianyin_top_sku']
table.delete_many({})
sql = "SELECT i.goods_id, COUNT(i.goods_id) FROM lie_order_items i \
LEFT JOIN lie_order o ON i.order_id = o.order_id \
WHERE i.goods_type = 2 AND i.status = 1 AND i.brand_id != 0 AND o.is_type = 0 \
GROUP BY i.goods_id ORDER BY COUNT(i.goods_id) DESC"
result = DBHandler.read(ConnList.Order(), sql)
for row in result:
goods_id = row[0]
goods_count = row[1]
table.insert_one({'goods_id': goods_id, 'goods_count': goods_count})
@staticmethod
def pipeline_keyword_match():
start_time = DateHandler.today_between_months(1, '%Y-%m-%d')
end_time = DateHandler.today_between_months(0, '%Y-%m-%d')
db = ConnList.Order()
start_time = 1575129600
end_time = 1575216000
# 获取无结果词
has_result = DBHandler.scroll_read(url="http://so12.ichunt.com/search/searchLog/index",
body={"create_time/range": "%d,%d" % (start_time, end_time),
"flag/condition": "1", "p": 1,
"offset": "1000", "is_scroll": "1"},
key='list')
kl = {}
cmp_kl = {}
for row in has_result:
keyword = row['keyword']
ip = row['ip']
if keyword not in kl:
kl[keyword] = {'count': 1, 'user': [ip]}
else:
kl[keyword]['count'] += 1
if ip not in kl[keyword]['user']:
kl[keyword]['user'].append(ip)
url = "http://so12.ichunt.com/other/exactGoods"
index = 0
for keyword in kl:
kl[keyword]['user'] = len(kl[keyword]['user'])
body = {
'supplier_id': -1,
'goods_name': keyword
}
r = requests.post(url, body)
result = r.json()
data = result['data']
if len(data) > 0:
cmp_kl[keyword] = kl[keyword]
sql = "SELECT count(i.order_id)\
FROM lie_order_items i \
LEFT JOIN lie_order o ON i.order_id = o.order_id \
LEFT JOIN lie_user_main u ON i.user_id = u.user_id \
WHERE i.status = 1 \
AND o.status > 2 \
AND o.is_type = 0 \
AND u.is_test = 0 \
AND u.is_type = 0 \
AND o.create_time BETWEEN %d AND %d AND i.goods_name = \'%s\'" % (start_time, end_time, keyword)
od_data = DBHandler.read(db, sql)
sql = "SELECT count(i.user_id)\
FROM lie_order_items i \
LEFT JOIN lie_order o ON i.order_id = o.order_id \
LEFT JOIN lie_user_main u ON i.user_id = u.user_id \
WHERE i.status = 1 \
AND o.status > 2 \
AND o.is_type = 0 \
AND u.is_test = 0 \
AND u.is_type = 0 \
AND o.create_time BETWEEN %d AND %d AND i.goods_name = \'%s\' GROUP BY i.user_id" % (start_time, end_time, keyword)
us_data = DBHandler.read(db, sql)
if len(od_data) > 0:
count_od = od_data[0][0]
cmp_kl[keyword]['ct_od'] = count_od
else:
cmp_kl[keyword]['ct_od'] = 0
if len(us_data) > 0:
count_us = us_data[0][0]
cmp_kl[keyword]['ct_us'] = count_us
else:
cmp_kl[keyword]['ct_us'] = 0
index += 1
print(index)
time.sleep(0.01)
wr_db = ConnList.Dashboard()
month = int(DateHandler.today_between_months(1, "%Y%m"))
for keyword in cmp_kl:
count = int(cmp_kl[keyword]['count'])
user = int(cmp_kl[keyword]['user'])
count_od = int(cmp_kl[keyword]['ct_od'])
count_us = int(cmp_kl[keyword]['ct_us'])
sql = "INSERT INTO lie_keyword_match(keyword,total_count,user_count,deal_total_count,deal_user_count,month) VALUES (\'%s\',%d,%d,%d,%d,%d)" % \
(keyword,count,user,count_od,count_us,month)
DBHandler.insert(wr_db, sql)
print(len(cmp_kl))
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
from translate.ts_base import TsBase
from translate.ts_page import TsPage
from extract.ex_shence import ExShenCe
from utils.date_handler import DateHandler
from load.load_mysql import LoadMysql
from config.conn_list import ConnList
import re
class TsActivity(TsBase):
def __init__(self, data):
self.pd = {}
for d in data:
ac_id = d['id']
ac_sign = d['sign']
ac_name = d['name']
self.pd[ac_id] = {
'activity_en': ac_sign,
'activity_cn': ac_name,
'activity_id': ac_id,
'reg_count': 0,
'log_count': 0,
'pv_count': 0,
'uv_count': 0,
'ip_count': 0,
'answer_count': 0,
'answer_people': [],
'answer_point': 0
}
"""
处理登录注册数据
data: 数据
key: 解析key
r_key: 结果key
is_load: 是否写入DB
"""
def trans_rl_data(self, data, key, r_key, is_load):
ts = TsPage('')
for d in data:
sc = d[key]
ud = d['user_id']
obj = re.match(r'(.*)ptag=([\w\-\.]*)($|,(.*))', sc, re.M | re.I)
ptag = ts.trans_ptag(obj, 2, 1)
# 符合活动判断
if 'activity-' in ptag:
count = ptag.count('-')
if count > 0:
p_s = ptag.split('-')
ac_id = p_s[len(p_s) - 1]
# 活动id必须为数字
if ac_id.isdigit():
self.pd[int(ac_id)][r_key] += 1
# 写入DB
if is_load:
col = ['user_id', 'ac_id', 'insert_time', 'cal_ts']
rd = [{'user_id': ud, 'ac_id': int(ac_id),
'insert_time': DateHandler.now_datetime(),
'cal_ts': DateHandler.now_date(1, 1)}]
LoadMysql.simple_dict_load(col, 'lie_activity_user', rd,
db=ConnList.Dashboard(), cal_time=False)
"""
计算神策pv,uv,ip值
"""
def trans_pui_value(self):
for pd in self.pd:
ac_name = self.pd[pd]['activity_en']
regex = '(.*)(/activity/' + str(ac_name) + ')(.*)'
if regex != '':
sc = ExShenCe('').reg_pui(regex)
self.pd[pd]['pv_count'] = sc['pv']
self.pd[pd]['uv_count'] = sc['uv']
self.pd[pd]['ip_count'] = sc['ip']
"""
计算答题数据
"""
def trans_answer_value(self, data):
for asw in data:
user_id = asw['user_id']
ac_id = asw['activity_id']
score = asw['score']
if ac_id in self.pd:
self.pd[ac_id]['answer_point'] += score
self.pd[ac_id]['answer_count'] += 1
if user_id not in self.pd[ac_id]['answer_people']:
self.pd[ac_id]['answer_people'].append(user_id)
class Base:
class TsBase:
def __init__(self, name, data):
self.data = data
print('数据转换:', name)
# 人数去重
def no_repeat_people(self, data):
def no_repeat_people(self):
user_list = []
for row in data:
for row in self.data:
user_id = row['user_id']
if user_id not in user_list:
user_list.append(user_id)
print(len(user_list))
return len(user_list)
# 返回次数
def return_times(self):
return len(self.data)
# 设置数据
def set_data(self, data):
self.data = data
# 返回数据
def return_data(self):
return self.data
from translate.ts_base import TsBase
from config.conn_list import ConnList
from utils.db_handler import DBHandler
from utils.excel_handler import ExcelHandler
import socket, struct
import time
class TsBehavior(TsBase):
be_d = {1: '访问', 2: '注册', 3: '登录', 4: '加入购物车', 5: '立即购买', 6: '立即结算', 7: '立即付款',
8: '客服服务', 9: '优惠券', 10: '抽奖', 11: '搜索'}
"""
根据号码获取user_id
"""
def trans_mobile_to_user_id(self):
ul = []
rl = []
for mobile in self.data:
sql = "SELECT user_id FROM lie_user_main WHERE mobile = \'%d\'" % int(mobile)
db = ConnList.Order()
result = DBHandler.read(db, sql)
if len(result) > 0:
user_id = result[0][0]
if user_id not in ul:
ul.append(user_id)
rl.append({'user_id': user_id, 'mobile': mobile})
return rl
"""
行为数据转化
"""
def trans_user_behavior(self, condition):
# Excel标题、内容
title = ['账号', 'IP', 'GUID', '行为', '时间']
content = ['mobile', 'ip', 'user_sign', 'be', 'create_time']
rd = {}
index = 0
# 筛选条件
start_time = condition['start_time']
end_time = condition['end_time']
for data in self.data:
user_id = data['user_id']
mobile = data['mobile']
email = data['email']
sql = "SELECT behavior,create_time,user_sign,ip from lie_behavior_log WHERE create_time \
BETWEEN %d AND %d AND user_id = %d" % (start_time, end_time, user_id)
db = ConnList.Behavior()
result = DBHandler.read(db, sql)
for row in result:
be = self.be_d[row[0]]
create_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(row[1]))
user_sign = row[2]
ip = socket.inet_ntoa(struct.pack('!L', row[3]))
rd[index] = {'mobile': mobile if mobile != '' else email, 'be': be, 'create_time': create_time,
'user_sign': user_sign, 'ip': ip}
index += 1
ExcelHandler.write_to_excel(title, content, rd, "result", result_type=1)
"""
行为汇总转化
"""
def trans_sum_behavior(self, test_ip):
be = {'search_count': {'pc': 0, 'h5': 0}, 'custom_count': {'pc': 0, 'h5': 0},
'addcart_count': {'pc': 0, 'h5': 0}, 'buy_count': {'pc': 0, 'h5': 0},
'confirm_count': {'pc': 0, 'h5': 0}, 'pay_count': {'pc': 0, 'h5': 0},
'new_order_count': {'pc': 0, 'h5': 0}, 'pay_order_count': {'pc': 0, 'h5': 0}}
# 遍历
for row in self.data:
ip = row['ip']
behavior = row['behavior']
platform = 'pc' if row['platform'] == 1 else 'h5'
# 排除测试ip
if ip not in test_ip:
# 判断行为类型
if behavior == 4:
be['addcart_count'][platform] += 1
elif behavior == 5:
be['buy_count'][platform] += 1
elif behavior == 6:
be['confirm_count'][platform] += 1
elif behavior == 7:
be['pay_count'][platform] += 1
elif behavior == 8:
be['custom_count'][platform] += 1
elif behavior == 11:
be['search_count'][platform] += 1
return be
"""
行为导出
"""
def trans_output_behavior(self):
# Excel标题、内容
# title = ['号码', 'IP', '行为', '时间']
# content = ['mobile', 'ip', 'be', 'create_time']
title = ['账号', 'IP', 'adtag', '行为参数', '场景', '行为', '使用平台', 'GUID', '创建时间']
content = ['mobile', 'ip', 'adtag', 'param', 'scene', 'be', 'platform', 'user_sign', 'create_time']
rd = {}
index = 0
for data in self.data:
user_id = data['user_id']
behavior = self.be_d[data['behavior']]
param = data['param']
scene = data['scene']
adtag = data['adtag']
user_sign = data['user_sign']
if data['platform'] == 1:
platform = 'PC'
elif data['platform'] == 2:
platform = 'H5'
ip = socket.inet_ntoa(struct.pack('!L', data['ip']))
create_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(data['create_time']))
mobile = ''
email = ''
# 获取会员信息
if user_id != 0:
sql = "SELECT mobile,email from lie_user_main WHERE user_id = %d" % user_id
db = ConnList.Order()
result = DBHandler.read(db, sql)
if len(result) > 0:
mobile = result[0][0]
email = result[0][1]
rd[index] = {'mobile': mobile if mobile != '' else email, 'ip': ip,
'be': behavior, 'create_time': create_time, 'param': param,
'scene': scene, 'platform': platform, 'adtag': adtag, 'user_sign': user_sign}
index += 1
if index % 1000 == 0:
print(index)
ExcelHandler.write_to_excel_with_openpyxl(title, content, rd, "result.xls")
"""
下单用户
"""
def trans_order_behavior(self):
# 会员列表
user_list = []
ip_list = []
title = ['IP']
content = ['ip']
rd = {}
# 获取所有用户id
for data in self.data:
user_id = data['user_id']
if user_id not in user_list:
user_list.append(user_id)
# 获取该用户所登录过得ip
for user_id in user_list:
sql = "SELECT ip from lie_behavior_log WHERE user_id = %d" % user_id
db = ConnList.Behavior()
result = DBHandler.read(db, sql)
for row in result:
ip = socket.inet_ntoa(struct.pack('!L', row[0]))
if ip not in ip_list:
ip_list.append(ip)
index = 1
for ip in ip_list:
rd[index] = {'ip': ip}
index += 1
ExcelHandler.write_to_excel(title, content, rd, "result", result_type=1)
"""
判断用户是否存在行为
"""
def trans_user_exit_behavior(self, condition):
# Excel标题、内容
title = ['账号', '时间']
content = ['mobile', 'create_time']
rd = {}
for row in self.data:
user_id = row['user_id']
mobile = row['mobile'] if row['mobile'] != '' else row['email']
create_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(row['create_time']))
db = ConnList.Behavior()
sql = "SELECT behavior_id FROM lie_behavior_log \
WHERE user_id = %d AND behavior NOT IN %s" % (user_id, condition)
result = DBHandler.read(db, sql)
if len(result) == 0:
print(mobile)
rd[user_id] = {'mobile': mobile, 'create_time': create_time}
ExcelHandler.write_to_excel_with_openpyxl(title, content, rd, "result.xls")
import traceback
from utils.db_handler import DBHandler
from utils.date_handler import DateHandler
from extract.ex_erp import ExERP
class TsCrm:
def __init__(self, db, wr_db, data):
self.dd = {}
self.db = db
self.wr_db = wr_db
self.data = data
self.exchange = ExERP(DateHandler.now_date(0, 1)).get_erp_exchange()
self.init()
def init(self):
for row in self.data:
outter_uid = row[0]
user_id = row[1]
self.dd[outter_uid] = {
'user_id': user_id,
'is_order': 0,
'last_order_time': 0,
'order_num': 0,
'model_num': 0,
'order_amount': 0,
'paid_amount': 0,
'contact': '',
'contact_info': ''
}
def is_order(self):
for outter_uid in self.dd:
try:
sql = "SELECT 1 FROM lie_order WHERE user_id = %d AND is_type = 0 AND order_type = 1" % outter_uid
order = DBHandler.read(self.db, sql)
if (len(order) > 0):
self.dd[outter_uid]['is_order'] = 2
else:
self.dd[outter_uid]['is_order'] = 1
except:
self.dd[outter_uid]['is_order'] = 0
def last_order(self):
for outter_uid in self.dd:
try:
sql = "SELECT create_time FROM lie_order WHERE user_id = %d AND is_type = 0 AND order_type = 1 ORDER BY order_id DESC LIMIT 1" % outter_uid
order = DBHandler.read(self.db, sql)
last_order_time = order[0][0]
self.dd[outter_uid]['last_order_time'] = last_order_time
except:
self.dd[outter_uid]['last_order_time'] = 0
def order_num(self):
for outter_uid in self.dd:
try:
sql = "SELECT count(order_id) FROM lie_order WHERE user_id = %d AND is_type = 0 AND order_type = 1 AND status = 10" % outter_uid
order = DBHandler.read(self.db, sql)
order_num = order[0][0]
self.dd[outter_uid]['order_num'] = order_num
except:
self.dd[outter_uid]['order_num'] = 0
def model_num(self):
for outter_uid in self.dd:
try:
sql = "SELECT count(i.goods_id) FROM lie_order_items i LEFT JOIN lie_order o ON i.order_id = o.order_id WHERE o.user_id = %d AND i.status = 1 AND o.is_type = 0 AND o.order_type = 1 AND o.status = 10" % outter_uid
model = DBHandler.read(self.db, sql)
model_num = model[0][0]
self.dd[outter_uid]['model_num'] = model_num
except:
self.dd[outter_uid]['model_num'] = 0
def order_amount(self):
for outter_uid in self.dd:
try:
sum_amount = 0
sql = "SELECT order_amount,currency FROM lie_order WHERE user_id = %d AND is_type = 0 AND order_type = 1" % outter_uid
order = DBHandler.read(self.db, sql)
for od in order:
amount = float(od[0])
currency = od[1]
if currency == 2:
amount = self.exchange * amount
sum_amount += amount
self.dd[outter_uid]['order_amount'] = sum_amount
except:
self.dd[outter_uid]['order_amount'] = 0
def paid_amount(self):
for outter_uid in self.dd:
try:
sum_amount = 0
sql = "SELECT p.pay_amount,o.currency FROM lie_order o LEFT JOIN lie_pay_log p ON o.order_id = p.order_id WHERE o.is_type = 0 AND o.order_type = 1 AND o.status > 2 AND p.is_paid = 1 AND o.user_id = %d" % outter_uid
order = DBHandler.read(self.db, sql)
for od in order:
amount = float(od[0])
currency = od[1]
if currency == 2:
amount = self.exchange * amount
sum_amount += amount
self.dd[outter_uid]['paid_amount'] = sum_amount
except:
self.dd[outter_uid]['paid_amount'] = 0
def contact_info(self):
for outter_uid in self.dd:
try:
sql = "SELECT consignee,mobile FROM lie_order_address a LEFT JOIN lie_order o ON o.order_id = a.order_id WHERE o.user_id = %d AND o.is_type = 0 AND o.order_type = 1 ORDER BY o.order_id DESC LIMIT 1" % outter_uid
contact = DBHandler.read(self.db, sql)
consignee = contact[0][0]
mobile = contact[0][1]
self.dd[outter_uid]['contact'] = consignee
self.dd[outter_uid]['contact_info'] = mobile
except:
self.dd[outter_uid]['contact'] = ''
self.dd[outter_uid]['contact_info'] = ''
def iap_data(self):
create_time = DateHandler.date_time(0)
for outter_uid in self.dd:
try:
dd = self.dd[outter_uid]
dd['create_time'] = create_time
dd['outter_uid'] = outter_uid
sql = "SELECT 1 FROM lie_user_extend WHERE outter_uid = %d" % outter_uid
is_exist = DBHandler.read(self.wr_db, sql)
if (len(is_exist) > 0):
self.update_data(dd)
else:
self.insert_data(dd)
except:
traceback.print_exc()
def insert_data(self, dd):
sql = "INSERT INTO lie_user_extend (user_id,outter_uid,no_create_order,latest_order_time,completed_order_nums,model_nums,total_order_amount,amount_paid,contact,contact_info,create_time) \
VALUES (%d, %d, %d, %d, %d, %d, %.2f, %.2f, \'%s\',\'%s\', %d)" % \
(dd['user_id'], dd['outter_uid'], dd['is_order'], dd['last_order_time'], dd['order_num'], dd['model_num'], dd['order_amount'], dd['paid_amount'], dd['contact'], dd['contact_info'], dd['create_time'])
DBHandler.insert(self.wr_db, sql)
def update_data(self, dd):
sql = "UPDATE lie_user_extend SET no_create_order=%d,latest_order_time=%d,completed_order_nums=%d,model_nums=%d,total_order_amount=%.2f,amount_paid=%.2f,contact=\'%s\',contact_info=\'%s\' WHERE outter_uid=%d" % \
(dd['is_order'], dd['last_order_time'], dd['order_num'], dd['model_num'], dd['order_amount'], dd['paid_amount'], dd['contact'], dd['contact_info'], dd['outter_uid'])
DBHandler.update(self.wr_db, sql)
from translate.ts_base import TsBase
from extract.ex_erp import ExERP
class TsDaily(TsBase):
"""
处理下单参数
"""
def trans_order(self, exchange):
rd = {'order_count': 0,
'order_user': [],
'order_amount': 0,
'ly_count': 0,
'ly_user': [],
'ly_amount': 0,
'zy_count': 0,
'zy_user': [],
'zy_amount': 0,
'zy_paid_count': 0,
'zy_paid_user': [],
'zy_paid_amount': 0}
for row in self.data:
status = row['status']
user_id = row['user_id']
currency = row['currency']
order_goods_type = row['order_goods_type']
order_amount = float(row['order_amount']) if currency == 1 else float(row['order_amount']) * exchange
if order_goods_type in (1, 2):
# 汇总订单
rd['order_count'] += 1
rd['order_amount'] += order_amount
if user_id not in rd['order_user']:
rd['order_user'].append(user_id)
# 联营订单
if order_goods_type == 1:
rd['ly_count'] += 1
rd['ly_amount'] += order_amount
if user_id not in rd['ly_user']:
rd['ly_user'].append(user_id)
elif order_goods_type == 2:
rd['zy_count'] += 1
rd['zy_amount'] += order_amount
if user_id not in rd['zy_user']:
rd['zy_user'].append(user_id)
if status > 2:
rd['zy_paid_count'] += 1
rd['zy_paid_amount'] += order_amount
if user_id not in rd['zy_paid_user']:
rd['zy_paid_user'].append(user_id)
# 计算数量
rd['order_user'] = len(rd['order_user'])
rd['ly_user'] = len(rd['ly_user'])
rd['zy_user'] = len(rd['zy_user'])
rd['zy_paid_user'] = len(rd['zy_paid_user'])
return rd
"""
处理付款下单参数
"""
def trans_paid_order(self, exchange):
rd = {'order_count': 0,
'order_user': [],
'order_amount': 0,
'ly_count': 0,
'ly_user': [],
'ly_amount': 0,
'zy_count': 0,
'zy_user': [],
'zy_amount': 0}
for row in self.data:
user_id = row['user_id']
currency = row['currency']
order_goods_type = row['order_goods_type']
order_amount = float(row['pay_amount']) if currency == 1 else float(row['pay_amount']) * exchange
if order_goods_type in (1, 2):
# 汇总订单
rd['order_count'] += 1
rd['order_amount'] += order_amount
if user_id not in rd['order_user']:
rd['order_user'].append(user_id)
# 联营订单
if order_goods_type == 1:
rd['ly_count'] += 1
rd['ly_amount'] += order_amount
if user_id not in rd['ly_user']:
rd['ly_user'].append(user_id)
elif order_goods_type == 2:
rd['zy_count'] += 1
rd['zy_amount'] += order_amount
if user_id not in rd['zy_user']:
rd['zy_user'].append(user_id)
# 计算数量
rd['order_user'] = len(rd['order_user'])
rd['ly_user'] = len(rd['ly_user'])
rd['zy_user'] = len(rd['zy_user'])
return rd
"""
求交集分数
"""
def trans_mixed(self, user1, user2):
all_ul_1 = []
all_ul_2 = []
zy_ul_1 = []
zy_ul_2 = []
for row in user1:
user_id = row['user_id']
order_goods_type = row['order_goods_type']
if user_id not in all_ul_1:
all_ul_1.append(user_id)
if order_goods_type == 2:
if user_id not in zy_ul_1:
zy_ul_1.append(user_id)
for row in user2:
user_id = row['user_id']
order_goods_type = row['order_goods_type']
if user_id not in all_ul_2:
all_ul_2.append(user_id)
if order_goods_type == 2:
if user_id not in zy_ul_2:
zy_ul_2.append(user_id)
all_mix_user = len(list(set(all_ul_1).intersection(set(all_ul_2))))
all_radio = round(all_mix_user / len(all_ul_1) * 100)
zy_mix_user = len(list(set(zy_ul_1).intersection(set(zy_ul_2))))
zy_radio = round(zy_mix_user / len(zy_ul_1) * 100)
return {'all_radio': all_radio, 'zy_radio': zy_radio}
from utils.date_handler import DateHandler
from utils.db_handler import DBHandler
from config.conn_list import ConnList
class TsERP:
def __init__(self):
super().__init__()
self.now_ts = DateHandler.now_datetime()
# 写DB
def load_erp(self, data, db):
col_str = 'poolfund_no,\
poolfund_source_no,\
entru_no,order_amount,\
business_time,\
deadline_day,\
receive_time,\
return_amount,\
return_time,\
is_settle,\
delay_day,\
erp_company_code,\
delay_amount,\
remark,\
is_credit,\
gross_profit,\
poolfund_type,\
tolerance,\
source_type,\
create_time,\
update_time'
for row in data:
value_str = "'%s',\
'%s',\
'%s',\
'%.2f',\
'%d',\
'%d',\
'%d',\
'%.2f',\
'%d',\
'%d',\
'%d',\
'%s',\
'%.2f',\
'%s',\
'%d',\
'%.2f',\
'%s',\
'%d',\
'%d',\
'%d',\
'%d'" % \
(row['poolfundNo'],
row['poolfundSourceNo'],
row['entruNo'],
row['tradeAmount'],
row['bizdate'],
row['term'],
row['receDate'],
row['refundAMt'],
row['refundDate'],
row['isSettle'],
row['overdueDays'],
row['company_code'],
row['overdueAmount'],
'',
row['iscredit'],
row['gross_profit'],
row['poolfundType'],
row['tolerance'],
row['source_type'],
self.now_ts,
self.now_ts)
sql = "INSERT INTO lie_basic_detail (%s) VALUES (%s)" % (col_str, value_str)
DBHandler.insert(db, sql)
# print(sql)
# info写DB
def load_erp_info(self, data, db):
col_str = 'erp_company_code,\
first_order_time,\
total_order_amount,\
total_gross_profit,\
recently_six_average_amount,\
recently_six_max_amount,\
recently_three_average_amount,\
recently_three_max_amount,\
last_month_amount,\
reveive_period_aomunt,\
total_delay_times,\
mianly_class,\
create_time,\
update_time'
value_str = "'%s',\
'%d',\
'%.2f',\
'%.2f',\
'%.2f',\
'%.2f',\
'%.2f',\
'%.2f',\
'%.2f',\
'%.2f',\
'%d',\
'%s',\
'%d',\
'%d'" % \
(data['cmp_code'],
data['first_order_time'],
data['total_order_amount'],
data['total_gross_profit'],
data['recently_six_average_amount'],
data['recently_six_max_amount'],
data['recently_thr_average_amount'],
data['recently_thr_max_amount'],
data['last_month_amount'],
data['reveive_period_aomunt'],
data['total_delay_times'],
data['mianly_class'],
self.now_ts,
self.now_ts)
sql = "INSERT INTO lie_basic_info (%s) VALUES (%s)" % (col_str, value_str)
DBHandler.insert(db, sql)
# 月份写进db
def load_erp_month(self, data, table, db):
col_str = 'receive_amount,\
delay_amount,\
receive_count,\
delay_count,\
delay_day,\
period_day,\
tolerance_receive_amount,\
tolerance_receive_count,\
appoint_tolerance,\
schedule_delivery_amount,\
delivery_amount,\
return_amount,\
unload_amount,\
insurance_user_count,\
gross_profit,\
delay_avg_day,\
period_user_level,\
period_weight_A,\
period_weight_B,\
period_platform_level,\
period_use_times_single,\
period_use_times_six,\
erp_company_code,\
month,\
create_time,\
update_time'
for cmp in data:
cmp_data = data[cmp]
for month in cmp_data:
detail = cmp_data[month]
value_str = "'%.2f',\
'%.2f',\
'%d',\
'%d',\
'%d',\
'%d',\
'%.2f',\
'%d',\
'%d',\
'%.2f',\
'%.2f',\
'%.2f',\
'%.2f',\
'%d',\
'%.2f',\
'%.2f',\
'%.2f',\
'%.2f',\
'%.2f',\
'%.2f',\
'%.2f',\
'%.2f',\
'%s',\
'%d',\
'%d',\
'%d'" % \
(detail['receive_amount'],
detail['delay_amount'],
detail['receive_count'],
detail['delay_count'],
detail['delay_day'],
detail['period_day'],
detail['tolerance_receive_amount'],
detail['tolerance_receive_count'],
detail['appoint_tolerance'],
detail['schedule_delivery_amount'],
detail['delivery_amount'],
detail['return_amount'],
detail['unload_amount'],
detail['insurance_user_count'],
detail['gross_profit'],
detail['delay_avg_day'],
detail['period_user_level'],
detail['period_weight_A'],
detail['period_weight_B'],
detail['period_platform_level'],
detail['period_use_times_single'],
detail['period_use_times_six'],
cmp,
int(str(month).replace('-','')),
self.now_ts,
self.now_ts)
sql = "INSERT INTO %s (%s) VALUES (%s)" % (table, col_str, value_str)
DBHandler.insert(db, sql)
# 清空历史数据
def truncate_erp(self, table, db):
sql = "truncate %s" % table
DBHandler.read(db, sql)
# 初始化数据字典
def init_erp_info(self):
rd = {
'cmp_code': '',
'first_order_time': 0,
'total_order_amount': 0.00,
'total_gross_profit': 0.00,
'recently_six_total_amount': 0.00,
'recently_six_total_count': 0,
'recently_six_average_amount': 0.00,
'recently_six_max_amount': 0.00,
'recently_thr_total_amount': 0.00,
'recently_thr_total_count': 0,
'recently_thr_average_amount': 0.00,
'recently_thr_max_amount': 0.00,
'last_month_amount': 0.00,
'reveive_period_aomunt': 0.00,
'total_delay_times': 0,
'mianly_class': ''
}
return rd
# 初始化月份
def init_erp_month(self, cmp_list, month_list):
rd = {}
for cmp in cmp_list:
rd[cmp] = {}
for month in month_list:
rd[cmp][month] = {
'receive_amount': 0.00,
'delay_amount': 0.00,
'receive_count': 0,
'delay_count': 0,
'delay_day': 0,
'period_day': 0,
'tolerance_receive_amount': 0.00,
'tolerance_receive_count': 0.00,
'appoint_tolerance': 0,
'schedule_delivery_amount': 0.00,
'delivery_amount': 0.00,
'return_amount': 0.00,
'unload_amount': 0.00,
'insurance_user_count': 0,
'gross_profit': 0.00,
'delay_sum_day': 0,
'delay_avg_day': 0,
'period_user_level': 0,
'period_weight_A': 0,
'period_weight_B': 0,
'period_platform_level': 0,
'period_use_times_single': 0,
'period_use_times_six': 0,
'total_delay_avg_day': 0,
'total_tolerance': 0
}
return rd
def init_erp_month_temp(self):
rd = {
'receive_amount': 0.00,
'delay_amount': 0.00,
'receive_count': 0,
'delay_count': 0,
'delay_day': 0,
'period_day': 0,
'tolerance_receive_amount': 0.00,
'tolerance_receive_count': 0.00,
'appoint_tolerance': 0,
'schedule_delivery_amount': 0.00,
'delivery_amount': 0.00,
'return_amount': 0.00,
'unload_amount': 0.00,
'insurance_user_count': 0,
'gross_profit': 0.00,
'delay_sum_day': 0,
'delay_avg_day': 0,
'period_user_level': 0,
'period_weight_A': 0,
'period_weight_B': 0,
'period_platform_level': 0,
'period_use_times_single': 0,
'period_use_times_six': 0,
'total_delay_avg_day': 0,
'total_tolerance': 0
}
return rd
# 判断是否再时间范围内
def is_between_time(self, cmp_ts, months):
start = DateHandler.today_between_months(months)
start_ts = DateHandler.str_to_unix(start, fmt="%Y-%m-%d")
today_ts = DateHandler.date_time(0)
if start_ts <= cmp_ts <= today_ts:
return True
else:
return False
# 最近6个月
def recently_six_month(self, date):
lt = []
year = int(date[0:4])
month = int(date[4:6])
day = 1
for i in range(1, 7):
new_date = DateHandler.day_between_months(year, month, day, i, fmt="%Y%m")
lt.append(new_date)
return "(" + ",".join(lt) + ")"
# 更新月度数据
def update_basic_month(self, data, table, db):
month = data['month']
cmp_code = data['cmp_code']
period_weight_A = data['period_weight_A']
period_weight_B = data['period_weight_B']
period_platform_level = data['period_platform_level']
period_use_times_single = data['period_use_times_single']
period_use_times_six = data['period_use_times_six']
update_time = DateHandler.now_datetime()
sql = "UPDATE %s SET \
period_weight_A = %.2f, \
period_weight_B = %.2f, \
period_platform_level = %.2f, \
period_use_times_single = %.2f, \
period_use_times_six = %.2f,\
update_time = %d \
WHERE erp_company_code = \'%s\' AND month = %d" % \
(
table,
period_weight_A,
period_weight_B,
period_platform_level,
period_use_times_single,
period_use_times_six,
update_time,
cmp_code,
month
)
DBHandler.update(db, sql)
from translate.ts_base import TsBase
from config.conn_list import ConnList
from load.load_mysql import LoadMysql
from utils.db_handler import DBHandler
from utils.date_handler import DateHandler
from utils.excel_handler import ExcelHandler
import json
import requests
import time
class TsGoods(TsBase):
"""
获取商品型号
"""
def trans_goods_name(self):
index = 0
rd = {}
r = ConnList.LyGoods()
for spu_id in self.data:
if r.hget("spu", spu_id) is not None:
info = json.loads(str(r.hget("spu", spu_id), encoding="utf-8"))
rd[info['spu_name']] = {'price': self.data[spu_id]['price'], 'canal': self.data[spu_id]['canal']}
if index % 10000 == 0:
print(index)
index += 1
return rd
"""
自营联营型号匹配
"""
def trans_match_name(self, zy, ly, exchange, tax):
# Excel标题、内容
title = ['型号名称', '自营价格', '联营价格', '渠道']
content = ['goods_name', 'zy_price', 'ly_price', 'canal']
# 结果字典
rd = {}
index = 0
# 计算汇总
for goods_name in ly:
if goods_name in zy:
# 自营最低价
zy_price_json = json.loads(zy[goods_name], encoding="utf-8")
zy_price = zy_price_json[len(zy_price_json) - 1]['price_cn']
ly_price_json = json.loads(ly[goods_name]['price'], encoding="utf-8")
ly_canal = ly[goods_name]['canal']
# 联营最低价
if '[' in ly[goods_name]['price']:
try:
ly_price_cn = ly_price_json[len(ly_price_json) - 1]['price_cn']
ly_price_en = ly_price_json[len(ly_price_json) - 1]['price_us']
# 币种转换
if ly_price_cn == 0:
ly_price = round(ly_price_en * exchange * tax, 4)
else:
ly_price = round(ly_price_cn, 4)
rd[index] = {'goods_name': goods_name, 'zy_price': zy_price, 'ly_price': ly_price,
'canal': ly_canal}
index += 1
except:
print('error', ly_price_json)
ExcelHandler.write_to_excel_with_openpyxl(title, content, rd, "1.xls")
return rd
"""
自营比联营价格低的型号
"""
def trans_zy_low_price(self, result):
# Excel标题、内容
title = ['型号名称', '自营价格', '联营价格', '渠道']
content = ['goods_name', 'zy_price', 'ly_price', 'canal']
# 结果字典
rd = {}
index = 0
for r in result:
goods_name = result[r]['goods_name']
zy_price = result[r]['zy_price']
ly_price = result[r]['ly_price']
canal = result[r]['canal']
# 价格比对
if zy_price > ly_price:
rd[index] = {'goods_name': goods_name, 'zy_price': zy_price, 'ly_price': ly_price, 'canal': canal}
index += 1
ExcelHandler.write_to_excel_with_openpyxl(title, content, rd, "2.xls")
return rd
"""
获取立创自营数据
"""
def trans_lc_data(self):
print('立创自营')
db = ConnList.Dashboard()
for goods_name in self.data:
sql = "SELECT concat(sale_count,sale_unit) FROM lie_goods_cal WHERE goods_name = \'%s\'" % goods_name
result = DBHandler.read(db, sql)
if len(result) > 0:
self.data[goods_name]['lc_sale_count'] = result[0][0]
else:
self.data[goods_name]['lc_sale_count'] = 0
"""
获取猎芯总搜索次数
"""
def trans_lx_search(self):
print('猎芯搜索')
index = 1
for goods_name in self.data:
url = "http://so12.ichunt.com/search/searchLog/index"
body = {"keyword/condition": goods_name, "p": 1, "offset": "1", "is_scroll": "1", "is_exact": "1"}
r = requests.post(url, data=body)
if index % 100 == 0:
print(index)
index += 1
try:
if r.json()['error_code'] == 0:
self.data[goods_name]['lx_search_count'] = r.json()['data']['total']
else:
self.data[goods_name]['lx_search_count'] = 0
except:
pass
"""
根据goods_name是否存在决定更新还是插入
"""
def trans_load_gn(self):
db = ConnList.Dashboard()
for goods_name in self.data:
time.sleep(0.01)
sql_exist = "SELECT id FROM lie_stock_find WHERE goods_name = '%s'" % goods_name
exist = DBHandler.read(db, sql_exist)
# 不存在则插入,存在则更新
# 插入
if len(exist) == 0:
load_col = ['goods_name', 'lx_search_count', 'ickey_search_count', 'ickey_trade_count',
'ickey_buyer_count', 'lc_sale_count', 'create_time', 'update_time']
LoadMysql.simple_dict_load(load_col, 'lie_stock_find', [self.data[goods_name]], db=ConnList.Dashboard(), cal_time=False)
# 更新
else:
sql = "UPDATE lie_stock_find SET lx_search_count=%d, ickey_search_count=%d, \
ickey_trade_count=%d, ickey_buyer_count=%d, lc_sale_count='%s', update_time = %d \
WHERE goods_name = '%s'" % \
(self.data[goods_name]['lx_search_count'], self.data[goods_name]['ickey_search_count'],
self.data[goods_name]['ickey_trade_count'], self.data[goods_name]['ickey_buyer_count'],
self.data[goods_name]['lc_sale_count'], self.data[goods_name]['update_time'], goods_name)
DBHandler.read(db, sql)
"""
处理专卖预警数据
"""
def trans_zm_warn(self):
rd = []
for row in self.data:
# 渠道编码
supplier_code = row['supplier_code']
purchase_uid = row['channel_uid']
# sku搜索接口
url = "http://so12.ichunt.com/search/es/searchsku"
body = {"update_time/sr": 'lte,' + str(DateHandler.date_time(25)), "supplier_id": 17,
"goods_status/condition": 1, "status/condition": 1,
'canal/condition': supplier_code}
r = requests.post(url, data=body)
# 有结果
if r.json()['error_code'] == 0:
rd.append({'supplier_code': supplier_code, 'purchase_uid': purchase_uid,
'count': r.json()['data']['total']})
return rd
"""
处理猎芯联营渠道下单数据
"""
def trans_special_canal(self, erp):
rd = {}
for row in self.data:
url = "http://so12.ichunt.com/search/es/searchsku"
body = {"supplier_id": '17',
"goods_status/condition": 1,
"status/condition": 1,
'canal/condition': row['supplier_code']}
r = requests.post(url, data=body)
effect_num = int(r.json()['data']['total']) if r.json()['error_code'] == 0 else 0
rd[row['supplier_name']] = {'supplier_name': row['supplier_name'],
'supplier_code': row['supplier_code'],
'effect_num': effect_num,
'order_num': 0,
'cal_ts': DateHandler.now_date(1, 1),
'insert_time': DateHandler.now_datetime()}
rd['其他'] = {'supplier_name': '其他', # 其他
'supplier_code': '其他',
'effect_num': 0,
'order_num': 0,
'cal_ts': DateHandler.now_date(1, 1),
'insert_time': DateHandler.now_datetime()}
ls = []
for e in erp: # 遍历ERP
sp_name = e['supplier_name']
order_time = e['pur_order_bizdate']
order_no = e['pur_order_no']
sale_man = e['sale_man']
if sp_name in rd:
s_n = sp_name
else:
s_n = '其他'
rd[s_n]['order_num'] += 1
ls.append({'supplier_code': rd[s_n]['supplier_code'],
'order_time': order_time,
'order_no': order_no,
'sale_man': sale_man,
'cal_ts': DateHandler.now_date(1, 1)})
col = ['supplier_code', 'order_time', 'order_no', 'sale_man', 'cal_ts']
LoadMysql.simple_dict_load(col, 'lie_special_order', ls, db=ConnList.Dashboard(), cal_time=False)
return rd
"""
计算曝光料号
"""
def trans_zy_expose_type(self):
rd = {'brand': {}, 'class': {}}
db = ConnList.lxData()
for row in self.data:
goods_id = row['goods_id']
sql = "SELECT brand_name,class_name FROM lie_goods g \
LEFT JOIN lie_brand b ON g.brand_id = b.brand_id \
LEFT JOIN lie_self_classify c ON g.class_id1 = c.class_id \
WHERE goods_id = %d" % goods_id
rs = DBHandler.read(db, sql)
# 品牌和一级类目
brand = rs[0][0]
cls = rs[0][1]
# 品牌
if brand not in rd['brand']:
rd['brand'][brand] = {'expose': 1, 'order_count': 0, 'paid_order': 0, 'cost': 0, 'pur_cost': 0}
else:
rd['brand'][brand]['expose'] += 1
# 一级分类
if cls not in rd['class']:
rd['class'][cls] = {'expose': 1, 'order_count': 0, 'paid_order': 0, 'cost': 0, 'pur_cost': 0}
else:
rd['class'][cls]['expose'] += 1
# 关闭db连接
db.close()
return rd
"""
计算订单
"""
def trans_zy_order_type(self, rd):
db = ConnList.lxData()
for row in self.data:
goods_id = row['goods_id']
sql = "SELECT brand_name,class_name FROM lie_goods g \
LEFT JOIN lie_brand b ON g.brand_id = b.brand_id \
LEFT JOIN lie_self_classify c ON g.class_id1 = c.class_id \
WHERE goods_id = %d" % goods_id
rs = DBHandler.read(db, sql)
# 品牌和一级类目
brand = rs[0][0]
cls = rs[0][1]
# 品牌
if brand not in rd['brand']:
rd['brand'][brand] = {'expose': 0, 'order_count': 1, 'paid_order': 0, 'cost': 0, 'pur_cost': 0}
else:
rd['brand'][brand]['order_count'] += 1
# 一级类目
if cls not in rd['class']:
rd['class'][cls] = {'expose': 0, 'order_count': 1, 'paid_order': 0, 'cost': 0, 'pur_cost': 0}
else:
rd['class'][cls]['order_count'] += 1
db.close()
"""
计算付款订单
"""
def trans_zy_paid_order_type(self, rd):
db = ConnList.lxData()
for row in self.data:
goods_id = row['goods_id']
goods_number = row['goods_number']
sql = "SELECT brand_name,class_name,cost FROM lie_goods g \
LEFT JOIN lie_brand b ON g.brand_id = b.brand_id \
LEFT JOIN lie_self_classify c ON g.class_id1 = c.class_id \
WHERE goods_id = %d" % goods_id
rs = DBHandler.read(db, sql)
# 品牌和一级类目
brand = rs[0][0]
cls = rs[0][1]
cost = rs[0][2] * goods_number
# 品牌
if brand not in rd['brand']:
rd['brand'][brand] = {'expose': 0, 'order_count': 0, 'paid_order': 1, 'cost': cost, 'pur_cost': 0}
else:
rd['brand'][brand]['paid_order'] += 1
rd['brand'][brand]['cost'] += cost
# 一级类目
if cls not in rd['class']:
rd['class'][cls] = {'expose': 0, 'order_count': 0, 'paid_order': 1, 'cost': cost, 'pur_cost': 0}
else:
rd['class'][cls]['paid_order'] += 1
rd['class'][cls]['cost'] += cost
db.close()
"""
计算采购
"""
def trans_zy_purchase(self, rd):
for row in self.data:
brand = row['brand']
cost = row['amount']
# 品牌
if brand not in rd['brand']:
rd['brand'][brand] = {'expose': 0, 'order_count': 0, 'paid_order': 0, 'cost': 0, 'pur_cost': cost}
else:
rd['brand'][brand]['pur_cost'] += cost
from translate.ts_base import Base
from translate.ts_base import TsBase
from utils.excel_handler import ExcelHandler
from config.conn_list import ConnList
from utils.db_handler import DBHandler
from utils.date_handler import DateHandler
import time
import requests
import re
class TsOrder(Base):
class TsOrder(TsBase):
"""
获取用户订单明细 zzl v1.0
......@@ -16,16 +18,20 @@ class TsOrder(Base):
def trans_order_people(self, exchange):
# Excel标题、内容
title = ['姓名&公司', '手机号', '付款时间', '实付金额', '业务员']
content = ['tax_title', 'mobile', 'pay_time', 'order_amount', 'sale_name']
title = ['姓名&公司', '手机号', '付款时间', '累计付款金额(元)', '累计付款笔数', '交易员', '供应商']
content = ['tax_title', 'mobile', 'pay_time', 'order_amount', 'order_count', 'sale_name', 'supplier_name']
# user_id列表
ul = []
ol = []
# 结果字典
rd = {}
for row in self.data:
user_id = row['user_id']
if 'order_amount' in row:
order_amount = float(row['order_amount'])
elif 'goods_price' in row:
order_amount = float(row['goods_price'] * row['goods_number'])
currency = row['currency']
pay_time = row['pay_time']
tax_title = row['tax_title']
......@@ -33,25 +39,37 @@ class TsOrder(Base):
mobile = row['mobile']
email = row['email']
sale_id = row['sale_id']
supplier_name = row['supplier_name']
order_id = row['order_id']
# 币种转换
if currency == 2:
order_amount *= exchange
# 用户去重
if user_id not in ul:
ol.append(order_id)
ul.append(user_id)
rd[user_id] = {}
rd[user_id]['order_amount'] = order_amount
rd[user_id]['order_count'] = 1
rd[user_id]['pay_time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(pay_time))
rd[user_id]['tax_title'] = tax_name + ' ' + tax_title
rd[user_id]['mobile'] = mobile if mobile != '' else email
rd[user_id]['supplier_name'] = supplier_name
# 跨库获取交易员
db = ConnList.IcData()
sql = 'select name from user_info where userId = %d' % (sale_id)
sql = 'select name from user_info where userId = %d' % sale_id
results = DBHandler.read(db=db, sql=sql)
sale_man = results[0][0] if len(results) > 0 else ''
rd[user_id]['sale_name'] = sale_man
else:
rd[user_id]['order_amount'] += order_amount
# 订单数累加
if order_id not in ol:
ol.append(order_id)
rd[user_id]['order_count'] += 1
# 供应商拼接
if supplier_name not in rd[user_id]['supplier_name']:
rd[user_id]['supplier_name'] += (', ' + supplier_name)
ExcelHandler.write_to_excel(title, content, rd, "result", result_type=1)
"""
......@@ -92,6 +110,531 @@ class TsOrder(Base):
print(user_list)
print(user_paid_list)
"""
计算常用指标
"""
def trans_order_target(self, exchange):
# 用户列表
order_ul = []
paid_ul = []
# 金额
amount = 0
paid_amount = 0
# 数量
count = 0
paid_count = 0
for row in self.data:
user_id = row['user_id']
order_amount = 0
if 'order_amount' in row:
order_amount = float(row['order_amount'])
elif 'goods_price' in row:
order_amount = float(row['goods_price'] * row['goods_number'])
status = row['status']
currency = row['currency']
# 币种转换
if currency == 2:
order_amount *= exchange
# 计算下单金额
amount += order_amount
count += 1
# 计算下单人数
if user_id not in order_ul:
order_ul.append(user_id)
# 计算已付款
if status > 2:
paid_amount += order_amount
paid_count += 1
if user_id not in paid_ul:
paid_ul.append(user_id)
return {'ul': len(order_ul), 'ul_amount': amount, 'count': count, 'paid_ul': len(paid_ul), 'paid_amount': paid_amount, 'paid_count': paid_count}
"""
计算优惠券使用情况
"""
def trans_order_coupon(self, exchange):
ol = []
ul = []
amount = 0
coupon_amount = 0
for row in self.data:
user_id = row['user_id']
order_id = row['order_id']
price = row['price']
price_type = row['price_type']
if price_type == -4:
if user_id not in ul:
ul.append(user_id)
if order_id not in ol:
ol.append(order_id)
coupon_amount += abs(price)
for row in ol:
db = ConnList.Order()
sql = 'select order_amount,currency from lie_order where order_id = %d' % row
results = DBHandler.read(db=db, sql=sql)
order_amount = results[0][0]
currency = results[0][1]
# 币种转换
if currency == 2:
order_amount *= exchange
amount += order_amount
return {'ul': len(ul), 'coupon_amount': coupon_amount, 'order_amount': amount}
"""
计算概览
"""
def trans_order_view(self, exchange):
# 用户列表
order_ul = []
# 金额
amount = 0
for row in self.data:
user_id = row['user_id']
order_amount = float(row['goods_price'] * row['goods_number'])
currency = row['currency']
# 币种转换
if currency == 2:
order_amount *= exchange
# 计算下单金额
amount += order_amount
# 计算人数
if user_id not in order_ul:
order_ul.append(user_id)
return {'user': order_ul, 'amount': amount}
"""
计算各个渠道
"""
def trans_order_tag(self, exchange):
rd = {
'sem': {'user': [], 'amount': 0},
'wx': {'user': [], 'amount': 0},
'wechat': {'user': [], 'amount': 0},
'qq': {'user': [], 'amount': 0}
}
for row in self.data:
order_source = row['order_source']
user_id = row['user_id']
currency = row['currency']
order_amount = float(row['goods_price'] * row['goods_number'])
# 币种转换
if currency == 2:
order_amount *= exchange
# 判断tag
obj = re.match(r'(.*)?adtag=([\w]*)(\.|$|&|(.*))', order_source, re.M | re.I)
# 匹配成功
if obj:
tag = obj.group(2)
# 判断是否再字典中
if tag in rd:
rd[tag]['amount'] += order_amount
# user_id去重
if user_id not in rd[tag]['user']:
rd[tag]['user'].append(user_id)
return rd
"""
区分新老用户
"""
def trans_user_type(self, exchange, start_time):
ul = []
rd = {
'new': {'user': 0, 'amount': 0},
'old': {'user': 0, 'amount': 0}
}
for row in self.data:
user_id = row['user_id']
currency = row['currency']
order_amount = float(row['goods_price'] * row['goods_number'])
# 币种转换
if currency == 2:
order_amount *= exchange
# 判断新老用户
db = ConnList.Order()
sql = 'select 1 FROM lie_order where user_id = %d AND create_time < %s' % (user_id, start_time)
results = DBHandler.read(db=db, sql=sql)
# 0为新用户,否则为老用户
if len(results) == 0:
tag = 'new'
else:
tag = 'old'
# 判断是否在名单内
if user_id not in ul:
ul.append(user_id)
rd[tag]['user'] += 1
rd[tag]['amount'] += order_amount
return rd
"""
计算user_id累计交易型号和累计成单数量
"""
def trans_user_offer(self):
rd = {}
for user_id in self.data:
rd[user_id] = {}
# 统计累计交易单号
db = ConnList.Order()
sql = "SELECT count(order_id) FROM lie_order WHERE status = 10 AND user_id = %d" % user_id
results = DBHandler.read(db=db, sql=sql)
if len(results) > 0:
rd[user_id]['trade_order_num'] = results[0][0]
else:
rd[user_id]['trade_order_num'] = 0
# 统计累计成交型号成交
sql = "SELECT count(goods_id) FROM lie_order_items WHERE user_id = %d" % user_id
results = DBHandler.read(db=db, sql=sql)
if len(results) > 0:
rd[user_id]['trade_sku_num'] = results[0][0]
else:
rd[user_id]['trade_order_num'] = 0
# 获取最新下单时间
sql = "SELECT create_time FROM lie_order WHERE user_id = %d order by create_time desc limit 1" % user_id
results = DBHandler.read(db=db, sql=sql)
if len(results) > 0:
rd[user_id]['last_order_time'] = results[0][0]
else:
rd[user_id]['last_order_time'] = 0
return rd
"""
订单应付金额计算
"""
def trans_order_cal(self):
rd = {'pc': {'count': 0, 'amount': 0, 'people': []},
'h5': {'count': 0, 'amount': 0, 'people': []},
'xcx': {'count': 0, 'amount': 0, 'people': []},
'ht': {'count': 0, 'amount': 0, 'people': []},
'jd': {'count': 0, 'amount': 0, 'people': []},
'zy': {'count': 0, 'amount': 0, 'people': []},
'ly': {'count': 0, 'amount': 0, 'people': []},
'hz': {'count': 0, 'amount': 0, 'people': []}}
for row in self.data:
pf = 1
pf_type = ''
sale_type = ''
order_source = row['order_source']
order_amount = round(float(row['order_amount']), 2)
order_type = row['order_type']
order_goods_type = row['order_goods_type']
currency = row['currency']
user_id = row['user_id']
# 获取pf
pobj = re.match(r'(.*)pf=((-|)[0-9])(,|$|&|(.*))', order_source, re.M | re.I)
if pobj:
pf = int(pobj.group(2))
# 币种转换
if currency == 2:
order_amount = order_amount * 6.8
# 判断平台
if order_type == 3 and order_goods_type == 2:
pf_type = 'jd'
elif order_type == 1:
# 汇总用户
if user_id not in rd['hz']['people']:
rd['hz']['people'].append(user_id)
# 判断平台类型
if pf == 1:
pf_type = 'pc'
elif pf == 2:
pf_type = 'h5'
elif pf == 6:
pf_type = 'xcx'
elif pf == -1:
pf_type = 'ht'
# 判断销售类型 ly zy
if order_goods_type == 1:
sale_type = 'ly'
if user_id not in rd['ly']['people']:
rd['ly']['people'].append(user_id)
elif order_goods_type == 2:
sale_type = 'zy'
if user_id not in rd['zy']['people']:
rd['zy']['people'].append(user_id)
# 数据计算
if pf_type != '' and sale_type != '':
rd[pf_type]['count'] += 1
rd[sale_type]['count'] += 1
rd[pf_type]['amount'] += order_amount
rd[sale_type]['amount'] += order_amount
# 计算汇总
for r in rd:
if r != 'zy' and r != 'ly' and r != 'hz':
rd['hz']['count'] += rd[r]['count']
rd['hz']['amount'] += rd[r]['amount']
return rd
"""
计算订单实付金额
"""
def trans_order_paid_cal(self):
rd = {'pc': {'count': [], 'amount': 0, 'favour_count': 0, 'favour_amount': 0, 'people': []},
'h5': {'count': [], 'amount': 0, 'favour_count': 0, 'favour_amount': 0, 'people': []},
'xcx': {'count': [], 'amount': 0, 'favour_count': 0, 'favour_amount': 0, 'people': []},
'ht': {'count': [], 'amount': 0, 'favour_count': 0, 'favour_amount': 0, 'people': []},
'jd': {'count': [], 'amount': 0, 'favour_count': 0, 'favour_amount': 0, 'people': []},
'zy': {'count': [], 'amount': 0, 'favour_count': 0, 'favour_amount': 0, 'people': []},
'ly': {'count': [], 'amount': 0, 'favour_count': 0, 'favour_amount': 0, 'people': []},
'hz': {'count': [], 'amount': 0, 'favour_count': 0, 'favour_amount': 0, 'people': []}}
for row in self.data:
pf = 0
pf_type = ''
sale_type = ''
order_source = row['order_source']
order_type = row['order_type']
order_id = row['order_id']
order_goods_type = row['order_goods_type']
currency = row['currency']
price = round(abs(float(row['price'])), 2)
price_type = row['price_type']
user_id = row['user_id']
# 获取pf
pobj = re.match(r'(.*)pf=((-|)[0-9])(,|$|&|(.*))', order_source, re.M | re.I)
if pobj:
pf = int(pobj.group(2))
# 币种转换
if currency == 2:
price = price * 6.8
# 判断平台
if order_type == 3 and order_goods_type == 2:
pf_type = 'jd'
elif order_type == 1:
# 汇总用户
if user_id not in rd['hz']['people']:
rd['hz']['people'].append(user_id)
# 判断平台类型
if pf == 1:
pf_type = 'pc'
elif pf == 2:
pf_type = 'h5'
elif pf == 6:
pf_type = 'xcx'
elif pf == -1:
pf_type = 'ht'
# 判断销售类型
if order_goods_type == 1:
sale_type = 'ly'
if user_id not in rd['ly']['people']:
rd['ly']['people'].append(user_id)
elif order_goods_type == 2:
sale_type = 'zy'
if user_id not in rd['zy']['people']:
rd['zy']['people'].append(user_id)
# 数据计算
if pf_type != '' and sale_type != '':
# -3尾款,-2预付款,-1付款
if -3 <= price_type <= -1:
rd[pf_type]['amount'] += price
rd[sale_type]['amount'] += price
# order_id去重
if order_id not in rd[pf_type]['count']:
rd[pf_type]['count'].append(order_id)
if order_id not in rd[sale_type]['count']:
rd[sale_type]['count'].append(order_id)
# -4优惠金额
elif price_type == -4:
rd[pf_type]['favour_count'] += 1
rd[sale_type]['favour_count'] += 1
rd[pf_type]['favour_amount'] += price
rd[sale_type]['favour_amount'] += price
# 数值变换
for pf in rd:
rd[pf]['count'] = len(rd[pf]['count'])
# 计算汇总
for r in rd:
if r not in ['zy', 'ly', 'hz']:
rd['hz']['favour_count'] += rd[r]['favour_count']
rd['hz']['favour_amount'] += rd[r]['favour_amount']
rd['hz']['count'] += rd[r]['count']
rd['hz']['amount'] += rd[r]['amount']
return rd
"""
转换相关订单Excel数据
"""
def trans_order_excel(self):
rd = {}
order_status = {-1: '已取消', 1: '待审核', 2: '待付款',
3: '待付尾款', 4: '待发货', 7: '部分发货',
8: '待收货', 10: '交易成功'}
inv_type_d = {1: '不开发票', 2: '普通发票', 3: '增值税发票', 4: '其他'}
index = 0
for row in self.data:
order_id = row['order_id']
order_sn = row['order_sn']
order_source = row['order_source']
order_pay_type = row['order_pay_type']
order_amount = row['order_amount'] if row['currency'] == 1 else float(row['order_amount']) * 6.8
user_id = row['user_id']
currency = 'RMB' if row['currency'] == 1 else 'USD'
status = row['status']
cancel_reason = row['cancel_reason']
create_time = DateHandler.unix_to_date(row['create_time'])
# 用户信息
db = ConnList.Order()
sql_user = "SELECT mobile, email FROM lie_user_main WHERE user_id = %d" % user_id
user = DBHandler.read(db, sql_user)
if len(user) > 0:
mobile = user[0][0]
email = user[0][1]
else:
mobile = ''
email = ''
mobile = mobile if mobile != '' else email
# 地址信息
sql_addr = "SELECT consignee, address FROM lie_order_address WHERE order_id = %d" % order_id
addr = DBHandler.read(db, sql_addr)
consignee = addr[0][0]
address = addr[0][1]
# 发票信息
sql_inv = "SELECT inv_type, tax_title, company_phone FROM lie_order_invoice WHERE order_id = %d" % order_id
inv = DBHandler.read(db, sql_inv)
inv_type = inv[0][0]
tax_title = inv[0][1]
company_phone = inv[0][2]
# 格式转换
if 'pf=2' in order_source:
pf = '移动端'
elif 'pf=-1' in order_source:
pf = '后台'
else:
pf = 'PC端'
pt = ''
if order_pay_type == 1:
pt = '全款支付'
elif order_pay_type == 2:
pt = '预付款支付'
elif order_pay_type == 3:
pt = '账期支付'
# 明细信息
sql_items = "SELECT goods_name,brand_name,supplier_name,goods_number,goods_price,single_pre_price \
FROM lie_order_items WHERE order_id = %d" % order_id
items = DBHandler.read(db, sql_items)
for i in items:
goods_name = i[0]
brand_name = i[1]
supplier_name = i[2]
goods_number = i[3]
goods_price = i[4]
single_pre_price = i[5]
# 写如数据
rd[index] = {
'order_id': order_id,
'order_sn': order_sn,
'mobile': mobile,
'consignee': consignee,
'create_time': create_time,
'pf': pf,
'goods_name': goods_name,
'brand_name': brand_name,
'supplier_name': supplier_name,
'goods_number': goods_number,
'goods_price': goods_price,
'single_pre_price': single_pre_price,
'sum_price': float(goods_price) * goods_number,
'currency': currency,
'order_pay_type': pt,
'status': order_status[status],
'address': address,
'inv_type': inv_type_d[inv_type],
'tax_title': tax_title,
'company_phone': company_phone,
'order_source': order_source,
'cancel_reason': cancel_reason,
'order_amount': order_amount
}
index += 1
# for row in self.data:
#
# if 'pf=2' in row['order_source']:
# pf = '移动端'
# elif 'pf=-1' in row['order_source']:
# pf = '后台'
# else:
# pf = 'PC端'
#
# pt = ''
# if row['order_pay_type'] == 1:
# pt = '全款支付'
# elif row['order_pay_type'] == 2:
# pt = '预付款支付'
# elif row['order_pay_type'] == 3:
# pt = '账期支付'
#
# rd[index] = {
# 'order_id': row['order_id'],
# 'order_sn': row['order_sn'],
# 'mobile': row['mobile'] if row['mobile'] != '' else row['email'],
# 'consignee': row['consignee'],
# 'create_time': DateHandler.unix_to_date(row['create_time']),
# 'pf': pf,
# 'goods_name': row['goods_name'],
# 'brand_name': row['brand_name'],
# 'supplier_name': row['supplier_name'],
# 'goods_number': row['goods_number'],
# 'goods_price': row['goods_price'],
# 'single_pre_price': row['single_pre_price'],
# 'sum_price': float(row['goods_price']) * row['goods_number'],
# 'currency': 'RMB' if row['currency'] == 1 else 'USD',
# 'order_pay_type': pt,
# 'status': order_status[row['status']],
# 'address': row['address'],
# 'inv_type': inv_type[row['inv_type']],
# 'tax_title': row['tax_title'],
# 'company_phone': row['company_phone'],
# 'order_source': row['order_source'],
# 'cancel_reason': row['cancel_reason']
# }
#
# index += 1
#
return rd
from translate.ts_base import TsBase
from extract.ex_shence import ExShenCe
from load.load_mysql import LoadMysql
from config.conn_list import ConnList
from utils.date_handler import DateHandler
import re
class TsPage(TsBase):
# 初始化
def __init__(self, data):
self.pd = {}
for d in data:
pg = d['ptag']
rg = d['regex']
pgd = d['id']
self.pd[pg] = {}
for pf in ['pc', 'h5']:
self.pd[pg][pf] = {
'page_id': pgd,
'platform': 1 if pf == 'pc' else 2,
'pv_count': 0,
'uv_count': 0,
'ip_count': 0,
'entry_count': 0,
'outward_count': 0,
'exit_count': 0,
'stay_time_count': 0,
'reg_count': 0,
'log_count': 0,
'detail_count': 0,
'addcart_count': 0,
'buy_count': 0,
'search_count': 0,
'result_qq': 0,
'no_result_qq': 0,
'right_qq': 0,
'top_qq': 0,
'confirm_count': 0,
'order_count': 0,
'order_pay_count': 0,
'search_people_count': [],
'reg_people_count': [],
'log_people_count': [],
'addcart_people_count': [],
'buy_people_count': [],
'qq_people_count': [],
'regex': rg
}
# 计算神策数据
def trans_sc_regex(self):
for p in self.pd:
regex = self.pd[p]['pc']['regex']
if regex != '':
pc_re = '(.*)(www\.)' + self.pd[p]['pc']['regex']
h5_re = '(.*)(m\.)' + self.pd[p]['h5']['regex']
pc_sc = ExShenCe('').reg_pui(pc_re)
h5_sc = ExShenCe('').reg_pui(h5_re)
self.pd[p]['pc']['pv_count'] = pc_sc['pv']
self.pd[p]['pc']['uv_count'] = pc_sc['uv']
self.pd[p]['pc']['ip_count'] = pc_sc['ip']
self.pd[p]['h5']['pv_count'] = h5_sc['pv']
self.pd[p]['h5']['uv_count'] = h5_sc['uv']
self.pd[p]['h5']['ip_count'] = h5_sc['ip']
"""
计算登录注册数据
data: 数据
d_key: 需要解析的key
p_key: 平台key
r_key: 结果字典key
"""
def trans_user_rl(self, data, d_key, p_key, r_key):
ul = []
for row in data:
mk = row[d_key]
pf = row[p_key]
ud = row['user_id']
if ud not in ul: # 去除重复user_id
ul.append(ud)
obj = re.match(r'(.*)ptag=([\w\-\.]*)($|,(.*))', mk, re.M | re.I)
pt = self.trans_ptag(obj, 2, 1)
if pt != '': # ptag不为空
self.trans_ptag_value(pt, pf, r_key)
"""
处理ptag
mk: ptag未解析值
"""
def trans_ptag(self, obj, o_ix, o_ax):
ptag = ''
# 判断是否含有ptag
if obj:
line = obj.group(o_ix)
count = line.count('.')
if count > 0:
line_s = line.split('.')
ptag = line_s[len(line_s) - o_ax]
else:
ptag = line
return ptag
"""
处理ptag累加值
ptag: page_tag
pf: 平台值
v: 字典值
"""
def trans_ptag_value(self, ptag, pf, v):
# 数值转换
if pf == 1:
pf_type = 'pc'
else:
pf_type = 'h5'
# 索引值计算
index = 0
for key in self.pd:
if key in ptag and key != '-':
self.pd[key][pf_type][v] += 1
break
index += 1
# 匹配不到的归类到其他
if index == len(self.pd):
self.pd['-'][pf_type][v] += 1
"""
计算人数
"""
def trans_ptag_people(self, ptag, pf, v, ip):
index = 0
for key in self.pd:
if key in ptag and key != '-':
self.pd[key][pf][v].append(ip)
break
index += 1
"""
处理行为数据
data: 行为数据
test_ip: 测试ip
"""
def trans_be_times(self, data, test_ip):
for be in data:
pt = be['ptag']
pf = be['platform']
sc = be['scene']
ip = be['ip']
bh = be['behavior']
if ip not in test_ip:
if pt.strip() != '':
obj = re.match(r'(.*)', pt, re.I | re.M)
r_pt = self.trans_ptag(obj, 1, 1)
# 商详
if re.match(r'(.*)\.detail', pt, re.M | re.I):
self.trans_ptag_value(self.trans_ptag(obj, 1, 2), pf, 'detail_count')
r_pt = self.trans_ptag(obj, 1, 2)
# 加入购物车
if re.match(r'(.*)addcart(.*|$)', sc, re.I | re.M):
self.trans_ptag_value(r_pt, pf, 'addcart_count')
# 立即购买
if re.match(r'(.*)buy(.*|$)', sc, re.I | re.M):
self.trans_ptag_value(r_pt, pf, 'buy_count')
# 搜索
if sc == 'search':
self.trans_ptag_value(r_pt, pf, 'search_count')
# 有结果客服
if re.match(r'(.*)inquiery(.*|$)', pt, re.M | re.I):
self.trans_ptag_value(r_pt, pf, 'result_qq')
# 无结果客服
if re.match(r'(.*)noresult(.*|$)', pt, re.M | re.I):
self.trans_ptag_value(r_pt, pf, 'no_result_qq')
# 右侧客服
if re.match(r'(.*)sidenav(.*|$)', pt, re.M | re.I):
self.trans_ptag_value(r_pt, pf, 'right_qq')
# 顶部客服
if re.match(r'(.*)layer(.*|$)', pt, re.M | re.I):
self.trans_ptag_value(r_pt, pf, 'top_qq')
# 立即结算
if bh == 6:
self.trans_ptag_value(r_pt, pf, 'confirm_count')
# 人数计算
# 搜索人数
if pf == 1:
pf_type = 'pc'
else:
pf_type = 'h5'
# 字典包含
for key in self.pd:
if key in r_pt and key != '-':
if bh == 11 and ip not in self.pd[key][pf_type]['search_people_count']:
self.trans_ptag_people(pt, pf_type, 'search_people_count', ip)
elif bh == 2 and ip not in self.pd[key][pf_type]['reg_people_count']:
self.trans_ptag_people(pt, pf_type, 'reg_people_count', ip)
elif bh == 3 and ip not in self.pd[key][pf_type]['log_people_count']:
self.trans_ptag_people(pt, pf_type, 'log_people_count', ip)
elif bh == 4 and ip not in self.pd[key][pf_type]['addcart_people_count']:
self.trans_ptag_people(pt, pf_type, 'addcart_people_count', ip)
elif bh == 5 and ip not in self.pd[key][pf_type]['buy_people_count']:
self.trans_ptag_people(pt, pf_type, 'buy_people_count', ip)
elif bh == 8 and ip not in self.pd[key][pf_type]['qq_people_count']:
self.trans_ptag_people(pt, pf_type, 'qq_people_count', ip)
break
"""
处理订单数据
data: 订单数据
"""
def trans_order(self, data):
for order in data:
source = order['order_source']
status = order['status']
od = order['order_id']
sobj = re.match(r'(.*)ptag=([\w\-\.]*)($|,(.*))', source, re.M | re.I)
pobj = re.match(r'(.*)pf=([\w\-\.]*)($|,(.*))', source, re.M | re.I)
if pobj:
pf = int(pobj.group(2))
else:
pf = 1
# 区分平台
if pf > 0:
ptag = self.trans_ptag(sobj, 2, 1)
self.trans_ptag_value(ptag, pf, 'order_count')
if status > 2:
self.trans_ptag_value(ptag, pf, 'order_pay_count')
# 存储order_id
pf_type = 'pc' if pf == 1 else 'h5'
pgd = 0
for key in self.pd:
if key in ptag and key != '-':
pgd = self.pd[key][pf_type]['page_id']
break
col = ['order_id', 'page_id', 'platform', 'cal_ts', 'insert_time']
ld = [{'order_id': od, 'page_id': pgd, 'platform': pf,
'cal_ts': DateHandler.now_date(1, 1), 'insert_time': DateHandler.now_datetime()}]
LoadMysql.simple_dict_load(col, 'lie_page_order', ld, db=ConnList.Dashboard(), cal_time=False)
from config.conn_list import ConnList
from utils.db_handler import DBHandler
from extract.ex_base import Base
from translate.ts_base import TsBase
class TsPurchase(TsBase):
@staticmethod
def trans_sku_wms(data):
wms = ConnList.Wms()
ic = ConnList.IcData()
for sku_id in data:
sql = "SELECT supplier_name, create_uid \
FROM lie_purchase \
WHERE picking_id = \
(SELECT picking_id FROM lie_purchase_items WHERE sku_id = %d ORDER BY picking_id DESC LIMIT 1)" \
% sku_id
# 获取供应商和创建人信息
purchase_data = DBHandler.read(wms, sql)
supplier_name = purchase_data[0][0] if len(purchase_data) > 0 else ''
create_uid = purchase_data[0][1] if len(purchase_data) > 0 else 0
# 获取创建人姓名
sql = "SELECT name FROM user_info WHERE userId = %d" % create_uid
ic_data = DBHandler.read(ic, sql)
data[sku_id]['supplier_name'] = supplier_name
data[sku_id]['creater'] = ic_data[0][0] if len(ic_data) > 0 else ''
# 补全其他信息
col = ['goods_name', 'class_name', 'brand_name', 'encap', 'mpq', 'goods_status',
'goods_type', 'purchase_num', 'in_num', 'lock_stock', 'wait_stock', 'stock',
'(stock+wait_stock) as actual_stock']
col_to_str = Base.col_to_str(col)
sql = "SELECT %s \
FROM lie_safe_stock \
WHERE sku_id = %d" % (col_to_str, sku_id)
wms_data = DBHandler.read(wms, sql)
# 填充数据
index = 0
for c in col:
if c == '(stock+wait_stock) as actual_stock':
c = 'actual_stock'
data[sku_id][c] = wms_data[0][index]
index += 1
# 新增采购数量总数,最高采购价,最低采购价,入库总数字段
sql = "SELECT sum(picking_number), max(picking_price), min(picking_price), sum(putaway_number)\
FROM lie_purchase_items WHERE sku_id = %d" % sku_id
wms_data = DBHandler.read(wms, sql)
for w in wms_data:
data[sku_id]['sum_picking_number'] = w[0] if w[0] is not None else 0
data[sku_id]['max_picking_price'] = '%.2f' % w[1] if w[1] is not None else 0.0
data[sku_id]['min_picking_price'] = '%.2f' % w[2] if w[2] is not None else 0.0
data[sku_id]['sum_putaway_number'] = w[3] if w[3] is not None else 0
wms.close()
ic.close()
"""
去除下架商品
"""
@staticmethod
def trans_rid_soldout_goods(data):
rd = {}
db = ConnList.lxData()
for sku_id in data:
sql = "SELECT 1 FROM lie_goods WHERE goods_id = %d AND status = 3" % sku_id
result = DBHandler.read(db, sql)
if len(result) == 0:
rd[sku_id] = data[sku_id]
db.close()
return rd
from utils.excel_handler import ExcelHandler
from config.conn_list import ConnList
from utils.db_handler import DBHandler
from translate.ts_base import TsBase
import time
class TsRate(TsBase):
"""
获取复购人员名单
"""
@staticmethod
def trans_re_put_account(ex_user, ex_order):
# Excel标题、内容
title = ['账号', '注册时间']
content = ['mobile', 'create_time']
# 结果字典
rd = []
# 下单笔数
order_count = 0
# 下单人数
order_people = 0
# 下单金额
order_amount_ex = 0
# 付款笔数
order_paid_count = 0
# 付款金额
order_paid_amount_ex = 0
# 总下单笔数
order_sum_count = 0
# 总付款笔数
order_sum_paid_count = 0
user_list = []
all_user = []
where_user = {'start_time': 1543766400, 'count': 2}
where_order = {'start_time': 1543766400, 'end_time': 1546272000, 'condition': ['order_id > 0']}
# 复购人员
user_data = ex_user.re_pur_user(where_user)
# 订单数据
order_data = ex_order.all_order(where_order)
for row in order_data:
user_id = row['user_id']
order_status = row['status']
order_currency = row['currency']
order_amount = float(row['order_amount']) * 6.95 if order_currency == 2 else float(row['order_amount'])
order_sum_count += 1
#
if order_status > 2:
order_sum_paid_count += 1
# 计算所有用户数
if user_id not in all_user:
all_user.append(user_id)
# 判断复购用户是否包含在内
if user_id in user_data:
order_count += 1
order_amount_ex += order_amount
if order_status > 2:
order_paid_count += 1
order_paid_amount_ex += order_amount
# 去重
if user_id not in user_list:
user_list.append(user_id)
for user_id in user_list:
db = ConnList.Order()
sql = 'select mobile,email,create_time from lie_user_main where user_id = %d' % user_id
results = DBHandler.read(db=db, sql=sql)
mobile = results[0][0]
email = results[0][1]
create_time = results[0][2]
rd.append({'mobile': mobile if mobile != '' else email,
'create_time': time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(create_time))})
ExcelHandler.write_to_excel(title, content, rd, "result", result_type=2)
"""
计算首购、复购指标数据
"""
def trans_fb_data(self, exchange):
ul = {}
fb_ul = {}
rb_ul = {'user': 0, 'amount': 0}
for row in self.data:
user_id = row['user_id']
sale_id = row['sale_id']
tax_title = row['tax_title']
nike_name = row['nike_name']
pay_time = row['pay_time']
order_amount = float(row['order_amount'] if row['currency'] == 1 else float(row['order_amount']) * exchange)
# 计算首购数据
if user_id not in fb_ul:
fb_ul[user_id] = {'order_amount': order_amount, 'pay_time': pay_time, 'sale_id': sale_id,
'tax_title': tax_title + nike_name}
else:
# 获取首单
if pay_time < fb_ul[user_id]['pay_time']:
fb_ul[user_id]['pay_time'] = pay_time
fb_ul[user_id]['order_amount'] = order_amount
# 计算复购数据
if user_id not in ul:
ul[user_id] = {'order_amount': order_amount, 'count': 1, 'sale_id': sale_id,
'tax_title': tax_title + nike_name}
else:
ul[user_id]['order_amount'] += order_amount
ul[user_id]['count'] += 1
amount = 0
for user in ul:
if ul[user]['count'] >= 2:
rb_ul['user'] += 1
rb_ul['amount'] += ul[user]['order_amount']
# if ul[user]['order_amount'] >= 30000:
print(user, ul[user]['count'], ul[user]['order_amount'])
amount += ul[user]['order_amount']
# 结果更新
# Excel标题、内容
# rd = {}
# title = ['姓名&公司', '手机号', '实付金额', '业务员']
# content = ['tax_title', 'mobile', 'order_amount', 'sale_name']
# for user_id in fb_ul:
# # if fb_ul[user_id]['order_amount'] >= 30000 and fb_ul[user_id]['count'] >= 2:
# # 补全基础信息
# db = ConnList.Order()
# sql = "SELECT mobile, email \
# FROM lie_user_main u \
# WHERE user_id = %d" % user_id
# results = DBHandler.read(db=db, sql=sql)
# fb_ul[user_id]['mobile'] = results[0][0] if results[0][0] != '' else results[0][1]
# # 跨库获取交易员
# db = ConnList.IcData()
# sale_id = fb_ul[user_id]['sale_id']
# sql = 'select name from user_info where userId = %d' % sale_id
# results = DBHandler.read(db=db, sql=sql)
# sale_man = results[0][0] if len(results) > 0 else ''
# fb_ul[user_id]['sale_name'] = sale_man
# rd[user_id] = fb_ul[user_id]
#
# ExcelHandler.write_to_excel(title, content, rd, "result", result_type=1)
"""
累计实付
"""
def trans_total_paid(self):
rd = {}
for row in self.data:
user_id = row['user_id']
mobile = row['mobile']
email = row['email']
order_amount = float(row['order_amount'] if row['currency'] == 1 else float(row['order_amount']) * 6.8)
if user_id not in rd:
rd[user_id] = {'email': email, 'mobile': mobile, 'order_amount': order_amount}
else:
rd[user_id]['order_amount'] += order_amount
execl = []
title = ['账号', '累计实付金额']
content = ['account', 'order_amount']
for user_id in rd:
result = rd[user_id]
execl.append({'account': result['mobile'] if result['mobile'] != '' else result['email'],
'order_amount': result['order_amount']})
ExcelHandler.write_to_excel(title, content, execl, "result", result_type=2)
from translate.ts_base import TsBase
from utils.date_handler import DateHandler
from load.load_mysql import LoadMysql
from config.conn_list import ConnList
import re
import json
class TsTag(TsBase):
"""
初始化Adtag字典
"""
def trans_init_dict(self):
at_d = {}
pf_d = ['pc', 'h5']
self.data.append({'one_level_channel_en': 'direct'})
self.data.append({'one_level_channel_en': 'others'})
for row in self.data:
adtag = row['one_level_channel_en']
if adtag != '':
at_d[adtag] = {}
for pf in pf_d:
at_d[adtag][pf] = {'pv_count': 0, 'search_count': 0, 'search_people_count': 0, 'log_count': 0,
'reg_count': 0, 'addcart_count': 0, 'addcart_people_count': [], 'buy_count': 0,
'buy_people_count': [], 'order_count': 0, 'pv_radio': '0%', 'order_id': []}
return at_d
"""
处理adtag值
"""
def trans_bd_tag_value(self, bd_data, is_all):
# pv汇总
sum_pv = 0
# 遍历数据
for row in bd_data:
url = row['url']
pv_count = row['pv_count']
sum_pv += pv_count
# 判断PC端还是H5端
h5_obj = re.match(r'(.*)://m\.(.*)', url, re.M | re.I)
pf = 'h5' if h5_obj else 'pc'
# 判断adtag是否吻合
adtag_obj = re.match(r'(.*)?adtag=([\w]*)(\.|$|&|(.*))', url, re.M | re.I)
# 判断是否自营页面
if is_all is not True:
# 判断是否自营页面
zy_obj = re.match(r'(.*)(/v3/xianhuo|/item/)(.*)', url, re.M | re.I)
if zy_obj:
self.trans_tag_obj(pf, adtag_obj, 2, 'pv_count', pv_count)
else:
self.trans_tag_obj(pf, adtag_obj, 2, 'pv_count', pv_count)
# 计算pv占比
if sum_pv > 0:
for key in self.data:
for pf in ['pc', 'h5']:
self.data[key]['pc']['pv_radio'] = str(round(self.data[key][pf]['pv_count'] / sum_pv * 100, 2)) \
+ '%'
"""
处理自营行为adtag
"""
def trans_be_tag_value(self, be_data, order_data, expose_data, reg_data, log_data, test_ip, is_all):
# 遍历行为
for row in be_data:
ip = row['ip']
adtag = row['adtag']
param = row['param']
pf = 'pc' if row['platform'] == 1 else 'h5'
behavior = row['behavior']
if ip not in test_ip:
adtag_obj = re.match(r'((\w)+)(\.|$|&|(.*))', adtag, re.M | re.I)
if not is_all: # 自营获取
param_obj = re.match(r'(.*)supplier_name=猎芯自营(\.|-|$|&|(.*))', param, re.M | re.I)
# 行为属于自营
if param_obj:
# 搜索
if behavior == 4:
self.trans_tag_ip_obj(pf, adtag_obj, 1, 'addcart_count', 'addcart_people_count', ip)
elif behavior == 5:
self.trans_tag_ip_obj(pf, adtag_obj, 1, 'buy_count', 'buy_people_count', ip)
else:
if behavior == 4: # 所有获取
self.trans_tag_ip_obj(pf, adtag_obj, 1, 'addcart_count', 'addcart_people_count', ip)
elif behavior == 5:
self.trans_tag_ip_obj(pf, adtag_obj, 1, 'buy_count', 'buy_people_count', ip)
# 遍历订单数据
for row in order_data:
order_source = row['order_source']
order_id = row['order_id']
pf_obj = re.match(r'(.*)pf=((-|)[0-9])(\.|$|&|(.*))', order_source, re.M | re.I)
adtag_obj = re.match(r'(.*)adtag=([\w]*)(\.|$|&|(.*))', order_source, re.M | re.I)
# 判断平台
if pf_obj:
pf = 'h5' if int(pf_obj.group(2)) == 2 else 'pc'
self.trans_tag_order_obj(pf, adtag_obj, 2, 'order_count', 1, order_id)
# 遍历曝光数据
for row in expose_data:
row = json.loads(row)
supplier_type = row['supplier_type']
adtag = row['adtag'] if 'adtag' in row else ''
# supplier_type为猎芯自营
if supplier_type == '猎芯自营':
adtag_obj = re.match(r'((\w)+)(\.|$|&|(.*))', adtag, re.M | re.I)
self.trans_tag_obj('pc', adtag_obj, 1, 'search_count', 1)
# 遍历注册数据
for row in reg_data:
rm = row['reg_remark']
cd = row['create_device']
adtag_obj = re.match(r'(.*)adtag=([\w]*)(\.|$|&|(.*))', rm, re.M | re.I)
pf = 'pc' if cd == 1 else 'h5'
self.trans_tag_obj(pf, adtag_obj, 2, 'reg_count', 1)
# 遍历注册数据
ul = []
for row in log_data:
ud = row['user_id']
lr = row['login_remark']
pf = row['platform']
if ud not in ul:
ul.append(ud)
adtag_obj = re.match(r'(.*)adtag=([\w]*)(\.|$|&|(.*))', lr, re.M | re.I)
pf = 'pc' if pf == 1 else 'h5'
self.trans_tag_obj(pf, adtag_obj, 2, 'log_count', 1)
"""
处理正则匹配结果 (不统计IP)
"""
def trans_tag_obj(self, pf, adtag_obj, adtag_index, key, value):
# adtag匹配
if adtag_obj:
adtag = adtag_obj.group(adtag_index)
# key在字典中存在
if adtag in self.data:
tag = adtag
else:
tag = 'others'
else:
tag = 'direct'
# 计算数据累加值
self.data[tag][pf][key] += value
"""
处理正则订单结果
"""
def trans_tag_order_obj(self, pf, adtag_obj, adtag_index, key, value, order_id):
# adtag匹配
if adtag_obj:
adtag = adtag_obj.group(adtag_index)
# key在字典中存在
if adtag in self.data:
tag = adtag
else:
tag = 'others'
else:
tag = 'direct'
# 计算数据累加值
self.data[tag][pf][key] += value
# 添加订单
self.data[tag][pf]['order_id'].append(order_id)
"""
处理正则匹配结果 (统计IP)
key1: 次数
key2: 人数
"""
def trans_tag_ip_obj(self, pf, adtag_obj, adtag_index, key1, key2, ip):
# adtag匹配
if adtag_obj:
adtag = adtag_obj.group(adtag_index)
if adtag in self.data:
tag = adtag
else:
tag = 'others'
else:
tag = 'direct'
# 计算人数
if ip not in self.data[tag][pf][key2]:
self.data[tag][pf][key2].append(ip)
# 计算累加值
self.data[tag][pf][key1] += 1
"""
处理结果
"""
def trans_result(self, result, table):
rd = []
for row in result:
# 数据替换
dt = result[row]
dt_pc = dt['pc']
dt_h5 = dt['h5']
dt_h5['adtag'] = row
dt_pc['adtag'] = row
dt_pc['addcart_people_count'] = len(dt_pc['addcart_people_count'])
dt_h5['addcart_people_count'] = len(dt_h5['addcart_people_count'])
dt_pc['buy_people_count'] = len(dt_pc['buy_people_count'])
dt_h5['buy_people_count'] = len(dt_h5['buy_people_count'])
dt_pc['platform'] = '1'
dt_h5['platform'] = '2'
dt_pc['insert_time'] = DateHandler.now_datetime()
dt_h5['insert_time'] = DateHandler.now_datetime()
dt_pc['cal_ts'] = DateHandler.now_date(1, 1)
dt_h5['cal_ts'] = DateHandler.now_date(1, 1)
rd.append(dt_pc)
rd.append(dt_h5)
# 判断是否有订单
order_list = []
if len(dt_pc['order_id']) > 0:
for order_id in dt_pc['order_id']:
order_list.append({'order_id': order_id, 'adtag': row, 'platform': 1,
'insert_time': DateHandler.now_datetime(), 'cal_ts': DateHandler.now_date(1, 1)})
if len(dt_h5['order_id']) > 0:
for order_id in dt_h5['order_id']:
order_list.append({'order_id': order_id, 'adtag': row, 'platform': 2,
'insert_time': DateHandler.now_datetime(), 'cal_ts': DateHandler.now_date(1, 1)})
# 存在订单,写入DB
if len(order_list) > 0:
load_col = ['order_id', 'adtag', 'platform', 'cal_ts', 'insert_time']
LoadMysql.simple_dict_load(load_col, table, order_list,
db=ConnList.Dashboard(), cal_time=False)
return rd
from translate.ts_base import Base
from translate.ts_base import TsBase
from utils.excel_handler import ExcelHandler
from config.conn_list import ConnList
from utils.db_handler import DBHandler
from utils.utils_handler import UtilsHandler
import time
import re
class TsUser(Base):
class TsUser(TsBase):
"""
获取一定时间内未登录用户
"""
def trans_user_without_login(self, day):
def trans_user_without_login(self, day, log_start_time, log_end_time):
# Excel标题、内容
title = ['账号', '注册时间']
......@@ -26,7 +28,6 @@ class TsUser(Base):
# 获取登录时间
db = ConnList.Order()
sql = 'select last_login_time from lie_user_login_log where user_id = %d' % user_id
print(user_id)
results = DBHandler.read(db=db, sql=sql)
# 标记位
index = 0
......@@ -34,17 +35,384 @@ class TsUser(Base):
for r in results:
last_login_time = r[0]
# 时间间隔
dur_time = last_login_time - create_time
# dur_time = last_login_time - create_time
# 下次登录时间大于一天且小于X天
if 86400 < dur_time < 60 * 60 * 24 * day:
break
# if 86400 < dur_time < 60 * 60 * 24 * day:
# break
# 标记位自增
if log_start_time <= last_login_time <= log_end_time:
break
index += 1
if index == len(results):
rd.append({'mobile': mobile if mobile != '' else email,
'create_time': time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(create_time))})
print(len(rd))
# 生成Excel
ExcelHandler.write_to_excel(title, content, rd, "result", result_type=2)
"""
指定时间内未登录用户
"""
def trans_user_without_login_time(self, start_time):
# Excel标题、内容
title = ['user_id', '账号', '注册时间']
content = ['user_id', 'mobile', 'create_time']
# 结果字典
rd = []
index = 0
# 遍历结果集
for row in self.data:
mobile = row['mobile']
email = row['email']
create_time = row['create_time']
user_id = row['user_id']
if index % 1000 == 0:
print(index)
index += 1
# # 获取登录时间
# db = ConnList.Order()
# sql = 'select 1 from lie_user_login_log where user_id = %d AND last_login_time >= %d limit 1' % (user_id, start_time)
# results = DBHandler.read(db=db, sql=sql)
# if len(results) == 0:
rd.append({'user_id': user_id,
'mobile': mobile if mobile != '' else email,
'create_time': time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(create_time))})
# 生成Excel
ExcelHandler.write_to_excel(title, content, rd, "result", result_type=2)
"""
获取订单会员信息
"""
def trans_user_order(self):
# Excel标题、内容
title = ['账号']
content = ['mobile']
# 结果字典
ul = []
rd = []
for row in self.data:
user_id = row['user_id']
if user_id not in ul:
ul.append(user_id)
db = ConnList.Order()
sql = 'select mobile,email from lie_user_main where user_id = %d' % user_id
results = DBHandler.read(db=db, sql=sql)
mobile = results[0][0]
email = results[0][1]
rd.append({'mobile': mobile if mobile != '' else email})
# 生成Excel
ExcelHandler.write_to_excel(title, content, rd, "result", result_type=2)
"""
统计搜索词
"""
def trans_search_keyword(self, test_ip, pf='NULL'):
key_d = {}
for i in self.data:
r_ip = i['ip']
if r_ip not in test_ip:
i['keyword'] = i['keyword'].lstrip()
i['keyword'] = i['keyword'].rstrip()
i['keyword'] = i['keyword'].upper()
if pf != 'NULL':
if pf == i['platform'] and '</A>' not in i['keyword'] and '<A' not in i['keyword']:
if i['keyword'] not in key_d.keys():
key_d[i['keyword']] = 1
else:
key_d[i['keyword']] += 1
else:
if '</A>' not in i['keyword'] and '<A' not in i['keyword']:
if i['keyword'] not in key_d.keys():
key_d[i['keyword']] = 1
else:
key_d[i['keyword']] += 1
key_c = sorted(UtilsHandler.dict2list(key_d), key=lambda x: x[1], reverse=True)
return key_c
"""
判断新老用户并导出
"""
def trans_no_user(self, start_time, exchange):
# Excel标题、内容
title = ['账号', '用户类型', '实付金额']
content = ['account', 'user_type', 'order_amount']
# 用户列表
new_ul = []
new_paid_ul = []
old_ul = []
old_paid_ul = []
rd = {}
# 金额
new_amount = 0
new_paid_amount = 0
old_amount = 0
old_paid_amount = 0
db = ConnList.Order()
for row in self.data:
user_id = row['user_id']
order_amount = float(row['goods_price'] * row['goods_number'])
status = row['status']
currency = row['currency']
mobile = row['mobile']
email = row['email']
# 币种转换
if currency == 2:
order_amount *= exchange
# 判断新老用户
sql = "SELECT user_id FROM lie_order WHERE user_id = %d AND create_time < %d" % (user_id, start_time)
result = DBHandler.read(db, sql)
user_type = '老用户' if len(result) > 0 else '新用户'
# 下单订单
if user_type == '新用户':
if user_id not in new_ul:
new_ul.append(user_id)
new_amount += order_amount
else:
if user_id not in old_ul:
old_ul.append(user_id)
old_amount += order_amount
# 付款订单
if status > 2:
if user_type == '新用户':
if user_id not in new_paid_ul:
new_paid_ul.append(user_id)
new_paid_amount += order_amount
else:
if user_id not in old_paid_ul:
old_paid_ul.append(user_id)
old_paid_amount += order_amount
# 生成Excel字典
if user_id not in rd:
rd[user_id] = {'account': mobile if mobile != '' else email,
'order_amount': order_amount, 'user_type': user_type}
else:
rd[user_id]['order_amount'] += order_amount
print('新:', len(new_ul), new_amount, len(new_paid_ul), new_paid_amount)
print('旧:', len(old_ul), old_amount, len(old_paid_ul), old_paid_amount)
ExcelHandler.write_to_excel(title, content, rd, "result", result_type=1)
"""
计算PC、H5平台用户数
"""
def trans_pf_user(self, keyword):
rd = {'pc': [], 'h5': [], 'sum': []}
for row in self.data:
pf = 'pc' if row[keyword] == 1 else 'h5'
user_id = row['user_id']
# 平台
if user_id not in rd[pf]:
rd[pf].append(user_id)
# 汇总
if user_id not in rd['sum']:
rd['sum'].append(user_id)
return {'pc': len(rd['pc']), 'h5': len(rd['h5']), 'sum': len(rd['sum'])}
"""
正则计算PC、H5平台次数
"""
def trans_pf_regex_user(self, keyword):
rd = {'pc': 0, 'h5': 0, 'sum': 0}
for row in self.data:
source = row[keyword]
pf_tag = re.match(r'(.*)pf=([0-9])(,|$|&|(.*))', source, re.M | re.I)
if pf_tag:
try:
pf = 'pc' if int(pf_tag.group(2)) == 1 else 'h5'
rd[pf] += 1
except:
pass
return {'pc': rd['pc'], 'h5': rd['h5'], 'sum': rd['pc'] + rd['h5']}
"""
"""
def trans_user_id(self):
rd = []
db = ConnList.Order()
index = 0
title = ['user_id', '账号', '创建时间']
content = ['user_id', 'account', 'create_time']
for row in self.data:
account = row[0]
create_time = row[1]
if type(account) == float:
account = str(int(account))
sql = "SELECT user_id FROM lie_user_main WHERE mobile = \'%s\' OR email = \'%s\'" % (account, account)
results = DBHandler.read(db=db, sql=sql)
if len(results) > 0:
user_id = results[0][0]
else:
user_id = -1
rd.append({'user_id': user_id, 'account': account, 'create_time': create_time})
if index % 1000 == 0:
print(index)
index += 1
db.close()
ExcelHandler.write_to_excel(title, content, rd, "result", result_type=2)
"""
"""
def trans_user_info(self):
rd = []
db = ConnList.Order()
# Excel标题
title = ['会员账号', '推送业务员', '公司名', '收货人', '收货手机/号码']
# Excel内容
content = ['account', 'sale_man', 'com_name', 'consignee', 'mobile']
for row in self.data:
account = row[0]
sale_man = row[1]
if type(account) == str:
account_type = 'u.email'
account = str(account)
else:
account_type = 'u.mobile'
account = int(account)
# left join获取公司user_company的公司名,user_address的具体地址
sql = "SELECT com_name, consignee, a.mobile FROM lie_user_main u \
LEFT JOIN lie_user_company c ON u.user_id = c.user_id \
LEFT JOIN lie_user_address a ON u.user_id = a.user_id \
WHERE %s = \'%s\'" % (account_type, account)
result = DBHandler.read(db, sql)
rd.append({'account': account, 'sale_man': sale_man, 'com_name': result[0][0], 'consignee': result[0][1], 'mobile': result[0][2]})
# 关闭db
db.close()
ExcelHandler.write_to_excel(title, content, rd, "result", result_type=2)
"""
"""
def trans_user_info_2(self):
rd = []
db = ConnList.Order()
# Excel标题
title = ['会员账号', '公司名', '推送业务员', '付款时间', '累计付款金额', '付款笔数', '订单来源']
# Excel内容
content = ['account', 'com_name', 'sale_man', 'pay_time', 'pay_amount', 'pay_count', 'pf']
for row in self.data:
account = row[0]
sale_man = row[2]
pf = '无'
if type(account) == str:
account_type = 'u.email'
account = str(account)
else:
account_type = 'u.mobile'
account = int(account)
# left join获取公司user_company的公司名,user_address的具体地址
sql = "SELECT com_name,a.mobile,u.user_id FROM lie_user_main u \
LEFT JOIN lie_user_company c ON u.user_id = c.user_id \
LEFT JOIN lie_user_address a ON u.user_id = a.user_id \
WHERE %s = \'%s\'" % (account_type, account)
result = DBHandler.read(db, sql)
if len(result) > 0:
com_name = result[0][0]
user_id = result[0][2]
else:
com_name = ''
user_id = -1
# 实付balabala
cal = {'pay_time': 0, 'pay_amount': 0, 'pay_count': []}
sql = "SELECT p.pay_time, p.pay_amount, o.currency, p.order_id, o.order_source FROM lie_pay_log p LEFT JOIN lie_order o ON p.order_id = o.order_id WHERE p.user_id = %d AND p.pay_time BETWEEN %d AND %d" % (user_id, 1554998400, 1556640000)
result = DBHandler.read(db, sql)
for r in result:
pay_time = r[0]
currency = r[2]
order_id = r[3]
order_source = r[4]
pay_amount = float(r[1]) if currency == 1 else float(r[1]) * 6.7
if 'pf=-1' in order_source:
pf = '后台'
elif 'pf=1' in order_source:
pf = 'PC端'
else:
pf = '其他'
cal['pay_time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(pay_time))
cal['pay_amount'] += pay_amount
if order_id not in cal['pay_count']:
cal['pay_count'].append(order_id)
rd.append({'account': account, 'com_name': com_name, 'sale_man': sale_man, 'pay_time': cal['pay_time'], 'pay_amount': cal['pay_amount'], 'pay_count': len(cal['pay_count']), 'pf': pf})
# 关闭db
db.close()
ExcelHandler.write_to_excel(title, content, rd, "result", result_type=2)
"""
"""
def trans_user_info_3(self):
rd = []
db = ConnList.Behavior()
# Excel标题
title = ['user_id', '账号', '外部渠道来源', '是否有关键行为']
# Excel内容
content = ['user_id', 'account', 'adtag', 'is_be']
for row in self.data:
user_id = row[0]
account = row[1]
adtag = row[2]
sql = "SELECT 1 FROM lie_behavior_log WHERE behavior IN (8,11) AND user_id = %d" % user_id
result = DBHandler.read(db, sql)
if len(result) > 0:
is_be = '是'
else:
is_be = '否'
rd.append({'user_id': user_id,
'account': account,
'adtag': adtag,
'is_be': is_be})
ExcelHandler.write_to_excel(title, content, rd, "result", result_type=2)
db.close()
from translate.ts_base import TsBase
from config.conn_list import ConnList
from extract.ex_erp import ExERP
from utils.date_handler import DateHandler
from utils.db_handler import DBHandler
from load.load_mysql import LoadMysql
import time
import copy
class TsWallet(TsBase):
"""
钱包金额汇总
"""
def trans_pay_order(self):
# 计算字典
wallet_dict = {}
for row in self.data:
user_id = row['user_id']
order_sn = row['order_sn']
order_id = row['order_id']
pay_time = row['pay_time']
pay_amount = float(row['pay_amount'])
mobile = row['mobile']
email = row['email']
invite_uid = row['invite_uid']
safe_mobile = row['safe_mobile'] if row['safe_mobile'] is not None else ''
currency = row['currency']
# 币种转换
if currency == 2:
exchange = ExERP(date=DateHandler.unix_to_date(pay_time, fmt="%Y-%m-%d")).get_erp_exchange()
pay_amount *= exchange
if user_id not in wallet_dict:
wallet_dict[user_id] = {'mobile': mobile, 'email': email, 'invite_uid': invite_uid, 'receiver_type': 'invitee', 'safe_mobile': safe_mobile, 'order_list': {}}
wallet_dict[user_id]['order_list'][order_sn] = {}
wallet_dict[user_id]['order_list'][order_sn]['user_id'] = user_id
wallet_dict[user_id]['order_list'][order_sn]['order_id'] = order_id
wallet_dict[user_id]['order_list'][order_sn]['order_sn'] = order_sn
wallet_dict[user_id]['order_list'][order_sn]['pay_time'] = pay_time
wallet_dict[user_id]['order_list'][order_sn]['pay_amount'] = pay_amount
else:
if order_sn in wallet_dict[user_id]['order_list']: # 预付款情况下追加后付款
wallet_dict[user_id]['order_list'][order_sn]['pay_time'] = pay_time
wallet_dict[user_id]['order_list'][order_sn]['pay_amount'] += pay_amount
else:
wallet_dict[user_id]['order_list'][order_sn] = {}
wallet_dict[user_id]['order_list'][order_sn]['user_id'] = user_id
wallet_dict[user_id]['order_list'][order_sn]['order_id'] = order_id
wallet_dict[user_id]['order_list'][order_sn]['order_sn'] = order_sn
wallet_dict[user_id]['order_list'][order_sn]['pay_time'] = pay_time
wallet_dict[user_id]['order_list'][order_sn]['pay_amount'] = pay_amount
return wallet_dict
"""
仅邀请人
"""
def trans_only_inviter(self):
# 邀请人
invite_dict = {}
db = ConnList.Order()
for user_id in self.data:
invite_uid = self.data[user_id]['invite_uid']
order_list = self.data[user_id]['order_list']
if invite_uid not in invite_dict:
sql = "SELECT mobile,email,safe_mobile FROM lie_user_main u LEFT JOIN lie_user_info i ON u.user_id = i.user_id WHERE u.user_id = %d" % invite_uid
result = DBHandler.read(db, sql)
mobile = result[0][0]
email = result[0][1]
safe_mobile = result[0][2] if result[0][2] is not None else ''
invite_dict[invite_uid] = {'mobile': mobile, 'email': email, 'receiver_type': 'inviter', 'safe_mobile': safe_mobile, 'order_list': order_list}
else:
invite_dict[invite_uid]['order_list'] = dict(order_list, **invite_dict[invite_uid]['order_list'])
db.close()
return invite_dict
"""
邀请人和被邀请人
"""
def trans_inviter_and_invitee(self):
# db = ConnList.Local()
db = ConnList.Order()
for user_id in list(self.data.keys()):
invite_uid = self.data[user_id]['invite_uid']
order_list = self.data[user_id]['order_list']
if invite_uid not in self.data:
sql = "SELECT mobile, email, safe_mobile FROM lie_user_main u LEFT JOIN lie_user_info i ON u.user_id = i.user_id WHERE u.user_id = %d" % invite_uid
result = DBHandler.read(db, sql)
mobile = result[0][0]
email = result[0][1]
safe_mobile = result[0][2] if result[0][2] is not None else ''
self.data[invite_uid] = {'mobile': mobile, 'email': email, 'receiver_type': 'inviter', 'safe_mobile': safe_mobile, 'order_list': copy.deepcopy(order_list)}
else:
self.data[invite_uid]['order_list'] = dict(copy.deepcopy(order_list), **self.data[invite_uid]['order_list'])
db.close()
"""
金额返现明细计算
"""
def trans_detail_receiver(self, wallet):
# 钱包规则
inviter_require_amount = float(wallet['inviter_require_amount'])
invitee_require_amount = float(wallet['invitee_require_amount'])
inviter_cashback_scale = float(wallet['inviter_cashback_scale'])
invitee_cashback_scale = float(wallet['invitee_cashback_scale'])
for user_id in self.data:
order_list = self.data[user_id]['order_list']
receiver_type = self.data[user_id]['receiver_type']
require_amount = inviter_require_amount if receiver_type == 'inviter' else invitee_require_amount
for order_sn in order_list:
pay_amount = order_list[order_sn]['pay_amount']
if pay_amount >= require_amount:
if self.data[user_id]['receiver_type'] == 'invitee':
order_list[order_sn]['receiver_amount'] = round(pay_amount * invitee_cashback_scale, 2)
elif self.data[user_id]['receiver_type'] == 'inviter':
order_list[order_sn]['receiver_amount'] = round(pay_amount * inviter_cashback_scale, 2)
else:
order_list[order_sn]['receiver_amount'] = 0
"""
金额汇总返现计算
"""
def trans_summary_receiver(self, wallet):
# 钱包规则
inviter_max_preferential_amount = float(wallet['inviter_max_preferential_amount'])
invitee_max_preferential_amount = float(wallet['invitee_max_preferential_amount'])
for user_id in self.data:
order_list = self.data[user_id]['order_list']
receiver_type = self.data[user_id]['receiver_type']
max_preferential_amount = inviter_max_preferential_amount if receiver_type == 'inviter' else invitee_max_preferential_amount
receiver_amount = 0
pay_amount = 0
for order_sn in order_list:
receiver_amount += order_list[order_sn]['receiver_amount']
pay_amount += order_list[order_sn]['pay_amount']
self.data[user_id]['pay_amount'] = pay_amount
if receiver_amount > max_preferential_amount:
self.data[user_id]['receiver_amount'] = max_preferential_amount
else:
self.data[user_id]['receiver_amount'] = receiver_amount
"""
写入DB
"""
def trans_wallet_load(self, wallet):
wpr_id = wallet['wpr_id']
inviter_max_preferential_amount = float(wallet['inviter_max_preferential_amount'])
invitee_max_preferential_amount = float(wallet['invitee_max_preferential_amount'])
# db = ConnList.Local()
db = ConnList.WriteOrder()
sql_cb = "SELECT max(cb_id) + 1 FROM lie_wallet_cashback"
result_cb = DBHandler.read(db, sql_cb)
cb_id = 1 if (len(result_cb) == 0 or result_cb[0][0] is None) else result_cb[0][0]
for user_id in self.data:
# 汇总
receiver_amount = self.data[user_id]['receiver_amount']
receiver_type = self.data[user_id]['receiver_type']
if receiver_amount > 0:
sql = "SELECT user_id, cb_id FROM lie_wallet_cashback WHERE wpr_id = %d AND user_id = %d LIMIT 1" % (wpr_id, user_id)
result = DBHandler.read(db, sql)
# 汇总表不存在user_id
if len(result) == 0 or result[0][0] is None:
# 汇总数据
self.load_all(self.data[user_id], cb_id, user_id, wallet)
db.commit()
# 明细数据
self.load_detail(self.data[user_id]['order_list'], cb_id, wallet)
db.commit()
cb_id += 1
else:
max_amount = inviter_max_preferential_amount if receiver_type == 'inviter' else invitee_max_preferential_amount
# 明细数据
self.load_detail(self.data[user_id]['order_list'], result[0][1], wallet)
db.commit()
# 更新汇总金额
self.update_summary(result[0][1], db, max_amount)
db.commit()
"""
更新取消订单
"""
def trans_cancel_order(self, wallet):
wpr_id = wallet['wpr_id']
inviter_max_preferential_amount = float(wallet['inviter_max_preferential_amount'])
invitee_max_preferential_amount = float(wallet['invitee_max_preferential_amount'])
# db = ConnList.Local()
db = ConnList.WriteOrder()
sql = "SELECT cb_id,w.order_id FROM lie_wallet_cashback_detail w LEFT JOIN lie_order o ON w.order_id = o.order_id WHERE wpr_id = %d AND status = -1" % wpr_id
result = DBHandler.read(db, sql)
cancel_list = []
cb_list = []
for row in result:
cb_list.append(row[0])
if row[1] not in cancel_list:
cancel_list.append(row[1])
if len(cancel_list) > 0:
# 删除取消订单
sql = "DELETE FROM lie_wallet_cashback_detail WHERE order_id IN (%s)" % ",".join(map(str, cancel_list))
DBHandler.delete(db, sql)
db.commit()
# 占个坑
max_amount = inviter_max_preferential_amount
# 更新金额
for cb_id in cb_list:
self.update_summary(cb_id, db, max_amount)
"""
写入汇总
"""
def load_all(self, data, cb_id, user_id, wallet):
ls = [{
'cb_id': cb_id,
'user_id': user_id,
'user_account': data['mobile'] if data['mobile'] != '' else data['email'],
# 'safe_mobile': data['safe_mobile'],
'total_cashback_amount': data['receiver_amount'],
'total_pay_amount': data['pay_amount'],
'total_order_amount': data['pay_amount'],
'wpr_id': wallet['wpr_id'],
'rule_name': wallet['rule_name'],
'status': -1,
'reviewer_uid': 0,
'reviewer_name': '',
'review_time': 0,
'issuer_uid': 0,
'issuer_name': '',
'issue_time': 0,
'create_time': DateHandler.now_datetime(),
'update_time': DateHandler.now_datetime()
}]
col = ['cb_id', 'user_id', 'user_account', 'total_cashback_amount', 'total_pay_amount',
'total_order_amount',
'wpr_id', 'rule_name', 'status', 'reviewer_uid', 'reviewer_name', 'review_time', 'issuer_uid',
'issuer_name', 'issue_time',
'create_time', 'update_time']
# LoadMysql.simple_dict_load(col, 'lie_wallet_cashback', ls, db=ConnList.Local(), cal_time=False)
LoadMysql.simple_dict_load(col, 'lie_wallet_cashback', ls, db=ConnList.WriteOrder(), cal_time=False)
"""
写入明细
"""
def load_detail(self, order_list, cb_id, wallet):
dl = []
for order_id in order_list:
if order_list[order_id]['receiver_amount'] > 0:
dl.append({
'cb_id': cb_id,
'wpr_id': wallet['wpr_id'],
'order_id': order_list[order_id]['order_id'],
'order_sn': order_list[order_id]['order_sn'],
'order_amount': order_list[order_id]['pay_amount'],
'pay_amount': order_list[order_id]['pay_amount'],
'pay_time': order_list[order_id]['pay_time'],
'cashback_amount': order_list[order_id]['receiver_amount'],
'user_id': order_list[order_id]['user_id']
})
col = ['cb_id', 'wpr_id', 'order_id', 'order_sn', 'order_amount', 'pay_amount', 'pay_time', 'cashback_amount',
'user_id']
# LoadMysql.simple_dict_load(col, 'lie_wallet_cashback_detail', dl, db=ConnList.Local(), cal_time=False)
LoadMysql.simple_dict_load(col, 'lie_wallet_cashback_detail', dl, db=ConnList.WriteOrder(), cal_time=False)
"""
更新汇总金额
"""
def update_summary(self, cb_id, db, max_amount):
sql = "SELECT SUM(cashback_amount) FROM lie_wallet_cashback_detail WHERE cb_id = %d" % cb_id
result = DBHandler.read(db, sql)
if len(result) > 0:
cashback = result[0][0] if result[0][0] is not None else 0
cashback = max_amount if cashback > max_amount else cashback
sql = "UPDATE lie_wallet_cashback SET total_cashback_amount = %f, update_time = %d WHERE cb_id = %d" % (cashback, DateHandler.now_datetime(), cb_id)
DBHandler.update(db, sql)
db.commit()
No preview for this file type
import time
import datetime
from dateutil.relativedelta import relativedelta
class DateHandler:
......@@ -49,3 +51,54 @@ class DateHandler:
minute = (end - start) / 60
return round(minute, 2)
"""
获取今天星期几
"""
@staticmethod
def weekday():
return datetime.datetime.now().weekday()
"""
UNIX时间戳转换为时间格式
"""
@staticmethod
def unix_to_date(timestamp, fmt="%Y-%m-%d %H:%M:%S"):
return time.strftime(fmt, time.localtime(timestamp))
"""
字符串时间转化为UNIX时间戳
"""
@staticmethod
def str_to_unix(str, fmt="%Y-%m-%d %H:%M:%S"):
# 转为时间数组
time_array = time.strptime(str, fmt)
return int(time.mktime(time_array))
"""
UNIX后推时间
"""
@staticmethod
def unix_after_days(unix_time, days):
return unix_time + days * 86400
"""
返回今天之前之后的日期
months: +前推 -后推
返回格式:2019-01-01
"""
@staticmethod
def today_between_months(months, fmt="%Y-%m-%d"):
return (datetime.date.today() - relativedelta(months=months)).strftime(fmt)
"""
返回指定时间之前之后的日期
months: +前推 -后推
返回格式:2019-01-01
"""
@staticmethod
def day_between_months(year, month, day, months, fmt="%Y-%m-%d"):
return (datetime.datetime(year, month, 1) - relativedelta(months=months)).strftime(fmt)
import traceback
import requests
import random
import hashlib
import string
from hdfs import Client
from urllib import parse
from utils.date_handler import DateHandler
class DBHandler:
"""
MySQL读取数据
"""
@staticmethod
def read(db, sql):
......@@ -14,8 +24,139 @@ class DBHandler:
results = cursor.fetchall()
except:
db.rollback()
traceback.print_exc()
finally:
db.close()
print(traceback.print_exc())
return results
"""
MySQL更新数据
"""
@staticmethod
def update(db, sql):
cursor = db.cursor()
try:
cursor.execute(sql)
db.commit()
except:
db.rollback()
traceback.print_exc()
print(sql)
"""
MySQL插入数据
"""
@staticmethod
def insert(db, sql):
cursor = db.cursor()
try:
cursor.execute(sql)
db.commit()
except:
db.rollback()
traceback.print_exc()
# print(sql)
"""
MySQL删除数据
"""
@staticmethod
def delete(db, sql):
cursor = db.cursor()
try:
cursor.execute(sql)
db.commit()
except:
db.rollback()
traceback.print_exc()
print(sql)
"""
读取HDFS数据
"""
@staticmethod
def hdfs_read(file):
client = Client("http://172.18.137.35:50170", root="/", timeout=100, session=False)
with client.read(file) as reader:
result = reader.read().splitlines()
return result
"""
上传HDFS数据
upload(hdfs_path, local_path, overwrite=False, n_threads=1, temp_dir=None,
chunk_size=65536,progress=None, cleanup=True, **kwargs)
overwrite:是否是覆盖性上传文件
n_threads:启动的线程数目
temp_dir:当overwrite=true时,远程文件一旦存在,则会在上传完之后进行交换
chunk_size:文件上传的大小区间
progress:回调函数来跟踪进度,为每一chunk_size字节。它将传递两个参数,
文件上传的路径和传输的字节数。一旦完成,-1将作为第二个参数
cleanup:如果在上传任何文件时发生错误,则删除该文件
"""
@staticmethod
def hdfs_upload(hdfs_path, local_path):
client = Client("http://172.18.137.35:50170", root="/", timeout=100, session=False)
client.upload(hdfs_path=hdfs_path, local_path=local_path)
"""
翻页读取ES数据
"""
@staticmethod
def scroll_read(url, body, key):
r = requests.post(url, data=body)
total = r.json()['data']['total']
final_result = r.json()['data'][key]
scroll_id = r.json()['data']['scroll_id']
if total > 1000:
page = int(total / 1000)
for i in range(page):
body = {"scroll_id": scroll_id}
r = requests.post(url, data=body)
result = r.json()['data'][key]
for r in result:
final_result.append(r)
return final_result
"""
es加密数据获取
"""
@staticmethod
def esEncryptData(key, url):
# 生成当前的时间戳
now_timestamp = DateHandler.now_datetime()
# 生成随机字符串
ran_str = ''.join(random.sample(string.ascii_letters + string.digits, 4)).lower()
# 参数字典
params_dict = {'check_time': now_timestamp, 'salt': ran_str}
# 生成sign, key为密钥
sign = parse.urlencode(params_dict).lower()
# key = 'djdj93ichuntj56dksisearchdj45eieapi'
sign = key + sign + str(ran_str)
sign = hashlib.md5(sign.encode(encoding='UTF-8')).hexdigest()
# 搜索接口
# requestUrl = "http://so12.ichunt.com/search/ServerApi/index"
# 搜索条件
search_body = {"check_time": now_timestamp, "salt": ran_str, "sign": sign}
# requests获取数据
r = requests.post(url, data=search_body)
result = r.json()['data']
return result
import xlwt
import xlrd
# workbook相关
from openpyxl.workbook import Workbook
# ExcelWriter,封装了很强大的excel写的功能
from openpyxl.writer.excel import ExcelWriter
from openpyxl.utils import get_column_letter
class ExcelHandler:
......@@ -38,3 +44,76 @@ class ExcelHandler:
# 结果保存
wb.save(file_name + '.xls')
"""
function:
读出*.xlsx中的每一条记录,把它保存在data_dic中返回
Param:
records: 要保存的,一个包含每一条记录的list
save_excel_name: 保存为的文件名
head_row_stu_arrive_star:
Return:
data_dic: 返回的记录的dict
"""
@staticmethod
def write_to_excel_with_openpyxl(title, content, result, file_name):
# 新建一个workbook
wb = Workbook()
# 新建一个excelWriter
ew = ExcelWriter(workbook=wb)
# 第一个sheet是ws
ws = wb.worksheets[0]
# 设置ws的名称
ws.title = "name"
# 写第一行,标题行
print(title)
for h_x in range(1, len(title) + 1):
h_col = get_column_letter(h_x)
ws.cell('%s%s' % (h_col, 1)).value = '%s' % (title[h_x - 1])
# 写第二行及其以后的那些行
i = 2
for r in result:
col_index = 1
for c in content:
col = get_column_letter(col_index)
try:
ws.cell('%s%s' % (col, i)).value = '%s' % str(result[r][c])
except:
ws.cell('%s%s' % (col, i)).value = '%s' % ''
print(result[r][c])
col_index += 1
i += 1
# 写文件
ew.save(filename=file_name)
"""
sheet_name:sheet下数据
data_type:1:以行为基础的list 2:以列为基础的list
"""
@staticmethod
def read_to_excel(file_name, sheet_name, data_type):
rd = []
wb = xlrd.open_workbook(file_name)
# 获取Sheet
sheet = wb.sheet_by_name(sheet_name)
# 获取行数,列数
row = sheet.nrows
col = sheet.ncols
# 遍历获取sheet数据
if data_type == 1:
for i in range(row):
l = []
for j in range(col):
l.append(sheet.cell_value(i, j))
rd.append(l)
elif data_type == 2:
for i in range(col):
l = []
for j in range(row):
l.append(sheet.cell_value(j, i))
rd.append(l)
return rd
import time
import json
import logging
class LogHandler:
@staticmethod
def elk_log(msg, msg_code, file_name, app):
data = json.dumps({
'msg': msg,
'msgCode': msg_code,
'ts': int(time.time()),
'dateStr': time.strftime('%Y-%m-%d %H:%M%S', time.localtime(int(time.time()))),
'app': app,
'serverIp': '127.0.0.1',
'fileName': file_name,
'lineNo': "",
'method': ""
})
f = open(file_name, 'a')
f.write(data + '\n')
f.close()
@staticmethod
def logger(level=logging.INFO, filepath='', filename='', logger='',
formatter='%(asctime)s - %(name)s - %(levelname)s - %(message)s'):
handler = logging.FileHandler(filepath + filename)
handler.setLevel(level)
handler.setFormatter(formatter)
logger.addHandler(handler)
from urllib import request, parse
import requests
import time
import hashlib
import random
import datetime
import string
import json
robot_api = 'https://oapi.dingtalk.com/robot/send?access_token='
access_token = '030f5d480c8f3a7c199fdcfc91e0e5d3e54149c12d51c4a5a3c0bc07a8be308a'
access_token = 'acb26b0510e79df7e029498c0820c35eb49dc033dc97a633fb1add665852f668'
class MsgHandler:
"""
发送钉钉消息
msg:消息内容
robot:指定机器人
"""
@staticmethod
def send_dd_msg(msg, robot=access_token):
data = {
......@@ -15,4 +26,74 @@ class MsgHandler:
"content": msg
}
}
requests.post(robot_api + robot, json=data)
res = requests.post(robot_api + robot, json=data)
print(res)
"""
调用消息接口发送邮件
data:发送数据内容
email:发送邮件列表
keyword:发送邮件模板
"""
@staticmethod
def send_email(data, email, keyword):
# 验证参数
k1 = int(time.time())
pf = -1
auth_key = 'fh6y5t4rr351d2c3bryi'
encrypt = hashlib.md5(str(k1).encode(encoding='UTF-8')).hexdigest()
encrypt = encrypt + auth_key
k2 = hashlib.md5(encrypt.encode(encoding='UTF-8')).hexdigest()
# 接口
url = "http://api.ichunt.com/msg/sendMessageByAuto"
# 是否忽略是否为本网站会员
is_ignore = 'FALSE'
# 请求参数
body = {"data": json.dumps(data, ensure_ascii=False), "touser": json.dumps(email, ensure_ascii=False),
"keyword": keyword, "is_ignore": is_ignore, "k1": k1, "k2": k2, "pf": pf}
# requests获取数据
requests.post(url, data=body)
"""
针对ES加密的接口返回参数
"""
@staticmethod
def get_encrypt_msg(key, url):
# 生成当前的时间戳
now_timestamp = int(time.time())
# 生成随机字符串
ran_str = ''.join(random.sample(string.ascii_letters + string.digits, 4)).lower()
# 业务参数
today = datetime.date.today()
yesterday = today - datetime.timedelta(days=1)
yesterday_timestamp = int(time.mktime(time.strptime(str(yesterday), '%Y-%m-%d')))
today_timestamp = int(time.mktime(time.strptime(str(today), '%Y-%m-%d')))
start_time = yesterday_timestamp
end_time = today_timestamp
size = 20000
# 参数字典
params_dict = {'check_time': now_timestamp, 'end_time': end_time, 'salt': ran_str,
'size': size, 'start_time': start_time}
# 生成sign, key为密钥
sign = parse.urlencode(params_dict).lower()
sign = key + sign + str(ran_str)
sign = hashlib.md5(sign.encode(encoding='UTF-8')).hexdigest()
# 搜索条件
search_body = {"start_time": start_time, "end_time": end_time, "size": size, "check_time": now_timestamp,
"salt": ran_str, "sign": sign}
# requests获取数据
r = requests.post(url, data=search_body)
result = r.json()['data']
return result
# !/usr/bin/env python
# -*- coding:utf-8 -*-
from threading import Thread
import pika
class Base(object):
def __init__(self, user, pwd, host, exchange=None, exchange_type=None):
credentials = pika.PlainCredentials(user, pwd)
self.conn = pika.BlockingConnection(pika.ConnectionParameters(host, credentials=credentials)) # 连接
self.ch = self.conn.channel() # 频道
self.exchange = exchange
if exchange and exchange_type:
self.ch.exchange_declare(exchange=exchange, exchange_type=exchange_type)
def send_task_fanout(self, body):
if self.exchange:
self.ch.basic_publish(exchange=self.exchange, routing_key='',
properties=pika.BasicProperties(delivery_mode=2),
body=body)
def __del__(self):
try:
self.ch.close()
self.conn.close()
except Exception:
pass
class _Task(Thread):
def __init__(self, target, args, no_ack, ch, method, properties):
super().__init__(target=target, args=args)
self.no_ack = no_ack
self.ch = ch
self.method = method
self.properties = properties
def run(self):
super().run()
if self.no_ack is False:
self.ch.basic_ack(delivery_tag=self.method.delivery_tag)
class Customer(Base):
def __init__(self, user, pwd, host, task_queue, *store_queues, exchange=None, exchange_type=None, prefetch_count=1,
durable=True, no_ack=False):
super().__init__(user, pwd, host, exchange=exchange, exchange_type=exchange_type)
self.task_queue = task_queue
self.store_queues = store_queues
self.prefetch_count = prefetch_count # 在同一时刻,不要发送超过x条消息给一个工作者(worker)
self.durable = durable # 队列声明为持久化
self.no_ack = no_ack # 消息响应 true为关闭
self.exchange = exchange
self.exchange_type = exchange_type
def send_task(self, body):
self.ch.basic_publish(exchange='', routing_key=self.task_queue,
properties=pika.BasicProperties(delivery_mode=2), body=body)
def store_data(self, data, rk=None):
routing_key = rk if rk else self.store_queues[0]
self.ch.basic_publish(exchange='', routing_key=routing_key,
properties=pika.BasicProperties(delivery_mode=2), body=data)
def server_forever(self, func):
if self.task_queue is not None:
self.ch.queue_declare(queue=self.task_queue, durable=self.durable)
if self.exchange and self.exchange_type:
self.ch.queue_bind(exchange=self.exchange, queue=self.task_queue)
if self.store_queues:
for sq in self.store_queues:
if sq:
self.ch.queue_declare(queue=sq, durable=self.durable)
self.ch.basic_qos(prefetch_count=self.prefetch_count)
self.ch.basic_consume(func, queue=self.task_queue, no_ack=self.no_ack)
self.ch.start_consuming()
class Producer(Base):
def __init__(self, user, pwd, host, task_queue=None, durable=True, exchange=None, exchange_type=None):
super().__init__(user, pwd, host, exchange=exchange, exchange_type=exchange_type)
self.task_queue = task_queue
self.durable = durable # 持久化
self.exchange = exchange
def send_task(self, body):
self.ch.basic_publish(exchange='', routing_key=self.task_queue,
properties=pika.BasicProperties(delivery_mode=2), # make message persistent
body=body)
# self.ch.close()
def produce(self, func):
if self.task_queue is not None:
self.ch.queue_declare(queue=self.task_queue, durable=self.durable)
func(self)
class UtilsHandler:
''' 将字典转化为列表 '''
@staticmethod
def dict2list(dic: dict):
keys = dic.keys()
vals = dic.values()
lst = [(key, val) for key, val in zip(keys, vals)]
return lst
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment