Skip to content
Toggle navigation
P
Projects
G
Groups
S
Snippets
Help
岳巧源
/
data_server
This project
Loading...
Sign in
Toggle navigation
Go to a project
Project
Repository
Issues
0
Merge Requests
0
Pipelines
Wiki
Snippets
Settings
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Commit
a70c40f8
authored
Aug 27, 2024
by
岳巧源
Browse files
Options
_('Browse Files')
Download
Email Patches
Plain Diff
modify script
parent
38009ae5
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
13 additions
and
6 deletions
handle.py
handle.py
View file @
a70c40f8
import
hashlib
import
hashlib
import
json
import
json
import
os
import
os
import
logging
import
pandas
as
pd
import
pandas
as
pd
import
pika
import
pika
import
requests
import
requests
import
config
import
config
log_path
=
"/data/golang/src/europa-erp-go/scripts/data_server/py_mq.log"
title_arr
=
[
title_arr
=
[
"原厂型号"
,
"原厂型号"
,
"品牌"
,
"品牌"
,
...
@@ -23,6 +23,11 @@ title_arr = [
...
@@ -23,6 +23,11 @@ title_arr = [
"封装"
"封装"
]
]
logging
.
basicConfig
(
level
=
logging
.
INFO
,
filename
=
log_path
,
format
=
'
%(asctime)
s -
%(filename)
s[line:
%(lineno)
d] -
%(levelname)
s:
%(message)
s'
)
class
Producer
:
class
Producer
:
def
__init__
(
self
):
def
__init__
(
self
):
credentials
=
pika
.
PlainCredentials
(
username
=
config
.
rabbit_mq_user
,
password
=
config
.
rabbit_mq_password
)
credentials
=
pika
.
PlainCredentials
(
username
=
config
.
rabbit_mq_user
,
password
=
config
.
rabbit_mq_password
)
...
@@ -57,12 +62,13 @@ def resolve(file_name: str, supplier_id: int):
...
@@ -57,12 +62,13 @@ def resolve(file_name: str, supplier_id: int):
brand_name
=
str
(
df
.
loc
[
i
,
"品牌"
])
brand_name
=
str
(
df
.
loc
[
i
,
"品牌"
])
if
goods_name
.
strip
()
==
""
or
brand_name
.
strip
()
==
""
or
goods_name
.
strip
()
==
"nan"
or
brand_name
.
strip
()
==
"nan"
:
if
goods_name
.
strip
()
==
""
or
brand_name
.
strip
()
==
""
or
goods_name
.
strip
()
==
"nan"
or
brand_name
.
strip
()
==
"nan"
:
continue
continue
stock
=
str
(
df
.
loc
[
i
,
"库存"
])
stock
=
str
(
int
(
str
(
df
.
loc
[
i
,
"库存"
])))
moq
=
str
(
df
.
loc
[
i
,
"起订量"
])
moq
=
str
(
int
(
str
(
df
.
loc
[
i
,
"起订量"
])))
mpq
=
str
(
int
(
str
(
df
.
loc
[
i
,
"MPQ(最小包装数量)"
])))
price_is_us
=
False
price_is_us
=
False
supp_id
=
int
(
supplier_id
)
supp_id
=
int
(
supplier_id
)
supplier_name
=
""
supplier_name
=
""
multiple
=
str
(
df
.
loc
[
i
,
"增量"
]
)
multiple
=
str
(
int
(
str
(
df
.
loc
[
i
,
"增量"
]))
)
batch_sn
=
{
batch_sn
=
{
str
(
df
.
loc
[
i
,
"批次"
]):
int
(
df
.
loc
[
i
,
"库存"
]),
str
(
df
.
loc
[
i
,
"批次"
]):
int
(
df
.
loc
[
i
,
"库存"
]),
}
}
...
@@ -82,6 +88,7 @@ def resolve(file_name: str, supplier_id: int):
...
@@ -82,6 +88,7 @@ def resolve(file_name: str, supplier_id: int):
"batch_sn"
:
batch_sn
,
"batch_sn"
:
batch_sn
,
"ladder_price"
:
[],
"ladder_price"
:
[],
"goods_sn"
:
""
,
"goods_sn"
:
""
,
"mpq"
:
mpq
}
}
normal_text
=
(
goods_name
+
brand_name
+
str
(
supplier_id
)
+
str
(
df
.
loc
[
i
,
"封装"
])
+
str
(
df
.
loc
[
i
,
"批次"
]))
.
lower
()
normal_text
=
(
goods_name
+
brand_name
+
str
(
supplier_id
)
+
str
(
df
.
loc
[
i
,
"封装"
])
+
str
(
df
.
loc
[
i
,
"批次"
]))
.
lower
()
md
=
hashlib
.
md5
(
normal_text
.
encode
())
md
=
hashlib
.
md5
(
normal_text
.
encode
())
...
@@ -98,7 +105,7 @@ def resolve(file_name: str, supplier_id: int):
...
@@ -98,7 +105,7 @@ def resolve(file_name: str, supplier_id: int):
item_map
[
"price_cn"
]
=
float
(
price_data
)
item_map
[
"price_cn"
]
=
float
(
price_data
)
table
[
"ladder_price"
]
.
append
(
item_map
)
table
[
"ladder_price"
]
.
append
(
item_map
)
ans
=
json
.
dumps
(
table
,
ensure_ascii
=
False
)
ans
=
json
.
dumps
(
table
,
ensure_ascii
=
False
)
print
(
ans
)
logging
.
info
(
ans
)
producer
.
push
(
ans
)
producer
.
push
(
ans
)
producer
.
close
()
producer
.
close
()
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment