Skip to content
Toggle navigation
P
Projects
G
Groups
S
Snippets
Help
岳巧源
/
python_resolve_data
This project
Loading...
Sign in
Toggle navigation
Go to a project
Project
Repository
Issues
0
Merge Requests
0
Pipelines
Wiki
Snippets
Settings
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Commit
7d4c2187
authored
Aug 28, 2024
by
岳巧源
Browse files
Options
_('Browse Files')
Download
Email Patches
Plain Diff
add script
parent
c5059f77
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
104 additions
and
7 deletions
chip1stop.py
chip1stop.py
View file @
7d4c2187
import
csv
import
datetime
import
datetime
import
ftplib
import
ftplib
import
gzip
import
gzip
import
json
import
os
import
os
import
pika
import
pika
...
@@ -9,11 +11,17 @@ host = "ansetchip1.gotoftp11.com"
...
@@ -9,11 +11,17 @@ host = "ansetchip1.gotoftp11.com"
user
=
"ansetchip1"
user
=
"ansetchip1"
password
=
"nf6l2g17"
password
=
"nf6l2g17"
port
=
21
port
=
21
"""
ftp服务器上文件名的规律 csv文件都是 带时间的
txt文件都是固定文件名
此脚本每天晚上 23:40运行 用于解析当天上传的文件
"""
ftp_remote_path_csv
=
"/Chip1Stop_TI_included_"
+
datetime
.
datetime
.
now
()
.
strftime
(
"
%
Y
%
m
%
d"
)
+
"_ETCHIPS.csv.gz"
ftp_remote_path_csv
=
"/Chip1Stop_TI_included_"
+
datetime
.
datetime
.
now
()
.
strftime
(
"
%
Y
%
m
%
d"
)
+
"_ETCHIPS.csv.gz"
ftp_remote_path_txt
=
"/Chip1StopStockList RMB_Anjie_US Stock.txt.gz"
ftp_remote_path_txt
=
"/Chip1StopStockList RMB_Anjie_US Stock.txt.gz"
"""暂时设置为当前路径"""
"""暂时设置为当前路径"""
file_local_path_csv
=
"."
+
ftp_remote_path_csv
# file_local_path_csv = "." + ftp_remote_path_csv
file_local_path_csv
=
"./Chip1Stop_TI_included_20240826_ETCHIPS.csv"
file_local_path_txt
=
"."
+
ftp_remote_path_txt
file_local_path_txt
=
"."
+
ftp_remote_path_txt
ENV
=
"test"
# ENV 取值 test 或 prod
ENV
=
"test"
# ENV 取值 test 或 prod
...
@@ -40,6 +48,27 @@ config_prod = {
...
@@ -40,6 +48,27 @@ config_prod = {
"rabbit_mq_routing_key"
:
"europa_erp_sku_routing"
,
"rabbit_mq_routing_key"
:
"europa_erp_sku_routing"
,
}
}
csv_title
=
[
'Mfr'
,
'C1S Part#'
,
'Mfr Part#'
,
'MOQ'
,
'Avail Qty'
,
'Days to Ship'
,
'Break1'
,
'Break2'
,
'Break3'
,
'Break4'
,
'Break5'
,
'Break6'
,
'Break7'
,
'Price1'
,
'Price2'
,
'Price3'
,
'Price4'
,
'Price5'
,
'Price6'
,
'Price7'
,
'Date code'
]
"""判断传入的字符串是不是数字"""
def
is_number
(
s
:
str
):
try
:
float
(
s
)
return
True
except
ValueError
:
pass
try
:
import
unicodedata
unicodedata
.
numeric
(
s
)
return
True
except
(
TypeError
,
ValueError
):
pass
return
False
class
DataUtil
:
class
DataUtil
:
...
@@ -63,8 +92,75 @@ class DataUtil:
...
@@ -63,8 +92,75 @@ class DataUtil:
g_file
.
close
()
g_file
.
close
()
os
.
remove
(
file_path
)
os
.
remove
(
file_path
)
def
handle_csv
(
self
):
def
handle_csv
(
self
,
file_path
):
pass
if
not
os
.
path
.
exists
(
file_path
):
return
with
open
(
file_path
,
errors
=
'ignore'
)
as
f
:
res
=
[]
csv_reader
=
csv
.
reader
(
f
)
for
index
,
row
in
enumerate
(
csv_reader
):
if
index
>
10
:
break
if
index
==
0
:
continue
table
=
dict
()
"""csv文件对应的是美金"""
table
[
"price_is_us"
]
=
True
table
[
"supplier_name"
]
=
"Chip1stop"
table
[
"ladder_price"
]
=
[]
ladder_purchases
=
dict
()
ladder_prices
=
dict
()
for
i
in
range
(
len
(
row
)):
if
csv_title
[
i
]
==
"C1S Part#"
:
table
[
"goods_sn"
]
=
row
[
i
]
if
csv_title
[
i
]
==
"Mfr Part#"
:
table
[
"goods_name"
]
=
row
[
i
]
if
csv_title
[
i
]
==
"Mfr"
:
table
[
"brand_name"
]
=
row
[
i
]
if
csv_title
[
i
]
==
"Avail Qty"
:
table
[
"stock"
]
=
row
[
i
]
if
csv_title
[
i
]
==
"MOQ"
:
table
[
"moq"
]
=
row
[
i
]
if
csv_title
[
i
]
==
"Break1"
and
row
[
i
]
!=
""
:
ladder_purchases
[
"Break1"
]
=
row
[
i
]
if
csv_title
[
i
]
==
"Break2"
and
row
[
i
]
!=
""
:
ladder_purchases
[
"Break2"
]
=
row
[
i
]
if
csv_title
[
i
]
==
"Break3"
and
row
[
i
]
!=
""
:
ladder_purchases
[
"Break3"
]
=
row
[
i
]
if
csv_title
[
i
]
==
"Break4"
and
row
[
i
]
!=
""
:
ladder_purchases
[
"Break4"
]
=
row
[
i
]
if
csv_title
[
i
]
==
"Break5"
and
row
[
i
]
!=
""
:
ladder_purchases
[
"Break5"
]
=
row
[
i
]
if
csv_title
[
i
]
==
"Break6"
and
row
[
i
]
!=
""
:
ladder_purchases
[
"Break6"
]
=
row
[
i
]
if
csv_title
[
i
]
==
"Break7"
and
row
[
i
]
!=
""
:
ladder_purchases
[
"Break7"
]
=
row
[
i
]
if
csv_title
[
i
]
==
"Price1"
and
row
[
i
]
!=
""
:
ladder_prices
[
"Price1"
]
=
row
[
i
]
if
csv_title
[
i
]
==
"Price2"
and
row
[
i
]
!=
""
:
ladder_prices
[
"Price2"
]
=
row
[
i
]
if
csv_title
[
i
]
==
"Price3"
and
row
[
i
]
!=
""
:
ladder_prices
[
"Price3"
]
=
row
[
i
]
if
csv_title
[
i
]
==
"Price4"
and
row
[
i
]
!=
""
:
ladder_prices
[
"Price4"
]
=
row
[
i
]
if
csv_title
[
i
]
==
"Price5"
and
row
[
i
]
!=
""
:
ladder_prices
[
"Price5"
]
=
row
[
i
]
if
csv_title
[
i
]
==
"Price6"
and
row
[
i
]
!=
""
:
ladder_prices
[
"Price6"
]
=
row
[
i
]
if
csv_title
[
i
]
==
"Price7"
and
row
[
i
]
!=
""
:
ladder_prices
[
"Price7"
]
=
row
[
i
]
for
k
in
range
(
1
,
8
):
if
"Break"
+
str
(
k
)
in
ladder_purchases
\
and
"Price"
+
str
(
k
)
in
ladder_prices
\
and
is_number
(
ladder_purchases
[
"Break"
+
str
(
k
)])
\
and
is_number
(
ladder_prices
[
"Price"
+
str
(
k
)]):
item
=
dict
()
item
[
"purchases"
]
=
int
(
ladder_purchases
[
"Break"
+
str
(
k
)])
item
[
"price_us"
]
=
float
(
ladder_prices
[
"Price"
+
str
(
k
)])
item
[
"price_cn"
]
=
float
(
0
)
table
[
"ladder_price"
]
.
append
(
item
)
ans
=
json
.
dumps
(
table
)
print
(
ans
)
def
handle_txt
(
self
):
def
handle_txt
(
self
):
pass
pass
...
@@ -103,7 +199,8 @@ if __name__ == '__main__':
...
@@ -103,7 +199,8 @@ if __name__ == '__main__':
elif
ENV
==
"prod"
:
elif
ENV
==
"prod"
:
config
=
config_prod
config
=
config_prod
u
=
DataUtil
()
u
=
DataUtil
()
u
.
download
(
ftp_remote_path_csv
,
file_local_path_csv
)
# u.download(ftp_remote_path_csv, file_local_path_csv)
u
.
ungz
(
file_local_path_csv
)
# u.ungz(file_local_path_csv)
u
.
download
(
ftp_remote_path_txt
,
file_local_path_txt
)
# u.download(ftp_remote_path_txt, file_local_path_txt)
u
.
ungz
(
file_local_path_txt
)
# u.ungz(file_local_path_txt)
u
.
handle_csv
(
file_local_path_csv
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment