Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
重
重点类信息提取
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
ZGC_INDEX
重点类信息提取
Commits
21de2e36
Commit
21de2e36
authored
Jun 29, 2021
by
rico.liu
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
update
parent
197a1c03
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
15 additions
and
3 deletions
+15
-3
校验参数数据.py
公共代码/校验参数数据.py
+15
-3
No files found.
公共代码/校验参数数据.py
View file @
21de2e36
...
@@ -85,7 +85,7 @@ def check_data(category_name,path):
...
@@ -85,7 +85,7 @@ def check_data(category_name,path):
process_index
=
0
process_index
=
0
index_
=
Index
()
index_
=
Index
()
count
=
0
for
index
,
row
in
df
.
iterrows
():
for
index
,
row
in
df
.
iterrows
():
try
:
try
:
...
@@ -95,9 +95,13 @@ def check_data(category_name,path):
...
@@ -95,9 +95,13 @@ def check_data(category_name,path):
process_index
+=
1
process_index
+=
1
count
+=
1
id_
=
str
(
row
[
'id'
])
id_
=
str
(
row
[
'id'
])
eg_brand_name
=
row
[
'产品品牌'
]
eg_brand_name
=
row
[
'产品品牌'
]
eg_product_name
=
row
[
'产品名称'
]
eg_product_name
=
row
[
'产品名称'
]
for
db_param
in
db_params
[
'subtitle'
]
.
tolist
():
for
db_param
in
db_params
[
'subtitle'
]
.
tolist
():
if
db_param
in
[
'产品型号'
,
'CPU属性'
]:
#CPU属性为衍生属性,需要特殊处理
if
db_param
in
[
'产品型号'
,
'CPU属性'
]:
#CPU属性为衍生属性,需要特殊处理
continue
continue
...
@@ -146,7 +150,14 @@ def check_data(category_name,path):
...
@@ -146,7 +150,14 @@ def check_data(category_name,path):
dict_id
=
cursor_zi_new
.
fetchone
()[
0
]
dict_id
=
cursor_zi_new
.
fetchone
()[
0
]
dict_id_list
.
append
(
dict_id
)
dict_id_list
.
append
(
dict_id
)
if
count
%
10
==
0
:
cursor_zi_new
.
close
()
conn_zi_new
.
close
()
conn_zi_new
=
pymssql
.
connect
(
host
=
'123.56.115.207'
,
user
=
'zgcprice3311'
,
password
=
'zgcprice20200628'
,
database
=
'ZI_NEW'
,
autocommit
=
True
)
cursor_zi_new
=
conn_zi_new
.
cursor
()
if
flag
:
if
flag
:
if
len
(
dict_id_list
)
==
1
:
if
len
(
dict_id_list
)
==
1
:
_id
=
dict_id_list
[
0
]
_id
=
dict_id_list
[
0
]
...
@@ -154,6 +165,7 @@ def check_data(category_name,path):
...
@@ -154,6 +165,7 @@ def check_data(category_name,path):
else
:
else
:
cursor_zi_new
.
execute
(
"select * from ShuJuZiDian_Cfg where stdvalue is Null and id in (
%
s)"
%
','
.
join
([
'
%
s'
]
*
len
(
dict_id_list
)),
tuple
(
dict_id_list
))
cursor_zi_new
.
execute
(
"select * from ShuJuZiDian_Cfg where stdvalue is Null and id in (
%
s)"
%
','
.
join
([
'
%
s'
]
*
len
(
dict_id_list
)),
tuple
(
dict_id_list
))
fill_shujuzidian_df
=
pd
.
DataFrame
(
cursor_zi_new
.
fetchall
(),
columns
=
[
tuple
[
0
]
for
tuple
in
cursor_zi_new
.
description
])
fill_shujuzidian_df
=
pd
.
DataFrame
(
cursor_zi_new
.
fetchall
(),
columns
=
[
tuple
[
0
]
for
tuple
in
cursor_zi_new
.
description
])
category_name
=
category_name
.
replace
(
"/"
,
"_"
)
fill_shujuzidian_df
.
to_excel
(
f
"{category_name}数据字典补充.xlsx"
)
fill_shujuzidian_df
.
to_excel
(
f
"{category_name}数据字典补充.xlsx"
)
else
:
else
:
...
@@ -192,8 +204,8 @@ def check_data(category_name,path):
...
@@ -192,8 +204,8 @@ def check_data(category_name,path):
print
(
f
"{category_name}数据通过校验,可更新库内参数"
)
print
(
f
"{category_name}数据通过校验,可更新库内参数"
)
category_name
=
'
一体电脑
'
category_name
=
'
硒鼓/墨粉
'
path
=
'/Users/rico/
Work Space/1_Project/Company/中电中采/数据处理项目_重点类信息提取/一体电脑/一体电脑参数确认3
.xlsx'
path
=
'/Users/rico/
Downloads/硒鼓墨粉汇总-库内数据补参
.xlsx'
check_data
(
category_name
,
path
)
check_data
(
category_name
,
path
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment