smartpush 1.4.0__tar.gz → 1.4.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {smartpush-1.4.0 → smartpush-1.4.1}/PKG-INFO +2 -2
- smartpush-1.4.1/README.md +115 -0
- {smartpush-1.4.0 → smartpush-1.4.1}/setup.py +3 -3
- {smartpush-1.4.0 → smartpush-1.4.1}/smartpush/flow/MockFlow.py +31 -16
- {smartpush-1.4.0 → smartpush-1.4.1}/smartpush/test.py +21 -13
- {smartpush-1.4.0 → smartpush-1.4.1}/smartpush.egg-info/PKG-INFO +2 -2
- smartpush-1.4.0/README.md +0 -44
- {smartpush-1.4.0 → smartpush-1.4.1}/setup.cfg +0 -0
- {smartpush-1.4.0 → smartpush-1.4.1}/smartpush/__init__.py +0 -0
- {smartpush-1.4.0 → smartpush-1.4.1}/smartpush/export/__init__.py +0 -0
- {smartpush-1.4.0 → smartpush-1.4.1}/smartpush/export/basic/ExcelExportChecker.py +0 -0
- {smartpush-1.4.0 → smartpush-1.4.1}/smartpush/export/basic/GetOssUrl.py +0 -0
- {smartpush-1.4.0 → smartpush-1.4.1}/smartpush/export/basic/ReadExcel.py +0 -0
- {smartpush-1.4.0 → smartpush-1.4.1}/smartpush/export/basic/__init__.py +0 -0
- {smartpush-1.4.0 → smartpush-1.4.1}/smartpush/flow/__init__.py +0 -0
- {smartpush-1.4.0 → smartpush-1.4.1}/smartpush/get_jira_info.py +0 -0
- {smartpush-1.4.0 → smartpush-1.4.1}/smartpush/utils/DataTypeUtils.py +0 -0
- {smartpush-1.4.0 → smartpush-1.4.1}/smartpush/utils/EmailUtlis.py +0 -0
- {smartpush-1.4.0 → smartpush-1.4.1}/smartpush/utils/ListDictUtils.py +0 -0
- {smartpush-1.4.0 → smartpush-1.4.1}/smartpush/utils/StringUtils.py +0 -0
- {smartpush-1.4.0 → smartpush-1.4.1}/smartpush/utils/__init__.py +0 -0
- {smartpush-1.4.0 → smartpush-1.4.1}/smartpush.egg-info/SOURCES.txt +0 -0
- {smartpush-1.4.0 → smartpush-1.4.1}/smartpush.egg-info/dependency_links.txt +0 -0
- {smartpush-1.4.0 → smartpush-1.4.1}/smartpush.egg-info/top_level.txt +0 -0
@@ -0,0 +1,115 @@
|
|
1
|
+
# SmartPush_AutoTest
|
2
|
+
|
3
|
+
|
4
|
+
|
5
|
+
## Getting started
|
6
|
+
|
7
|
+
## 打包/上传的依赖(已安装不需要再次安装)
|
8
|
+
```sh
|
9
|
+
pip install wheel
|
10
|
+
pip install twine
|
11
|
+
```
|
12
|
+
|
13
|
+
## 1、清空本地文件夹
|
14
|
+
|
15
|
+
```sh
|
16
|
+
#!/bin/bash
|
17
|
+
|
18
|
+
# 定义要清空的目录和文件类型
|
19
|
+
BUILD_DIR="build"
|
20
|
+
DIST_DIR="dist"
|
21
|
+
EGG_INFO_PATTERN="*.egg-info"
|
22
|
+
|
23
|
+
# 清空 build 目录
|
24
|
+
if [ -d "$BUILD_DIR" ]; then
|
25
|
+
rm -rf "$BUILD_DIR"
|
26
|
+
echo "成功删除 $BUILD_DIR 目录"
|
27
|
+
else
|
28
|
+
echo "$BUILD_DIR 目录不存在"
|
29
|
+
fi
|
30
|
+
|
31
|
+
# 清空 dist 目录
|
32
|
+
if [ -d "$DIST_DIR" ]; then
|
33
|
+
rm -rf "$DIST_DIR"
|
34
|
+
echo "成功删除 $DIST_DIR 目录"
|
35
|
+
else
|
36
|
+
echo "$DIST_DIR 目录不存在"
|
37
|
+
fi
|
38
|
+
|
39
|
+
# 查找并删除所有 .egg-info 文件或目录
|
40
|
+
find . -name "$EGG_INFO_PATTERN" -exec rm -rf {} +
|
41
|
+
echo "已删除所有 $EGG_INFO_PATTERN 文件或目录"
|
42
|
+
|
43
|
+
```
|
44
|
+
|
45
|
+
## 2、更新版本号
|
46
|
+
```sh
|
47
|
+
#!/bin/bash
|
48
|
+
# 从 setup.py 文件中提取版本号
|
49
|
+
version=$(grep "version=" setup.py | sed -E "s/.*version=['\"]([^'\"]+)['\"].*/\1/")
|
50
|
+
# 将版本号拆分为数组
|
51
|
+
version_parts=($(echo "$version" | awk -F. '{for(i=1;i<=NF;i++) print $i}'))
|
52
|
+
# 获取版本号数组的长度
|
53
|
+
len=${#version_parts[@]}
|
54
|
+
# 输出拆分后的数组,用于调试
|
55
|
+
echo "拆分后的版本号数组: ${version_parts[@]}"
|
56
|
+
# 增加最后一位版本号
|
57
|
+
last_index=$((len))
|
58
|
+
((version_parts[$last_index]++))
|
59
|
+
# 处理进位
|
60
|
+
for ((i = last_index; i > 0; i--)); do
|
61
|
+
if [ ${version_parts[$i]} -ge 10 ]; then
|
62
|
+
version_parts[$i]=0
|
63
|
+
((version_parts[$i - 1]++))
|
64
|
+
else
|
65
|
+
break
|
66
|
+
fi
|
67
|
+
done
|
68
|
+
# 重新组合版本号
|
69
|
+
new_version=$(IFS=. ; echo "${version_parts[*]}")
|
70
|
+
# 根据系统类型使用不同的 sed 命令
|
71
|
+
if [[ "$(uname)" == "Darwin" ]]; then
|
72
|
+
sed -i '' "s/version=['\"][^'\"]*['\"]/version='$new_version'/" setup.py
|
73
|
+
else
|
74
|
+
sed -i "s/version=['\"][^'\"]*['\"]/version='$new_version'/" setup.py
|
75
|
+
fi
|
76
|
+
echo "版本号已从 $version 更新为 $new_version"
|
77
|
+
```
|
78
|
+
|
79
|
+
|
80
|
+
## 3、打包
|
81
|
+
```sh
|
82
|
+
python setup.py bdist_wheel
|
83
|
+
if [ $? -eq 0 ]; then
|
84
|
+
echo "bdist_wheel 执行成功"
|
85
|
+
else
|
86
|
+
echo "bdist_wheel 执行失败"
|
87
|
+
fi
|
88
|
+
```
|
89
|
+
|
90
|
+
|
91
|
+
## 4、上传到pipy的命令
|
92
|
+
```sh
|
93
|
+
twine upload dist/*
|
94
|
+
```
|
95
|
+
|
96
|
+
# 平台调用demo
|
97
|
+
```
|
98
|
+
import json # import 请置于行首
|
99
|
+
from smartpush.export.basic import ExcelExportChecker
|
100
|
+
from smartpush.export.basic import GetOssUrl
|
101
|
+
oss=GetOssUrl.get_oss_address_with_retry(vars['queryOssId'], "${em_host}", json.loads(requestHeaders))
|
102
|
+
result = ExcelExportChecker.check_excel_all(expected_oss=oss,actual_oss=vars['exportedOss'],ignore_sort =True)
|
103
|
+
assert result
|
104
|
+
```
|
105
|
+
## check_excel_all() 支持拓展参数
|
106
|
+
1、check_type = "including" 如果需要预期结果包含可传 eg.联系人导出场景可用,flow导出场景配合使用
|
107
|
+
2、ignore_sort = 0 如果需要忽略内部的行排序问题可传,eg.email热点点击数据导出无排序可用,传指定第几列,0是第一列
|
108
|
+
3、ignore_sort_sheet_name = "url点击" 搭配ignore_sort使用,指定哪个sheet忽略排序,不传默认所有都排序,参数大小写不敏感(url点击-URL点击)
|
109
|
+
4、skiprows = 1 传1可忽略第一行, eg.如flow的导出可用,动态表头不固定时可以跳过读取第一行
|
110
|
+
|
111
|
+
## get_oss_address_with_retry(target_id, url, requestHeader, requestParam=None, is_import=False, **kwargs)
|
112
|
+
1、is_import 导入校验是否成功传True,否则默认都是导出
|
113
|
+
2、**kwargs 参数支持重试次数
|
114
|
+
tries = 30 # 重试次数
|
115
|
+
delay = 2 # 延迟时间,单位s
|
@@ -2,12 +2,12 @@ from setuptools import setup, find_packages
|
|
2
2
|
|
3
3
|
setup(
|
4
4
|
name='smartpush',
|
5
|
-
version='1.4.
|
5
|
+
version='1.4.1',
|
6
6
|
description='用于smartpush自动化测试工具包',
|
7
|
-
author='
|
7
|
+
author='lulu、felix、long',
|
8
8
|
packages=find_packages(),
|
9
9
|
install_requires=[
|
10
10
|
# List your package's dependencies here
|
11
11
|
# TODO
|
12
12
|
],
|
13
|
-
)
|
13
|
+
)
|
@@ -5,8 +5,11 @@ import requests
|
|
5
5
|
from smartpush.utils import ListDictUtils
|
6
6
|
|
7
7
|
|
8
|
-
def get_current_flow(host_domain, cookies, flow_id):
|
9
|
-
# 提取flow所有节点数据
|
8
|
+
def get_current_flow(host_domain, cookies, flow_id, splits=None, **kwargs):
|
9
|
+
"""# 提取flow所有节点数据
|
10
|
+
splits: list, 需断言时填写, 拆分节点走向,必须走到终点, 如: ["false", "true"],即走到拆分节点限制不满足分支再走满足分支
|
11
|
+
get_email_content: bool,是否提取邮件内容
|
12
|
+
"""
|
10
13
|
_url = host_domain + "/flow/getFlowDetail"
|
11
14
|
headers = {
|
12
15
|
"cookie": cookies
|
@@ -19,8 +22,11 @@ def get_current_flow(host_domain, cookies, flow_id):
|
|
19
22
|
result = json.loads(requests.request(method="get", url=_url, headers=headers, params=params).text)
|
20
23
|
# 按节点id存储
|
21
24
|
node_counts = []
|
25
|
+
get_email_content = kwargs.get("get_email_content", False)
|
26
|
+
email_contents = []
|
22
27
|
|
23
|
-
def process_node(node):
|
28
|
+
def process_node(node, split_num=-1):
|
29
|
+
split_num += 1
|
24
30
|
node_counts.append({node["id"]: {"completedCount": node["data"]["completedCount"],
|
25
31
|
"skippedCount": node["data"]["skippedCount"],
|
26
32
|
"openUserCount": node["data"]["openUserCount"],
|
@@ -29,22 +35,28 @@ def get_current_flow(host_domain, cookies, flow_id):
|
|
29
35
|
}
|
30
36
|
}
|
31
37
|
)
|
38
|
+
# 提取邮件内容
|
39
|
+
if get_email_content and node["type"] == "sendLetter":
|
40
|
+
email_contents.append({node["data"]["sendLetter"]["emailName"]: {
|
41
|
+
"receiveAddress": node["data"]["sendLetter"]["receiveAddress"],
|
42
|
+
"sender": node["data"]["sendLetter"]["sender"],
|
43
|
+
}})
|
32
44
|
# 处理split节点
|
33
45
|
if "split" in node["data"].keys():
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
# 处理abTesting节点
|
39
|
-
elif "abTesting" in node["data"].keys():
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
46
|
+
if splits is not None:
|
47
|
+
split_branch = node['data']['split']['branches'][splits[split_num]]
|
48
|
+
for branch_node in split_branch:
|
49
|
+
process_node(branch_node, split_num)
|
50
|
+
# # 处理abTesting节点
|
51
|
+
# elif "abTesting" in node["data"].keys():
|
52
|
+
# for branch_node in node['data']['abTesting']['branches']["a"]:
|
53
|
+
# process_node(branch_node)
|
54
|
+
# for branch_node in node['data']['abTesting']['branches']["b"]:
|
55
|
+
# process_node(branch_node)
|
44
56
|
|
45
57
|
# 处理所有顶层节点
|
46
58
|
for node in result['resultData']['nodes']:
|
47
|
-
process_node(node)
|
59
|
+
process_node(node=node)
|
48
60
|
return node_counts, result["resultData"]["version"]
|
49
61
|
|
50
62
|
|
@@ -118,6 +130,7 @@ def check_flow(host_domain, cookies, mock_domain="", **kwargs):
|
|
118
130
|
num:非必填,默认为1 - compare_lists函数用于断言方法做差值计算
|
119
131
|
all_key: 非必填,bool,默认false,输入true时,检查指标节点常用5个字段
|
120
132
|
check_key: 非必填, 默认只有completedCount, list格式,传入需检查节点的指标key,如:completedCount、skippedCount、openRate等
|
133
|
+
split_node: list,有拆分节点时需填,结构:如: ["false", "true"],即走到拆分节点限制不满足分支再走满足分支
|
121
134
|
"""
|
122
135
|
# todo: 还差邮件校验部分,后续补充
|
123
136
|
is_split_steps = kwargs.get("split_steps", "all")
|
@@ -125,7 +138,8 @@ def check_flow(host_domain, cookies, mock_domain="", **kwargs):
|
|
125
138
|
if is_split_steps == "one" or is_split_steps == "all":
|
126
139
|
# 触发前提取flow数据,后续做对比
|
127
140
|
old_flow_counts, old_versions = get_current_flow(host_domain=host_domain, cookies=cookies,
|
128
|
-
flow_id=kwargs["flow_id"]
|
141
|
+
flow_id=kwargs["flow_id"],
|
142
|
+
splits=kwargs.get("split_node", None))
|
129
143
|
kwargs["old_flow_counts"] = old_flow_counts
|
130
144
|
# 更新flow
|
131
145
|
if kwargs.get("update_flow_params", False):
|
@@ -144,7 +158,8 @@ def check_flow(host_domain, cookies, mock_domain="", **kwargs):
|
|
144
158
|
time.sleep(kwargs.get("sleep_time", 60))
|
145
159
|
# 触发后提取flow数据,做断言
|
146
160
|
new_flow_counts, new_versions = get_current_flow(host_domain=host_domain, cookies=cookies,
|
147
|
-
flow_id=kwargs["flow_id"]
|
161
|
+
flow_id=kwargs["flow_id"],
|
162
|
+
splits=kwargs.get("split_node", None))
|
148
163
|
# 断言
|
149
164
|
result = ListDictUtils.compare_lists(temp1=kwargs.get("old_flow_counts"),
|
150
165
|
temp2=new_flow_counts, num=kwargs.get("num", 1),
|
@@ -2,6 +2,7 @@
|
|
2
2
|
# @Time :2025/2/20 00:27
|
3
3
|
# @Author :luzebin
|
4
4
|
import json
|
5
|
+
import time
|
5
6
|
|
6
7
|
import pandas as pd
|
7
8
|
|
@@ -41,10 +42,10 @@ if __name__ == '__main__':
|
|
41
42
|
# errors = ExcelExportChecker.check_field_format(actual_oss=oss1, fileds={0: {5: "time"}}, skiprows=1)
|
42
43
|
# ExcelExportChecker.check_excel_name(actual_oss=oss1, expected_oss=url)
|
43
44
|
|
44
|
-
# flow触发流程
|
45
|
+
# flow触发流程 ------------------------------------------------------------------------------------------------------------------------
|
45
46
|
_url = "http://sp-go-flow-test.inshopline.com"
|
46
47
|
host_domain = "https://test.smartpushedm.com/api-em-ec2"
|
47
|
-
cookies = "
|
48
|
+
cookies = "_ga=GA1.1.88071637.1717860341; _ga_NE61JB8ZM6=GS1.1.1718954972.32.1.1718954972.0.0.0; _ga_Z8N3C69PPP=GS1.1.1723104149.2.0.1723104149.0.0.0; _ga_D2KXR23WN3=GS1.1.1735096783.3.1.1735096812.0.0.0; osudb_lang=; osudb_oar=#01#SID0000127BMP3Os96/37rp7Et7tYy+s7TyyN/AaKNkLtst/Ks9rPF/Co/OjyNJYL+Y4lPf+p9rzrSk9uJnxx4BFXI04BoU/fxhnvaMH2ac1DoeYo7Ll0eizFs+CGNscHjBENqjUacTEcDHSprmyG4TrNfYJkB; osudb_appid=SMARTPUSH; osudb_subappid=1; osudb_uid=4213785247; a_lang=zh-hant-tw; ecom_http_token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE3NDkyNjM1NzgsImp0aSI6IjIyYmQwNmE3LTVkYTItNDA4OS05NGViLThlNThlNzdhM2MyOSIsInVzZXJJbmZvIjp7ImlkIjowLCJ1c2VySWQiOiI0MjEzNzg1MjQ3IiwidXNlcm5hbWUiOiIiLCJlbWFpbCI6ImZlbGl4LnNoYW9Ac2hvcGxpbmVhcHAuY29tIiwidXNlclJvbGUiOiJvd25lciIsInBsYXRmb3JtVHlwZSI6Nywic3ViUGxhdGZvcm0iOjEsInBob25lIjoiIiwibGFuZ3VhZ2UiOiJ6aC1oYW50LXR3IiwiYXV0aFR5cGUiOiIiLCJhdHRyaWJ1dGVzIjp7ImNvdW50cnlDb2RlIjoiQ04iLCJjdXJyZW5jeSI6IkpQWSIsImN1cnJlbmN5U3ltYm9sIjoiSlDCpSIsImRvbWFpbiI6InNtYXJ0cHVzaDQubXlzaG9wbGluZXN0Zy5jb20iLCJsYW5ndWFnZSI6ImVuIiwibWVyY2hhbnRFbWFpbCI6ImZlbGl4LnNoYW9Ac2hvcGxpbmUuY29tIiwibWVyY2hhbnROYW1lIjoiU21hcnRQdXNoNF9lYzJf6Ieq5Yqo5YyW5bqX6ZO6IiwicGhvbmUiOiIiLCJzY29wZUNoYW5nZWQiOnRydWUsInN0YWZmTGFuZ3VhZ2UiOiJ6aC1oYW5zLWNuIiwic3RhdHVzIjowLCJ0aW1lem9uZSI6IkFzaWEvTWFjYW8ifSwic3RvcmVJZCI6IjE2NDQzOTU5MjA0NDQiLCJoYW5kbGUiOiJzbWFydHB1c2g0IiwiZW52IjoiQ04iLCJzdGUiOiIiLCJ2ZXJpZnkiOiIifSwibG9naW5UaW1lIjoxNzQ2NjcxNTc4OTU4LCJzY29wZSI6WyJlbWFpbC1tYXJrZXQiLCJjb29raWUiLCJzbC1lY29tLWVtYWlsLW1hcmtldC1uZXctdGVzdCIsImVtYWlsLW1hcmtldC1uZXctZGV2LWZzIiwiYXBpLXVjLWVjMiIsImFwaS1zdS1lYzIiLCJhcGktZW0tZWMyIiwiZmxvdy1wbHVnaW4iLCJhcGktc3AtbWFya2V0LWVjMiJdLCJjbGllbnRfaWQiOiJlbWFpbC1tYXJrZXQifQ.Ym_w8ftfdP3zxVXpK_ZlPbWRI-S3ie46_zMCVJZln_Y; JSESSIONID=00A6B90F793A7392A2398A80CA278359"
|
48
49
|
|
49
50
|
params = {
|
50
51
|
"abandonedOrderId": "c2c4a695a36373f56899b370d0f1b6f2",
|
@@ -140,17 +141,23 @@ if __name__ == '__main__':
|
|
140
141
|
"clickDistinctUserRate": 0}, "id": "2503b475-ce3e-4906-ab04-0ebc387f0d7e"}],
|
141
142
|
"showDataStartTime": 1745164800000, "showDataEndTime": 1745251199000}
|
142
143
|
# mock_pulsar = MockFlow.check_flow(mock_domain=_url, host_domain=host_domain, cookies=cookies,
|
143
|
-
# flow_id="
|
144
|
-
#
|
144
|
+
# flow_id="FLOW6966717528141252274", pulsar=params,
|
145
|
+
# split_node=["true"])
|
145
146
|
# print(mock_pulsar)
|
146
147
|
|
147
|
-
#
|
148
|
-
#
|
149
|
-
#
|
150
|
-
#
|
151
|
-
|
152
|
-
|
153
|
-
|
148
|
+
# old_flow_counts, old_versions, email_contents = MockFlow.get_current_flow(host_domain=host_domain, cookies=cookies,
|
149
|
+
# flow_id="FLOW6966717528141252274",
|
150
|
+
# splits=["false", "false"], get_email_content=True)
|
151
|
+
# print(old_flow_counts, old_versions, email_contents)
|
152
|
+
mock_pulsar_step1, _ = MockFlow.check_flow(mock_domain=_url, host_domain=host_domain, cookies=cookies,
|
153
|
+
flow_id="FLOW6966717528141252274", pulsar=params,
|
154
|
+
split_steps="one", split_node=["true"])
|
155
|
+
time.sleep(60)
|
156
|
+
mock_pulsar_step2 = MockFlow.check_flow(mock_domain=_url, host_domain=host_domain, cookies=cookies,
|
157
|
+
flow_id="FLOW6966717528141252274", old_flow_counts=mock_pulsar_step1,
|
158
|
+
split_steps="two", split_node=["true"])
|
159
|
+
print(mock_pulsar_step1)
|
160
|
+
print(mock_pulsar_step2)
|
154
161
|
|
155
162
|
# split_steps="two")
|
156
163
|
# # node_counts, versions = MockFlow.get_current_flow(host_domain=host_domain, cookies=cookies,
|
@@ -191,8 +198,9 @@ if __name__ == '__main__':
|
|
191
198
|
'receiveAddress': 'autotest-smartpushpro5@smartpush.com',
|
192
199
|
'sender': '1SmartPush_Pro5_ec2自动化店铺 AutoTestName',
|
193
200
|
'subtitle': '营销测试邮件-2025-04-24 10:20:29.560357-😈'}}]
|
194
|
-
|
195
|
-
|
201
|
+
|
202
|
+
# result = EmailUtlis.check_email_content(emailProperty=email_property, loginEmail=loginEmail, password=password)
|
203
|
+
# print(result)
|
196
204
|
|
197
205
|
ccc = {
|
198
206
|
"id": "FLOW6966717528141252274",
|
smartpush-1.4.0/README.md
DELETED
@@ -1,44 +0,0 @@
|
|
1
|
-
# SmartPush_AutoTest
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
## Getting started
|
6
|
-
|
7
|
-
## 打包/上传的依赖
|
8
|
-
```
|
9
|
-
pip install wheel
|
10
|
-
pip install twine
|
11
|
-
```
|
12
|
-
|
13
|
-
|
14
|
-
## 打包-打包前记得修改版本号
|
15
|
-
```
|
16
|
-
python setup.py sdist bdist_wheel
|
17
|
-
```
|
18
|
-
|
19
|
-
|
20
|
-
## 上传到pipy的命令
|
21
|
-
```
|
22
|
-
twine upload dist/*
|
23
|
-
```
|
24
|
-
|
25
|
-
# 平台调用demo
|
26
|
-
```
|
27
|
-
import json # import 请置于行首
|
28
|
-
from smartpush.export.basic import ExcelExportChecker
|
29
|
-
from smartpush.export.basic import GetOssUrl
|
30
|
-
oss=GetOssUrl.get_oss_address_with_retry(vars['queryOssId'], "${em_host}", json.loads(requestHeaders))
|
31
|
-
result = ExcelExportChecker.check_excel_all(expected_oss=oss,actual_oss=vars['exportedOss'],ignore_sort =True)
|
32
|
-
assert result
|
33
|
-
```
|
34
|
-
## check_excel_all() 支持拓展参数
|
35
|
-
1、check_type = "including" 如果需要预期结果包含可传 eg.联系人导出场景可用,flow导出场景配合使用
|
36
|
-
2、ignore_sort = 0 如果需要忽略内部的行排序问题可传,eg.email热点点击数据导出无排序可用,传指定第几列,0是第一列
|
37
|
-
3、ignore_sort_sheet_name = "url点击" 搭配ignore_sort使用,指定哪个sheet忽略排序,不传默认所有都排序,参数大小写不敏感(url点击-URL点击)
|
38
|
-
4、skiprows = 1 传1可忽略第一行, eg.如flow的导出可用,动态表头不固定时可以跳过读取第一行
|
39
|
-
|
40
|
-
## get_oss_address_with_retry(target_id, url, requestHeader, requestParam=None, is_import=False, **kwargs)
|
41
|
-
1、is_import 导入校验是否成功传True,否则默认都是导出
|
42
|
-
2、**kwargs 参数支持重试次数
|
43
|
-
tries = 30 # 重试次数
|
44
|
-
delay = 2 # 延迟时间,单位s
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|