kaq-quant-common 0.1.91__tar.gz → 0.1.92__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/PKG-INFO +1 -1
  2. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/resources/kaq_ddb_stream_init_resources.py +1 -1
  3. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/resources/kaq_ddb_stream_read_resources.py +1 -1
  4. kaq_quant_common-0.1.92/kaq_quant_common/resources/kaq_ddb_stream_write_resources.py +202 -0
  5. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/pyproject.toml +1 -1
  6. kaq_quant_common-0.1.91/kaq_quant_common/resources/kaq_ddb_stream_write_resources.py +0 -91
  7. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/README.md +0 -0
  8. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/__init__.py +0 -0
  9. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/__init__.py +0 -0
  10. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/common/__init__.py +0 -0
  11. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/common/api_interface.py +0 -0
  12. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/common/auth.py +0 -0
  13. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/rest/__init__.py +0 -0
  14. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/rest/api_client_base.py +0 -0
  15. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/rest/api_server_base.py +0 -0
  16. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/rest/instruction/helper/order_helper.py +0 -0
  17. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/rest/instruction/instruction_client.py +0 -0
  18. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/rest/instruction/instruction_server_base.py +0 -0
  19. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/rest/instruction/models/__init__.py +0 -0
  20. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/rest/instruction/models/account.py +0 -0
  21. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/rest/instruction/models/order.py +0 -0
  22. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/rest/instruction/models/position.py +0 -0
  23. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/rest/instruction/models/transfer.py +0 -0
  24. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/ws/__init__.py +0 -0
  25. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/ws/exchange/models.py +0 -0
  26. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/ws/exchange/ws_exchange_client.py +0 -0
  27. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/ws/exchange/ws_exchange_server.py +0 -0
  28. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/ws/instruction/__init__.py +0 -0
  29. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/ws/instruction/ws_instruction_client.py +0 -0
  30. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/ws/instruction/ws_instruction_server_base.py +0 -0
  31. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/ws/models.py +0 -0
  32. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/ws/ws_client_base.py +0 -0
  33. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/api/ws/ws_server_base.py +0 -0
  34. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/common/__init__.py +0 -0
  35. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/common/ddb_table_monitor.py +0 -0
  36. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/common/http_monitor.py +0 -0
  37. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/common/modules/limit_order_helper.py +0 -0
  38. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/common/modules/limit_order_symbol_monitor.py +0 -0
  39. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/common/modules/limit_order_symbol_monitor_group.py +0 -0
  40. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/common/monitor_base.py +0 -0
  41. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/common/monitor_group.py +0 -0
  42. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/common/redis_table_monitor.py +0 -0
  43. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/common/statistics/funding_rate_history_statistics.py +0 -0
  44. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/common/statistics/kline_history_statistics.py +0 -0
  45. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/common/ws_wrapper.py +0 -0
  46. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/config/config.yaml +0 -0
  47. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/resources/__init__.py +0 -0
  48. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/resources/kaq_ddb_pool_stream_read_resources.py +0 -0
  49. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/resources/kaq_mysql_init_resources.py +0 -0
  50. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/resources/kaq_mysql_resources.py +0 -0
  51. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/resources/kaq_postgresql_resources.py +0 -0
  52. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/resources/kaq_quant_hive_resources.py +0 -0
  53. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/resources/kaq_redis_resources.py +0 -0
  54. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/utils/__init__.py +0 -0
  55. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/utils/dagster_job_check_utils.py +0 -0
  56. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/utils/dagster_utils.py +0 -0
  57. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/utils/date_util.py +0 -0
  58. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/utils/enums_utils.py +0 -0
  59. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/utils/error_utils.py +0 -0
  60. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/utils/hash_utils.py +0 -0
  61. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/utils/log_time_utils.py +0 -0
  62. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/utils/logger_utils.py +0 -0
  63. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/utils/mytt_utils.py +0 -0
  64. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/utils/signal_utils.py +0 -0
  65. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/utils/sqlite_utils.py +0 -0
  66. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/utils/uuid_utils.py +0 -0
  67. {kaq_quant_common-0.1.91 → kaq_quant_common-0.1.92}/kaq_quant_common/utils/yml_utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: kaq_quant_common
3
- Version: 0.1.91
3
+ Version: 0.1.92
4
4
  Summary:
5
5
  Author: kevinfuture
6
6
  Author-email: liuenbofuture@foxmail.com
@@ -27,7 +27,7 @@ class KaqQuantDdbStreamInitRepository:
27
27
  '''
28
28
  self.session = ddb.session(enableASYNC=True)
29
29
  self.session.connect(host, port, user, passwd)
30
- self.session.enableStreaming()
30
+ # self.session.enableStreaming()
31
31
 
32
32
  '''
33
33
  创建流数据表 && 创建引擎
@@ -23,7 +23,7 @@ class KaqQuantDdbStreamReadRepository:
23
23
  self.session = ddb.session(enableASYNC=False)
24
24
  self.session.setTimeout(3600)
25
25
  self.session.connect(host, port, user, passwd, keepAliveTime=240, reconnect=True, tryReconnectNums=10)
26
- self.session.enableStreaming()
26
+ # self.session.enableStreaming()
27
27
 
28
28
  # 需要注意的是 fetchSize 取值不能小于 8192 (记录条数)
29
29
  self.size = 8192
@@ -0,0 +1,202 @@
1
+ import os
2
+ import queue
3
+ import time
4
+ import dolphindb as ddb
5
+ from kaq_quant_common.utils import yml_utils
6
+ import pandas as pd
7
+ import threading
8
+ from kaq_quant_common.utils.logger_utils import get_logger
9
+ import traceback
10
+ from typing_extensions import deprecated
11
+
12
+ mutex = threading.Lock()
13
+
14
+ class KaqQuantDdbStreamWriteRepository:
15
+ '''
16
+ 定义 asof_join的级联方式, 合并数据到一起, 然后可以订阅判断
17
+ '''
18
+ def __init__(self, host, port, user, passwd):
19
+ self.logger = get_logger(self)
20
+ '''
21
+ 创建ddb连接 && 添加ddb流数据表支持
22
+ '''
23
+ try:
24
+ mutex.acquire()
25
+ self.session = ddb.session(enableASYNC=True)
26
+ self.session.connect(host, port, user, passwd, tryReconnectNums=10, reconnect=True, keepAliveTime=1000, readTimeout=10, writeTimeout=5)
27
+ # 流表订阅用的
28
+ # self.session.enableStreaming(threadCount=5)
29
+ # self.pool = ddb.DBConnectionPool(host, port, userid=user, password=passwd, loadBalance=True, reConnect=True, tryReconnectNums=5, sqlStd=SqlStd.MySQL)
30
+
31
+ # 需要注意的是 fetchSize 取值不能小于 8192 (记录条数)
32
+ self.size = 8192
33
+ except Exception as e:
34
+ self.logger.error(f'KaqQuantDdbStreamWriteRepository.__init__ is occured error: {str(e)} - {str(traceback.format_exc())}')
35
+ finally:
36
+ mutex.release()
37
+
38
+ def save2stream(self, ddb_table_name: str, df : pd.DataFrame):
39
+ '''
40
+ 调用此方法之前, 需要将dataframe中的字符串类型的值 ,添加引号
41
+ '''
42
+ # 遍历每列的数据类型
43
+ for column, dtype in df.dtypes.items():
44
+ if dtype == 'object' or dtype == 'str':
45
+ df[column] = '\'' + df[column] + '\''
46
+ for index, row in df.iterrows():
47
+ script = f"insert into {ddb_table_name} values({', '.join(str(x) for x in row.values)})"
48
+ try:
49
+ self.session.run(script, clearMemory=True)
50
+ except Exception as e:
51
+ self.logger.error(f'KaqQuantDdbStreamWriteRepository.save2stream is occured error: tableName is {ddb_table_name} - {str(e)} - {str(traceback.format_exc())}')
52
+
53
+ def build_insert_values_fast(self, df):
54
+ if df.empty:
55
+ return []
56
+ dtypes = df.dtypes.tolist()
57
+ # 提前确定哪些列需要加引号
58
+ str_idx = {i for i, dt in enumerate(dtypes) if dt == object or dt == 'object' or dt == 'str'}
59
+ # 转成 ndarray,减少 pandas 参与
60
+ arr = df.to_numpy()
61
+ rows = []
62
+ for row in arr:
63
+ parts = []
64
+ for i, v in enumerate(row):
65
+ if i in str_idx:
66
+ parts.append(f"'{v}'") # 直接拼接最快
67
+ else:
68
+ parts.append(str(v))
69
+ rows.append("(" + ", ".join(parts) + ")")
70
+ return rows
71
+
72
+ def save2stream_batch(self, ddb_table_name: str, df : pd.DataFrame):
73
+ '''
74
+ 调用此方法之前, 需要将dataframe中的字符串类型的值 ,添加引号
75
+ '''
76
+ try:
77
+ start1 = time.monotonic_ns()
78
+ row = self.build_insert_values_fast(df)
79
+ values = ', '.join(row)
80
+ script = f"insert into {ddb_table_name} values {values}"
81
+ start2 = time.monotonic_ns()
82
+ self.session.run(script, clearMemory=True)
83
+ end = time.monotonic_ns()
84
+ if "KAQ_QUANT_LOG" in os.environ:
85
+ diff = end - start2
86
+ if diff > 1_000_000_0: # 超过1毫秒
87
+ self.logger.warning(f'KaqQuantDdbStreamWriteRepository.save2stream cost time is only write : {end - start2} ns, save2stream_batch :{end - start1} ns, batch size is {len(df)}, tableName is {ddb_table_name}')
88
+ except Exception as e:
89
+ self.logger.error(f'KaqQuantDdbStreamWriteRepository.save2stream_batch is occured error: tableName is {ddb_table_name} - {str(e)} - {str(traceback.format_exc())}')
90
+
91
+
92
+ class DDBAsyncDFWriter:
93
+ def __init__(self, appender, batch_size=1000, flush_interval_ms=80):
94
+ self.logger = get_logger()
95
+ self.appender = appender
96
+ self.batch_size = batch_size
97
+ self.flush_interval = flush_interval_ms / 1000.0
98
+
99
+ self.queue = queue.Queue(maxsize=10000)
100
+ self.running = True
101
+
102
+ self.thread = threading.Thread(target=self._worker, daemon=True)
103
+ self.thread.start()
104
+
105
+ def add_df(self, df):
106
+ """直接传入一个 DataFrame"""
107
+ if not self.running:
108
+ return
109
+ if df is None or df.empty:
110
+ return
111
+ try:
112
+ self.queue.put(df, block=False)
113
+ except queue.Full:
114
+ self.logger.error("Warning: DDBAsyncDFWriter queue is full!")
115
+
116
+ def _worker(self):
117
+ buffer = []
118
+ current_rows = 0
119
+ last_flush_time = time.time()
120
+
121
+ while self.running or not self.queue.empty():
122
+ try:
123
+ # 使用较短的 timeout 以便能快速响应 running=False 状态
124
+ df = self.queue.get(timeout=0.01)
125
+ buffer.append(df)
126
+ current_rows += len(df)
127
+ except queue.Empty:
128
+ # 即使没有新数据,如果已经进入停止流程且 buffer 还有数,也要处理
129
+ if not self.running and not buffer:
130
+ break
131
+
132
+ now = time.time()
133
+ # 触发条件:行数够了,或时间到了,或者程序正在停止
134
+ if buffer and (current_rows >= self.batch_size or
135
+ (now - last_flush_time) >= self.flush_interval or
136
+ not self.running):
137
+ self._do_flush(buffer)
138
+ buffer = []
139
+ current_rows = 0
140
+ last_flush_time = now
141
+
142
+ def _do_flush(self, buffer):
143
+ try:
144
+ if not buffer:
145
+ return
146
+ final_df = pd.concat(buffer, ignore_index=True)
147
+ self.appender.append(final_df)
148
+ except Exception as e:
149
+ self.logger.error(f"DolphinDB 写入异常: {e}")
150
+
151
+ def stop(self):
152
+ """
153
+ 优雅停止:
154
+ 1. 设置 running 为 False
155
+ 2. 等待后台线程把队列里剩余的数据全部 flush 完
156
+ """
157
+ self.logger.warning("正在停止 DDBAsyncDFWriter 并清空残留数据...")
158
+ self.running = False
159
+ self.thread.join() # 等待工作线程处理完最后一批 buffer
160
+ self.logger.info("DDBAsyncDFWriter 已安全停止。")
161
+
162
+ class KaqQuantDdbStreamWriteSyncRepository:
163
+ '''
164
+ 使用appender直接写入的方式
165
+ '''
166
+ def __init__(self, host, port, user, passwd, tableName=None):
167
+ if tableName is None:
168
+ raise ValueError(f'Error tableName, please set. tableName={tableName}')
169
+ self.tableName = tableName
170
+ self.logger = get_logger(self)
171
+ '''
172
+ 创建ddb连接 && 添加ddb流数据表支持
173
+ '''
174
+ try:
175
+ mutex.acquire()
176
+ self.session = ddb.session()
177
+ self.session.connect(host, port, user, passwd, tryReconnectNums=10, reconnect=True, keepAliveTime=1000, readTimeout=10, writeTimeout=5)
178
+
179
+ self.batch_writer = DDBAsyncDFWriter(ddb.TableAppender(table_name=self.tableName, conn=self.session))
180
+ # 需要注意的是 fetchSize 取值不能小于 8192 (记录条数)
181
+ self.size = 8192
182
+ except Exception as e:
183
+ self.logger.error(f'KaqQuantDdbTableStreamWriteRepository.__init__ is occured error: {str(e)} - {str(traceback.format_exc())}')
184
+ finally:
185
+ mutex.release()
186
+
187
+ @deprecated("请确保pandas数据与ddb表的数据类型一致.")
188
+ def insert(self, df : pd.DataFrame):
189
+ '''
190
+ dataframe中日期等类型与ddb流表中一致,例如:
191
+ df['create_time'] = pd.to_datetime(df['create_time'], unit='ms')
192
+ df['event_time'] = pd.to_datetime(df['event_time'], unit='ms')
193
+ '''
194
+ try:
195
+ self.batch_writer.add_df(df)
196
+ except Exception as e:
197
+ self.logger.error(f'KaqQuantDdbTableStreamWriteRepository.insert is occured error: {str(e)} - {str(traceback.format_exc())}')
198
+
199
+
200
+ if __name__ == '__main__':
201
+ host, port, user, passwd = yml_utils.get_ddb_info(os.getcwd())
202
+ kaq = KaqQuantDdbStreamWriteRepository(host, port, user, passwd)
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "kaq_quant_common"
3
- version = "0.1.91"
3
+ version = "0.1.92"
4
4
  description = ""
5
5
  authors = [
6
6
  {name = "kevinfuture",email = "liuenbofuture@foxmail.com"}
@@ -1,91 +0,0 @@
1
- import os
2
- import time
3
- import dolphindb as ddb
4
- from kaq_quant_common.utils import yml_utils
5
- import pandas as pd
6
- import threading
7
- from kaq_quant_common.utils.logger_utils import get_logger
8
- import traceback
9
-
10
- mutex = threading.Lock()
11
-
12
- class KaqQuantDdbStreamWriteRepository:
13
- '''
14
- 定义 asof_join的级联方式, 合并数据到一起, 然后可以订阅判断
15
- '''
16
- def __init__(self, host, port, user, passwd):
17
- self.logger = get_logger(self)
18
- '''
19
- 创建ddb连接 && 添加ddb流数据表支持
20
- '''
21
- try:
22
- mutex.acquire()
23
- self.session = ddb.session(enableASYNC=True)
24
- self.session.connect(host, port, user, passwd, tryReconnectNums=10, reconnect=True, keepAliveTime=1000, readTimeout=10, writeTimeout=5)
25
- self.session.enableStreaming(threadCount=5)
26
- # self.pool = ddb.DBConnectionPool(host, port, userid=user, password=passwd, loadBalance=True, reConnect=True, tryReconnectNums=5, sqlStd=SqlStd.MySQL)
27
-
28
- # 需要注意的是 fetchSize 取值不能小于 8192 (记录条数)
29
- self.size = 8192
30
- except Exception as e:
31
- self.logger.error(f'KaqQuantDdbStreamWriteRepository.__init__ is occured error: {str(e)} - {str(traceback.format_exc())}')
32
- finally:
33
- mutex.release()
34
-
35
- def save2stream(self, ddb_table_name: str, df : pd.DataFrame):
36
- '''
37
- 调用此方法之前, 需要将dataframe中的字符串类型的值 ,添加引号
38
- '''
39
- # 遍历每列的数据类型
40
- for column, dtype in df.dtypes.items():
41
- if dtype == 'object' or dtype == 'str':
42
- df[column] = '\'' + df[column] + '\''
43
- for index, row in df.iterrows():
44
- script = f"insert into {ddb_table_name} values({', '.join(str(x) for x in row.values)})"
45
- try:
46
- self.session.run(script, clearMemory=True)
47
- except Exception as e:
48
- self.logger.error(f'KaqQuantDdbStreamWriteRepository.save2stream is occured error: tableName is {ddb_table_name} - {str(e)} - {str(traceback.format_exc())}')
49
-
50
- def build_insert_values_fast(self, df):
51
- if df.empty:
52
- return []
53
- dtypes = df.dtypes.tolist()
54
- # 提前确定哪些列需要加引号
55
- str_idx = {i for i, dt in enumerate(dtypes) if dt == object or dt == 'object' or dt == 'str'}
56
- # 转成 ndarray,减少 pandas 参与
57
- arr = df.to_numpy()
58
- rows = []
59
- for row in arr:
60
- parts = []
61
- for i, v in enumerate(row):
62
- if i in str_idx:
63
- parts.append(f"'{v}'") # 直接拼接最快
64
- else:
65
- parts.append(str(v))
66
- rows.append("(" + ", ".join(parts) + ")")
67
- return rows
68
-
69
- def save2stream_batch(self, ddb_table_name: str, df : pd.DataFrame):
70
- '''
71
- 调用此方法之前, 需要将dataframe中的字符串类型的值 ,添加引号
72
- '''
73
- try:
74
- start1 = time.monotonic_ns()
75
- row = self.build_insert_values_fast(df)
76
- values = ', '.join(row)
77
- script = f"insert into {ddb_table_name} values {values}"
78
- start2 = time.monotonic_ns()
79
- self.session.run(script, clearMemory=True)
80
- end = time.monotonic_ns()
81
- if "KAQ_QUANT_LOG" in os.environ:
82
- diff = end - start2
83
- if diff > 1_000_000_0: # 超过1毫秒
84
- self.logger.warning(f'KaqQuantDdbStreamWriteRepository.save2stream cost time is only write : {end - start2} ns, save2stream_batch :{end - start1} ns, batch size is {len(df)}, tableName is {ddb_table_name}')
85
- except Exception as e:
86
- self.logger.error(f'KaqQuantDdbStreamWriteRepository.save2stream_batch is occured error: tableName is {ddb_table_name} - {str(e)} - {str(traceback.format_exc())}')
87
-
88
-
89
- if __name__ == '__main__':
90
- host, port, user, passwd = yml_utils.get_ddb_info(os.getcwd())
91
- kaq = KaqQuantDdbStreamWriteRepository(host, port, user, passwd)