kaq-quant-common 0.1.82__tar.gz → 0.1.84__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/PKG-INFO +1 -1
  2. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/rest/instruction/helper/order_helper.py +9 -0
  3. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/rest/instruction/models/account.py +3 -1
  4. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/rest/instruction/models/order.py +3 -0
  5. kaq_quant_common-0.1.84/kaq_quant_common/common/statistics/funding_rate_history_statistics.py +208 -0
  6. kaq_quant_common-0.1.84/kaq_quant_common/common/statistics/kline_history_statistics.py +211 -0
  7. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/pyproject.toml +1 -1
  8. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/README.md +0 -0
  9. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/__init__.py +0 -0
  10. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/__init__.py +0 -0
  11. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/common/__init__.py +0 -0
  12. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/common/api_interface.py +0 -0
  13. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/common/auth.py +0 -0
  14. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/rest/__init__.py +0 -0
  15. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/rest/api_client_base.py +0 -0
  16. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/rest/api_server_base.py +0 -0
  17. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/rest/instruction/instruction_client.py +0 -0
  18. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/rest/instruction/instruction_server_base.py +0 -0
  19. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/rest/instruction/models/__init__.py +0 -0
  20. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/rest/instruction/models/position.py +0 -0
  21. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/rest/instruction/models/transfer.py +0 -0
  22. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/ws/__init__.py +0 -0
  23. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/ws/exchange/models.py +0 -0
  24. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/ws/exchange/ws_exchange_client.py +0 -0
  25. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/ws/exchange/ws_exchange_server.py +0 -0
  26. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/ws/instruction/__init__.py +0 -0
  27. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/ws/instruction/ws_instruction_client.py +0 -0
  28. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/ws/instruction/ws_instruction_server_base.py +0 -0
  29. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/ws/models.py +0 -0
  30. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/ws/ws_client_base.py +0 -0
  31. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/api/ws/ws_server_base.py +0 -0
  32. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/common/__init__.py +0 -0
  33. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/common/ddb_table_monitor.py +0 -0
  34. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/common/http_monitor.py +0 -0
  35. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/common/modules/limit_order_helper.py +0 -0
  36. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/common/modules/limit_order_symbol_monitor.py +0 -0
  37. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/common/modules/limit_order_symbol_monitor_group.py +0 -0
  38. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/common/monitor_base.py +0 -0
  39. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/common/monitor_group.py +0 -0
  40. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/common/redis_table_monitor.py +0 -0
  41. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/common/ws_wrapper.py +0 -0
  42. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/config/config.yaml +0 -0
  43. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/resources/__init__.py +0 -0
  44. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/resources/kaq_ddb_pool_stream_read_resources.py +0 -0
  45. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/resources/kaq_ddb_stream_init_resources.py +0 -0
  46. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/resources/kaq_ddb_stream_read_resources.py +0 -0
  47. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/resources/kaq_ddb_stream_write_resources.py +0 -0
  48. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/resources/kaq_mysql_init_resources.py +0 -0
  49. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/resources/kaq_mysql_resources.py +0 -0
  50. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/resources/kaq_postgresql_resources.py +0 -0
  51. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/resources/kaq_quant_hive_resources.py +0 -0
  52. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/resources/kaq_redis_resources.py +0 -0
  53. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/utils/__init__.py +0 -0
  54. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/utils/dagster_job_check_utils.py +0 -0
  55. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/utils/dagster_utils.py +0 -0
  56. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/utils/date_util.py +0 -0
  57. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/utils/enums_utils.py +0 -0
  58. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/utils/error_utils.py +0 -0
  59. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/utils/hash_utils.py +0 -0
  60. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/utils/log_time_utils.py +0 -0
  61. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/utils/logger_utils.py +0 -0
  62. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/utils/mytt_utils.py +0 -0
  63. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/utils/signal_utils.py +0 -0
  64. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/utils/sqlite_utils.py +0 -0
  65. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/utils/uuid_utils.py +0 -0
  66. {kaq_quant_common-0.1.82 → kaq_quant_common-0.1.84}/kaq_quant_common/utils/yml_utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: kaq_quant_common
3
- Version: 0.1.82
3
+ Version: 0.1.84
4
4
  Summary:
5
5
  Author: kevinfuture
6
6
  Author-email: liuenbofuture@foxmail.com
@@ -274,6 +274,15 @@ class OrderHelper:
274
274
  WHERE exchange = '{exchange}' AND symbol = '{symbol}' AND position_side = '{position_side.value}' AND status = '{PositionStatus.OPEN.value}'
275
275
  ORDER BY open_time ASC;
276
276
  """
277
+
278
+ # 如果有指定仓位id,就用指定的
279
+ if hasattr(order, "position_id") and order.position_id:
280
+ sql = f"""
281
+ SELECT * FROM {self._mysql_table_name_position}
282
+ WHERE id = '{order.position_id}' AND status = '{PositionStatus.OPEN.value}'
283
+ """
284
+ self._logger.info(f"{ins_id}_{exchange}_{symbol} get position by id {order.position_id}")
285
+
277
286
  execute_ret = mysql.execute_sql(sql)
278
287
  try:
279
288
  row = execute_ret.fetchone()
@@ -1,3 +1,5 @@
1
+ from typing import Optional
2
+
1
3
  from pydantic import BaseModel
2
4
 
3
5
  from . import InstructionRequestBase, InstructionResponseBase
@@ -14,7 +16,7 @@ class AssetsInfo(BaseModel):
14
16
  # ↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓ 查询合约账户余额
15
17
  class ContractBalanceRequest(InstructionRequestBase):
16
18
  # 币种
17
- coin: str
19
+ coin: Optional[str] = None
18
20
 
19
21
 
20
22
  class ContractBalanceResponse(InstructionResponseBase):
@@ -87,6 +87,9 @@ class OrderInfo(BaseModel):
87
87
  # 策略类型
88
88
  strategy_type: Optional[str] = None
89
89
 
90
+ # 平仓用的,指定仓位
91
+ position_id: Optional[str] = None
92
+
90
93
  def __init__(self, **data):
91
94
  super().__init__(**data)
92
95
  if self.instruction_id is None:
@@ -0,0 +1,208 @@
1
+ import datetime
2
+ import json
3
+ from typing import Optional
4
+
5
+ import pandas as pd
6
+ from pydantic import BaseModel
7
+
8
+ from kaq_quant_common.common.statistics.kline_history_statistics import \
9
+ StatisticsInfo
10
+ from kaq_quant_common.utils import logger_utils
11
+
12
+ platforms = ["binance", "bitget", "bybit", "gate", "htx", "okx"]
13
+
14
+
15
+ # 这个类是统计合约历史资金数据的,在确保已经抓取完数据后使用
16
+ class FuturesFundingRateHistoryStatistics:
17
+ def __init__(
18
+ self,
19
+ begin_timestamp: int,
20
+ end_timestamp: int,
21
+ symbols: list[str],
22
+ master: str,
23
+ redis: None,
24
+ mysql: None,
25
+ ):
26
+ self.redis_key = "kaq_futures_funding_rate_history_statistics"
27
+ self.begin_timestamp = begin_timestamp
28
+ self.end_timestamp = end_timestamp
29
+ self.symbols = symbols
30
+ # 计算天数
31
+ self.day_num = (end_timestamp - begin_timestamp) // (24 * 3600 * 1000)
32
+ self._redis = redis
33
+ self._mysql = mysql
34
+ self.master = master
35
+ self._logger = logger_utils.get_logger()
36
+ self.table_name = "kaq_futures_funding_rate_history"
37
+
38
+ # 针对本平台的所有交易对每个进行对应统计
39
+ def symbols_statistics(self):
40
+ for symbol in self.symbols:
41
+ try:
42
+ self.get_symbol_funding_rate_all_platform(symbol)
43
+ except Exception as e:
44
+ self._logger.error(f"拉取{symbol}的资金数据出现异常: {e}")
45
+ ddd = 1
46
+ continue
47
+
48
+ # 对指定交易对进行全平台的资金数据拉取
49
+ def get_symbol_funding_rate_all_platform(self, symbol: str):
50
+ df_dict = {}
51
+ # 先拉自己的吧
52
+ master_df = self.query_symbol_funding_rate_data(symbol, self.master)
53
+
54
+ # 先记录下主平台拉到的数据量,用来对比其它平台,其它平台不是这么多就证明有问题,需要记录下日志
55
+ master_df_len = len(master_df)
56
+
57
+ # 没有数据就跳过
58
+ if master_df_len == 0:
59
+ raise Exception(
60
+ f"{self.master}平台拉取到的{symbol}的资金数据量为:{master_df_len}条,跳过后续处理"
61
+ )
62
+
63
+ # 对其它交易所进行拉取
64
+ for platform in platforms:
65
+ if platform == self.master:
66
+ continue
67
+ platform_df = self.query_symbol_funding_rate_data(symbol, platform)
68
+
69
+ if len(platform_df) != master_df_len:
70
+ self._logger.error(
71
+ f"{platform}平台拉取到的{symbol}的资金数据量与主平台不一致,主平台:{master_df_len},{platform}平台:{len(platform_df)},跳过该交易所数据"
72
+ )
73
+ continue
74
+
75
+ df_dict[platform] = platform_df
76
+
77
+ # 开始计算差异
78
+ symbol_diffrenence_dict = self.calculate_funding_rate_difference(
79
+ symbol, master_df, df_dict
80
+ )
81
+
82
+ self._redis.hset(
83
+ self.redis_key + ":" + self.master.upper(),
84
+ symbol,
85
+ json.dumps({k: v.model_dump() for k, v in symbol_diffrenence_dict.items()}),
86
+ )
87
+
88
+ # 计算各个平台的资金数据差异
89
+ def calculate_funding_rate_difference(
90
+ self, symbol: str, master_df: pd.DataFrame, df_dict: dict
91
+ ):
92
+ res = {}
93
+ master_hour = master_df.iloc[0]["hour"]
94
+ # 自己也要计算差异
95
+ master_std = master_df[f"{self.master}_rate"].std()
96
+ master_mean = master_df[f"{self.master}_rate"].mean()
97
+ master_max = master_df[f"{self.master}_rate"].max()
98
+ master_rate_corr = master_df[f"{self.master}_rate"].corr(
99
+ master_df["event_time_hour"]
100
+ )
101
+ master_min = master_df[f"{self.master}_rate"].min()
102
+ master_quantile = master_df[f"{self.master}_rate"].quantile([0.25, 0.5, 0.75])
103
+
104
+ res[self.master] = StatisticsInfo(
105
+ platform=self.master,
106
+ std=master_std,
107
+ mean=master_mean,
108
+ max=master_max,
109
+ corr=master_rate_corr,
110
+ min=master_min,
111
+ quantile={str(k): float(v) for k, v in master_quantile.to_dict().items()},
112
+ period=master_hour,
113
+ )
114
+
115
+ # 还要加主平台的差异,主平台不需要减,只需要直接处理
116
+ for platform, platform_df in df_dict.items():
117
+ platform_hour = platform_df.iloc[0]["hour"]
118
+ # 合并数据,找出差异
119
+ merged_df = pd.merge(
120
+ master_df, platform_df, on=["symbol", "event_time_hour"], how="inner"
121
+ )
122
+
123
+ # 计算差值
124
+ merged_df["rate_diff"] = (
125
+ merged_df[f"{self.master}_rate"] - merged_df[f"{platform}_rate"]
126
+ )
127
+ # 标准差
128
+ rate_std = merged_df["rate_diff"].std()
129
+ # 平均值
130
+ rate_mean = merged_df["rate_diff"].mean()
131
+ # 最大值
132
+ rate_max = merged_df["rate_diff"].max()
133
+ # 皮尔逊系数(斜率)
134
+ rate_corr = merged_df["rate_diff"].corr(merged_df["event_time_hour"])
135
+ # 最小值
136
+ rate_min = merged_df["rate_diff"].min()
137
+ # 4分位数
138
+ rate_quantile = merged_df["rate_diff"].quantile([0.25, 0.5, 0.75])
139
+ # self._logger.info(
140
+ # f"{self.master}与{platform}平台的{symbol}的K线差异统计: 标准差={rate_std}, 平均值={rate_mean}, 最大值={rate_max}, 最小值={rate_min}, 皮尔逊系数={rate_corr}, 四分位数={rate_quantile.to_dict()}"
141
+ # )
142
+ res[platform] = StatisticsInfo(
143
+ platform=platform,
144
+ std=rate_std,
145
+ mean=rate_mean,
146
+ max=rate_max,
147
+ corr=rate_corr,
148
+ min=rate_min,
149
+ quantile={str(k): v for k, v in rate_quantile.to_dict().items()},
150
+ period=platform_hour,
151
+ )
152
+
153
+ return res
154
+
155
+ # 拉指定时间指定symbol的资金数据
156
+ def query_symbol_funding_rate_data(self, symbol: str, platform: str):
157
+ sql_result_df = pd.DataFrame()
158
+ # 先转成周一日期来定表名,因为数据表是按周来分表的
159
+ sql = f"select exchange, symbol, rate as {platform}_rate, event_time from {self.table_name} where symbol = '{symbol}' and event_time >= {self.begin_timestamp} and event_time < {self.end_timestamp} and exchange='{platform}' order by event_time desc ;"
160
+ result = self._mysql.fetch_data(sql)
161
+ sql_result_df = pd.DataFrame(result)
162
+ if sql_result_df.empty:
163
+ return sql_result_df
164
+ # 转换类型
165
+ sql_result_df[f"{platform}_rate"] = sql_result_df[f"{platform}_rate"].astype(
166
+ float
167
+ )
168
+ # 增加个周期字段用来补全24小时数据
169
+ # 我操,接口拿到的event_time毫秒有些不是整点,转成小时处理吧。
170
+ sql_result_df["event_time"] = sql_result_df["event_time"].astype(int)
171
+ sql_result_df["event_time_hour"] = sql_result_df["event_time"] // 1000
172
+ # 计算周期
173
+ sql_result_df["period"] = sql_result_df["event_time_hour"].diff(periods=-1)
174
+ sql_result_df["hour"] = sql_result_df["period"] // 3600
175
+
176
+ sql_result_df = sql_result_df[::-1] # 反向排序
177
+
178
+ start_ts = int(sql_result_df["event_time"].iloc[0])
179
+ start_dt = pd.to_datetime(start_ts // 1000, unit="s")
180
+ # 先生成一批完整的时间序列数据
181
+ temp_date_df = pd.date_range(
182
+ start_dt,
183
+ periods=self.day_num * 24,
184
+ freq="h",
185
+ )
186
+ temp_date_df = temp_date_df[::-1] # 反向排序
187
+
188
+ # 生成完整的毫秒级时间戳DataFrame
189
+ temp_time_df = pd.DataFrame(
190
+ {"event_time_hour": temp_date_df.astype(int) // 10**9} # 转为毫秒
191
+ )
192
+ # 合并,左侧为完整时间
193
+ merged_df = temp_time_df.merge(sql_result_df, on="event_time_hour", how="left")
194
+ # 向上对齐填充
195
+ merged_df = merged_df.sort_values("event_time_hour").ffill()
196
+ # event_time用自身时间,其它字段用ffill
197
+ merged_df["event_time_hour"] = temp_time_df["event_time_hour"]
198
+ merged_df = merged_df.sort_values(
199
+ "event_time_hour", ascending=False
200
+ ).reset_index(drop=True)
201
+ return merged_df
202
+
203
+
204
+ if __name__ == "__main__":
205
+ klineStatistics = FuturesFundingRateHistoryStatistics(
206
+ 1765296000000, 1765382400000, ["BTCUSDT", "ETHUSDT"]
207
+ )
208
+ klineStatistics.symbols_statistics()
@@ -0,0 +1,211 @@
1
+ import datetime
2
+ import json
3
+ from typing import Optional
4
+
5
+ import pandas as pd
6
+ from pydantic import BaseModel
7
+
8
+ from kaq_quant_common.utils import logger_utils
9
+
10
+ platforms = ["binance", "bitget", "bybit", "gate", "htx", "okx"]
11
+
12
+
13
+ # 对比结果结构体
14
+ class StatisticsInfo(BaseModel):
15
+ # 对比平台
16
+ platform: str
17
+ # 标准差
18
+ std: float
19
+ # 平均值
20
+ mean: float
21
+ # 最大值
22
+ max: float
23
+ # 皮尔逊系数(斜率)
24
+ corr: float
25
+ # 最小值
26
+ min: float
27
+ # 4分位数
28
+ quantile: dict[str, float]
29
+ # 相隔时间(hour)
30
+ period: Optional[int] = 0
31
+
32
+
33
+ # 这个类是统计合约K线历史数据的,在确保已经抓取完数据后使用
34
+ class FuturesKlineHistoryStatistics:
35
+ def __init__(
36
+ self,
37
+ begin_timestamp: int,
38
+ end_timestamp: int,
39
+ symbols: list[str],
40
+ master: str,
41
+ redis: None,
42
+ mysql: None,
43
+ ):
44
+ self.redis_key = "kaq_futures_kline_history_statistics"
45
+ self.begin_timestamp = begin_timestamp
46
+ self.end_timestamp = end_timestamp
47
+ self.symbols = symbols
48
+ # 计算天数,每天都会有1440条数据
49
+ self.day_num = (end_timestamp - begin_timestamp) // (24 * 3600 * 1000)
50
+ self._redis = redis
51
+ self._mysql = mysql
52
+ self.master = master
53
+ self._logger = logger_utils.get_logger()
54
+
55
+ # 针对本平台的所有交易对每个进行对应统计
56
+ def symbols_statistics(self):
57
+ for symbol in self.symbols:
58
+ try:
59
+ self.get_symbol_kline_all_platform(symbol)
60
+ except Exception as e:
61
+ self._logger.error(f"拉取{symbol}的K线数据出现异常: {e}")
62
+ continue
63
+
64
+ # 对指定交易对进行全平台的K线拉取
65
+ def get_symbol_kline_all_platform(self, symbol: str):
66
+ df_dict = {}
67
+ # 先拉自己的吧
68
+ master_df = self.query_symbol_line_data(symbol, self.master)
69
+
70
+ # 不够数据也跳过
71
+ if len(master_df) < 1440 * self.day_num:
72
+ raise Exception(
73
+ f"{self.master}平台拉取到的{symbol}的K线数据量不足{1440 * self.day_num}条,跳过后续处理"
74
+ )
75
+
76
+ # 对其它交易所进行拉取
77
+ for platform in platforms:
78
+ if platform == self.master:
79
+ continue
80
+ platform_df = self.query_symbol_line_data(symbol, platform)
81
+
82
+ if len(platform_df) < 1440 * self.day_num:
83
+ self._logger.error(
84
+ f"{platform}平台拉取到的{symbol}的K线数据量不足{1440 * self.day_num}条,跳过该交易所数据"
85
+ )
86
+ continue
87
+
88
+ df_dict[platform] = platform_df
89
+
90
+ # 开始计算差异
91
+ symbol_diffrenence_dict = self.calculate_kline_difference(
92
+ symbol, master_df, df_dict
93
+ )
94
+
95
+ self._redis.hset(
96
+ self.redis_key + ":" + self.master.upper(),
97
+ symbol,
98
+ json.dumps({k: v.model_dump() for k, v in symbol_diffrenence_dict.items()}),
99
+ )
100
+
101
+ # 计算各个平台的K线差异
102
+ def calculate_kline_difference(
103
+ self, symbol: str, master_df: pd.DataFrame, df_dict: dict
104
+ ):
105
+ res = {}
106
+ # 自己也要计算差异
107
+ master_std = master_df[f"{self.master}_close"].std()
108
+ master_mean = master_df[f"{self.master}_close"].mean()
109
+ master_max = master_df[f"{self.master}_close"].max()
110
+ master_rate_corr = master_df[f"{self.master}_close"].corr(
111
+ master_df["event_time"]
112
+ )
113
+ master_min = master_df[f"{self.master}_close"].min()
114
+ master_quantile = master_df[f"{self.master}_close"].quantile([0.25, 0.5, 0.75])
115
+
116
+ res[self.master] = StatisticsInfo(
117
+ platform=self.master,
118
+ std=master_std,
119
+ mean=master_mean,
120
+ max=master_max,
121
+ corr=master_rate_corr,
122
+ min=master_min,
123
+ quantile={str(k): float(v) for k, v in master_quantile.to_dict().items()},
124
+ )
125
+ for platform, platform_df in df_dict.items():
126
+ # 合并数据,找出差异
127
+ merged_df = pd.merge(
128
+ master_df, platform_df, on=["symbol", "event_time"], how="inner"
129
+ )
130
+ # 计算差值
131
+ merged_df["close_diff"] = (
132
+ merged_df[f"{self.master}_close"] - merged_df[f"{platform}_close"]
133
+ )
134
+ # 标准差
135
+ close_std = merged_df["close_diff"].std()
136
+ # 平均值
137
+ close_mean = merged_df["close_diff"].mean()
138
+ # 最大值
139
+ close_max = merged_df["close_diff"].max()
140
+ # 皮尔逊系数(斜率)
141
+ close_corr = merged_df["close_diff"].corr(merged_df["event_time"])
142
+ # 最小值
143
+ close_min = merged_df["close_diff"].min()
144
+ # 4分位数
145
+ close_quantile = merged_df["close_diff"].quantile([0.25, 0.5, 0.75])
146
+ # self._logger.info(
147
+ # f"{self.master}与{platform}平台的{symbol}的K线差异统计: 标准差={close_std}, 平均值={close_mean}, 最大值={close_max}, 最小值={close_min}, 皮尔逊系数={close_corr}, 四分位数={close_quantile.to_dict()}"
148
+ # )
149
+ res[platform] = StatisticsInfo(
150
+ platform=platform,
151
+ std=close_std,
152
+ mean=close_mean,
153
+ max=close_max,
154
+ corr=close_corr,
155
+ min=close_min,
156
+ quantile={str(k): float(v) for k, v in close_quantile.to_dict().items()},
157
+ )
158
+
159
+ return res
160
+
161
+ # 拉指定时间指定symbol的k线数据
162
+ def query_symbol_line_data(self, symbol: str, platform: str):
163
+ sql_result_df = pd.DataFrame()
164
+ zero_timestamp_list = self.get_zero_timestamp_list()
165
+ # 表前缀
166
+ table_name_prefix = f"kaq_{platform}_futures_kline_history"
167
+ for ts in zero_timestamp_list:
168
+ # 先转成周一日期来定表名,因为数据表是按周来分表的
169
+ date_str = self.get_monday_time(ts)
170
+ table_name = f"{table_name_prefix}_{date_str}"
171
+ sql = f"select exchange, symbol, close as {platform}_close, event_time from {table_name} where symbol = '{symbol}' and event_time >= {ts} and event_time < {ts + 86400000} order by event_time desc ;"
172
+ result = self._mysql.fetch_data(sql)
173
+ sql_result_df = pd.concat([sql_result_df, result], ignore_index=True)
174
+
175
+ return sql_result_df
176
+
177
+ # 计算某个时间戳对应的周一日期字符串
178
+ def get_monday_time(self, timestamp):
179
+ dt = datetime.datetime.fromtimestamp(timestamp / 1000)
180
+ monday = dt - datetime.timedelta(days=dt.weekday())
181
+ monday = monday.replace(hour=0, minute=0, second=0, microsecond=0)
182
+ return monday.strftime("%Y%m%d")
183
+
184
+ # 计算开始到结束时间所有的0时时间戳
185
+ def get_zero_timestamp_list(self):
186
+ # 毫秒转日期
187
+ timestamp_list = []
188
+ begin_date = datetime.datetime.fromtimestamp(
189
+ self.begin_timestamp // 1000, datetime.UTC
190
+ ).date()
191
+ end_date = datetime.datetime.fromtimestamp(
192
+ self.end_timestamp // 1000, datetime.UTC
193
+ ).date()
194
+ cur = begin_date
195
+ while cur <= end_date:
196
+ # 0点时间戳(毫秒)
197
+ dt = datetime.datetime.combine(
198
+ cur, datetime.time(0, 0), tzinfo=datetime.UTC
199
+ )
200
+ ts = int(dt.timestamp() * 1000)
201
+ timestamp_list.append(ts)
202
+ cur += datetime.timedelta(days=1)
203
+
204
+ return timestamp_list
205
+
206
+
207
+ if __name__ == "__main__":
208
+ klineStatistics = FuturesKlineHistoryStatistics(
209
+ 1765296000000, 1765382400000, ["BTCUSDT", "ETHUSDT"]
210
+ )
211
+ klineStatistics.symbols_statistics()
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "kaq_quant_common"
3
- version = "0.1.82"
3
+ version = "0.1.84"
4
4
  description = ""
5
5
  authors = [
6
6
  {name = "kevinfuture",email = "liuenbofuture@foxmail.com"}