rquote 0.4.6__py3-none-any.whl → 0.4.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rquote/config.py +2 -0
- rquote/markets/base.py +101 -40
- rquote/markets/cn_stock.py +134 -10
- {rquote-0.4.6.dist-info → rquote-0.4.7.dist-info}/METADATA +2 -2
- {rquote-0.4.6.dist-info → rquote-0.4.7.dist-info}/RECORD +7 -7
- {rquote-0.4.6.dist-info → rquote-0.4.7.dist-info}/WHEEL +0 -0
- {rquote-0.4.6.dist-info → rquote-0.4.7.dist-info}/top_level.txt +0 -0
rquote/config.py
CHANGED
|
@@ -30,6 +30,8 @@ class Config:
|
|
|
30
30
|
return cls(
|
|
31
31
|
http_timeout=int(os.getenv('RQUOTE_HTTP_TIMEOUT', '10')),
|
|
32
32
|
http_retry_times=int(os.getenv('RQUOTE_RETRY_TIMES', '3')),
|
|
33
|
+
http_retry_delay=float(os.getenv('RQUOTE_HTTP_RETRY_DELAY', '1.0')),
|
|
34
|
+
http_pool_size=int(os.getenv('RQUOTE_HTTP_POOL_SIZE', '10')),
|
|
33
35
|
cache_enabled=os.getenv('RQUOTE_CACHE_ENABLED', 'true').lower() == 'true',
|
|
34
36
|
cache_ttl=int(os.getenv('RQUOTE_CACHE_TTL', '3600')),
|
|
35
37
|
log_level=os.getenv('RQUOTE_LOG_LEVEL', 'INFO'),
|
rquote/markets/base.py
CHANGED
|
@@ -174,63 +174,124 @@ class Market(ABC):
|
|
|
174
174
|
|
|
175
175
|
need_extend_forward = False # 需要向前扩展(更新日期)
|
|
176
176
|
need_extend_backward = False # 需要向后扩展(更早日期)
|
|
177
|
+
need_extend_for_length = False # 需要扩展以满足长度要求(>=60行)
|
|
177
178
|
extend_sdate = sdate
|
|
178
179
|
extend_edate = edate
|
|
179
180
|
|
|
180
|
-
#
|
|
181
|
+
# 逻辑1: 检查是否需要向前扩展(请求的 edate 晚于缓存的最新日期)
|
|
181
182
|
if request_edate and request_edate > cached_latest:
|
|
182
183
|
need_extend_forward = True
|
|
183
184
|
# 从缓存的最新日期+1天开始,扩展到请求的 edate
|
|
184
185
|
extend_sdate = (cached_latest + pd.Timedelta(days=1)).strftime('%Y-%m-%d')
|
|
185
186
|
extend_edate = edate
|
|
186
187
|
|
|
187
|
-
#
|
|
188
|
-
|
|
188
|
+
# 逻辑2: 如果从cache取的数据在edate前的长度小于等于60,则进行网络请求取数合并进cache
|
|
189
|
+
elif request_edate:
|
|
190
|
+
# 计算edate之前的数据行数
|
|
191
|
+
data_before_edate = cached_df[cached_df.index <= request_edate]
|
|
192
|
+
if len(data_before_edate) <= 60:
|
|
193
|
+
need_extend_for_length = True
|
|
194
|
+
# 从更早的日期开始获取,确保edate前有足够的数据(>=60行)
|
|
195
|
+
# 往前推约4个月(120天),确保有足够的交易日
|
|
196
|
+
target_sdate = request_edate - pd.Timedelta(days=120)
|
|
197
|
+
extend_sdate = target_sdate.strftime('%Y-%m-%d')
|
|
198
|
+
extend_edate = edate
|
|
199
|
+
logger.info(f"[PRICE EXTEND LENGTH] symbol={symbol}, edate前数据行数={len(data_before_edate)} <= 60, 从更早日期获取, extend_sdate={extend_sdate}")
|
|
200
|
+
|
|
201
|
+
# 逻辑3: 如果cache中有数据,但新的edate小于cache中数据最小值
|
|
202
|
+
elif request_edate and request_edate < cached_earliest:
|
|
203
|
+
need_extend_backward = True
|
|
204
|
+
# 从缓存最早日期开始往前获取,直到覆盖edate且edate前的长度大于60
|
|
205
|
+
# 先尝试从edate往前推足够的天数(约4个月)
|
|
206
|
+
target_sdate = request_edate - pd.Timedelta(days=120)
|
|
207
|
+
extend_sdate = target_sdate.strftime('%Y-%m-%d')
|
|
208
|
+
extend_edate = (cached_earliest - pd.Timedelta(days=1)).strftime('%Y-%m-%d')
|
|
209
|
+
logger.info(f"[PRICE EXTEND EARLY] symbol={symbol}, edate={request_edate} 早于缓存最早日期={cached_earliest}, 从更早日期获取, extend_sdate={extend_sdate}, extend_edate={extend_edate}")
|
|
210
|
+
|
|
211
|
+
# 检查是否需要向后扩展(请求的 sdate 早于缓存的最早日期,且不是情况3)
|
|
212
|
+
if request_sdate and request_sdate < cached_earliest and not need_extend_backward:
|
|
189
213
|
need_extend_backward = True
|
|
190
214
|
# 从请求的 sdate 开始,扩展到缓存的最早日期-1天
|
|
191
215
|
extend_sdate = sdate
|
|
192
216
|
extend_edate = (cached_earliest - pd.Timedelta(days=1)).strftime('%Y-%m-%d')
|
|
193
217
|
|
|
194
218
|
# 如果需要扩展,获取缺失的数据
|
|
195
|
-
if need_extend_forward or need_extend_backward:
|
|
196
|
-
logger.info(f"[PRICE EXTEND] 需要扩展数据, symbol={symbol}, extend_sdate={extend_sdate}, extend_edate={extend_edate}, need_forward={need_extend_forward}, need_backward={need_extend_backward}")
|
|
197
|
-
# 获取扩展的数据
|
|
198
|
-
extended_result = fetch_func(symbol, extend_sdate, extend_edate, freq, days, fq)
|
|
199
|
-
_, _, extended_df = extended_result
|
|
200
|
-
logger.info(f"[PRICE FETCH] 从网络获取扩展数据, 数据行数={len(extended_df)}")
|
|
219
|
+
if need_extend_forward or need_extend_backward or need_extend_for_length:
|
|
220
|
+
logger.info(f"[PRICE EXTEND] 需要扩展数据, symbol={symbol}, extend_sdate={extend_sdate}, extend_edate={extend_edate}, need_forward={need_extend_forward}, need_backward={need_extend_backward}, need_length={need_extend_for_length}")
|
|
201
221
|
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
pass
|
|
214
|
-
|
|
215
|
-
# 合并数据
|
|
216
|
-
merged_df = pd.concat([cached_df, extended_df])
|
|
217
|
-
merged_df = merged_df[~merged_df.index.duplicated(keep='last')]
|
|
218
|
-
merged_df = merged_df.sort_index()
|
|
219
|
-
|
|
220
|
-
# 过滤到请求的日期范围
|
|
221
|
-
if request_sdate or request_edate:
|
|
222
|
-
if request_sdate and request_edate:
|
|
223
|
-
mask = (merged_df.index >= request_sdate) & (merged_df.index <= request_edate)
|
|
224
|
-
elif request_sdate:
|
|
225
|
-
mask = merged_df.index >= request_sdate
|
|
226
|
-
else:
|
|
227
|
-
mask = merged_df.index <= request_edate
|
|
228
|
-
merged_df = merged_df[mask]
|
|
222
|
+
# 对于逻辑2和逻辑3,可能需要循环获取直到满足条件
|
|
223
|
+
max_iterations = 5 # 最多循环5次,避免无限循环
|
|
224
|
+
iteration = 0
|
|
225
|
+
current_merged_df = cached_df.copy()
|
|
226
|
+
|
|
227
|
+
while iteration < max_iterations:
|
|
228
|
+
iteration += 1
|
|
229
|
+
# 获取扩展的数据
|
|
230
|
+
extended_result = fetch_func(symbol, extend_sdate, extend_edate, freq, days, fq)
|
|
231
|
+
_, _, extended_df = extended_result
|
|
232
|
+
logger.info(f"[PRICE FETCH] 从网络获取扩展数据 (迭代{iteration}), 数据行数={len(extended_df)}")
|
|
229
233
|
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
+
if not extended_df.empty:
|
|
235
|
+
# 确保两个 DataFrame 的索引都是 DatetimeIndex
|
|
236
|
+
if not isinstance(current_merged_df.index, pd.DatetimeIndex):
|
|
237
|
+
try:
|
|
238
|
+
current_merged_df.index = pd.to_datetime(current_merged_df.index)
|
|
239
|
+
except (ValueError, TypeError):
|
|
240
|
+
pass
|
|
241
|
+
if not isinstance(extended_df.index, pd.DatetimeIndex):
|
|
242
|
+
try:
|
|
243
|
+
extended_df.index = pd.to_datetime(extended_df.index)
|
|
244
|
+
except (ValueError, TypeError):
|
|
245
|
+
pass
|
|
246
|
+
|
|
247
|
+
# 合并数据
|
|
248
|
+
current_merged_df = pd.concat([current_merged_df, extended_df])
|
|
249
|
+
current_merged_df = current_merged_df[~current_merged_df.index.duplicated(keep='last')]
|
|
250
|
+
current_merged_df = current_merged_df.sort_index()
|
|
251
|
+
|
|
252
|
+
# 检查是否满足条件(逻辑2和逻辑3需要检查长度)
|
|
253
|
+
if need_extend_for_length or need_extend_backward:
|
|
254
|
+
if request_edate:
|
|
255
|
+
data_before_edate = current_merged_df[current_merged_df.index <= request_edate]
|
|
256
|
+
if len(data_before_edate) > 60:
|
|
257
|
+
# 满足条件,退出循环
|
|
258
|
+
logger.info(f"[PRICE EXTEND] 已满足长度要求, edate前数据行数={len(data_before_edate)}")
|
|
259
|
+
break
|
|
260
|
+
else:
|
|
261
|
+
# 还需要继续获取更早的数据
|
|
262
|
+
current_earliest = current_merged_df.index.min()
|
|
263
|
+
if current_earliest <= pd.to_datetime(extend_sdate):
|
|
264
|
+
# 已经获取到最早的数据,无法再往前获取
|
|
265
|
+
logger.warning(f"[PRICE EXTEND] 已获取到最早数据,但edate前数据行数={len(data_before_edate)}仍不足60行")
|
|
266
|
+
break
|
|
267
|
+
# 继续往前推
|
|
268
|
+
extend_sdate_dt = pd.to_datetime(extend_sdate) - pd.Timedelta(days=120)
|
|
269
|
+
extend_sdate = extend_sdate_dt.strftime('%Y-%m-%d')
|
|
270
|
+
logger.info(f"[PRICE EXTEND] 继续获取更早数据, 新extend_sdate={extend_sdate}")
|
|
271
|
+
continue
|
|
272
|
+
|
|
273
|
+
# 对于逻辑1(向前扩展),不需要循环,直接退出
|
|
274
|
+
if need_extend_forward and not need_extend_for_length and not need_extend_backward:
|
|
275
|
+
break
|
|
276
|
+
else:
|
|
277
|
+
# 获取失败,退出循环
|
|
278
|
+
logger.warning(f"[PRICE EXTEND] 获取数据为空,退出循环")
|
|
279
|
+
break
|
|
280
|
+
|
|
281
|
+
# 过滤到请求的日期范围
|
|
282
|
+
if request_sdate or request_edate:
|
|
283
|
+
if request_sdate and request_edate:
|
|
284
|
+
mask = (current_merged_df.index >= request_sdate) & (current_merged_df.index <= request_edate)
|
|
285
|
+
elif request_sdate:
|
|
286
|
+
mask = current_merged_df.index >= request_sdate
|
|
287
|
+
else:
|
|
288
|
+
mask = current_merged_df.index <= request_edate
|
|
289
|
+
current_merged_df = current_merged_df[mask]
|
|
290
|
+
|
|
291
|
+
result = (symbol, name, current_merged_df)
|
|
292
|
+
# 更新缓存(使用原始 key,PersistentCache 会智能合并)
|
|
293
|
+
self._put_cache(cache_key, result)
|
|
294
|
+
return result
|
|
234
295
|
|
|
235
296
|
# 不需要扩展,直接返回缓存的数据
|
|
236
297
|
# 注意:PersistentCache.get() 已经根据请求的日期范围进行了过滤,
|
rquote/markets/cn_stock.py
CHANGED
|
@@ -31,11 +31,10 @@ class CNStockMarket(Market):
|
|
|
31
31
|
if symbol[:2] == 'BK':
|
|
32
32
|
return self._get_bk_price(symbol)
|
|
33
33
|
|
|
34
|
-
#
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
# 使用基类的缓存逻辑
|
|
34
|
+
# PT代码也使用基类的缓存逻辑(包含长度检查和扩展逻辑)
|
|
35
|
+
# 使用基类的缓存逻辑,所有市场都会应用这两个逻辑:
|
|
36
|
+
# 1. 如果从cache取的数据在edate前的长度小于等于60,则进行网络请求取数合并进cache
|
|
37
|
+
# 2. 如果cache中有数据,但新的edate小于cache中数据最小值,从更早日期开始取并合并
|
|
39
38
|
return super().get_price(symbol, sdate, edate, freq, days, fq)
|
|
40
39
|
|
|
41
40
|
def _fetch_price_data(self, symbol: str, sdate: str = '', edate: str = '',
|
|
@@ -97,33 +96,135 @@ class CNStockMarket(Market):
|
|
|
97
96
|
# 先检查缓存(使用base_key格式,日期通过参数传递)
|
|
98
97
|
base_key = f"{symbol}:{freq}:{fq}"
|
|
99
98
|
cached = self._get_cached(base_key, sdate=sdate, edate=edate)
|
|
99
|
+
cached_df = None
|
|
100
|
+
need_fetch = False # 标记是否需要从网络获取数据
|
|
100
101
|
if cached:
|
|
101
|
-
|
|
102
|
-
|
|
102
|
+
_, name, cached_df = cached
|
|
103
|
+
# 检查缓存数据是否满足请求的 edate
|
|
104
|
+
if not cached_df.empty and isinstance(cached_df.index, pd.DatetimeIndex):
|
|
105
|
+
cached_earliest = cached_df.index.min()
|
|
106
|
+
cached_latest = cached_df.index.max()
|
|
107
|
+
request_edate = pd.to_datetime(edate) if edate else None
|
|
108
|
+
request_sdate = pd.to_datetime(sdate) if sdate else None
|
|
109
|
+
|
|
110
|
+
# 逻辑1: 如果请求的 edate 晚于缓存的最新日期,需要从网络获取新数据
|
|
111
|
+
if request_edate and request_edate > cached_latest:
|
|
112
|
+
logger.info(f"[PT CACHE INCOMPLETE] symbol={symbol}, 缓存最新日期={cached_latest}, 请求日期={request_edate}, 需要扩展数据")
|
|
113
|
+
need_fetch = True
|
|
114
|
+
# 逻辑2: 如果从cache取的数据在edate前的长度小于等于60,则进行网络请求取数合并进cache
|
|
115
|
+
elif request_edate:
|
|
116
|
+
# 计算edate之前的数据行数
|
|
117
|
+
data_before_edate = cached_df[cached_df.index <= request_edate]
|
|
118
|
+
if len(data_before_edate) <= 60:
|
|
119
|
+
logger.info(f"[PT CACHE INSUFFICIENT] symbol={symbol}, edate前数据行数={len(data_before_edate)} <= 60, 需要获取更多历史数据")
|
|
120
|
+
need_fetch = True
|
|
121
|
+
# 逻辑3: 如果cache中有数据,但新的edate小于cache中数据最小值,需要从更早的日期开始取
|
|
122
|
+
elif request_edate and request_edate < cached_earliest:
|
|
123
|
+
logger.info(f"[PT CACHE EARLY] symbol={symbol}, 请求edate={request_edate} 早于缓存最早日期={cached_earliest}, 需要从更早日期获取")
|
|
124
|
+
need_fetch = True
|
|
125
|
+
else:
|
|
126
|
+
logger.info(f"[PT CACHE HIT] symbol={symbol}, 从缓存返回数据, 日期范围={cached_df.index.min()} 到 {cached_df.index.max()}")
|
|
127
|
+
return cached
|
|
128
|
+
else:
|
|
129
|
+
logger.info(f"[PT CACHE HIT] symbol={symbol}, 从缓存返回数据")
|
|
130
|
+
return cached
|
|
103
131
|
|
|
104
132
|
try:
|
|
105
|
-
|
|
133
|
+
# 确定需要获取的日期范围
|
|
134
|
+
extend_sdate = sdate
|
|
135
|
+
extend_edate = edate
|
|
136
|
+
need_multiple_fetch = False # 是否需要多次获取以满足长度要求
|
|
137
|
+
|
|
138
|
+
if cached and cached_df is not None and not cached_df.empty and isinstance(cached_df.index, pd.DatetimeIndex):
|
|
139
|
+
cached_earliest = cached_df.index.min()
|
|
140
|
+
cached_latest = cached_df.index.max()
|
|
141
|
+
request_edate = pd.to_datetime(edate) if edate else None
|
|
142
|
+
request_sdate = pd.to_datetime(sdate) if sdate else None
|
|
143
|
+
|
|
144
|
+
# 情况1: 请求的 edate 晚于缓存的最新日期,从缓存的最新日期+1天开始获取
|
|
145
|
+
if request_edate and request_edate > cached_latest:
|
|
146
|
+
extend_sdate = (cached_latest + pd.Timedelta(days=1)).strftime('%Y-%m-%d')
|
|
147
|
+
logger.info(f"[PT FETCH] 从缓存最新日期后开始获取, extend_sdate={extend_sdate}, edate={edate}")
|
|
148
|
+
# 情况2: edate前的数据长度<=60,需要获取更多历史数据
|
|
149
|
+
elif request_edate:
|
|
150
|
+
data_before_edate = cached_df[cached_df.index <= request_edate]
|
|
151
|
+
if len(data_before_edate) <= 60:
|
|
152
|
+
# 计算需要获取多少天的数据才能达到60+行
|
|
153
|
+
# 假设每个交易日都有数据,需要大约60个交易日(约3个月)
|
|
154
|
+
# 从edate往前推,确保获取足够的数据
|
|
155
|
+
target_sdate = request_edate - pd.Timedelta(days=120) # 往前推约4个月,确保有足够交易日
|
|
156
|
+
extend_sdate = target_sdate.strftime('%Y-%m-%d')
|
|
157
|
+
extend_edate = edate
|
|
158
|
+
logger.info(f"[PT FETCH] edate前数据不足60行,从更早日期获取, extend_sdate={extend_sdate}, extend_edate={extend_edate}")
|
|
159
|
+
need_multiple_fetch = True # 可能需要多次获取
|
|
160
|
+
# 情况3: 请求的edate早于缓存的最早日期,从缓存最早日期开始往前获取
|
|
161
|
+
elif request_edate and request_edate < cached_earliest:
|
|
162
|
+
# 从缓存最早日期开始往前获取,直到覆盖edate且edate前的长度大于60
|
|
163
|
+
# 先尝试从edate往前推足够的天数
|
|
164
|
+
target_sdate = request_edate - pd.Timedelta(days=120) # 往前推约4个月
|
|
165
|
+
extend_sdate = target_sdate.strftime('%Y-%m-%d')
|
|
166
|
+
extend_edate = (cached_earliest - pd.Timedelta(days=1)).strftime('%Y-%m-%d')
|
|
167
|
+
logger.info(f"[PT FETCH] edate早于缓存最早日期,从更早日期获取, extend_sdate={extend_sdate}, extend_edate={extend_edate}")
|
|
168
|
+
need_multiple_fetch = True # 可能需要多次获取
|
|
169
|
+
|
|
170
|
+
url = f'https://proxy.finance.qq.com/ifzqgtimg/appstock/app/newfqkline/get?_var=kline_dayqfq¶m={symbol},{freq},{extend_sdate},{extend_edate},{days},{fq}'
|
|
106
171
|
response = hget(url)
|
|
107
172
|
if not response:
|
|
108
173
|
logger.warning(f'{symbol} hget failed')
|
|
174
|
+
# 如果网络请求失败,但有缓存数据,返回缓存数据
|
|
175
|
+
if cached:
|
|
176
|
+
logger.info(f"[PT FALLBACK] 网络请求失败,返回缓存数据")
|
|
177
|
+
return cached
|
|
109
178
|
return symbol, 'None', pd.DataFrame([])
|
|
110
179
|
|
|
111
180
|
response_text = response.text
|
|
112
181
|
json_start = response_text.find('{')
|
|
113
182
|
if json_start == -1:
|
|
114
183
|
logger.warning(f'{symbol} invalid response format')
|
|
184
|
+
# 如果解析失败,但有缓存数据,返回缓存数据
|
|
185
|
+
if cached:
|
|
186
|
+
logger.info(f"[PT FALLBACK] 解析失败,返回缓存数据")
|
|
187
|
+
return cached
|
|
115
188
|
return symbol, 'None', pd.DataFrame([])
|
|
116
189
|
|
|
117
190
|
data = json.loads(response_text[json_start:])
|
|
118
191
|
if data.get('code') != 0:
|
|
119
192
|
logger.warning(f'{symbol} API returned error: {data.get("msg", "Unknown error")}')
|
|
193
|
+
# 如果API返回错误,但有缓存数据,返回缓存数据
|
|
194
|
+
if cached:
|
|
195
|
+
logger.info(f"[PT FALLBACK] API错误,返回缓存数据")
|
|
196
|
+
return cached
|
|
120
197
|
return symbol, 'None', pd.DataFrame([])
|
|
121
198
|
|
|
122
199
|
# 使用解析器
|
|
123
200
|
try:
|
|
124
201
|
parser = KlineParser()
|
|
125
202
|
name, df = parser.parse_tencent_kline(data, symbol)
|
|
126
|
-
|
|
203
|
+
|
|
204
|
+
# 如果有缓存数据,合并新旧数据
|
|
205
|
+
if cached and cached_df is not None and not cached_df.empty and isinstance(cached_df.index, pd.DatetimeIndex):
|
|
206
|
+
# 确保两个 DataFrame 的索引都是 DatetimeIndex
|
|
207
|
+
if not isinstance(df.index, pd.DatetimeIndex):
|
|
208
|
+
try:
|
|
209
|
+
df.index = pd.to_datetime(df.index)
|
|
210
|
+
except (ValueError, TypeError):
|
|
211
|
+
pass
|
|
212
|
+
|
|
213
|
+
# 合并数据
|
|
214
|
+
merged_df = pd.concat([cached_df, df])
|
|
215
|
+
merged_df = merged_df[~merged_df.index.duplicated(keep='last')]
|
|
216
|
+
merged_df = merged_df.sort_index()
|
|
217
|
+
|
|
218
|
+
# 过滤到请求的日期范围
|
|
219
|
+
if edate:
|
|
220
|
+
request_edate = pd.to_datetime(edate)
|
|
221
|
+
merged_df = merged_df[merged_df.index <= request_edate]
|
|
222
|
+
|
|
223
|
+
result = (symbol, name, merged_df)
|
|
224
|
+
logger.info(f"[PT MERGE] 合并缓存和新数据, 缓存行数={len(cached_df)}, 新数据行数={len(df)}, 合并后行数={len(merged_df)}")
|
|
225
|
+
else:
|
|
226
|
+
result = (symbol, name, df)
|
|
227
|
+
|
|
127
228
|
self._put_cache(base_key, result)
|
|
128
229
|
return result
|
|
129
230
|
except Exception as e:
|
|
@@ -153,7 +254,30 @@ class CNStockMarket(Market):
|
|
|
153
254
|
for col in ['open', 'high', 'low', 'close', 'vol']:
|
|
154
255
|
df[col] = pd.to_numeric(df[col], errors='coerce')
|
|
155
256
|
|
|
156
|
-
|
|
257
|
+
# 如果有缓存数据,合并新旧数据
|
|
258
|
+
if cached and cached_df is not None and not cached_df.empty and isinstance(cached_df.index, pd.DatetimeIndex):
|
|
259
|
+
# 确保两个 DataFrame 的索引都是 DatetimeIndex
|
|
260
|
+
if not isinstance(df.index, pd.DatetimeIndex):
|
|
261
|
+
try:
|
|
262
|
+
df.index = pd.to_datetime(df.index)
|
|
263
|
+
except (ValueError, TypeError):
|
|
264
|
+
pass
|
|
265
|
+
|
|
266
|
+
# 合并数据
|
|
267
|
+
merged_df = pd.concat([cached_df, df])
|
|
268
|
+
merged_df = merged_df[~merged_df.index.duplicated(keep='last')]
|
|
269
|
+
merged_df = merged_df.sort_index()
|
|
270
|
+
|
|
271
|
+
# 过滤到请求的日期范围
|
|
272
|
+
if edate:
|
|
273
|
+
request_edate = pd.to_datetime(edate)
|
|
274
|
+
merged_df = merged_df[merged_df.index <= request_edate]
|
|
275
|
+
|
|
276
|
+
result = (symbol, name, merged_df)
|
|
277
|
+
logger.info(f"[PT MERGE FALLBACK] 合并缓存和新数据, 缓存行数={len(cached_df)}, 新数据行数={len(df)}, 合并后行数={len(merged_df)}")
|
|
278
|
+
else:
|
|
279
|
+
result = (symbol, name, df)
|
|
280
|
+
|
|
157
281
|
self._put_cache(base_key, result)
|
|
158
282
|
return result
|
|
159
283
|
except Exception as e:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: rquote
|
|
3
|
-
Version: 0.4.
|
|
3
|
+
Version: 0.4.7
|
|
4
4
|
Summary: Mostly day quotes of cn/hk/us/fund/future markets, side with quote list fetch
|
|
5
5
|
Requires-Python: >=3.9.0
|
|
6
6
|
Description-Content-Type: text/markdown
|
|
@@ -18,7 +18,7 @@ Requires-Dist: duckdb>=0.9.0; extra == "persistent"
|
|
|
18
18
|
|
|
19
19
|
## 版本信息
|
|
20
20
|
|
|
21
|
-
当前版本:**0.4.
|
|
21
|
+
当前版本:**0.4.7**
|
|
22
22
|
|
|
23
23
|
## 主要特性
|
|
24
24
|
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
rquote/__init__.py,sha256=VEZOnqv7Erc34qqllI17bG9cezoaf9uNoOtX9fmhtyw,2420
|
|
2
|
-
rquote/config.py,sha256=
|
|
2
|
+
rquote/config.py,sha256=WTdjXCT0FcxGW1ZFe4AfjyGvGQD68uzm0rr7PAN8UBQ,1263
|
|
3
3
|
rquote/exceptions.py,sha256=lJH2GC5dDhMoW_OtlBc03wlUn684-7jNPyF1NjmfVIE,569
|
|
4
4
|
rquote/plots.py,sha256=UQn4sjhIzVwagfhUDM738b2HHjKo4tRdU2UCs_1-FbY,2341
|
|
5
5
|
rquote/utils.py,sha256=bH0ZFIo-ZelNztzPS6BXFShXE3yGA9USI_P9INN0Y-s,310
|
|
@@ -19,8 +19,8 @@ rquote/data_sources/tencent.py,sha256=ayt1O85pheLwzX3z5c6Qij1NrmUywcsz6YcSVzdDoM
|
|
|
19
19
|
rquote/factors/__init__.py,sha256=_ZbH2XxYtXwCJpvRVdNvGncoPSpMqrtlYmf1_fMGIjM,116
|
|
20
20
|
rquote/factors/technical.py,sha256=dPDs3pDEDRV9iQJBrSoKpGFLQMjOqyoBdN2rUntpOUU,4235
|
|
21
21
|
rquote/markets/__init__.py,sha256=k4F8cZgb-phqemMqhZXFPdOKsR4P--DD3d5i21vKhbg,365
|
|
22
|
-
rquote/markets/base.py,sha256=
|
|
23
|
-
rquote/markets/cn_stock.py,sha256=
|
|
22
|
+
rquote/markets/base.py,sha256=Oe-taKEdDCfaFGCnALB-gunSFMhe_OD736bVv7OBGHU,16433
|
|
23
|
+
rquote/markets/cn_stock.py,sha256=MQj_mNADY50Hc3IPviQi51ixMI_XKZXiuYHo6k1PXCI,17150
|
|
24
24
|
rquote/markets/factory.py,sha256=4Txpuok0LBOLT_vAiIU-NslwVnYF7sKHCdlacAboxpo,2875
|
|
25
25
|
rquote/markets/future.py,sha256=yGMyUu9Fv75jbzPbvW6_36otEeebSij7vnzow_zyEn8,7358
|
|
26
26
|
rquote/markets/hk_stock.py,sha256=AhRJpWp027ACew9ogxkVCJXbqbYQ1AkbFwDJccXbvAs,1183
|
|
@@ -33,7 +33,7 @@ rquote/utils/helpers.py,sha256=V07n9BtRS8bEJH023Kca78-unk7iD3B9hn2UjELetYs,354
|
|
|
33
33
|
rquote/utils/http.py,sha256=X0Alhnu0CNqyQeOt6ivUWmh2XwrWxXd2lSpQOKDdnzw,3249
|
|
34
34
|
rquote/utils/logging.py,sha256=fs2YF1Srux4LLTdk_Grjm5g1f4mzewI38VVSAI82goA,1471
|
|
35
35
|
rquote/utils/web.py,sha256=I8_pcThW6VUvahuRHdtp32iZwr85hEt1hB6TgznMy_U,3854
|
|
36
|
-
rquote-0.4.
|
|
37
|
-
rquote-0.4.
|
|
38
|
-
rquote-0.4.
|
|
39
|
-
rquote-0.4.
|
|
36
|
+
rquote-0.4.7.dist-info/METADATA,sha256=Ugxawwms3YT8209UMnAaPwxZ20L2_fc2SmIlHQPV4YA,14898
|
|
37
|
+
rquote-0.4.7.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
38
|
+
rquote-0.4.7.dist-info/top_level.txt,sha256=CehAiaZx7Fo8HGoV2zd5GhILUW1jQEN8YS-cWMlrK9Y,7
|
|
39
|
+
rquote-0.4.7.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|