analyser_hj3415 4.1.4__py3-none-any.whl → 4.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -377,48 +377,9 @@ class Mil:
377
377
  return myredis.Base.fetch_and_cache_data(redis_name, refresh, fetch_generate_data, refresh, timer=expire_time)
378
378
 
379
379
  @classmethod
380
- def bulk_generate_data(cls, codes: List[str], refresh: bool) -> Dict[str, MilData]:
381
- # --- (1) 파이프라인 GET ---
382
- pipe = myredis.Base.redis_client.pipeline()
383
- redis_keys = [f"{code}_{cls.REDIS_MIL_DATA_SUFFIX}" for code in codes]
384
- for redis_key in redis_keys:
385
- pipe.get(redis_key)
386
- results_from_redis = pipe.execute() # [val1, val2, ...]
387
-
388
- final_results = {}
389
- missing_codes = []
390
-
391
- # refresh=True 이면 기존 데이터 무시하고 다시 계산해야 하므로 모두 missing 처리
392
- if refresh:
393
- missing_codes = codes[:]
394
- else:
395
- # refresh=False 이면, Redis 값이 None인 티커만 다시 계산
396
- for code, val in zip(codes, results_from_redis):
397
- if val is None:
398
- missing_codes.append(code)
399
- else:
400
- # Redis에 pickled 데이터가 있다면 언피클해서 담기
401
- red_data = pickle.loads(val)
402
- final_results[code] = red_data
403
-
404
- # --- (2) 필요한 티커만 직접 연산 ---
405
- newly_computed_data = {}
406
- for code in missing_codes:
407
- mylogger.debug(f"*** bulk_generate_data : {code}")
408
- data = cls(code)._generate_data(refresh=True)
409
- newly_computed_data[code] = data
410
-
411
- # --- (3) 파이프라인 SET ---
412
- if newly_computed_data:
413
- pipe = myredis.Base.redis_client.pipeline()
414
- for code, data in newly_computed_data.items():
415
- redis_key = f"{code}_{cls.REDIS_MIL_DATA_SUFFIX}"
416
- # ProphetLatestData 객체를 pickle로 직렬화
417
- pickled_data = pickle.dumps(data)
418
- # SET + expire_time
419
- pipe.setex(redis_key, expire_time, pickled_data)
420
- pipe.execute()
421
-
422
- # 최종 결과 딕셔너리 (캐시에 있었던 것 + 새로 만든 것)
423
- final_results.update(newly_computed_data)
424
- return final_results
380
+ def bulk_get_data(cls, codes: List[str], refresh: bool) -> Dict[str, MilData]:
381
+ return myredis.Base.bulk_get_or_compute(
382
+ [f"{code}_{cls.REDIS_MIL_DATA_SUFFIX}" for code in codes],
383
+ lambda key: cls(key[:6])._generate_data(refresh=True),
384
+ refresh=refresh
385
+ )
@@ -3,7 +3,6 @@ from collections import OrderedDict
3
3
  from dataclasses import dataclass
4
4
  from typing import Tuple, Dict, List
5
5
  import math
6
- import pickle
7
6
 
8
7
  from utils_hj3415 import tools, setup_logger
9
8
  from db_hj3415 import myredis
@@ -278,51 +277,12 @@ class Red:
278
277
  return myredis.Base.fetch_and_cache_data(redis_name, refresh, fetch_generate_data, refresh, timer=expire_time)
279
278
 
280
279
  @classmethod
281
- def bulk_generate_data(cls, codes: List[str], expect_earn: float, refresh: bool) -> Dict[str, RedData]:
282
- # --- (1) 파이프라인 GET ---
283
- pipe = myredis.Base.redis_client.pipeline()
284
- redis_keys = [f"{code}_{cls.REDIS_RED_DATA_SUFFIX}_{expect_earn}" for code in codes]
285
- for redis_key in redis_keys:
286
- pipe.get(redis_key)
287
- results_from_redis = pipe.execute() # [val1, val2, ...]
288
-
289
- final_results = {}
290
- missing_codes = []
291
-
292
- # refresh=True 이면 기존 데이터 무시하고 다시 계산해야 하므로 모두 missing 처리
293
- if refresh:
294
- missing_codes = codes[:]
295
- else:
296
- # refresh=False 이면, Redis 값이 None인 티커만 다시 계산
297
- for code, val in zip(codes, results_from_redis):
298
- if val is None:
299
- missing_codes.append(code)
300
- else:
301
- # Redis에 pickled 데이터가 있다면 언피클해서 담기
302
- red_data = pickle.loads(val)
303
- final_results[code] = red_data
304
-
305
- # --- (2) 필요한 티커만 직접 연산 ---
306
- newly_computed_data = {}
307
- for code in missing_codes:
308
- mylogger.debug(f"*** bulk_generate_data : {code}")
309
- data = cls(code, expect_earn)._generate_data(refresh=True)
310
- newly_computed_data[code] = data
311
-
312
- # --- (3) 파이프라인 SET ---
313
- if newly_computed_data:
314
- pipe = myredis.Base.redis_client.pipeline()
315
- for code, data in newly_computed_data.items():
316
- redis_key = f"{code}_{cls.REDIS_RED_DATA_SUFFIX}_{expect_earn}"
317
- # ProphetLatestData 객체를 pickle로 직렬화
318
- pickled_data = pickle.dumps(data)
319
- # SET + expire_time
320
- pipe.setex(redis_key, expire_time, pickled_data)
321
- pipe.execute()
322
-
323
- # 최종 결과 딕셔너리 (캐시에 있었던 것 + 새로 만든 것)
324
- final_results.update(newly_computed_data)
325
- return final_results
280
+ def bulk_get_data(cls, codes: List[str], expect_earn: float, refresh: bool) -> Dict[str, RedData]:
281
+ return myredis.Base.bulk_get_or_compute(
282
+ [f"{code}_{cls.REDIS_RED_DATA_SUFFIX}_{expect_earn}" for code in codes],
283
+ lambda key: cls(key[:6], expect_earn)._generate_data(refresh=True),
284
+ refresh=refresh
285
+ )
326
286
 
327
287
  @staticmethod
328
288
  def ranking(expect_earn: float = 0.06, refresh=False) -> OrderedDict:
@@ -2,7 +2,6 @@ from collections import OrderedDict
2
2
  from datetime import datetime, timedelta
3
3
  from typing import Tuple, List, Dict, Union
4
4
 
5
- import pickle
6
5
  import yfinance as yf
7
6
  import pandas as pd
8
7
  from prophet import Prophet
@@ -137,6 +136,7 @@ class MyProphet:
137
136
  df['ds'] = df['ds'].dt.tz_localize(None)
138
137
  # 추가 변수를 정규화
139
138
  df['volume_scaled'] = self.scaler.fit_transform(df[['volume']])
139
+
140
140
  mylogger.debug('_preprocessing_for_prophet')
141
141
  mylogger.debug(df)
142
142
  self.initialized = True
@@ -171,15 +171,20 @@ class MyProphet:
171
171
  mylogger.debug(forecast)
172
172
  return forecast
173
173
 
174
- mylogger.debug("Initializing data for MyProphet")
174
+ mylogger.debug(f"{self.ticker} : Initializing data for MyProphet")
175
175
 
176
176
  self.scaler = StandardScaler()
177
177
  self.model = Prophet()
178
178
 
179
179
  self.raw_data = get_raw_data()
180
180
  mylogger.debug(self.raw_data)
181
- self.df_real = preprocessing_for_prophet()
182
- self.df_forecast = make_forecast()
181
+ try:
182
+ self.df_real = preprocessing_for_prophet()
183
+ self.df_forecast = make_forecast()
184
+ except ValueError as e:
185
+ mylogger.error(f"{self.ticker} : 빈 데이터프레임...{e}")
186
+ self.df_real = pd.DataFrame()
187
+ self.df_forecast = pd.DataFrame()
183
188
 
184
189
  def _make_prophet_latest_data(self) -> ProphetLatestData:
185
190
  def scoring(price: float, yhat_lower: float, yhat_upper: float, method: str = 'sigmoid') -> Tuple[str, int]:
@@ -226,22 +231,32 @@ class MyProphet:
226
231
 
227
232
  if not self.initialized:
228
233
  self.initializing()
229
- latest_row = self.df_real.iloc[-1]
230
- latest_yhat = \
231
- self.df_forecast.loc[
232
- self.df_forecast['ds'] == latest_row['ds'], ['ds', 'yhat_lower', 'yhat_upper', 'yhat']].iloc[
233
- 0].to_dict()
234
-
235
- data = ProphetLatestData(
236
- ticker=self.ticker,
237
- date=latest_row['ds'].date(),
238
- price=latest_row['y'],
239
- yhat=latest_yhat['yhat'],
240
- yhat_lower=latest_yhat['yhat_lower'],
241
- yhat_upper=latest_yhat['yhat_upper'],
242
- )
234
+ try:
235
+ latest_row = self.df_real.iloc[-1]
236
+ latest_yhat = \
237
+ self.df_forecast.loc[
238
+ self.df_forecast['ds'] == latest_row['ds'], ['ds', 'yhat_lower', 'yhat_upper', 'yhat']].iloc[
239
+ 0].to_dict()
243
240
 
244
- data.trading_action, data.score = scoring(data.price, data.yhat_lower, data.yhat_upper)
241
+ data = ProphetLatestData(
242
+ ticker=self.ticker,
243
+ date=latest_row['ds'].date(),
244
+ price=latest_row['y'],
245
+ yhat=latest_yhat['yhat'],
246
+ yhat_lower=latest_yhat['yhat_lower'],
247
+ yhat_upper=latest_yhat['yhat_upper'],
248
+ )
249
+
250
+ data.trading_action, data.score = scoring(data.price, data.yhat_lower, data.yhat_upper)
251
+ except Exception:
252
+ data = ProphetLatestData(
253
+ ticker=self.ticker,
254
+ date=datetime.now().date(),
255
+ price=float('nan'),
256
+ yhat=float('nan'),
257
+ yhat_lower=float('nan'),
258
+ yhat_upper=float('nan'),
259
+ )
245
260
  return data
246
261
 
247
262
  def generate_latest_data(self, refresh: bool) -> ProphetLatestData:
@@ -257,7 +272,7 @@ class MyProphet:
257
272
  mylogger.debug("**** Start generate_data... ****")
258
273
  redis_name = f'{self.ticker}_{self.REDIS_LATEST_DATA_SUFFIX}'
259
274
 
260
- mylogger.info(
275
+ mylogger.debug(
261
276
  f"redisname: '{redis_name}' / refresh : {refresh} / expire_time : {expire_time / 3600}h")
262
277
 
263
278
  prophet_data = myredis.Base.fetch_and_cache_data(redis_name, refresh, self._make_prophet_latest_data, timer=expire_time)
@@ -285,20 +300,31 @@ class MyProphet:
285
300
  if not self.initialized:
286
301
  self.initializing()
287
302
 
288
- # 날짜를 기준으로 합치기 (outer join)
289
- merged_df = pd.merge(self.df_real, self.df_forecast, on="ds", how="outer")
290
- # 날짜 정렬
291
- merged_df = merged_df.sort_values(by="ds").reset_index(drop=True)
292
-
293
- data = ProphetChartData(
294
- ticker=self.ticker,
295
- labels=merged_df["ds"].tolist(),
296
- prices=[{"x": ds, "y": y} for ds, y in zip(merged_df["ds"], merged_df["y"]) if pd.notna(y)], # type: ignore
297
- yhats=[{"x": ds, "y": yhat} for ds, yhat in zip(merged_df["ds"], merged_df["yhat"])], # type: ignore
298
- yhat_uppers=[{"x": ds, "y": yhat_upper} for ds, yhat_upper in zip(merged_df["ds"], merged_df["yhat_upper"])], # type: ignore
299
- yhat_lowers=[{"x": ds, "y": yhat_lower} for ds, yhat_lower in zip(merged_df["ds"], merged_df["yhat_lower"])], # type: ignore
300
- is_prophet_up=tsa.common.is_up_by_OLS(self.df_forecast.set_index('ds')['yhat'].to_dict()),
301
- )
303
+ try:
304
+ # 날짜를 기준으로 합치기 (outer join)
305
+ merged_df = pd.merge(self.df_real, self.df_forecast, on="ds", how="outer")
306
+ # 날짜 정렬
307
+ merged_df = merged_df.sort_values(by="ds").reset_index(drop=True)
308
+
309
+ data = ProphetChartData(
310
+ ticker=self.ticker,
311
+ labels=merged_df["ds"].tolist(),
312
+ prices=[{"x": ds, "y": y} for ds, y in zip(merged_df["ds"], merged_df["y"]) if pd.notna(y)], # type: ignore
313
+ yhats=[{"x": ds, "y": yhat} for ds, yhat in zip(merged_df["ds"], merged_df["yhat"])], # type: ignore
314
+ yhat_uppers=[{"x": ds, "y": yhat_upper} for ds, yhat_upper in zip(merged_df["ds"], merged_df["yhat_upper"])], # type: ignore
315
+ yhat_lowers=[{"x": ds, "y": yhat_lower} for ds, yhat_lower in zip(merged_df["ds"], merged_df["yhat_lower"])], # type: ignore
316
+ is_prophet_up=tsa.common.is_up_by_OLS(self.df_forecast.set_index('ds')['yhat'].to_dict()),
317
+ )
318
+ except Exception:
319
+ data = ProphetChartData(
320
+ ticker=self.ticker,
321
+ labels=[],
322
+ prices=[],
323
+ yhats=[],
324
+ yhat_uppers=[],
325
+ yhat_lowers=[],
326
+ is_prophet_up=False,
327
+ )
302
328
  return data
303
329
 
304
330
  prophet_chart_data = myredis.Base.fetch_and_cache_data(redis_name, refresh, fetch_generate_prophet_chart_data,
@@ -326,49 +352,11 @@ class MyProphet:
326
352
 
327
353
  @staticmethod
328
354
  def bulk_get_latest_data(tickers: List[str], refresh: bool) -> Dict[str, ProphetLatestData]:
329
- # --- (1) 파이프라인 GET ---
330
- pipe = myredis.Base.redis_client.pipeline()
331
- redis_keys = [f"{ticker}_{MyProphet.REDIS_LATEST_DATA_SUFFIX}" for ticker in tickers]
332
- for redis_key in redis_keys:
333
- pipe.get(redis_key)
334
- results_from_redis = pipe.execute() # [val1, val2, ...]
335
-
336
- final_results = {}
337
- missing_tickers = []
338
-
339
- # refresh=True 이면 기존 데이터 무시하고 다시 계산해야 하므로 모두 missing 처리
340
- if refresh:
341
- missing_tickers = tickers[:]
342
- else:
343
- # refresh=False 이면, Redis 값이 None인 티커만 다시 계산
344
- for ticker, val in zip(tickers, results_from_redis):
345
- if val is None:
346
- missing_tickers.append(ticker)
347
- else:
348
- # Redis에 pickled 데이터가 있다면 언피클해서 담기
349
- prophet_data = pickle.loads(val)
350
- final_results[ticker] = prophet_data
351
-
352
- # --- (2) 필요한 티커만 직접 연산 ---
353
- newly_computed_data = {}
354
- for ticker in missing_tickers:
355
- data = MyProphet(ticker)._make_prophet_latest_data()
356
- newly_computed_data[ticker] = data
357
-
358
- # --- (3) 파이프라인 SET ---
359
- if newly_computed_data:
360
- pipe = myredis.Base.redis_client.pipeline()
361
- for ticker, data in newly_computed_data.items():
362
- redis_key = f"{ticker}_{MyProphet.REDIS_LATEST_DATA_SUFFIX}"
363
- # ProphetLatestData 객체를 pickle로 직렬화
364
- pickled_data = pickle.dumps(data)
365
- # SET + expire_time
366
- pipe.setex(redis_key, expire_time, pickled_data)
367
- pipe.execute()
368
-
369
- # 최종 결과 딕셔너리 (캐시에 있었던 것 + 새로 만든 것)
370
- final_results.update(newly_computed_data)
371
- return final_results
355
+ return myredis.Base.bulk_get_or_compute(
356
+ tickers,
357
+ lambda ticker: MyProphet(ticker)._make_prophet_latest_data(),
358
+ refresh=refresh,
359
+ )
372
360
 
373
361
 
374
362
  class CorpProphet(MyProphet):
@@ -1,11 +1,11 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: analyser_hj3415
3
- Version: 4.1.4
3
+ Version: 4.1.6
4
4
  Summary: Stock analyser and database processing programs
5
5
  Requires-Python: >=3.6
6
6
  Description-Content-Type: text/markdown
7
7
  Requires-Dist: utils-hj3415>=3.0.10
8
- Requires-Dist: db-hj3415>=4.5.0
8
+ Requires-Dist: db-hj3415>=4.5.2
9
9
  Requires-Dist: scikit-learn>=1.5.2
10
10
  Requires-Dist: plotly>=5.24.1
11
11
  Requires-Dist: yfinance>=0.2.44
@@ -5,13 +5,13 @@ analyser_hj3415/analyser/eval/__init__.py,sha256=IP1d0Q3nOCAD3zK1qxrC685MkJQfUh-
5
5
  analyser_hj3415/analyser/eval/blue.py,sha256=p5JPwkQYoO0dsOe3VnfMV3pOWLzNsAFvLCUK56f85Xo,10782
6
6
  analyser_hj3415/analyser/eval/common.py,sha256=sNXapoofShA43ww_SLjXmIjkrAr1AhAcezdaN_X_3Us,11443
7
7
  analyser_hj3415/analyser/eval/growth.py,sha256=tlHxLx4u5h7bNG0T8ViJujX20QllfrSaBl-TBqFNkEs,6362
8
- analyser_hj3415/analyser/eval/mil.py,sha256=aSEqlpytkI0A5_jZKOFLUbPgXc-WiKJUVDZuyI_L3fI,17147
9
- analyser_hj3415/analyser/eval/red.py,sha256=1wbZWFBWQjnnE4uQQVE34Wfl2YXePzJ2yqKmfLxicoc,13906
8
+ analyser_hj3415/analyser/eval/mil.py,sha256=ZWGHVbSKjas094Lpz6DWihiqS-WQvr--4rUZZ75hZYE,15440
9
+ analyser_hj3415/analyser/eval/red.py,sha256=cC3h9YSsaSq86D07f2pkyX-fdG2aQeFF9ZoqvCZy2RA,12170
10
10
  analyser_hj3415/analyser/tsa/__init__.py,sha256=pg20ZQRABedTdaIoOr5t043RNKtJ7ji_WmnZrD1IhPg,147
11
11
  analyser_hj3415/analyser/tsa/common.py,sha256=ZLUkifupOlLKsrPiqR3y6FaEN4M_loZhxCZXYxkX0us,1874
12
12
  analyser_hj3415/analyser/tsa/lstm.py,sha256=oENuJyyo6U9MMn4UF4ZauGai51_dJisMSUNBiH8udXo,28998
13
- analyser_hj3415/analyser/tsa/prophet.py,sha256=a8XPiikRFbqHjbDT7C3hK5RsqOp5JyAbY7lDFNQfBzM,17884
14
- analyser_hj3415-4.1.4.dist-info/entry_points.txt,sha256=ZfjPnJuH8SzvhE9vftIPMBIofsc65IAWYOhqOC_L5ck,65
15
- analyser_hj3415-4.1.4.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
16
- analyser_hj3415-4.1.4.dist-info/METADATA,sha256=UqtHmuR79l4cAs6pEa4YD4wiuoMPfgEmpTzsCiamFDs,6777
17
- analyser_hj3415-4.1.4.dist-info/RECORD,,
13
+ analyser_hj3415/analyser/tsa/prophet.py,sha256=0xcZWk8wf-A7W_27dJErUZ9x3_Fei37KgDRwH0SUT_I,17142
14
+ analyser_hj3415-4.1.6.dist-info/entry_points.txt,sha256=ZfjPnJuH8SzvhE9vftIPMBIofsc65IAWYOhqOC_L5ck,65
15
+ analyser_hj3415-4.1.6.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
16
+ analyser_hj3415-4.1.6.dist-info/METADATA,sha256=E0RGpwa6JVSXsxygDGPiVOn1wxZOPYpJ8sJ5MaozStk,6777
17
+ analyser_hj3415-4.1.6.dist-info/RECORD,,