analyser_hj3415 3.4.1__py3-none-any.whl → 4.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- analyser_hj3415/analyser/eval/red.py +66 -24
- analyser_hj3415/analyser/tsa/lstm.py +138 -169
- analyser_hj3415/analyser/tsa/prophet.py +213 -147
- analyser_hj3415/cli.py +27 -44
- {analyser_hj3415-3.4.1.dist-info → analyser_hj3415-4.0.0.dist-info}/METADATA +1 -1
- analyser_hj3415-4.0.0.dist-info/RECORD +17 -0
- analyser_hj3415/analyser/compile.py +0 -355
- analyser_hj3415/workroom/__init__.py +0 -0
- analyser_hj3415/workroom/mysklearn.py +0 -50
- analyser_hj3415/workroom/mysklearn2.py +0 -39
- analyser_hj3415/workroom/score.py +0 -342
- analyser_hj3415/workroom/trash.py +0 -289
- analyser_hj3415-3.4.1.dist-info/RECORD +0 -23
- {analyser_hj3415-3.4.1.dist-info → analyser_hj3415-4.0.0.dist-info}/WHEEL +0 -0
- {analyser_hj3415-3.4.1.dist-info → analyser_hj3415-4.0.0.dist-info}/entry_points.txt +0 -0
@@ -1,12 +1,13 @@
|
|
1
|
+
from collections import OrderedDict
|
1
2
|
from datetime import datetime, timedelta
|
2
|
-
from typing import
|
3
|
+
from typing import Tuple, List, Dict, Union
|
4
|
+
|
5
|
+
import pandas
|
6
|
+
import pickle
|
3
7
|
import yfinance as yf
|
4
8
|
import pandas as pd
|
5
9
|
from prophet import Prophet
|
6
10
|
from sklearn.preprocessing import StandardScaler
|
7
|
-
import matplotlib.pyplot as plt # Matplotlib 수동 임포트
|
8
|
-
import plotly.graph_objs as go
|
9
|
-
from plotly.offline import plot
|
10
11
|
from dataclasses import dataclass
|
11
12
|
import os
|
12
13
|
|
@@ -16,12 +17,12 @@ from db_hj3415 import myredis
|
|
16
17
|
from analyser_hj3415.analyser import eval, MIs, tsa
|
17
18
|
|
18
19
|
|
19
|
-
mylogger = setup_logger(__name__,'
|
20
|
+
mylogger = setup_logger(__name__,'INFO')
|
20
21
|
expire_time = tools.to_int(os.getenv('DEFAULT_EXPIRE_TIME_H', 48)) * 3600
|
21
22
|
|
22
23
|
|
23
24
|
@dataclass
|
24
|
-
class
|
25
|
+
class ProphetLatestData:
|
25
26
|
ticker: str
|
26
27
|
|
27
28
|
date: datetime.date
|
@@ -29,18 +30,33 @@ class ProphetData:
|
|
29
30
|
yhat: float
|
30
31
|
yhat_upper: float
|
31
32
|
yhat_lower: float
|
32
|
-
forecast_data: List[dict]
|
33
33
|
|
34
34
|
trading_action: str = ''
|
35
35
|
score: int = None
|
36
36
|
|
37
|
+
@dataclass
|
38
|
+
class ProphetChartData:
|
39
|
+
ticker: str
|
40
|
+
|
41
|
+
labels: List[pandas.Timestamp]
|
42
|
+
prices: List[Dict[pandas.Timestamp, float]]
|
43
|
+
yhats: List[Dict[pandas.Timestamp, float]]
|
44
|
+
yhat_uppers: List[Dict[pandas.Timestamp, float]]
|
45
|
+
yhat_lowers: List[Dict[pandas.Timestamp, float]]
|
46
|
+
|
47
|
+
is_prophet_up: bool
|
48
|
+
|
37
49
|
|
38
50
|
class MyProphet:
|
51
|
+
|
52
|
+
REDIS_LATEST_DATA_SUFFIX = "myprophet_data"
|
53
|
+
|
39
54
|
def __init__(self, ticker: str):
|
40
|
-
mylogger.
|
55
|
+
mylogger.debug(f'set up ticker : {ticker}')
|
41
56
|
self.scaler = StandardScaler()
|
42
57
|
self.model = Prophet()
|
43
58
|
self._ticker = ticker
|
59
|
+
self.initialized = False
|
44
60
|
|
45
61
|
self.raw_data = pd.DataFrame()
|
46
62
|
self.df_real = pd.DataFrame()
|
@@ -64,10 +80,11 @@ class MyProphet:
|
|
64
80
|
매개변수:
|
65
81
|
ticker (str): 새로 설정할 티커 값.
|
66
82
|
"""
|
67
|
-
mylogger.
|
83
|
+
mylogger.debug(f'change ticker : {self.ticker} -> {ticker}')
|
68
84
|
self.scaler = StandardScaler()
|
69
85
|
self.model = Prophet()
|
70
86
|
self._ticker = ticker
|
87
|
+
self.initialized = False
|
71
88
|
|
72
89
|
self.raw_data = pd.DataFrame()
|
73
90
|
self.df_real = pd.DataFrame()
|
@@ -121,6 +138,7 @@ class MyProphet:
|
|
121
138
|
df['volume_scaled'] = self.scaler.fit_transform(df[['volume']])
|
122
139
|
mylogger.debug('_preprocessing_for_prophet')
|
123
140
|
mylogger.debug(df)
|
141
|
+
self.initialized = True
|
124
142
|
return df
|
125
143
|
|
126
144
|
def make_forecast() -> pd.DataFrame:
|
@@ -152,7 +170,7 @@ class MyProphet:
|
|
152
170
|
mylogger.debug(forecast)
|
153
171
|
return forecast
|
154
172
|
|
155
|
-
|
173
|
+
mylogger.debug("Initializing data for MyProphet")
|
156
174
|
|
157
175
|
self.scaler = StandardScaler()
|
158
176
|
self.model = Prophet()
|
@@ -161,160 +179,128 @@ class MyProphet:
|
|
161
179
|
self.df_real = preprocessing_for_prophet()
|
162
180
|
self.df_forecast = make_forecast()
|
163
181
|
|
164
|
-
def
|
165
|
-
|
166
|
-
|
182
|
+
def _make_prophet_latest_data(self) -> ProphetLatestData:
|
183
|
+
def scoring(price: float, yhat_lower: float, yhat_upper: float, method: str = 'sigmoid') -> Tuple[str, int]:
|
184
|
+
"""
|
185
|
+
주어진 가격과 임계값을 기준으로 매매 행동('buy', 'sell', 'hold')과 점수를 결정합니다.
|
167
186
|
|
168
|
-
|
169
|
-
|
187
|
+
매개변수:
|
188
|
+
price (float): 자산의 현재 가격.
|
189
|
+
yhat_lower (float): 가격 예측의 하한 임계값.
|
190
|
+
yhat_upper (float): 가격 예측의 상한 임계값.
|
191
|
+
method (str, optional): 점수를 계산하는 방법 ('sigmoid' 또는 'log'). 기본값은 'sigmoid'.
|
170
192
|
|
171
|
-
|
172
|
-
|
173
|
-
"""
|
174
|
-
print("**** Start generate_data... ****")
|
175
|
-
redis_name = f'{self.ticker}_myprophet_data'
|
193
|
+
반환값:
|
194
|
+
Tuple[str, int]: 매매 행동('buy', 'sell', 'hold')과 관련 점수로 이루어진 튜플.
|
176
195
|
|
177
|
-
|
178
|
-
|
196
|
+
예외:
|
197
|
+
ValueError: 지원되지 않는 점수 계산 방법이 제공된 경우 발생.
|
198
|
+
"""
|
179
199
|
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
매개변수:
|
186
|
-
price (float): 자산의 현재 가격.
|
187
|
-
yhat_lower (float): 가격 예측의 하한 임계값.
|
188
|
-
yhat_upper (float): 가격 예측의 상한 임계값.
|
189
|
-
method (str, optional): 점수를 계산하는 방법 ('sigmoid' 또는 'log'). 기본값은 'sigmoid'.
|
190
|
-
|
191
|
-
반환값:
|
192
|
-
Tuple[str, int]: 매매 행동('buy', 'sell', 'hold')과 관련 점수로 이루어진 튜플.
|
193
|
-
|
194
|
-
예외:
|
195
|
-
ValueError: 지원되지 않는 점수 계산 방법이 제공된 경우 발생.
|
196
|
-
"""
|
197
|
-
|
198
|
-
def calculate_score(deviation: float, method_in: str) -> int:
|
199
|
-
if method_in == 'sigmoid':
|
200
|
-
return tools.to_int(eval.Tools.sigmoid_score(deviation))
|
201
|
-
elif method_in == 'log':
|
202
|
-
return tools.to_int(eval.Tools.log_score(deviation))
|
203
|
-
else:
|
204
|
-
raise ValueError(f"Unsupported scoring method: {method}")
|
205
|
-
|
206
|
-
buying_deviation = eval.Tools.cal_deviation(price, yhat_lower)
|
207
|
-
buying_score = calculate_score(buying_deviation, method)
|
208
|
-
if price >= yhat_lower:
|
209
|
-
buying_score = -buying_score
|
210
|
-
|
211
|
-
selling_deviation = eval.Tools.cal_deviation(price, yhat_upper)
|
212
|
-
selling_score = calculate_score(selling_deviation, method)
|
213
|
-
if price <= yhat_upper:
|
214
|
-
selling_score = -selling_score
|
215
|
-
|
216
|
-
if buying_score > 0:
|
217
|
-
return 'buy', buying_score
|
218
|
-
elif selling_score > 0:
|
219
|
-
return 'sell', selling_score
|
200
|
+
def calculate_score(deviation: float, method_in: str) -> int:
|
201
|
+
if method_in == 'sigmoid':
|
202
|
+
return tools.to_int(eval.Tools.sigmoid_score(deviation))
|
203
|
+
elif method_in == 'log':
|
204
|
+
return tools.to_int(eval.Tools.log_score(deviation))
|
220
205
|
else:
|
221
|
-
|
222
|
-
|
206
|
+
raise ValueError(f"Unsupported scoring method: {method}")
|
207
|
+
|
208
|
+
buying_deviation = eval.Tools.cal_deviation(price, yhat_lower)
|
209
|
+
buying_score = calculate_score(buying_deviation, method)
|
210
|
+
if price >= yhat_lower:
|
211
|
+
buying_score = -buying_score
|
212
|
+
|
213
|
+
selling_deviation = eval.Tools.cal_deviation(price, yhat_upper)
|
214
|
+
selling_score = calculate_score(selling_deviation, method)
|
215
|
+
if price <= yhat_upper:
|
216
|
+
selling_score = -selling_score
|
217
|
+
|
218
|
+
if buying_score > 0:
|
219
|
+
return 'buy', buying_score
|
220
|
+
elif selling_score > 0:
|
221
|
+
return 'sell', selling_score
|
222
|
+
else:
|
223
|
+
return 'hold', 0
|
224
|
+
|
225
|
+
if not self.initialized:
|
223
226
|
self.initializing()
|
224
|
-
|
225
|
-
|
226
|
-
self.df_forecast.loc[
|
227
|
+
latest_row = self.df_real.iloc[-1]
|
228
|
+
latest_yhat = \
|
229
|
+
self.df_forecast.loc[
|
230
|
+
self.df_forecast['ds'] == latest_row['ds'], ['ds', 'yhat_lower', 'yhat_upper', 'yhat']].iloc[
|
227
231
|
0].to_dict()
|
228
232
|
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
)
|
238
|
-
|
239
|
-
data.trading_action, data.score = scoring(data.price, data.yhat_lower, data.yhat_upper)
|
240
|
-
return data
|
233
|
+
data = ProphetLatestData(
|
234
|
+
ticker=self.ticker,
|
235
|
+
date=latest_row['ds'].date(),
|
236
|
+
price=latest_row['y'],
|
237
|
+
yhat=latest_yhat['yhat'],
|
238
|
+
yhat_lower=latest_yhat['yhat_lower'],
|
239
|
+
yhat_upper=latest_yhat['yhat_upper'],
|
240
|
+
)
|
241
241
|
|
242
|
+
data.trading_action, data.score = scoring(data.price, data.yhat_lower, data.yhat_upper)
|
243
|
+
return data
|
242
244
|
|
243
|
-
|
245
|
+
def generate_latest_data(self, refresh: bool) -> ProphetLatestData:
|
246
|
+
"""
|
247
|
+
ProphetData 객체를 생성하거나 캐시된 데이터를 반환합니다.
|
244
248
|
|
245
|
-
|
249
|
+
매개변수:
|
250
|
+
refresh (bool): 데이터를 새로 생성할지 여부.
|
246
251
|
|
247
|
-
|
252
|
+
반환값:
|
253
|
+
ProphetData: 생성된 ProphetData 객체.
|
248
254
|
"""
|
249
|
-
|
255
|
+
mylogger.debug("**** Start generate_data... ****")
|
256
|
+
redis_name = f'{self.ticker}_{self.REDIS_LATEST_DATA_SUFFIX}'
|
250
257
|
|
251
|
-
|
258
|
+
mylogger.info(
|
259
|
+
f"redisname: '{redis_name}' / refresh : {refresh} / expire_time : {expire_time / 3600}h")
|
260
|
+
|
261
|
+
prophet_data = myredis.Base.fetch_and_cache_data(redis_name, refresh, self._make_prophet_latest_data, timer=expire_time)
|
262
|
+
return prophet_data
|
263
|
+
|
264
|
+
def generate_chart_data(self, refresh: bool) -> ProphetChartData:
|
252
265
|
"""
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
def export(self, to="html") -> Optional[str]:
|
266
|
+
1. 현재 주가 (실제 데이터)
|
267
|
+
• df_real['ds'] → x축 (날짜)
|
268
|
+
• df_real['y'] → y축 (실제 주가)
|
269
|
+
|
270
|
+
2. 예측 값 범위 (최소/최대)
|
271
|
+
• df_forecast['ds'] → x축 (날짜)
|
272
|
+
• df_forecast['yhat_lower'] → y축 (최소 예측값)
|
273
|
+
• df_forecast['yhat_upper'] → y축 (최대 예측값)
|
263
274
|
"""
|
264
|
-
|
275
|
+
mylogger.debug("**** Start generate_prophet_chart_data... ****")
|
276
|
+
redis_name = f'{self.ticker}_myprophet_chart_data'
|
265
277
|
|
266
|
-
|
267
|
-
refresh
|
268
|
-
to (str): 내보낼 형식 ('html', 'png', 'file').
|
278
|
+
mylogger.info(
|
279
|
+
f"redisname: '{redis_name}' / refresh : {refresh} / expire_time : {expire_time / 3600}h")
|
269
280
|
|
270
|
-
|
271
|
-
|
281
|
+
def fetch_generate_prophet_chart_data() -> ProphetChartData:
|
282
|
+
if self.initialized:
|
283
|
+
self.initializing()
|
272
284
|
|
273
|
-
|
274
|
-
|
275
|
-
|
276
|
-
|
277
|
-
# Plotly를 사용한 시각화
|
278
|
-
fig = go.Figure()
|
279
|
-
|
280
|
-
# 실제 데이터
|
281
|
-
fig.add_trace(go.Scatter(x=self.df_real['ds'], y=self.df_real['y'], mode='markers', name='실제주가'))
|
282
|
-
# 예측 데이터
|
283
|
-
fig.add_trace(go.Scatter(x=self.df_forecast['ds'], y=self.df_forecast['yhat'], mode='lines', name='예측치'))
|
284
|
-
|
285
|
-
# 상한/하한 구간
|
286
|
-
fig.add_trace(
|
287
|
-
go.Scatter(x=self.df_forecast['ds'], y=self.df_forecast['yhat_upper'], fill=None, mode='lines', name='상한'))
|
288
|
-
fig.add_trace(
|
289
|
-
go.Scatter(x=self.df_forecast['ds'], y=self.df_forecast['yhat_lower'], fill='tonexty', mode='lines', name='하한'))
|
290
|
-
|
291
|
-
fig.update_layout(
|
292
|
-
# title=f'{self.code} {self.name} 주가 예측 그래프(prophet)',
|
293
|
-
xaxis_title='일자',
|
294
|
-
yaxis_title='주가(원)',
|
295
|
-
xaxis = dict(
|
296
|
-
tickformat='%Y/%m', # X축을 '연/월' 형식으로 표시
|
297
|
-
),
|
298
|
-
yaxis = dict(
|
299
|
-
tickformat=".0f", # 소수점 없이 원래 숫자 표시
|
300
|
-
),
|
301
|
-
showlegend=False,
|
302
|
-
)
|
285
|
+
# 날짜를 기준으로 합치기 (outer join)
|
286
|
+
merged_df = pd.merge(self.df_real, self.df_forecast, on="ds", how="outer")
|
287
|
+
# 날짜 정렬
|
288
|
+
merged_df = merged_df.sort_values(by="ds").reset_index(drop=True)
|
303
289
|
|
304
|
-
|
305
|
-
|
306
|
-
|
307
|
-
|
308
|
-
|
309
|
-
|
310
|
-
|
311
|
-
|
312
|
-
|
313
|
-
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
|
290
|
+
data = ProphetChartData(
|
291
|
+
ticker=self.ticker,
|
292
|
+
labels=merged_df["ds"].tolist(),
|
293
|
+
prices=[{"x": ds, "y": y} for ds, y in zip(merged_df["ds"], merged_df["y"]) if pd.notna(y)], # type: ignore
|
294
|
+
yhats=[{"x": ds, "y": yhat} for ds, yhat in zip(merged_df["ds"], merged_df["yhat"])], # type: ignore
|
295
|
+
yhat_uppers=[{"x": ds, "y": yhat_upper} for ds, yhat_upper in zip(merged_df["ds"], merged_df["yhat_upper"])], # type: ignore
|
296
|
+
yhat_lowers=[{"x": ds, "y": yhat_lower} for ds, yhat_lower in zip(merged_df["ds"], merged_df["yhat_lower"])], # type: ignore
|
297
|
+
is_prophet_up=self.is_prophet_up(refresh=refresh),
|
298
|
+
)
|
299
|
+
return data
|
300
|
+
|
301
|
+
prophet_chart_data = myredis.Base.fetch_and_cache_data(redis_name, refresh, fetch_generate_prophet_chart_data,
|
302
|
+
timer=expire_time)
|
303
|
+
return prophet_chart_data
|
318
304
|
|
319
305
|
def is_prophet_up(self, refresh: bool) -> bool:
|
320
306
|
"""
|
@@ -326,12 +312,13 @@ class MyProphet:
|
|
326
312
|
반환값:
|
327
313
|
bool: 상승 추세 여부.
|
328
314
|
"""
|
329
|
-
|
315
|
+
mylogger.debug("**** Caching is_prophet_up ... ****")
|
330
316
|
redis_name = f'{self.ticker}_is_prophet_up'
|
331
|
-
|
317
|
+
mylogger.info(f"redisname: '{redis_name}' / expire_time : {expire_time / 3600}h")
|
332
318
|
|
333
319
|
def fetch_is_prophet_up():
|
334
|
-
self.
|
320
|
+
if self.initialized:
|
321
|
+
self.initializing()
|
335
322
|
yhat_dict = self.df_forecast.set_index('ds')['yhat'].to_dict()
|
336
323
|
return tsa.common.is_up_by_OLS(yhat_dict)
|
337
324
|
|
@@ -356,6 +343,51 @@ class MyProphet:
|
|
356
343
|
# 변환이 실패하면 ValueError가 발생, 형식이 맞지 않음
|
357
344
|
return False
|
358
345
|
|
346
|
+
@staticmethod
|
347
|
+
def bulk_generate_latest_data(tickers: List[str], refresh: bool) -> Dict[str, ProphetLatestData]:
|
348
|
+
# --- (1) 파이프라인 GET ---
|
349
|
+
pipe = myredis.Base.redis_client.pipeline()
|
350
|
+
redis_keys = [f"{ticker}_{MyProphet.REDIS_LATEST_DATA_SUFFIX}" for ticker in tickers]
|
351
|
+
for redis_key in redis_keys:
|
352
|
+
pipe.get(redis_key)
|
353
|
+
results_from_redis = pipe.execute() # [val1, val2, ...]
|
354
|
+
|
355
|
+
final_results = {}
|
356
|
+
missing_tickers = []
|
357
|
+
|
358
|
+
# refresh=True 이면 기존 데이터 무시하고 다시 계산해야 하므로 모두 missing 처리
|
359
|
+
if refresh:
|
360
|
+
missing_tickers = tickers[:]
|
361
|
+
else:
|
362
|
+
# refresh=False 이면, Redis 값이 None인 티커만 다시 계산
|
363
|
+
for ticker, val in zip(tickers, results_from_redis):
|
364
|
+
if val is None:
|
365
|
+
missing_tickers.append(ticker)
|
366
|
+
else:
|
367
|
+
# Redis에 pickled 데이터가 있다면 언피클해서 담기
|
368
|
+
prophet_data = pickle.loads(val)
|
369
|
+
final_results[ticker] = prophet_data
|
370
|
+
|
371
|
+
# --- (2) 필요한 티커만 직접 연산 ---
|
372
|
+
newly_computed_data = {}
|
373
|
+
for ticker in missing_tickers:
|
374
|
+
data = MyProphet(ticker)._make_prophet_latest_data()
|
375
|
+
newly_computed_data[ticker] = data
|
376
|
+
|
377
|
+
# --- (3) 파이프라인 SET ---
|
378
|
+
if newly_computed_data:
|
379
|
+
pipe = myredis.Base.redis_client.pipeline()
|
380
|
+
for ticker, data in newly_computed_data.items():
|
381
|
+
redis_key = f"{ticker}_{MyProphet.REDIS_LATEST_DATA_SUFFIX}"
|
382
|
+
# ProphetLatestData 객체를 pickle로 직렬화
|
383
|
+
pickled_data = pickle.dumps(data)
|
384
|
+
# SET + expire_time
|
385
|
+
pipe.setex(redis_key, expire_time, pickled_data)
|
386
|
+
pipe.execute()
|
387
|
+
|
388
|
+
# 최종 결과 딕셔너리 (캐시에 있었던 것 + 새로 만든 것)
|
389
|
+
final_results.update(newly_computed_data)
|
390
|
+
return final_results
|
359
391
|
|
360
392
|
|
361
393
|
class CorpProphet(MyProphet):
|
@@ -383,6 +415,40 @@ class CorpProphet(MyProphet):
|
|
383
415
|
self.name = myredis.Corps(code, 'c101').get_name()
|
384
416
|
self.ticker = self.code + '.KS'
|
385
417
|
|
418
|
+
@staticmethod
|
419
|
+
def ticker_to_code(ticker:str):
|
420
|
+
return ticker[:-3]
|
421
|
+
|
422
|
+
@staticmethod
|
423
|
+
def code_to_ticker(code:str):
|
424
|
+
return code+'.KS'
|
425
|
+
|
426
|
+
@staticmethod
|
427
|
+
def ranking(top: Union[int, str] = 'all', refresh=False) -> OrderedDict:
|
428
|
+
mylogger.info("**** Start prophet ranking ... ****")
|
429
|
+
|
430
|
+
data = {}
|
431
|
+
for ticker, latest_data in MyProphet.bulk_generate_latest_data(
|
432
|
+
[CorpProphet.code_to_ticker(code) for code in myredis.Corps.list_all_codes()], refresh=refresh).items():
|
433
|
+
code = CorpProphet.ticker_to_code(ticker)
|
434
|
+
score = latest_data.score
|
435
|
+
mylogger.debug(f'{code} score : {score}')
|
436
|
+
data[code] = score
|
437
|
+
|
438
|
+
ranking = OrderedDict(sorted(data.items(), key=lambda x: x[1], reverse=True))
|
439
|
+
|
440
|
+
if top == 'all':
|
441
|
+
return ranking
|
442
|
+
else:
|
443
|
+
if isinstance(top, int):
|
444
|
+
return OrderedDict(list(ranking.items())[:top])
|
445
|
+
else:
|
446
|
+
raise ValueError("top 인자는 'all' 이나 int형 이어야 합니다.")
|
447
|
+
|
448
|
+
@staticmethod
|
449
|
+
def bulk_generate_latest_data(codes: List[str], refresh: bool) -> Dict[str, ProphetLatestData]:
|
450
|
+
return MyProphet.bulk_generate_latest_data([CorpProphet.code_to_ticker(code) for code in codes], refresh=refresh)
|
451
|
+
|
386
452
|
|
387
453
|
class MIProphet(MyProphet):
|
388
454
|
"""
|
analyser_hj3415/cli.py
CHANGED
@@ -2,7 +2,7 @@ import argparse
|
|
2
2
|
import pprint
|
3
3
|
|
4
4
|
from utils_hj3415 import tools
|
5
|
-
from analyser_hj3415.analyser import eval, tsa,
|
5
|
+
from analyser_hj3415.analyser import eval, tsa, MIs
|
6
6
|
from db_hj3415 import myredis, mymongo
|
7
7
|
|
8
8
|
|
@@ -10,16 +10,6 @@ def analyser_manager():
|
|
10
10
|
parser = argparse.ArgumentParser(description="Analyser Commands")
|
11
11
|
type_subparsers = parser.add_subparsers(dest='type', help='분석 타입')
|
12
12
|
|
13
|
-
# compile 명령어 서브파서
|
14
|
-
compile_parser = type_subparsers.add_parser('compile', help='Compile 타입')
|
15
|
-
compile_subparser = compile_parser.add_subparsers(dest='command', help='Compile 관련된 명령')
|
16
|
-
# compile - caching 파서
|
17
|
-
caching_parser = compile_subparser.add_parser('caching', help='lstm 랭킹 책정 및 레디스 저장')
|
18
|
-
caching_parser.add_argument('-r', '--refresh', action='store_true', help='래디스 캐시를 사용하지 않고 강제로 재계산 할지')
|
19
|
-
caching_parser.add_argument('-mi', '--market_index', action='store_true', help='Market index도 캐싱할지')
|
20
|
-
caching_parser.add_argument('-t', '--top', type=int, help='prophet ranking 몇위까지 작업을 할지')
|
21
|
-
|
22
|
-
|
23
13
|
# prophet 명령어 서브파서
|
24
14
|
prophet_parser = type_subparsers.add_parser('prophet', help='MyProphet 타입')
|
25
15
|
prophet_subparser = prophet_parser.add_subparsers(dest='command', help='prophet 관련된 명령')
|
@@ -34,11 +24,16 @@ def analyser_manager():
|
|
34
24
|
# lstm 명령어 서브파서
|
35
25
|
lstm_parser = type_subparsers.add_parser('lstm', help='MyLSTM 타입')
|
36
26
|
lstm_subparser = lstm_parser.add_subparsers(dest='command', help='lstm 관련된 명령')
|
37
|
-
# lstm -
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
27
|
+
# lstm - caching 파서
|
28
|
+
caching_parser = lstm_subparser.add_parser('caching', help='lstm 차트데이터 산출 및 레디스 저장')
|
29
|
+
target_subparser = caching_parser.add_subparsers(dest='target', help='corp or mi')
|
30
|
+
# lstm - caching - corp 파서
|
31
|
+
corp_parser = target_subparser.add_parser('corp', help='corp lstm top n 데이터 산출 및 레디스 저장')
|
32
|
+
corp_parser.add_argument('-n', '--num', type=int, help='앙상블트레이닝을 몇회반복할것인지')
|
33
|
+
corp_parser.add_argument('-t', '--top', type=int, help='prophet ranking 몇위까지 작업을 할지')
|
34
|
+
# lstm - caching - mi 파서
|
35
|
+
mi_parser = target_subparser.add_parser('mi', help='mi lstm 데이터 산출 및 레디스 저장')
|
36
|
+
mi_parser.add_argument('-n', '--num', type=int, help='앙상블트레이닝을 몇회반복할것인지')
|
42
37
|
|
43
38
|
# red 명령어 서브파서
|
44
39
|
red_parser = type_subparsers.add_parser('red', help='red 타입')
|
@@ -91,7 +86,7 @@ def analyser_manager():
|
|
91
86
|
for i, code in enumerate(myredis.Corps.list_all_codes()):
|
92
87
|
red.code = code
|
93
88
|
print(f"*** {i} / {red} ***")
|
94
|
-
pprint.pprint(red.get(args.refresh
|
89
|
+
pprint.pprint(red.get(args.refresh))
|
95
90
|
else:
|
96
91
|
assert tools.is_6digit(args.code), "code 인자는 6자리 숫자이어야 합니다."
|
97
92
|
# 저장된 기대수익률을 불러서 임시저장
|
@@ -104,9 +99,9 @@ def analyser_manager():
|
|
104
99
|
mymongo.Logs.save('cli', 'INFO', 'run >> analyser red ranking')
|
105
100
|
try:
|
106
101
|
if args.expect_earn is None:
|
107
|
-
result =
|
102
|
+
result = eval.Red.ranking(refresh=args.refresh)
|
108
103
|
else:
|
109
|
-
result =
|
104
|
+
result = eval.Red.ranking(expect_earn=args.expect_earn, refresh=args.refresh)
|
110
105
|
print(result)
|
111
106
|
except Exception as e:
|
112
107
|
print(e)
|
@@ -176,7 +171,7 @@ def analyser_manager():
|
|
176
171
|
if args.command == 'ranking':
|
177
172
|
mymongo.Logs.save('cli', 'INFO', 'run >> analyser prophet ranking')
|
178
173
|
try:
|
179
|
-
result =
|
174
|
+
result = tsa.CorpProphet.ranking(refresh=args.refresh)
|
180
175
|
print(result)
|
181
176
|
except Exception as e:
|
182
177
|
print(e)
|
@@ -189,35 +184,23 @@ def analyser_manager():
|
|
189
184
|
myprophet = tsa.CorpProphet(args.target)
|
190
185
|
else:
|
191
186
|
raise Exception("Invalid target")
|
192
|
-
print(myprophet.
|
187
|
+
print(myprophet.generate_latest_data(refresh=args.refresh).score)
|
193
188
|
# mymongo.Logs.save('cli','INFO', f'run >> analyser prophet get {args.target}')
|
194
|
-
|
195
|
-
elif args.type == 'compile':
|
189
|
+
elif args.type == 'lstm':
|
196
190
|
if args.command == 'caching':
|
197
|
-
mymongo.Logs.save('cli', 'INFO', f'run >> analyser compile caching')
|
198
191
|
try:
|
199
|
-
if args.
|
200
|
-
|
192
|
+
if args.target == 'corp':
|
193
|
+
tsa.CorpLSTM.caching_chart_data_topn(top=args.top, num=args.num)
|
194
|
+
mymongo.Logs.save('cli', 'INFO',
|
195
|
+
f'run >> analyser lstm caching corp -t {args.top} -n {args.num}')
|
196
|
+
elif args.target == 'mi':
|
197
|
+
tsa.MILSTM.caching_chart_data_mi_all(num=args.num)
|
198
|
+
mymongo.Logs.save('cli', 'INFO',
|
199
|
+
f'run >> analyser lstm caching mi -n {args.num}')
|
201
200
|
else:
|
202
|
-
|
203
|
-
if args.market_index:
|
204
|
-
compile.MICompile.caching_mi_compile_all(refresh=args.refresh)
|
201
|
+
print("올바른 'type' 값을 입력하세요 (corp / mi)")
|
205
202
|
except Exception as e:
|
206
203
|
print(e)
|
207
|
-
mymongo.Logs.save('cli','ERROR', f'analyser lstm caching 실행중 에러 - {e}')
|
208
|
-
|
209
|
-
elif args.type == 'lstm':
|
210
|
-
if args.command == 'predict':
|
211
|
-
mi_type = str(args.target).upper()
|
212
|
-
if mi_type in MIs._fields:
|
213
|
-
mylstm = tsa.MILSTM(mi_type)
|
214
|
-
elif tools.is_6digit(args.target):
|
215
|
-
mylstm = tsa.CorpLSTM(args.target)
|
216
|
-
else:
|
217
|
-
raise Exception("Invalid target")
|
218
|
-
future_data, grade = mylstm.get_final_predictions(refresh=args.refresh, num=args.num)
|
219
|
-
print(future_data)
|
220
|
-
print(grade)
|
221
|
-
# mymongo.Logs.save('cli','INFO', f'run >> analyser lstm get {args.target}')
|
204
|
+
mymongo.Logs.save('cli', 'ERROR', f'analyser lstm caching 실행중 에러 - {e}')
|
222
205
|
else:
|
223
206
|
parser.print_help()
|
@@ -0,0 +1,17 @@
|
|
1
|
+
analyser_hj3415/__init__.py,sha256=jqHEUoBeihYOMaS0bPOe3nRVXBufZ0clxc6M6jxPY0o,320
|
2
|
+
analyser_hj3415/cli.py,sha256=-6YRrlcPi15McWS7mI8QADnRv3UGRGMMbUPFGyIkolA,11455
|
3
|
+
analyser_hj3415/analyser/__init__.py,sha256=N0XyBfWJNpDS_6JYziKETWePO_jtFB1m7E8Qbwt1w0Q,1096
|
4
|
+
analyser_hj3415/analyser/eval/__init__.py,sha256=IP1d0Q3nOCAD3zK1qxrC685MkJQfUh-qaXc7xptTxk8,80
|
5
|
+
analyser_hj3415/analyser/eval/blue.py,sha256=p9_ddqLMJGq5HSn6NApuLhrX29qD--AASig9F71eb8I,10952
|
6
|
+
analyser_hj3415/analyser/eval/common.py,sha256=sNXapoofShA43ww_SLjXmIjkrAr1AhAcezdaN_X_3Us,11443
|
7
|
+
analyser_hj3415/analyser/eval/growth.py,sha256=sfJ7h06efrTfL4ylhUCV525IzUzilbun1ya9r5SVCtU,6526
|
8
|
+
analyser_hj3415/analyser/eval/mil.py,sha256=mFMiFCuCBvlQrhQcM5hMg8U4zF32TS1GnUmk8fPd950,15178
|
9
|
+
analyser_hj3415/analyser/eval/red.py,sha256=b-Odud8pxQIO2NjI7m3HbK4FOND5WhaoYV94mCHqDPo,13907
|
10
|
+
analyser_hj3415/analyser/tsa/__init__.py,sha256=pg20ZQRABedTdaIoOr5t043RNKtJ7ji_WmnZrD1IhPg,147
|
11
|
+
analyser_hj3415/analyser/tsa/common.py,sha256=OnsZ_cFYmNzmk0tV5qSqVW-5jJyrwMWHguWdS2Z6fvY,979
|
12
|
+
analyser_hj3415/analyser/tsa/lstm.py,sha256=SoUx7JlgW4d3CGahtG4dUJoKBe0meqiI6CSTTL7iSL0,27977
|
13
|
+
analyser_hj3415/analyser/tsa/prophet.py,sha256=YNSHnNRl41JrcrmOuuuwHyVoZMpeV4IMliLAF9XXxCk,17651
|
14
|
+
analyser_hj3415-4.0.0.dist-info/entry_points.txt,sha256=ZfjPnJuH8SzvhE9vftIPMBIofsc65IAWYOhqOC_L5ck,65
|
15
|
+
analyser_hj3415-4.0.0.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
|
16
|
+
analyser_hj3415-4.0.0.dist-info/METADATA,sha256=HbzCJ7SF7frEi1AkS4zLsNhhez1CVz5_icnpkIPhPm4,6777
|
17
|
+
analyser_hj3415-4.0.0.dist-info/RECORD,,
|