neurostats-API 0.0.17__py3-none-any.whl → 0.0.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,4 @@
1
- __version__='0.0.16'
1
+ __version__='0.0.19'
2
2
 
3
3
  from .fetchers import (
4
4
  BalanceSheetFetcher,
@@ -9,5 +9,6 @@ from .fetchers import (
9
9
  MarginTradingFetcher,
10
10
  MonthRevenueFetcher,
11
11
  TechFetcher,
12
- ProfitLoseFetcher
12
+ TEJStockPriceFetcher,
13
+ ProfitLoseFetcher,
13
14
  )
@@ -2,7 +2,7 @@ from .base import StatsDateTime, StatsFetcher
2
2
  from .balance_sheet import BalanceSheetFetcher
3
3
  from .cash_flow import CashFlowFetcher
4
4
  from .finance_overview import FinanceOverviewFetcher
5
- from .tej_finance_report import FinanceReportFetcher
5
+ from .tej_finance_report import FinanceReportFetcher, TEJStockPriceFetcher
6
6
  from .tech import TechFetcher
7
7
  from .institution import InstitutionFetcher
8
8
  from .margin_trading import MarginTradingFetcher
@@ -1,4 +1,5 @@
1
1
  import abc
2
+ from typing import Union
2
3
  from pymongo import MongoClient
3
4
  import pandas as pd
4
5
  import json
@@ -53,8 +54,8 @@ class StatsFetcher:
53
54
  season = (month - 1) // 3 + 1
54
55
 
55
56
  return StatsDateTime(date, year, month, day, season)
56
-
57
- def has_required_columns(self, df:pd.DataFrame, required_cols=None):
57
+
58
+ def has_required_columns(self, df: pd.DataFrame, required_cols=None):
58
59
  """
59
60
  Check if the required columns are present in the DataFrame.
60
61
 
@@ -68,23 +69,22 @@ class StatsFetcher:
68
69
  """
69
70
  if required_cols is None:
70
71
  required_cols = ['date', 'open', 'high', 'low', 'close', 'volume']
71
-
72
+
72
73
  return all(col in df.columns for col in required_cols)
73
74
 
74
75
 
75
76
  class BaseTEJFetcher(abc.ABC):
76
77
 
77
- def __init__(self):
78
- self.client = None
79
- self.db = None
80
- self.collection = None
81
-
82
78
  @abc.abstractmethod
83
79
  def get(self):
84
80
  pass
85
81
 
86
82
  def get_latest_data_time(self, ticker):
87
- latest_data = self.collection.find_one({"ticker": ticker}, {"last_update": 1, "_id": 0})
83
+ latest_data = self.collection.find_one(
84
+ {"ticker": ticker}, {
85
+ "last_update": 1,
86
+ "_id": 0
87
+ })
88
88
 
89
89
  try:
90
90
  latest_date = latest_data['last_update']["latest_data_date"]
@@ -93,11 +93,16 @@ class BaseTEJFetcher(abc.ABC):
93
93
 
94
94
  return latest_date
95
95
 
96
- def cal_YoY(self, data_dict: dict, start_year: int, end_year: int, season: int):
96
+ def cal_YoY(
97
+ self, data_dict: dict, start_year: int, end_year: int, season: int):
97
98
  year_shifts = [1, 3, 5, 10]
98
99
  return_dict = {}
99
100
  for year in range(start_year, end_year + 1):
100
- year_data = data_dict[f"{year}Q{season}"]
101
+ try:
102
+ year_data = data_dict[f"{year}Q{season}"].copy()
103
+ except KeyError as e:
104
+ continue
105
+
101
106
  year_keys = list(year_data.keys())
102
107
  for key in year_keys:
103
108
  if (key in 'season'):
@@ -109,9 +114,11 @@ class BaseTEJFetcher(abc.ABC):
109
114
  for shift in year_shifts:
110
115
  this_value = year_data[key]
111
116
  try:
112
- past_year = str(year - shift)
117
+ past_year = year - shift
113
118
  last_value = data_dict[f"{past_year}Q{season}"][key]
114
- temp_dict[f"YoY_{shift}"] = YoY_Calculator.cal_growth(this_value, last_value, delta=shift)
119
+ temp_dict[
120
+ f"YoY_{shift}"] = YoY_Calculator.cal_growth(
121
+ this_value, last_value, delta=shift)
115
122
  except Exception as e:
116
123
  temp_dict[f"YoY_{shift}"] = None
117
124
 
@@ -149,9 +156,11 @@ class BaseTEJFetcher(abc.ABC):
149
156
  temp_dict = {"value": this_value}
150
157
 
151
158
  try:
152
- last_value = data_dict[f"{last_year}Q{last_season}"][key]['value']
159
+ last_value = data_dict[f"{last_year}Q{last_season}"][
160
+ key]['value']
153
161
 
154
- temp_dict['growth'] = YoY_Calculator.cal_growth(this_value, last_value, delta=1)
162
+ temp_dict['growth'] = YoY_Calculator.cal_growth(
163
+ this_value, last_value, delta=1)
155
164
  except Exception as e:
156
165
  temp_dict['growth'] = None
157
166
 
@@ -169,3 +178,33 @@ class BaseTEJFetcher(abc.ABC):
169
178
  for key in data_dict.keys():
170
179
  data_dict[key] = pd.DataFrame.from_dict(data_dict[key])
171
180
  return data_dict
181
+
182
+ def set_time_shift(self, date: Union[str, datetime], period: str):
183
+ if (isinstance(date, str)):
184
+ date = datetime.strptime(date, "%Y-%m-%d")
185
+ if (period == '1d'):
186
+ return date - timedelta(days=1)
187
+
188
+ elif (period == '7d'):
189
+ return date - timedelta(days=7)
190
+
191
+ elif (period == '1m'):
192
+ return date - timedelta(days=30)
193
+
194
+ elif (period == '3m'):
195
+ return date - timedelta(days=90)
196
+
197
+ elif (period == '1y'):
198
+ return date - timedelta(days=365)
199
+
200
+ elif (period == '3y'):
201
+ return date - timedelta(days=365 * 3)
202
+
203
+ elif (period == '5y'):
204
+ return date - timedelta(days=365 * 5)
205
+
206
+ elif (period == '10y'):
207
+ return date - timedelta(days=365 * 10)
208
+
209
+ elif (period == 'all'):
210
+ return datetime.strptime("1991-01-01", "%Y-%m-%d")
@@ -2,48 +2,49 @@ from .base import StatsFetcher
2
2
  import pandas as pd
3
3
  import yfinance as yf
4
4
 
5
+
5
6
  class TechFetcher(StatsFetcher):
6
-
7
- def __init__(self, ticker:str, db_client):
8
7
 
8
+ def __init__(self, ticker: str, db_client):
9
9
  """
10
10
  The Capitalization-Weighted Index includes the following tickers:
11
11
  ['GSPC', 'IXIC', 'DJI', 'TWII']
12
12
  """
13
-
13
+
14
14
  super().__init__(ticker, db_client)
15
+ self.collection = self.db["TWN/APIPRCD"]
15
16
  self.full_ohlcv = self._get_ohlcv()
16
- self.basic_indexes = ['SMA5', 'SMA20', 'SMA60', 'EMA5', 'EMA20',
17
- 'EMA40', 'EMA12', 'EMA26', 'RSI7', 'RSI14',
18
- 'RSI21', 'MACD', 'Signal Line', 'Middle Band',
19
- 'Upper Band', 'Lower Band', '%b', 'ATR',
20
- 'BBW','EMA Cycle','EMA Cycle Instructions',
21
- 'Day Trading Signal']
22
-
17
+ self.basic_indexes = [
18
+ 'SMA5', 'SMA20', 'SMA60', 'EMA5', 'EMA20', 'EMA40', 'EMA12',
19
+ 'EMA26', 'RSI7', 'RSI14', 'RSI21', 'MACD', 'Signal Line',
20
+ 'Middle Band', 'Upper Band', 'Lower Band', '%b', 'ATR', 'BBW',
21
+ 'EMA Cycle', 'EMA Cycle Instructions', 'Day Trading Signal'
22
+ ]
23
+
23
24
  self.daily_index = TechProcessor.cal_basic_index(self.full_ohlcv)
24
25
 
25
26
  self.weekly_index = TechProcessor.resample(
26
- self.daily_index,
27
- period= 'W',
28
- technical_indicators = self.basic_indexes
27
+ self.daily_index,
28
+ period='W',
29
+ technical_indicators=self.basic_indexes
29
30
  )
30
31
 
31
32
  self.monthly_index = TechProcessor.resample(
32
- self.daily_index,
33
- period= 'ME',
34
- technical_indicators = self.basic_indexes
33
+ self.daily_index,
34
+ period='ME',
35
+ technical_indicators=self.basic_indexes
35
36
  )
36
37
 
37
38
  self.quarterly_index = TechProcessor.resample(
38
- self.daily_index,
39
- period= 'QE',
40
- technical_indicators = self.basic_indexes
39
+ self.daily_index,
40
+ period='QE',
41
+ technical_indicators=self.basic_indexes
41
42
  )
42
43
 
43
44
  self.yearly_index = TechProcessor.resample(
44
- self.daily_index,
45
- period= 'YE',
46
- technical_indicators = self.basic_indexes
45
+ self.daily_index,
46
+ period='YE',
47
+ technical_indicators=self.basic_indexes
47
48
  )
48
49
 
49
50
  def _get_ohlcv(self):
@@ -51,25 +52,7 @@ class TechFetcher(StatsFetcher):
51
52
  required_cols = ['date', 'open', 'high', 'low', 'close', 'volume']
52
53
 
53
54
  try:
54
- query = {'ticker': self.ticker}
55
- ticker_full = self.collection.find_one(query)
56
-
57
- if not ticker_full:
58
- raise ValueError(f"No data found for ticker: {self.ticker}")
59
-
60
- daily_data = ticker_full.get("daily_data", [])
61
- if not isinstance(daily_data, list):
62
- raise TypeError("Expected 'daily_data' to be a list.")
63
-
64
- df = pd.DataFrame(daily_data)
65
-
66
- if not self.has_required_columns(df, required_cols):
67
- raise KeyError(f"Missing required columns")
68
-
69
- except (KeyError, ValueError, TypeError) as e:
70
-
71
- print(f"Conduct yf searching")
72
-
55
+ # 先對yf search
73
56
  if self.ticker in ['GSPC', 'IXIC', 'DJI', 'TWII']:
74
57
  full_tick = f'^{self.ticker}'
75
58
  else:
@@ -80,13 +63,41 @@ class TechFetcher(StatsFetcher):
80
63
  if not self.has_required_columns(df, required_cols):
81
64
 
82
65
  print(f".tw failed, try .two")
83
-
66
+
84
67
  full_tick = f'{self.ticker}.two'
85
68
 
86
69
  df = self.conduct_yf_search(full_tick)
87
-
88
- return df[required_cols]
89
70
 
71
+ if (df.empty):
72
+ raise ValueError(f"No data found for ticker: {self.ticker}")
73
+
74
+ except (KeyError, ValueError, TypeError) as e:
75
+ # 再對TEJ search
76
+ tej_required_cols = [
77
+ "mdate", "open_d", 'high_d', 'low_d', 'close_d', 'vol'
78
+ ]
79
+ tej_name_proj = {
80
+ tej_name: org_name
81
+ for tej_name, org_name in zip(tej_required_cols, required_cols)
82
+ }
83
+
84
+ query = {'ticker': self.ticker}
85
+ ticker_full = self.collection.find_one(query)
86
+
87
+ if not ticker_full:
88
+ raise
89
+
90
+ daily_data = ticker_full.get("data", [])
91
+ if not isinstance(daily_data, list):
92
+ raise TypeError("Expected 'daily_data' to be a list.")
93
+
94
+ df = pd.DataFrame(daily_data)
95
+
96
+ if not self.has_required_columns(df, tej_required_cols):
97
+ raise KeyError(f"Missing required columns")
98
+ df = df.rename(columns=tej_name_proj)
99
+
100
+ return df[required_cols]
90
101
 
91
102
  def get_daily(self):
92
103
 
@@ -95,7 +106,7 @@ class TechFetcher(StatsFetcher):
95
106
  def get_weekly(self):
96
107
 
97
108
  return self.weekly_index
98
-
109
+
99
110
  def get_monthly(self):
100
111
 
101
112
  return self.monthly_index
@@ -103,19 +114,19 @@ class TechFetcher(StatsFetcher):
103
114
  def get_quarterly(self):
104
115
 
105
116
  return self.quarterly_index
106
-
117
+
107
118
  def get_yearly(self):
108
119
 
109
120
  return self.yearly_index
110
-
111
- def conduct_yf_search(self, ticker:str):
121
+
122
+ def conduct_yf_search(self, ticker: str):
112
123
 
113
124
  yf_ticker = yf.Ticker(ticker)
114
125
  origin_df = yf_ticker.history(period="10y")
115
-
126
+
116
127
  if origin_df.empty:
117
128
  return origin_df
118
-
129
+
119
130
  origin_df = origin_df.reset_index()
120
131
  origin_df["Date"] = pd.to_datetime(origin_df["Date"])
121
132
  df = origin_df.rename(
@@ -131,8 +142,9 @@ class TechFetcher(StatsFetcher):
131
142
 
132
143
  return df
133
144
 
145
+
134
146
  class TechProcessor:
135
-
147
+
136
148
  @staticmethod
137
149
  def cal_sma(closes: pd.Series, n_days: int) -> pd.Series:
138
150
  return closes.rolling(window=n_days).mean()
@@ -158,7 +170,9 @@ class TechProcessor:
158
170
  return macds.ewm(span=n_days, adjust=False).mean()
159
171
 
160
172
  @staticmethod
161
- def cal_bollinger_bands(closes: pd.Series, n_days: int = 20) -> pd.DataFrame:
173
+ def cal_bollinger_bands(
174
+ closes: pd.Series, n_days: int = 20
175
+ ) -> pd.DataFrame:
162
176
  middle = closes.rolling(window=n_days).mean()
163
177
  upper = middle + 2 * closes.rolling(window=n_days).std()
164
178
  lower = middle - 2 * closes.rolling(window=n_days).std()
@@ -174,12 +188,15 @@ class TechProcessor:
174
188
  )
175
189
 
176
190
  @staticmethod
177
- def cal_atr(highes: pd.Series, lows: pd.Series, closes: pd.Series, n_days: int) -> pd.Series:
191
+ def cal_atr(
192
+ highes: pd.Series, lows: pd.Series, closes: pd.Series, n_days: int
193
+ ) -> pd.Series:
178
194
  high_low = highes - lows
179
195
  high_close = (highes - closes.shift(1)).abs()
180
196
  low_close = (lows - closes.shift(1)).abs()
181
197
 
182
- true_range = pd.concat([high_low, high_close, low_close], axis=1).max(axis=1)
198
+ true_range = pd.concat([high_low, high_close, low_close],
199
+ axis=1).max(axis=1)
183
200
  atr = true_range.rolling(window=n_days, min_periods=1).mean()
184
201
 
185
202
  return atr
@@ -233,15 +250,15 @@ class TechProcessor:
233
250
  return '今日此股票為好的當沖標的'
234
251
  else:
235
252
  return f'今日此股票並非好的當沖標的, 原因: {", ".join(reasons)}'
236
-
253
+
237
254
  @staticmethod
238
- def cal_basic_index(ohlcvs:pd.DataFrame):
239
-
255
+ def cal_basic_index(ohlcvs: pd.DataFrame):
256
+
240
257
  # SMA
241
- ohlcvs['SMA5'] = TechProcessor.cal_sma(ohlcvs['close'], 5)
258
+ ohlcvs['SMA5'] = TechProcessor.cal_sma(ohlcvs['close'], 5)
242
259
  ohlcvs['SMA20'] = TechProcessor.cal_sma(ohlcvs['close'], 20)
243
260
  ohlcvs['SMA60'] = TechProcessor.cal_sma(ohlcvs['close'], 40)
244
-
261
+
245
262
  # EMA
246
263
  ohlcvs['EMA5'] = TechProcessor.cal_ema(ohlcvs['close'], 5)
247
264
  ohlcvs['EMA20'] = TechProcessor.cal_ema(ohlcvs['close'], 20)
@@ -256,34 +273,42 @@ class TechProcessor:
256
273
  ohlcvs['RSI21'] = TechProcessor.cal_rsi(ohlcvs['close'], 21)
257
274
 
258
275
  # MACD
259
- ohlcvs['MACD'] = TechProcessor.cal_macd(ohlcvs['EMA12'], ohlcvs['EMA26'])
276
+ ohlcvs['MACD'] = TechProcessor.cal_macd(
277
+ ohlcvs['EMA12'], ohlcvs['EMA26']
278
+ )
260
279
  ohlcvs['Signal Line'] = TechProcessor.cal_single_line(ohlcvs['MACD'], 9)
261
-
280
+
262
281
  # BANDS
263
282
  bands = TechProcessor.cal_bollinger_bands(ohlcvs['close'], 20)
264
283
  ohlcvs['Middle Band'] = bands['middle']
265
284
  ohlcvs['Upper Band'] = bands['upper']
266
285
  ohlcvs['Lower Band'] = bands['lower']
267
286
  ohlcvs['%b'] = bands['%b']
268
- ohlcvs['BBW'] = (ohlcvs["Upper Band"] - ohlcvs["Lower Band"]) / ohlcvs["Middle Band"]
287
+ ohlcvs['BBW'] = (ohlcvs["Upper Band"] -
288
+ ohlcvs["Lower Band"]) / ohlcvs["Middle Band"]
269
289
 
270
290
  # ATR
271
- ohlcvs['ATR'] = TechProcessor.cal_atr(ohlcvs['high'],ohlcvs['low'],ohlcvs['close'],14)
291
+ ohlcvs['ATR'] = TechProcessor.cal_atr(
292
+ ohlcvs['high'], ohlcvs['low'], ohlcvs['close'], 14
293
+ )
272
294
 
273
295
  # EMA CYCLE
274
296
  ohlcvs['EMA Cycle'] = ohlcvs.apply(
275
- lambda row: TechProcessor.check_tech_trend(row['EMA5'], row['EMA20'], row['EMA40']),
297
+ lambda row: TechProcessor.
298
+ check_tech_trend(row['EMA5'], row['EMA20'], row['EMA40']),
276
299
  axis=1
277
300
  )
278
301
  guidance_map = {
279
302
  '穩定上升期': "三條移動平均線都左下右上, 買方優勢, 三線間隔越來越遠時, 進一步強攻",
280
- '牛市結束期': "ema20 & 40 左下右上, ema5 緩慢下滑, 行情仍強, 賣出條件為 ema5 持續下跌, ema20 停止上漲",
303
+ '牛市結束期':
304
+ "ema20 & 40 左下右上, ema5 緩慢下滑, 行情仍強, 賣出條件為 ema5 持續下跌, ema20 停止上漲",
281
305
  '熊市入口期': "全數出清穩定上升期布局的多頭部位, 考慮提早佈局建立空頭部位",
282
306
  '穩定下跌期': "三條移動平均線都是左上右下, 賣方優勢, 三線間隔越來越遠時, 進一步強攻",
283
- '熊市結束期': "ema20 & 40 左上右下, ema5 緩慢上升, 行情仍走弱, 布局買進的條件是 ema 持續上漲, ema20 停止下降, 幾乎持平",
307
+ '熊市結束期':
308
+ "ema20 & 40 左上右下, ema5 緩慢上升, 行情仍走弱, 布局買進的條件是 ema 持續上漲, ema20 停止下降, 幾乎持平",
284
309
  '牛市入口期': "全數出清穩定下跌期布局的空頭部位, 考慮提早佈局多頭部位",
285
310
  '未定義': "無對應指導"
286
- }
311
+ }
287
312
 
288
313
  ohlcvs['EMA Cycle Instructions'] = ohlcvs['EMA Cycle'].map(guidance_map)
289
314
 
@@ -292,7 +317,7 @@ class TechProcessor:
292
317
  ohlcvs['Day Trading Signal'] = ohlcvs.apply(
293
318
  lambda row: TechProcessor.check_day_trading(
294
319
  close_today=row['close'],
295
- close_yesterday=row['close_yesterday'], # 使用前一天的收盤價
320
+ close_yesterday=row['close_yesterday'], # 使用前一天的收盤價
296
321
  today_atr=row['ATR'],
297
322
  today_rsi7=row['RSI7']
298
323
  ),
@@ -302,7 +327,12 @@ class TechProcessor:
302
327
  return ohlcvs
303
328
 
304
329
  @staticmethod
305
- def resample(df: pd.DataFrame, period='W', technical_indicators=None, date_col='date'):
330
+ def resample(
331
+ df: pd.DataFrame,
332
+ period='W',
333
+ technical_indicators=None,
334
+ date_col='date'
335
+ ):
306
336
  """
307
337
  將 DataFrame 中的技術指標數據重新取樣為指定的時間週期。
308
338
  參數:
@@ -344,11 +374,16 @@ class TechProcessor:
344
374
  agg_dict[indicator] = 'mean'
345
375
 
346
376
  # 過濾出存在於 DataFrame 中的列
347
- existing_cols = {col: agg_dict[col] for col in agg_dict if col in numeric_df.columns}
348
-
377
+ existing_cols = {
378
+ col: agg_dict[col]
379
+ for col in agg_dict if col in numeric_df.columns
380
+ }
381
+
349
382
  # 確保索引為 DatetimeIndex,進行重新取樣
350
383
  if not isinstance(df.index, pd.DatetimeIndex):
351
- raise TypeError("The DataFrame index must be a DatetimeIndex for resampling.")
384
+ raise TypeError(
385
+ "The DataFrame index must be a DatetimeIndex for resampling."
386
+ )
352
387
 
353
388
  resampled_df = numeric_df.resample(period).agg(existing_cols)
354
389
 
@@ -357,11 +392,3 @@ class TechProcessor:
357
392
  resampled_df.reset_index(inplace=True)
358
393
 
359
394
  return resampled_df
360
-
361
-
362
-
363
-
364
-
365
-
366
-
367
-
@@ -3,8 +3,10 @@ from datetime import datetime
3
3
  from enum import Enum
4
4
  import pandas as pd
5
5
  from pymongo import MongoClient
6
+ from .tech import TechProcessor
6
7
  from ..utils import StatsProcessor, YoY_Calculator
7
8
  import warnings
9
+ import yaml
8
10
 
9
11
 
10
12
  class FinanceReportFetcher(BaseTEJFetcher):
@@ -15,23 +17,18 @@ class FinanceReportFetcher(BaseTEJFetcher):
15
17
  YOY_NOCAL = 3
16
18
  QOQ_NOCAL = 4
17
19
 
18
- def __init__(self, mongo_uri, db_name="company", collection_name="TWN/AINVFQ1"):
20
+ def __init__(
21
+ self,
22
+ mongo_uri,
23
+ db_name=".company",
24
+ collection_name="TWN/AINVFQ1"
25
+ ):
19
26
  self.client = MongoClient(mongo_uri)
20
27
  self.db = self.client[db_name]
21
28
  self.collection = self.db[collection_name]
22
29
 
23
- # yapf: disabled
24
- self.check_index = {
25
- 'coid', 'mdate', 'key3', 'no', 'sem', 'merg', 'curr', 'annd', 'fin_ind', 'bp11', 'bp21', 'bp22', 'bp31',
26
- 'bp41', 'bp51', 'bp53', 'bp61', 'bp62', 'bp63', 'bp64', 'bp65', 'bf11', 'bf12', 'bf21', 'bf22', 'bf41',
27
- 'bf42', 'bf43', 'bf44', 'bf45', 'bf99', 'bsca', 'bsnca', 'bsta', 'bscl', 'bsncl', 'bstl', 'bsse', 'bslse',
28
- 'debt', 'quick', 'ppe', 'ar', 'ip12', 'ip22', 'ip31', 'ip51', 'iv41', 'if11', 'isibt', 'isni', 'isnip',
29
- 'eps', 'ispsd', 'gm', 'opi', 'nri', 'ri', 'nopi', 'ebit', 'cip31', 'cscfo', 'cscfi', 'cscff', 'person',
30
- 'shares', 'wavg', 'taxrate', 'r104', 'r115', 'r105', 'r106', 'r107', 'r108', 'r201', 'r112', 'r401', 'r402',
31
- 'r403', 'r404', 'r405', 'r408', 'r409', 'r410', 'r502', 'r501', 'r205', 'r505', 'r517', 'r512', 'r509',
32
- 'r608', 'r616', 'r610', 'r607', 'r613', 'r612', 'r609', 'r614', 'r611', 'r307', 'r304', 'r305', 'r306',
33
- 'r316', 'r834'
34
- } # yapf: enabled
30
+ index_dict = StatsProcessor.load_yaml("tej_db_index.yaml")
31
+ self.check_index = set(index_dict[collection_name])
35
32
 
36
33
  def get(
37
34
  self,
@@ -57,19 +54,23 @@ class FinanceReportFetcher(BaseTEJFetcher):
57
54
  indexes = set(indexes)
58
55
  difference = indexes - self.check_index
59
56
  if (difference):
60
- warnings.warn(f"{list(difference)} 沒有出現在資料表中,請確認column名稱是否正確", UserWarning)
57
+ warnings.warn(
58
+ f"{list(difference)} 沒有出現在資料表中,請確認column名稱是否正確",
59
+ UserWarning)
60
+
61
+ if (not start_date):
62
+ start_date = datetime.strptime("2005-01-01", "%Y-%m-%d")
63
+ else:
64
+ start_date = datetime.strptime(start_date, "%Y-%m-%d")
61
65
 
62
66
  if (fetch_mode in {self.FetchMode.QOQ, self.FetchMode.QOQ_NOCAL}):
63
- if (not start_date):
64
- warnings.warn("No start_date specified, use default date = \"2005-01-01\"", UserWarning)
65
- start_date = datetime.strptime("2005-01-01", "%Y-%m-%d")
67
+
66
68
  if (not end_date):
67
- warnings.warn("No end_date specified, use default date = today", UserWarning)
68
69
  end_date = datetime.today()
70
+ else:
71
+ end_date = datetime.strptime(end_date, "%Y-%m-%d")
69
72
 
70
73
  assert (start_date <= end_date)
71
- start_date = datetime.strptime(start_date, "%Y-%m-%d")
72
- end_date = datetime.strptime(end_date, "%Y-%m-%d")
73
74
 
74
75
  start_year = start_date.year
75
76
  start_season = (start_date.month - 1) // 4 + 1
@@ -94,7 +95,6 @@ class FinanceReportFetcher(BaseTEJFetcher):
94
95
  return data_df
95
96
 
96
97
  elif (fetch_mode in {self.FetchMode.YOY, self.FetchMode.YOY_NOCAL}):
97
- start_date = datetime.strptime(start_date, "%Y-%m-%d")
98
98
  start_year = start_date.year
99
99
  end_date = self.get_latest_data_time(ticker)
100
100
  if (not end_date):
@@ -120,7 +120,15 @@ class FinanceReportFetcher(BaseTEJFetcher):
120
120
  return data_df
121
121
 
122
122
  def get_QoQ_data(
123
- self, ticker, start_year, start_season, end_year, end_season, report_type="Q", indexes=[], use_cal=False):
123
+ self,
124
+ ticker,
125
+ start_year,
126
+ start_season,
127
+ end_year,
128
+ end_season,
129
+ report_type="Q",
130
+ indexes=[],
131
+ use_cal=False):
124
132
  """
125
133
  取得時間範圍內每季資料
126
134
  """
@@ -146,31 +154,29 @@ class FinanceReportFetcher(BaseTEJFetcher):
146
154
  }, {
147
155
  "$unwind": "$data"
148
156
  }, {
149
- "$match":
150
- {
151
- "$or":
152
- [
153
- {
154
- "data.year": {
155
- "$gt": start_year,
156
- "$lt": end_year
157
- }
158
- }, {
159
- "data.year": start_year,
160
- "data.season": {
161
- "$gte": start_season
162
- }
163
- }, {
164
- "data.year": end_year,
165
- "data.season": {
166
- "$lte": end_season
167
- }
168
- }, {
169
- "data.year": lower_bound_year,
170
- "data.season": lower_bound_season
171
- }
172
- ]
173
- }
157
+ "$match": {
158
+ "$or": [
159
+ {
160
+ "data.year": {
161
+ "$gt": start_year,
162
+ "$lt": end_year
163
+ }
164
+ }, {
165
+ "data.year": start_year,
166
+ "data.season": {
167
+ "$gte": start_season
168
+ }
169
+ }, {
170
+ "data.year": end_year,
171
+ "data.season": {
172
+ "$lte": end_season
173
+ }
174
+ }, {
175
+ "data.year": lower_bound_year,
176
+ "data.season": lower_bound_season
177
+ }
178
+ ]
179
+ }
174
180
  }, {
175
181
  "$project": {
176
182
  "data.year": 1,
@@ -194,40 +200,40 @@ class FinanceReportFetcher(BaseTEJFetcher):
194
200
  }, {
195
201
  "$unwind": "$data"
196
202
  }, {
197
- "$match":
198
- {
199
- "$or":
200
- [
201
- {
202
- "data.year": {
203
- "$gt": start_year,
204
- "$lt": end_year
205
- }
206
- }, {
207
- "data.year": start_year,
208
- "data.season": {
209
- "$gte": start_season
210
- }
211
- }, {
212
- "data.year": end_year,
213
- "data.season": {
214
- "$lte": end_season
215
- }
216
- }, {
217
- "data.year": lower_bound_year,
218
- "data.season": lower_bound_season
219
- }
220
- ]
221
- }
203
+ "$match": {
204
+ "$or": [
205
+ {
206
+ "data.year": {
207
+ "$gt": start_year,
208
+ "$lt": end_year
209
+ }
210
+ }, {
211
+ "data.year": start_year,
212
+ "data.season": {
213
+ "$gte": start_season
214
+ }
215
+ }, {
216
+ "data.year": end_year,
217
+ "data.season": {
218
+ "$lte": end_season
219
+ }
220
+ }, {
221
+ "data.year": lower_bound_year,
222
+ "data.season": lower_bound_season
223
+ }
224
+ ]
225
+ }
222
226
  }, {
223
227
  "$project": project_stage
224
228
  }
225
229
  ]
226
230
 
227
231
  fetched_data = self.collection.aggregate(pipeline).to_list()
228
-
229
232
  data_dict = StatsProcessor.list_of_dict_to_dict(
230
- fetched_data, keys=["year", "season"], delimeter="Q", data_key=report_type)
233
+ fetched_data,
234
+ keys=["year", "season"],
235
+ delimeter="Q",
236
+ data_key=report_type)
231
237
 
232
238
  if (use_cal):
233
239
  data_with_QoQ = self.cal_QoQ(data_dict)
@@ -242,7 +248,15 @@ class FinanceReportFetcher(BaseTEJFetcher):
242
248
  data_df = data_df.iloc[:, ::-1]
243
249
  return data_df
244
250
 
245
- def get_YoY_data(self, ticker, start_year, end_year, season, report_type="Q", indexes=[], use_cal=False):
251
+ def get_YoY_data(
252
+ self,
253
+ ticker,
254
+ start_year,
255
+ end_year,
256
+ season,
257
+ report_type="Q",
258
+ indexes=[],
259
+ use_cal=False):
246
260
  """
247
261
  取得某季歷年資料
248
262
  """
@@ -267,20 +281,23 @@ class FinanceReportFetcher(BaseTEJFetcher):
267
281
  }, {
268
282
  "$unwind": "$data"
269
283
  }, {
270
- "$match":
271
- {
272
- "$or": [{
273
- "$and": [{
274
- "data.year": {
275
- "$in": select_year
276
- }
277
- }, {
278
- "data.season": {
279
- "$eq": season
284
+ "$match": {
285
+ "$or": [
286
+ {
287
+ "$and": [
288
+ {
289
+ "data.year": {
290
+ "$in": select_year
291
+ }
292
+ }, {
293
+ "data.season": {
294
+ "$eq": season
295
+ }
280
296
  }
281
- }]
282
- },]
283
- }
297
+ ]
298
+ },
299
+ ]
300
+ }
284
301
  }, {
285
302
  "$project": {
286
303
  "data.year": 1,
@@ -305,15 +322,17 @@ class FinanceReportFetcher(BaseTEJFetcher):
305
322
  "$unwind": "$data"
306
323
  }, {
307
324
  "$match": {
308
- "$and": [{
309
- "data.year": {
310
- "$in": select_year
311
- }
312
- }, {
313
- "data.season": {
314
- "$eq": season
325
+ "$and": [
326
+ {
327
+ "data.year": {
328
+ "$in": select_year
329
+ }
330
+ }, {
331
+ "data.season": {
332
+ "$eq": season
333
+ }
315
334
  }
316
- }]
335
+ ]
317
336
  }
318
337
  }, {
319
338
  "$project": project_stage
@@ -324,10 +343,14 @@ class FinanceReportFetcher(BaseTEJFetcher):
324
343
 
325
344
  # 處理計算YoY
326
345
  data_dict = StatsProcessor.list_of_dict_to_dict(
327
- fetched_data, keys=['year', 'season'], data_key=report_type, delimeter='Q')
346
+ fetched_data,
347
+ keys=['year', 'season'],
348
+ data_key=report_type,
349
+ delimeter='Q')
328
350
 
329
351
  if (use_cal):
330
- data_with_YoY = self.cal_YoY(data_dict, start_year, end_year, season)
352
+ data_with_YoY = self.cal_YoY(
353
+ data_dict, start_year, end_year, season)
331
354
  data_df = pd.DataFrame.from_dict(data_with_YoY)
332
355
  data_df = data_df.iloc[:, ::-1].T
333
356
  data_dict = data_df.to_dict()
@@ -337,3 +360,73 @@ class FinanceReportFetcher(BaseTEJFetcher):
337
360
  data_df = pd.DataFrame.from_dict(data_dict)
338
361
  data_df = data_df.iloc[:, ::-1]
339
362
  return data_df
363
+
364
+
365
+ class TEJStockPriceFetcher(BaseTEJFetcher):
366
+
367
+ def __init__(
368
+ self,
369
+ mongo_uri,
370
+ db_name: str = "company",
371
+ collection_name: str = None):
372
+ self.mongo_uri = mongo_uri
373
+ self.db_name = db_name
374
+ self.collection_name = collection_name
375
+
376
+ self.client = MongoClient(self.mongo_uri)
377
+ self.db = self.client[self.db_name]
378
+ self.collection = self.db[self.collection_name]
379
+
380
+ self.check_period = ['1d', '7d', '1m', '3m', '1y', '3y', '5y', '10y', 'all']
381
+
382
+ def get(
383
+ self,
384
+ ticker: str = "2330",
385
+ start_date: str = "2024-10-01",
386
+ period: str = None
387
+ ):
388
+ """
389
+ 取得開高低收資料
390
+ start_date: str: 起始的日期
391
+ period: 指定日期範圍(E.g. 1天, 7天...etc)
392
+ 如果宣告period, 以period為優先
393
+ """
394
+
395
+ assert (
396
+ period is None or period in self.check_period
397
+ ), f"period should be None or {','.join(self.check_period)}"
398
+
399
+ if (period is not None):
400
+ latest_date = self.get_latest_data_time(ticker)
401
+ start_date = self.set_time_shift(date=latest_date, period=period)
402
+ else:
403
+ start_date = datetime.strptime(start_date, "%Y-%m-%d")
404
+
405
+ pipeline = [
406
+ {
407
+ "$match": {
408
+ "ticker": ticker
409
+ }
410
+ }, {
411
+ "$unwind": "$data"
412
+ }, {
413
+ "$match": {
414
+ "data.mdate": {
415
+ "$gt": start_date
416
+ }
417
+ }
418
+ }, {
419
+ "$project": {
420
+ "ticker": 1,
421
+ "data": 1,
422
+ "_id": 0
423
+ }
424
+ }
425
+ ]
426
+ datas = self.collection.aggregate(pipeline).to_list()
427
+
428
+ elements = [element['data'] for element in datas]
429
+
430
+ data_df = pd.DataFrame(elements).set_index('mdate')
431
+
432
+ return data_df
@@ -0,0 +1,135 @@
1
+ TWN/AINVFQ1:
2
+ - coid
3
+ - mdate
4
+ - key3
5
+ - 'no'
6
+ - sem
7
+ - merg
8
+ - curr
9
+ - annd
10
+ - fin_ind
11
+ - bp11
12
+ - bp21
13
+ - bp22
14
+ - bp31
15
+ - bp41
16
+ - bp51
17
+ - bp53
18
+ - bp61
19
+ - bp62
20
+ - bp63
21
+ - bp64
22
+ - bp65
23
+ - bf11
24
+ - bf12
25
+ - bf21
26
+ - bf22
27
+ - bf41
28
+ - bf42
29
+ - bf43
30
+ - bf44
31
+ - bf45
32
+ - bf99
33
+ - bsca
34
+ - bsnca
35
+ - bsta
36
+ - bscl
37
+ - bsncl
38
+ - bstl
39
+ - bsse
40
+ - bslse
41
+ - debt
42
+ - quick
43
+ - ppe
44
+ - ar
45
+ - ip12
46
+ - ip22
47
+ - ip31
48
+ - ip51
49
+ - iv41
50
+ - if11
51
+ - isibt
52
+ - isni
53
+ - isnip
54
+ - eps
55
+ - ispsd
56
+ - gm
57
+ - opi
58
+ - nri
59
+ - ri
60
+ - nopi
61
+ - ebit
62
+ - cip31
63
+ - cscfo
64
+ - cscfi
65
+ - cscff
66
+ - person
67
+ - shares
68
+ - wavg
69
+ - taxrate
70
+ - r104
71
+ - r115
72
+ - r105
73
+ - r106
74
+ - r107
75
+ - r108
76
+ - r201
77
+ - r112
78
+ - r401
79
+ - r402
80
+ - r403
81
+ - r404
82
+ - r405
83
+ - r408
84
+ - r409
85
+ - r410
86
+ - r502
87
+ - r501
88
+ - r205
89
+ - r505
90
+ - r517
91
+ - r512
92
+ - r509
93
+ - r608
94
+ - r616
95
+ - r610
96
+ - r607
97
+ - r613
98
+ - r612
99
+ - r609
100
+ - r614
101
+ - r611
102
+ - r307
103
+ - r304
104
+ - r305
105
+ - r306
106
+ - r316
107
+ - r834
108
+ TWN/AFESTM1:
109
+ - coid
110
+ - mdate
111
+ - key3
112
+ - 'no'
113
+ - sem
114
+ - merg
115
+ - curr
116
+ - annd
117
+ - fin_ind
118
+ - ip12
119
+ - gm
120
+ - opi
121
+ - isibt
122
+ - isni
123
+ - isnip
124
+ - r306
125
+ - r316
126
+ - eps
127
+ - r105
128
+ - r106
129
+ - r107
130
+ - r108
131
+ - r401
132
+ - r402
133
+ - r403
134
+ - r404
135
+ - r405
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: neurostats_API
3
- Version: 0.0.17
3
+ Version: 0.0.19
4
4
  Summary: The service of NeuroStats website
5
5
  Home-page: https://github.com/NeurowattStats/NeuroStats_API.git
6
6
  Author: JasonWang@Neurowatt
@@ -774,43 +774,73 @@ YoY_10 1.420500e-01 1.586797e-01 1.551364e-01
774
774
  }
775
775
  ```
776
776
 
777
+ ### 公司自結資料
778
+ ```Python
779
+ from neurostats_API import FinanceReportFetcher
777
780
 
778
- ## 版本紀錄
779
- ## 0.0.16
780
- - 處理ValueFetcher的error #issue76
781
+ fetcher = FinanceReportFetcher(
782
+ mongo_uri = mongo_uri,
783
+ db_name = db_name,
784
+ collection_name = collection_name
785
+ )
781
786
 
782
- - tej_fetcher新增 QOQ, YOY功能
787
+ data = fetcher.get(
788
+ ticker = "2330" # 任意的股票代碼
789
+ fetch_mode = fetcher.FetchMode.QOQ_NOCAL # 取得模式
790
+ start_date = "2005-01-01",
791
+ end_date = "2024-12-31",
792
+ report_type = "Q",
793
+ indexes = []
794
+ ) # -> pd.DataFrame or Dict[pd.DataFrame]
795
+ ```
796
+ - `ticker`: 股票代碼
783
797
 
784
- ## 0.0.15
785
- - TechFetcher中新增指數條件
798
+ - `fetch_mode` : 取得模式,為`fetcher.YOY_NOCAL` 或 `fetcher.QOQ_NOCAL`
799
+ - `YOY_NOCAL`: 以end_date為準,取得與end_date同季的歷年資料,時間範圍以start_date為起始
800
+ > 例如`start_date = "2020-07-01"`, `end_date = "2024-01-01"`,會回傳2020~2024的第一季資料
786
801
 
787
- - 新增tej_fetcher索取TEJ相關的資料
802
+ - `QOQ_NOCAL`: 時間範圍內的每季資料
788
803
 
789
- - package新增depensnecy,可以安裝需要的相關package
804
+ - `QOQ`: 時間範圍內每季的每個index的數值以及QoQ
790
805
 
791
- ## 0.0.14
792
- - 修改部分財報資料錯誤的乘以1000的問題
806
+ - `YoY`: 以end_date為準,取得與end_date同季的歷年資料以及成長率,時間範圍以start_date為起始
793
807
 
794
- - 新增例外處理: 若資料庫對於季資料一部分index缺失的情況下仍會盡可能去將資料蒐集並呈現
808
+ - `start_date`: 開始日期,不設定時預設為`2005-01-01`
795
809
 
796
- ### 0.0.13
797
- - value_fetcher 新增獲得一序列評價的功能
810
+ - `end_date`: 結束日期,不設定時預設為資料庫最新資料的日期
798
811
 
799
- ### 0.0.12
800
- - 新增資券變化(margin trading)
812
+ - `report_type`: 選擇哪種報告,預設為`Q`
813
+ - `A`: 當年累計
814
+ - `Q`: 當季數值
815
+ - `TTM`: 移動四季 (包括當季在內,往前累計四個季度)
801
816
 
802
- - 修改法人買賣(institution_trading)的query方式
817
+ - `indexes`: 選擇的column,需要以TEJ提供的欄位名稱為準,不提供時或提供`[]`會回傳全部column
818
+ - 範例輸入: `['bp41', 'bp51']`
803
819
 
804
- ### 0.0.11
805
- - 修復財務分析的千元計算問題
820
+ [TEJ資料集連結](https://tquant.tejwin.com/%E8%B3%87%E6%96%99%E9%9B%86/)
821
+ 請看 `公司自結數`
806
822
 
807
- - 籌碼面新增法人買賣(institution_trading)
823
+ ### 開高低收
824
+ ```Python
825
+ mongo_uri = <MongoDB 的 URI>
826
+ db_name = 'company' # 連接的DB名稱
827
+ collection_name = "TWN/APIPRCD" # 連接的collection對象
828
+ from neurostats_API import TEJStockPriceFetcher
808
829
 
809
- - 將財報三表與月營收的資料型態與數值做轉換(%轉字串, 千元乘以1000)
810
- ### 0.0.10
811
- - 更新指標的資料型態: 單位為千元乘以1000之後回傳整數
830
+ fetcher = TEJStockPriceFetcher(
831
+ mongo_uri = mongo_uri,
832
+ db_name = db_name,
833
+ collection_name = collection_name
834
+ )
812
835
 
813
- - 處理銀行公司在finanace_overview會報錯誤的問題(未完全解決,因銀行公司財報有許多名稱不同,目前都會顯示為None)
836
+ data = fetcher.get(
837
+ ticker = "2330" # 任意的股票代碼
838
+ start_date = "2005-01-01",
839
+ period = "3m"
840
+ ) # -> pd.DataFrame
841
+ ```
842
+ - `ticker`: 股票代碼
843
+ - `start_date`: 搜尋範圍的開始日期
844
+ - `period`: 搜尋的時間範圍長度
814
845
 
815
- ### 0.0.9
816
- - 更新指標的資料型態: 單位為日, %, 倍轉為字串
846
+ `period`與`start_date`同時存在時以period優先
@@ -1,30 +1,31 @@
1
- neurostats_API/__init__.py,sha256=5ToELVqNOIdVJrMj5G8JvbyRIjvo1FxcP6e-a-iMe1Y,261
1
+ neurostats_API/__init__.py,sha256=U-0Tn9McUXzABJINU15pYkGchFGI5R3njBMgn-F4CzM,288
2
2
  neurostats_API/cli.py,sha256=UJSWLIw03P24p-gkBb6JSEI5dW5U12UvLf1L8HjQD-o,873
3
3
  neurostats_API/main.py,sha256=QcsfmWivg2Dnqw3MTJWiI0QvEiRs0VuH-BjwQHFCv00,677
4
- neurostats_API/fetchers/__init__.py,sha256=B4aBwVzf_X-YieEf3fZteU0qmBPVIB9VjrmkyWhLK18,489
4
+ neurostats_API/fetchers/__init__.py,sha256=KCw-yRSDFa3fw83u73LJ9OVop7gRl_YQYlQq-cITxuo,511
5
5
  neurostats_API/fetchers/balance_sheet.py,sha256=sQv4Gk5uoKURLEdh57YknOQWiyVwaXJ2Mw75jxNqUS0,5804
6
- neurostats_API/fetchers/base.py,sha256=Rl88Mhvi0uFpPupUvy0iyS7IA4B3fnn6ovMNzS7EU34,5594
6
+ neurostats_API/fetchers/base.py,sha256=hlHtCDKpVFFs2n0uJ0yjz26dTzgc2ZtOKleiguT_HxY,6673
7
7
  neurostats_API/fetchers/cash_flow.py,sha256=TY7VAWVXkj5-mzH5Iu0sIE-oV8MvGmmDy0URNotNV1E,7614
8
8
  neurostats_API/fetchers/finance_overview.py,sha256=PxUdWY0x030olYMLcCHDBn068JLmCE2RTOce1dxs5vM,27753
9
9
  neurostats_API/fetchers/institution.py,sha256=UrcBc6t7u7CnEwUsf6YmLbbJ8VncdWpq8bCz17q2dgs,11168
10
10
  neurostats_API/fetchers/margin_trading.py,sha256=lQImtNdvaBoSlKhJvQ3DkH3HjSSgKRJz4ZZpyR5-Z4I,10433
11
11
  neurostats_API/fetchers/month_revenue.py,sha256=nixX2llzjCFr2m2YVjxrSfkBusnZPrPb2dRDq1XLGhw,4251
12
12
  neurostats_API/fetchers/profit_lose.py,sha256=EN9Y0iamcAaHMZdjHXO6b_2buLnORssf8ZS7A0hi74s,5896
13
- neurostats_API/fetchers/tech.py,sha256=8U6kn7cvWJsmKIMn_f2l6U9H_NBy_OwOXlS26XhFIv0,12926
14
- neurostats_API/fetchers/tej_finance_report.py,sha256=laXph2ca1LCFocZjjdvtzmm5fcUecHk2Gs5h6-XMSWY,12967
13
+ neurostats_API/fetchers/tech.py,sha256=116UQP00FyZUGwOttHsauJLxAMNE99rPL6nl4U3ETK4,13386
14
+ neurostats_API/fetchers/tej_finance_report.py,sha256=1q_c2B_Dd7qIagdlXt9eNT7O53JmsquCrgdi2OOeQ6w,14008
15
15
  neurostats_API/fetchers/value_invest.py,sha256=b_x2Dpgs8VBU5HdG8ocKtfIEkqhU-Q0S5n6RxuFuM2g,7467
16
16
  neurostats_API/tools/balance_sheet.yaml,sha256=6XygNG_Ybb1Xkk1e39LMLKr7ATvaCP3xxuwFbgNl6dA,673
17
17
  neurostats_API/tools/cash_flow_percentage.yaml,sha256=fk2Z4eb1JjGFvP134eJatHacB7BgTkBenhDJr83w8RE,1345
18
18
  neurostats_API/tools/finance_overview_dict.yaml,sha256=B9nV75StXkrF3yv2-eezzitlJ38eEK86RD_VY6588gQ,2884
19
19
  neurostats_API/tools/profit_lose.yaml,sha256=iyp9asYJ04vAxk_HBUDse_IBy5oVvYHpwsyACg5YEeg,3029
20
20
  neurostats_API/tools/seasonal_data_field_dict.txt,sha256=X8yc_el6p8BH_3FikTqBVFGsvWdXT6MHXLfKfi44334,8491
21
+ neurostats_API/tools/tej_db_index.yaml,sha256=lu-cmbB6dhx0eUlBSkyzXWqPKlwRtEvqlMTAh2y0oHs,969
21
22
  neurostats_API/utils/__init__.py,sha256=0tJCRmlJq2aDwcNNW-oEaA9H0OxTJMFvjpVYtG4AvZU,186
22
23
  neurostats_API/utils/calculate_value.py,sha256=lUKSsWU76XRmDUcmi4eDjoQxjb3vWpAAKInF9w49VNI,782
23
24
  neurostats_API/utils/data_process.py,sha256=A--dzOsu42jRxqqCD41gTtjE5rhEBYmhB6y-AnCvo5U,8986
24
25
  neurostats_API/utils/datetime.py,sha256=XJya4G8b_-ZOaBbMXgQjWh2MC4wc-o6goQ7EQJQMWrQ,773
25
26
  neurostats_API/utils/db_client.py,sha256=OYe6yazcR4Aa6jYmy47JrryUeh2NnKGqY2K_lSZe6i8,455
26
27
  neurostats_API/utils/fetcher.py,sha256=VbrUhjA-GG5AyjPX2SHtFIbZM4dm3jo0RgZzuCbb_Io,40927
27
- neurostats_API-0.0.17.dist-info/METADATA,sha256=_MqEN2Yi-tDE8i4UzX9WGUi25Z7SzyNgDR2kj0p2vhw,29848
28
- neurostats_API-0.0.17.dist-info/WHEEL,sha256=R06PA3UVYHThwHvxuRWMqaGcr-PuniXahwjmQRFMEkY,91
29
- neurostats_API-0.0.17.dist-info/top_level.txt,sha256=nSlQPMG0VtXivJyedp4Bkf86EOy2TpW10VGxolXrqnU,15
30
- neurostats_API-0.0.17.dist-info/RECORD,,
28
+ neurostats_API-0.0.19.dist-info/METADATA,sha256=2YG4AS_jlIEZ8gPBxd7rZ9CEOyuAqTiBStFDH4k9ntM,30981
29
+ neurostats_API-0.0.19.dist-info/WHEEL,sha256=R06PA3UVYHThwHvxuRWMqaGcr-PuniXahwjmQRFMEkY,91
30
+ neurostats_API-0.0.19.dist-info/top_level.txt,sha256=nSlQPMG0VtXivJyedp4Bkf86EOy2TpW10VGxolXrqnU,15
31
+ neurostats_API-0.0.19.dist-info/RECORD,,