neurostats-API 0.0.10__tar.gz → 0.0.11__tar.gz
Sign up to get free protection for your applications and to get access to all the features.
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/PKG-INFO +1 -1
- neurostats_API-0.0.11/neurostats_API/__init__.py +1 -0
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/fetchers/__init__.py +1 -0
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/fetchers/balance_sheet.py +6 -3
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/fetchers/cash_flow.py +8 -6
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/fetchers/finance_overview.py +1 -3
- neurostats_API-0.0.11/neurostats_API/fetchers/institution.py +214 -0
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/fetchers/month_revenue.py +23 -7
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/fetchers/profit_lose.py +7 -4
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API.egg-info/PKG-INFO +1 -1
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/setup.py +1 -1
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/test/test_fetchers.py +9 -0
- neurostats_API-0.0.10/neurostats_API/__init__.py +0 -1
- neurostats_API-0.0.10/neurostats_API/fetchers/institution.py +0 -114
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/MANIFEST.in +0 -0
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/README.md +0 -0
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/cli.py +0 -0
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/fetchers/base.py +0 -0
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/fetchers/tech.py +0 -0
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/fetchers/value_invest.py +0 -0
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/main.py +0 -0
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/tools/balance_sheet.yaml +0 -0
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/tools/cash_flow_percentage.yaml +0 -0
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/tools/finance_overview_dict.yaml +0 -0
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/tools/profit_lose.yaml +0 -0
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/tools/seasonal_data_field_dict.txt +0 -0
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/utils/__init__.py +0 -0
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/utils/data_process.py +0 -0
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/utils/datetime.py +0 -0
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/utils/db_client.py +0 -0
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/utils/fetcher.py +0 -0
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API.egg-info/SOURCES.txt +0 -0
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API.egg-info/dependency_links.txt +0 -0
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API.egg-info/top_level.txt +0 -0
- {neurostats_API-0.0.10 → neurostats_API-0.0.11}/setup.cfg +0 -0
@@ -0,0 +1 @@
|
|
1
|
+
__version__='0.0.11'
|
@@ -2,6 +2,7 @@ from .base import StatsDateTime, StatsFetcher
|
|
2
2
|
from .balance_sheet import BalanceSheetFetcher
|
3
3
|
from .cash_flow import CashFlowFetcher
|
4
4
|
from .finance_overview import FinanceOverviewFetcher
|
5
|
+
from .institution import InstitutionFetcher
|
5
6
|
from .month_revenue import MonthRevenueFetcher
|
6
7
|
from .profit_lose import ProfitLoseFetcher
|
7
8
|
from .value_invest import ValueFetcher
|
@@ -116,10 +116,13 @@ class BalanceSheetFetcher(StatsFetcher):
|
|
116
116
|
try: # table_dict[項目][(2020Q1, '%')]
|
117
117
|
if (item_name == 'percentage'):
|
118
118
|
if (isinstance(item, (float, int))):
|
119
|
-
item =
|
120
|
-
|
119
|
+
item = StatsProcessor.cal_non_percentage(item, to_str=True, postfix="%")
|
120
|
+
elif ("YoY" in item_name):
|
121
121
|
if (isinstance(item, (float, int))):
|
122
|
-
item =
|
122
|
+
item = StatsProcessor.cal_percentage(item)
|
123
|
+
else:
|
124
|
+
if (isinstance(item, (float, int))):
|
125
|
+
item = StatsProcessor.cal_non_percentage(item, postfix="千元")
|
123
126
|
table_dict[index_name][(time_index, item_name)] = item
|
124
127
|
|
125
128
|
except KeyError:
|
@@ -132,14 +132,15 @@ class CashFlowFetcher(StatsFetcher):
|
|
132
132
|
table_dict[time_index][index_name]['value'] = value[
|
133
133
|
'value']
|
134
134
|
if (value['value']):
|
135
|
-
|
136
|
-
'percentage'] = np.round(
|
135
|
+
ratio = np.round(
|
137
136
|
(value['value'] / cash_flow[
|
138
137
|
main_cash_flow_name]['value']) * 100, 2)
|
138
|
+
table_dict[time_index][index_name][
|
139
|
+
'percentage'] = f"{ratio}%"
|
139
140
|
else:
|
140
141
|
table_dict[time_index][index_name][
|
141
142
|
'percentage'] = None
|
142
|
-
except:
|
143
|
+
except: # 新增index再做一次
|
143
144
|
if (time_index not in table_dict.keys()):
|
144
145
|
table_dict[time_index] = dict()
|
145
146
|
table_dict[time_index][index_name] = dict()
|
@@ -147,14 +148,15 @@ class CashFlowFetcher(StatsFetcher):
|
|
147
148
|
table_dict[time_index][index_name]['value'] = value[
|
148
149
|
'value']
|
149
150
|
if (value['value']):
|
150
|
-
|
151
|
-
'percentage'] = np.round(
|
151
|
+
ratio = np.round(
|
152
152
|
(value['value'] / cash_flow[
|
153
153
|
main_cash_flow_name]['value']) * 100, 2)
|
154
|
+
table_dict[time_index][index_name][
|
155
|
+
'percentage'] = f"{ratio}%"
|
154
156
|
else:
|
155
157
|
table_dict[time_index][index_name][
|
156
158
|
'percentage'] = None
|
157
|
-
|
159
|
+
table_dict[time_index][index_name]['value'] = StatsProcessor.cal_non_percentage(value['value'], postfix="千元")
|
158
160
|
try:
|
159
161
|
partial_cash_flow[time_index][index_name] = table_dict[
|
160
162
|
time_index][index_name]
|
@@ -156,6 +156,7 @@ class FinanceOverviewProcessor(StatsProcessor):
|
|
156
156
|
finance_dict[index] = StatsProcessor.cal_non_percentage(finance_dict[index], postfix="千元")
|
157
157
|
except Exception as e:
|
158
158
|
finance_dict[index] = None
|
159
|
+
|
159
160
|
|
160
161
|
@classmethod
|
161
162
|
def process_all(cls, finance_dict):
|
@@ -176,9 +177,6 @@ class FinanceOverviewProcessor(StatsProcessor):
|
|
176
177
|
cls.cal_net_debt_to_equity_ratio, cls.cal_interest_coverage_ratio,
|
177
178
|
cls.cal_debt_to_operating_cash_flow,
|
178
179
|
cls.cal_debt_to_free_cash_flow, cls.cal_cash_flow_ratio,
|
179
|
-
|
180
|
-
# process to 千元
|
181
|
-
cls.process_thousand_dollar
|
182
180
|
]
|
183
181
|
|
184
182
|
for method in methods:
|
@@ -0,0 +1,214 @@
|
|
1
|
+
from .base import StatsFetcher
|
2
|
+
from datetime import datetime, timedelta
|
3
|
+
import json
|
4
|
+
import numpy as np
|
5
|
+
import pandas as pd
|
6
|
+
from ..utils import StatsDateTime, StatsProcessor
|
7
|
+
import importlib.resources as pkg_resources
|
8
|
+
import yaml
|
9
|
+
|
10
|
+
|
11
|
+
class InstitutionFetcher(StatsFetcher):
|
12
|
+
"""
|
13
|
+
iFa -> 交易資訊 -> 法人買賣
|
14
|
+
|
15
|
+
包括:
|
16
|
+
1. 當日交易
|
17
|
+
2. 一年內交易
|
18
|
+
"""
|
19
|
+
|
20
|
+
def __init__(self, ticker, db_client):
|
21
|
+
super().__init__(ticker, db_client)
|
22
|
+
|
23
|
+
def prepare_query(self, start_date, end_date):
|
24
|
+
pipeline = super().prepare_query()
|
25
|
+
|
26
|
+
# target_query = {
|
27
|
+
# "date": date,
|
28
|
+
# "institution_trading": "$$target_season_data.institution_trading"
|
29
|
+
# }
|
30
|
+
|
31
|
+
pipeline.append({
|
32
|
+
"$project": {
|
33
|
+
"_id": 0,
|
34
|
+
"ticker": 1,
|
35
|
+
"company_name": 1,
|
36
|
+
"daily_data": {
|
37
|
+
"$map": {
|
38
|
+
"input": {
|
39
|
+
"$filter": {
|
40
|
+
"input": "$daily_data",
|
41
|
+
"as": "daily",
|
42
|
+
"cond": {
|
43
|
+
"$and": [{
|
44
|
+
"$gte": ["$$daily.date", start_date]
|
45
|
+
}, {
|
46
|
+
"$lte": ["$$daily.date", end_date]
|
47
|
+
}]
|
48
|
+
}
|
49
|
+
}
|
50
|
+
},
|
51
|
+
"as": "target_daily_data",
|
52
|
+
"in": "$$target_daily_data"
|
53
|
+
}
|
54
|
+
}
|
55
|
+
}
|
56
|
+
})
|
57
|
+
|
58
|
+
return pipeline
|
59
|
+
|
60
|
+
def collect_data(self, start_date, end_date):
|
61
|
+
pipeline = self.prepare_query(start_date, end_date)
|
62
|
+
|
63
|
+
fetched_data = self.collection.aggregate(pipeline).to_list()
|
64
|
+
|
65
|
+
return fetched_data[-1]
|
66
|
+
|
67
|
+
def query_data(self):
|
68
|
+
try:
|
69
|
+
latest_time = StatsDateTime.get_latest_time(
|
70
|
+
self.ticker, self.collection)['last_update_time']
|
71
|
+
latest_date = latest_time['institution_trading'][
|
72
|
+
'latest_date']
|
73
|
+
date = latest_date.replace(hour=0,
|
74
|
+
minute=0,
|
75
|
+
second=0,
|
76
|
+
microsecond=0)
|
77
|
+
except Exception as e:
|
78
|
+
print(
|
79
|
+
f"No updated time for institution_trading in {self.ticker}, use current time instead"
|
80
|
+
)
|
81
|
+
date = datetime.now(self.timezone)
|
82
|
+
date = date.replace(hour=0, minute=0, second=0, microsecond=0)
|
83
|
+
|
84
|
+
if (date.hour < 17): # 拿不到今天的資料
|
85
|
+
date = date - timedelta(days=1)
|
86
|
+
|
87
|
+
start_date = date - timedelta(days=365)
|
88
|
+
|
89
|
+
daily_data = self.collect_data(start_date, end_date=date)
|
90
|
+
|
91
|
+
daily_data = sorted(daily_data['daily_data'],
|
92
|
+
key=lambda x: x['date'],
|
93
|
+
reverse=True)
|
94
|
+
|
95
|
+
table_dict = self.process_data(daily_data)
|
96
|
+
|
97
|
+
return table_dict
|
98
|
+
|
99
|
+
def process_data(self, daily_data):
|
100
|
+
table_dict = dict()
|
101
|
+
|
102
|
+
latest_data = daily_data[0]
|
103
|
+
yesterday_data = daily_data[1]
|
104
|
+
|
105
|
+
# 交易價格與昨天交易
|
106
|
+
price_dict = {
|
107
|
+
"open": latest_data['open'],
|
108
|
+
'close': latest_data['close'],
|
109
|
+
'range': f"{latest_data['low']}-{latest_data['high']}",
|
110
|
+
'volumn': latest_data['volume'] / 1000,
|
111
|
+
'last_open': yesterday_data['open'],
|
112
|
+
'last_close': yesterday_data['close'],
|
113
|
+
'last_range': f"{yesterday_data['low']}-{yesterday_data['high']}",
|
114
|
+
'last_volumn': yesterday_data['volume'] / 1000
|
115
|
+
}
|
116
|
+
# 一年範圍
|
117
|
+
annual_lows = [data['low'] for data in daily_data]
|
118
|
+
annual_highs = [data['high'] for data in daily_data]
|
119
|
+
lowest = np.min(annual_lows).item()
|
120
|
+
highest = np.max(annual_highs).item()
|
121
|
+
|
122
|
+
price_dict['52weeks_range'] = f"{lowest}-{highest}"
|
123
|
+
table_dict['price'] = price_dict
|
124
|
+
|
125
|
+
# 發行股數 & 市值
|
126
|
+
|
127
|
+
# 今日法人買賣
|
128
|
+
table_dict['latest_trading'] = {
|
129
|
+
"date":
|
130
|
+
daily_data[0]['date'],
|
131
|
+
"table":
|
132
|
+
self.process_latest_trading(daily_data[0]['institution_trading'], daily_data[0]['volume'])
|
133
|
+
}
|
134
|
+
# 一年內法人
|
135
|
+
annual_trading = [
|
136
|
+
{
|
137
|
+
**data['institution_trading'],
|
138
|
+
"收盤價": int(data['close'])
|
139
|
+
}
|
140
|
+
for data in daily_data
|
141
|
+
] # 將close也併入這個表格
|
142
|
+
annual_dates = [data['date'] for data in daily_data]
|
143
|
+
table_dict['annual_trading'] = self.process_annual_trading(
|
144
|
+
annual_dates, annual_trading)
|
145
|
+
|
146
|
+
return table_dict
|
147
|
+
|
148
|
+
def process_latest_trading(self, latest_trading, volume):
|
149
|
+
latest_table = {
|
150
|
+
"foreign": self.default_institution_chart(),
|
151
|
+
"mutual": self.default_institution_chart(),
|
152
|
+
"prop": self.default_institution_chart(),
|
153
|
+
"institutional_investor":self.default_institution_chart(),
|
154
|
+
}
|
155
|
+
|
156
|
+
for key in latest_trading.keys():
|
157
|
+
if (key.find("外陸資") >= 0 or key.find("外資") >= 0):
|
158
|
+
self.target_institution(latest_trading, latest_table['foreign'], key, volume)
|
159
|
+
elif (key.find("自營商") >= 0):
|
160
|
+
self.target_institution(latest_trading,latest_table['prop'], key, volume)
|
161
|
+
elif (key.find("投信") >= 0):
|
162
|
+
self.target_institution(latest_trading,latest_table['mutual'], key, volume)
|
163
|
+
elif (key.find("三大法人") >= 0):
|
164
|
+
self.target_institution(latest_trading,latest_table['institutional_investor'], key, volume)
|
165
|
+
|
166
|
+
frames = []
|
167
|
+
for category, trades in latest_table.items():
|
168
|
+
temp_df = pd.DataFrame(trades).T
|
169
|
+
temp_df['category'] = category
|
170
|
+
frames.append(temp_df)
|
171
|
+
|
172
|
+
latest_df = pd.concat(frames)
|
173
|
+
latest_df = latest_df.reset_index().rename(columns={'index': 'type'})
|
174
|
+
latest_df = latest_df[['type', 'category', 'stock', 'price', 'average_price', 'percentage']]
|
175
|
+
|
176
|
+
return latest_df
|
177
|
+
|
178
|
+
def process_annual_trading(self, dates, annual_tradings):
|
179
|
+
dates = [date.strftime("%m/%d") for date in dates]
|
180
|
+
return pd.DataFrame(annual_tradings, index=dates)
|
181
|
+
|
182
|
+
def target_institution(self, old_table, new_table, key, volume):
|
183
|
+
if (key.find("買進") >= 0):
|
184
|
+
self.cal_institution(old_table, new_table['buy'], key, volume)
|
185
|
+
elif (key.find("賣出") >= 0):
|
186
|
+
self.cal_institution(old_table, new_table['sell'], key, volume)
|
187
|
+
elif (key.find("買賣超") >= 0):
|
188
|
+
self.cal_institution(old_table, new_table['over_buy_sell'], key, volume)
|
189
|
+
|
190
|
+
def cal_institution(self, old_table, new_table, key, volume):
|
191
|
+
new_table['stock'] = np.round(old_table[key] / 1000, 2).item()
|
192
|
+
new_table['percentage'] = np.round((old_table[key] / volume) * 100, 2).item()
|
193
|
+
|
194
|
+
def default_institution_chart(self):
|
195
|
+
return {
|
196
|
+
"buy": {
|
197
|
+
"stock": 0,
|
198
|
+
"price": 0,
|
199
|
+
"average_price": 0,
|
200
|
+
"percentage": 0
|
201
|
+
},
|
202
|
+
"sell": {
|
203
|
+
"stock": 0,
|
204
|
+
"price": 0,
|
205
|
+
"average_price": 0,
|
206
|
+
"percentage": 0
|
207
|
+
},
|
208
|
+
"over_buy_sell": {
|
209
|
+
"stock": 0,
|
210
|
+
"price": 0,
|
211
|
+
"average_price": 0,
|
212
|
+
"percentage": 0
|
213
|
+
},
|
214
|
+
}
|
@@ -64,6 +64,17 @@ class MonthRevenueFetcher(StatsFetcher):
|
|
64
64
|
def process_data(self, fetched_data):
|
65
65
|
|
66
66
|
monthly_data = fetched_data['monthly_data']
|
67
|
+
for data in monthly_data:
|
68
|
+
for key, value in data.items():
|
69
|
+
if ("YoY" in key):
|
70
|
+
data[key] = StatsProcessor.cal_percentage(value)
|
71
|
+
elif ("ratio" in key or 'percentage' in key):
|
72
|
+
data[key] = StatsProcessor.cal_non_percentage(value,
|
73
|
+
to_str=True,
|
74
|
+
postfix="%")
|
75
|
+
elif (key not in ('year', 'month')):
|
76
|
+
data[key] = StatsProcessor.cal_non_percentage(value,
|
77
|
+
postfix="千元")
|
67
78
|
target_month = monthly_data[0]['month']
|
68
79
|
monthly_df = pd.DataFrame(monthly_data)
|
69
80
|
target_month_df = monthly_df[monthly_df['month'] == target_month]
|
@@ -77,21 +88,26 @@ class MonthRevenueFetcher(StatsFetcher):
|
|
77
88
|
|
78
89
|
grand_total_df.rename(index={target_month: f"grand_total"},
|
79
90
|
inplace=True)
|
80
|
-
month_revenue_df = month_revenue_df.sort_index(ascending
|
91
|
+
month_revenue_df = month_revenue_df.sort_index(ascending=False)
|
81
92
|
month_revenue_df = pd.concat([grand_total_df, month_revenue_df],
|
82
93
|
axis=0)
|
83
94
|
|
84
|
-
fetched_data['month_revenue'] = month_revenue_df[sorted(
|
95
|
+
fetched_data['month_revenue'] = month_revenue_df[sorted(
|
96
|
+
month_revenue_df.columns, reverse=True)]
|
85
97
|
# 歷年月營收
|
86
98
|
fetched_data[
|
87
99
|
'this_month_revenue_over_years'] = target_month_df.set_index(
|
88
|
-
"year")[[
|
89
|
-
|
100
|
+
"year")[[
|
101
|
+
"revenue", "revenue_increment_ratio", "YoY_1", "YoY_3",
|
102
|
+
"YoY_5", "YoY_10"
|
103
|
+
]].T
|
90
104
|
# 歷年營收成長量
|
91
105
|
fetched_data['grand_total_over_years'] = target_month_df.set_index(
|
92
|
-
"year")[[
|
93
|
-
|
94
|
-
|
106
|
+
"year")[[
|
107
|
+
"grand_total", "grand_total_increment_ratio",
|
108
|
+
"grand_total_YoY_1", "grand_total_YoY_3", "grand_total_YoY_5",
|
109
|
+
"grand_total_YoY_10"
|
110
|
+
]].T
|
95
111
|
|
96
112
|
fetched_data.pop("monthly_data")
|
97
113
|
|
@@ -115,12 +115,15 @@ class ProfitLoseFetcher(StatsFetcher):
|
|
115
115
|
for index_name, value_dict in profit_lose.items():
|
116
116
|
# (2020Q1, 項目, 金額或%)
|
117
117
|
for item_name, item in value_dict.items():
|
118
|
-
if (
|
118
|
+
if ('percentage' in item_name):
|
119
119
|
if (isinstance(item, (float, int))):
|
120
|
-
item =
|
121
|
-
|
120
|
+
item = StatsProcessor.cal_non_percentage(item, to_str=True, postfix="%")
|
121
|
+
elif ('YoY' in item_name):
|
122
122
|
if (isinstance(item, (float, int))):
|
123
|
-
item =
|
123
|
+
item = StatsProcessor.cal_percentage(item)
|
124
|
+
else:
|
125
|
+
if (isinstance(item, (float, int))):
|
126
|
+
item = StatsProcessor.cal_non_percentage(item, postfix="千元")
|
124
127
|
try:
|
125
128
|
table_dict[index_name][(time_index, item_name)] = item
|
126
129
|
|
@@ -105,6 +105,7 @@ def test_balance_sheet():
|
|
105
105
|
|
106
106
|
def test_finance_overview():
|
107
107
|
from neurostats_API.fetchers import FinanceOverviewFetcher
|
108
|
+
company_list = {"2330": "台積電"}
|
108
109
|
for ticker in company_list.keys():
|
109
110
|
fetcher = FinanceOverviewFetcher(ticker=ticker, db_client=db_client)
|
110
111
|
fetched_data = fetcher.query_data()
|
@@ -183,4 +184,12 @@ def test_finance_overview():
|
|
183
184
|
assert key in fetched_data['seasonal_data'], f"{key} not found in fetched_data"
|
184
185
|
# assert fetched_data['seasonal_data'][0][key] is not None, f"{key} is None"
|
185
186
|
|
187
|
+
pp.pprint(fetched_data)
|
188
|
+
|
189
|
+
def test_institution_trading():
|
190
|
+
from neurostats_API.fetchers import InstitutionFetcher
|
191
|
+
fetcher = InstitutionFetcher(ticker='2330', db_client = db_client)
|
192
|
+
|
193
|
+
fetched_data = fetcher.query_data()
|
194
|
+
|
186
195
|
pp.pprint(fetched_data)
|
@@ -1 +0,0 @@
|
|
1
|
-
__version__='0.0.10'
|
@@ -1,114 +0,0 @@
|
|
1
|
-
from .base import StatsFetcher
|
2
|
-
from datetime import datetime, timedelta
|
3
|
-
import json
|
4
|
-
import numpy as np
|
5
|
-
import pandas as pd
|
6
|
-
from ..utils import StatsDateTime, StatsProcessor
|
7
|
-
import importlib.resources as pkg_resources
|
8
|
-
import yaml
|
9
|
-
|
10
|
-
|
11
|
-
class InstitutionFetcher(StatsFetcher):
|
12
|
-
"""
|
13
|
-
iFa -> 交易資訊 -> 法人買賣
|
14
|
-
|
15
|
-
包括:
|
16
|
-
1. 當日交易
|
17
|
-
2. 一年內交易
|
18
|
-
"""
|
19
|
-
|
20
|
-
def __init__(self, ticker, db_client):
|
21
|
-
raise(NotImplementedError("InstitutionFetcher : Not done yet"))
|
22
|
-
super().__init__(ticker, db_client)
|
23
|
-
|
24
|
-
def prepare_query(self, start_date, end_date):
|
25
|
-
pipeline = super().prepare_query()
|
26
|
-
|
27
|
-
target_query = {
|
28
|
-
"date": date,
|
29
|
-
"institution_trading": "$$target_season_data.institution_trading"
|
30
|
-
}
|
31
|
-
|
32
|
-
|
33
|
-
pipeline.append({
|
34
|
-
"$project": {
|
35
|
-
"_id": 0,
|
36
|
-
"ticker": 1,
|
37
|
-
"company_name": 1,
|
38
|
-
"profit_loses": {
|
39
|
-
"$map": {
|
40
|
-
"input": {
|
41
|
-
"$filter": {
|
42
|
-
"input": "$daily_data",
|
43
|
-
"as": "daily",
|
44
|
-
"cond": {
|
45
|
-
"$and": [
|
46
|
-
{"$gte": ["$$daily.date", start_date]},
|
47
|
-
{"$lte": ["$$daily.date", end_date]}
|
48
|
-
]
|
49
|
-
}
|
50
|
-
}
|
51
|
-
},
|
52
|
-
"as": "target_daily_data",
|
53
|
-
"in": "$$target_daily_data"
|
54
|
-
}
|
55
|
-
}
|
56
|
-
}
|
57
|
-
})
|
58
|
-
|
59
|
-
return pipeline
|
60
|
-
|
61
|
-
def collect_data(self, date):
|
62
|
-
pipeline = self.prepare_query(date)
|
63
|
-
|
64
|
-
fetched_data = self.collection.aggregate(pipeline).to_list()
|
65
|
-
|
66
|
-
return fetch_data[-1]
|
67
|
-
|
68
|
-
def query_data(self):
|
69
|
-
try:
|
70
|
-
latest_time = StatsDateTime.get_latest_time(
|
71
|
-
self.ticker, self.collection)['last_update_time']
|
72
|
-
latest_date = latest_time['daily_data']['institution_trading']['last_update']
|
73
|
-
date = latest_date.replace(hour=0, minute=0, second=0, microsecond=0)
|
74
|
-
except Exception as e:
|
75
|
-
print(f"No updated time for institution_trading in {self.ticker}, use current time instead")
|
76
|
-
date = datetime.now(self.timezone)
|
77
|
-
date = date.replace(hour=0, minute=0, second=0, microsecond=0)
|
78
|
-
|
79
|
-
if (date.hour < 17): # 拿不到今天的資料
|
80
|
-
date = date - timedelta(days=1)
|
81
|
-
|
82
|
-
start_date = start_date - timedelta(days=365)
|
83
|
-
|
84
|
-
daily_data = self.collect_data(date)
|
85
|
-
|
86
|
-
daily_data = sorted(daily_data['daily_data'], key = lambda x : x['date'], reverse = True)
|
87
|
-
|
88
|
-
self.process_data(self.ticker, daily_data)
|
89
|
-
|
90
|
-
def process_data(self, daily_data):
|
91
|
-
table_dict = dict()
|
92
|
-
|
93
|
-
latest_data = daily_data[0]
|
94
|
-
yesterday_data = daily_data[1]
|
95
|
-
|
96
|
-
# 交易價格與昨天交易
|
97
|
-
table_dict = {
|
98
|
-
"open": latest_data['open'],
|
99
|
-
'close': latest_data['close'],
|
100
|
-
'range': f"{latest_data['high']}-{latest_data['low']}",
|
101
|
-
'volumn': latest_data['volumn'] / 1000
|
102
|
-
|
103
|
-
}
|
104
|
-
|
105
|
-
# 今日法人買賣
|
106
|
-
|
107
|
-
# 一年內法人
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/tools/cash_flow_percentage.yaml
RENAMED
File without changes
|
{neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/tools/finance_overview_dict.yaml
RENAMED
File without changes
|
File without changes
|
{neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API/tools/seasonal_data_field_dict.txt
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{neurostats_API-0.0.10 → neurostats_API-0.0.11}/neurostats_API.egg-info/dependency_links.txt
RENAMED
File without changes
|
File without changes
|
File without changes
|