neurostats-API 0.0.10__py3-none-any.whl → 0.0.11__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- neurostats_API/__init__.py +1 -1
- neurostats_API/fetchers/__init__.py +1 -0
- neurostats_API/fetchers/balance_sheet.py +6 -3
- neurostats_API/fetchers/cash_flow.py +8 -6
- neurostats_API/fetchers/finance_overview.py +1 -3
- neurostats_API/fetchers/institution.py +136 -36
- neurostats_API/fetchers/month_revenue.py +23 -7
- neurostats_API/fetchers/profit_lose.py +7 -4
- {neurostats_API-0.0.10.dist-info → neurostats_API-0.0.11.dist-info}/METADATA +1 -1
- {neurostats_API-0.0.10.dist-info → neurostats_API-0.0.11.dist-info}/RECORD +12 -12
- {neurostats_API-0.0.10.dist-info → neurostats_API-0.0.11.dist-info}/WHEEL +0 -0
- {neurostats_API-0.0.10.dist-info → neurostats_API-0.0.11.dist-info}/top_level.txt +0 -0
neurostats_API/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__='0.0.
|
1
|
+
__version__='0.0.11'
|
@@ -2,6 +2,7 @@ from .base import StatsDateTime, StatsFetcher
|
|
2
2
|
from .balance_sheet import BalanceSheetFetcher
|
3
3
|
from .cash_flow import CashFlowFetcher
|
4
4
|
from .finance_overview import FinanceOverviewFetcher
|
5
|
+
from .institution import InstitutionFetcher
|
5
6
|
from .month_revenue import MonthRevenueFetcher
|
6
7
|
from .profit_lose import ProfitLoseFetcher
|
7
8
|
from .value_invest import ValueFetcher
|
@@ -116,10 +116,13 @@ class BalanceSheetFetcher(StatsFetcher):
|
|
116
116
|
try: # table_dict[項目][(2020Q1, '%')]
|
117
117
|
if (item_name == 'percentage'):
|
118
118
|
if (isinstance(item, (float, int))):
|
119
|
-
item =
|
120
|
-
|
119
|
+
item = StatsProcessor.cal_non_percentage(item, to_str=True, postfix="%")
|
120
|
+
elif ("YoY" in item_name):
|
121
121
|
if (isinstance(item, (float, int))):
|
122
|
-
item =
|
122
|
+
item = StatsProcessor.cal_percentage(item)
|
123
|
+
else:
|
124
|
+
if (isinstance(item, (float, int))):
|
125
|
+
item = StatsProcessor.cal_non_percentage(item, postfix="千元")
|
123
126
|
table_dict[index_name][(time_index, item_name)] = item
|
124
127
|
|
125
128
|
except KeyError:
|
@@ -132,14 +132,15 @@ class CashFlowFetcher(StatsFetcher):
|
|
132
132
|
table_dict[time_index][index_name]['value'] = value[
|
133
133
|
'value']
|
134
134
|
if (value['value']):
|
135
|
-
|
136
|
-
'percentage'] = np.round(
|
135
|
+
ratio = np.round(
|
137
136
|
(value['value'] / cash_flow[
|
138
137
|
main_cash_flow_name]['value']) * 100, 2)
|
138
|
+
table_dict[time_index][index_name][
|
139
|
+
'percentage'] = f"{ratio}%"
|
139
140
|
else:
|
140
141
|
table_dict[time_index][index_name][
|
141
142
|
'percentage'] = None
|
142
|
-
except:
|
143
|
+
except: # 新增index再做一次
|
143
144
|
if (time_index not in table_dict.keys()):
|
144
145
|
table_dict[time_index] = dict()
|
145
146
|
table_dict[time_index][index_name] = dict()
|
@@ -147,14 +148,15 @@ class CashFlowFetcher(StatsFetcher):
|
|
147
148
|
table_dict[time_index][index_name]['value'] = value[
|
148
149
|
'value']
|
149
150
|
if (value['value']):
|
150
|
-
|
151
|
-
'percentage'] = np.round(
|
151
|
+
ratio = np.round(
|
152
152
|
(value['value'] / cash_flow[
|
153
153
|
main_cash_flow_name]['value']) * 100, 2)
|
154
|
+
table_dict[time_index][index_name][
|
155
|
+
'percentage'] = f"{ratio}%"
|
154
156
|
else:
|
155
157
|
table_dict[time_index][index_name][
|
156
158
|
'percentage'] = None
|
157
|
-
|
159
|
+
table_dict[time_index][index_name]['value'] = StatsProcessor.cal_non_percentage(value['value'], postfix="千元")
|
158
160
|
try:
|
159
161
|
partial_cash_flow[time_index][index_name] = table_dict[
|
160
162
|
time_index][index_name]
|
@@ -156,6 +156,7 @@ class FinanceOverviewProcessor(StatsProcessor):
|
|
156
156
|
finance_dict[index] = StatsProcessor.cal_non_percentage(finance_dict[index], postfix="千元")
|
157
157
|
except Exception as e:
|
158
158
|
finance_dict[index] = None
|
159
|
+
|
159
160
|
|
160
161
|
@classmethod
|
161
162
|
def process_all(cls, finance_dict):
|
@@ -176,9 +177,6 @@ class FinanceOverviewProcessor(StatsProcessor):
|
|
176
177
|
cls.cal_net_debt_to_equity_ratio, cls.cal_interest_coverage_ratio,
|
177
178
|
cls.cal_debt_to_operating_cash_flow,
|
178
179
|
cls.cal_debt_to_free_cash_flow, cls.cal_cash_flow_ratio,
|
179
|
-
|
180
|
-
# process to 千元
|
181
|
-
cls.process_thousand_dollar
|
182
180
|
]
|
183
181
|
|
184
182
|
for method in methods:
|
@@ -18,34 +18,33 @@ class InstitutionFetcher(StatsFetcher):
|
|
18
18
|
"""
|
19
19
|
|
20
20
|
def __init__(self, ticker, db_client):
|
21
|
-
raise(NotImplementedError("InstitutionFetcher : Not done yet"))
|
22
21
|
super().__init__(ticker, db_client)
|
23
22
|
|
24
23
|
def prepare_query(self, start_date, end_date):
|
25
24
|
pipeline = super().prepare_query()
|
26
25
|
|
27
|
-
target_query = {
|
28
|
-
|
29
|
-
|
30
|
-
}
|
31
|
-
|
26
|
+
# target_query = {
|
27
|
+
# "date": date,
|
28
|
+
# "institution_trading": "$$target_season_data.institution_trading"
|
29
|
+
# }
|
32
30
|
|
33
31
|
pipeline.append({
|
34
32
|
"$project": {
|
35
33
|
"_id": 0,
|
36
34
|
"ticker": 1,
|
37
35
|
"company_name": 1,
|
38
|
-
"
|
36
|
+
"daily_data": {
|
39
37
|
"$map": {
|
40
38
|
"input": {
|
41
39
|
"$filter": {
|
42
40
|
"input": "$daily_data",
|
43
41
|
"as": "daily",
|
44
42
|
"cond": {
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
43
|
+
"$and": [{
|
44
|
+
"$gte": ["$$daily.date", start_date]
|
45
|
+
}, {
|
46
|
+
"$lte": ["$$daily.date", end_date]
|
47
|
+
}]
|
49
48
|
}
|
50
49
|
}
|
51
50
|
},
|
@@ -58,57 +57,158 @@ class InstitutionFetcher(StatsFetcher):
|
|
58
57
|
|
59
58
|
return pipeline
|
60
59
|
|
61
|
-
def collect_data(self,
|
62
|
-
pipeline = self.prepare_query(
|
60
|
+
def collect_data(self, start_date, end_date):
|
61
|
+
pipeline = self.prepare_query(start_date, end_date)
|
63
62
|
|
64
63
|
fetched_data = self.collection.aggregate(pipeline).to_list()
|
65
64
|
|
66
|
-
return
|
67
|
-
|
65
|
+
return fetched_data[-1]
|
66
|
+
|
68
67
|
def query_data(self):
|
69
68
|
try:
|
70
69
|
latest_time = StatsDateTime.get_latest_time(
|
71
70
|
self.ticker, self.collection)['last_update_time']
|
72
|
-
latest_date = latest_time['
|
73
|
-
|
71
|
+
latest_date = latest_time['institution_trading'][
|
72
|
+
'latest_date']
|
73
|
+
date = latest_date.replace(hour=0,
|
74
|
+
minute=0,
|
75
|
+
second=0,
|
76
|
+
microsecond=0)
|
74
77
|
except Exception as e:
|
75
|
-
print(
|
78
|
+
print(
|
79
|
+
f"No updated time for institution_trading in {self.ticker}, use current time instead"
|
80
|
+
)
|
76
81
|
date = datetime.now(self.timezone)
|
77
|
-
date = date.replace(hour=0, minute=0, second=0, microsecond=0)
|
82
|
+
date = date.replace(hour=0, minute=0, second=0, microsecond=0)
|
78
83
|
|
79
84
|
if (date.hour < 17): # 拿不到今天的資料
|
80
85
|
date = date - timedelta(days=1)
|
81
|
-
|
82
|
-
start_date = start_date - timedelta(days=365)
|
83
86
|
|
84
|
-
|
87
|
+
start_date = date - timedelta(days=365)
|
85
88
|
|
86
|
-
daily_data =
|
89
|
+
daily_data = self.collect_data(start_date, end_date=date)
|
90
|
+
|
91
|
+
daily_data = sorted(daily_data['daily_data'],
|
92
|
+
key=lambda x: x['date'],
|
93
|
+
reverse=True)
|
94
|
+
|
95
|
+
table_dict = self.process_data(daily_data)
|
96
|
+
|
97
|
+
return table_dict
|
87
98
|
|
88
|
-
self.process_data(self.ticker, daily_data)
|
89
|
-
|
90
99
|
def process_data(self, daily_data):
|
91
100
|
table_dict = dict()
|
92
101
|
|
93
|
-
latest_data = daily_data[0]
|
102
|
+
latest_data = daily_data[0]
|
94
103
|
yesterday_data = daily_data[1]
|
95
104
|
|
96
105
|
# 交易價格與昨天交易
|
97
|
-
|
106
|
+
price_dict = {
|
98
107
|
"open": latest_data['open'],
|
99
108
|
'close': latest_data['close'],
|
100
|
-
'range': f"{latest_data['
|
101
|
-
'volumn': latest_data['
|
102
|
-
|
109
|
+
'range': f"{latest_data['low']}-{latest_data['high']}",
|
110
|
+
'volumn': latest_data['volume'] / 1000,
|
111
|
+
'last_open': yesterday_data['open'],
|
112
|
+
'last_close': yesterday_data['close'],
|
113
|
+
'last_range': f"{yesterday_data['low']}-{yesterday_data['high']}",
|
114
|
+
'last_volumn': yesterday_data['volume'] / 1000
|
103
115
|
}
|
116
|
+
# 一年範圍
|
117
|
+
annual_lows = [data['low'] for data in daily_data]
|
118
|
+
annual_highs = [data['high'] for data in daily_data]
|
119
|
+
lowest = np.min(annual_lows).item()
|
120
|
+
highest = np.max(annual_highs).item()
|
104
121
|
|
105
|
-
|
106
|
-
|
107
|
-
# 一年內法人
|
108
|
-
|
109
|
-
|
110
|
-
|
122
|
+
price_dict['52weeks_range'] = f"{lowest}-{highest}"
|
123
|
+
table_dict['price'] = price_dict
|
111
124
|
|
125
|
+
# 發行股數 & 市值
|
112
126
|
|
127
|
+
# 今日法人買賣
|
128
|
+
table_dict['latest_trading'] = {
|
129
|
+
"date":
|
130
|
+
daily_data[0]['date'],
|
131
|
+
"table":
|
132
|
+
self.process_latest_trading(daily_data[0]['institution_trading'], daily_data[0]['volume'])
|
133
|
+
}
|
134
|
+
# 一年內法人
|
135
|
+
annual_trading = [
|
136
|
+
{
|
137
|
+
**data['institution_trading'],
|
138
|
+
"收盤價": int(data['close'])
|
139
|
+
}
|
140
|
+
for data in daily_data
|
141
|
+
] # 將close也併入這個表格
|
142
|
+
annual_dates = [data['date'] for data in daily_data]
|
143
|
+
table_dict['annual_trading'] = self.process_annual_trading(
|
144
|
+
annual_dates, annual_trading)
|
145
|
+
|
146
|
+
return table_dict
|
147
|
+
|
148
|
+
def process_latest_trading(self, latest_trading, volume):
|
149
|
+
latest_table = {
|
150
|
+
"foreign": self.default_institution_chart(),
|
151
|
+
"mutual": self.default_institution_chart(),
|
152
|
+
"prop": self.default_institution_chart(),
|
153
|
+
"institutional_investor":self.default_institution_chart(),
|
154
|
+
}
|
113
155
|
|
156
|
+
for key in latest_trading.keys():
|
157
|
+
if (key.find("外陸資") >= 0 or key.find("外資") >= 0):
|
158
|
+
self.target_institution(latest_trading, latest_table['foreign'], key, volume)
|
159
|
+
elif (key.find("自營商") >= 0):
|
160
|
+
self.target_institution(latest_trading,latest_table['prop'], key, volume)
|
161
|
+
elif (key.find("投信") >= 0):
|
162
|
+
self.target_institution(latest_trading,latest_table['mutual'], key, volume)
|
163
|
+
elif (key.find("三大法人") >= 0):
|
164
|
+
self.target_institution(latest_trading,latest_table['institutional_investor'], key, volume)
|
165
|
+
|
166
|
+
frames = []
|
167
|
+
for category, trades in latest_table.items():
|
168
|
+
temp_df = pd.DataFrame(trades).T
|
169
|
+
temp_df['category'] = category
|
170
|
+
frames.append(temp_df)
|
171
|
+
|
172
|
+
latest_df = pd.concat(frames)
|
173
|
+
latest_df = latest_df.reset_index().rename(columns={'index': 'type'})
|
174
|
+
latest_df = latest_df[['type', 'category', 'stock', 'price', 'average_price', 'percentage']]
|
175
|
+
|
176
|
+
return latest_df
|
177
|
+
|
178
|
+
def process_annual_trading(self, dates, annual_tradings):
|
179
|
+
dates = [date.strftime("%m/%d") for date in dates]
|
180
|
+
return pd.DataFrame(annual_tradings, index=dates)
|
181
|
+
|
182
|
+
def target_institution(self, old_table, new_table, key, volume):
|
183
|
+
if (key.find("買進") >= 0):
|
184
|
+
self.cal_institution(old_table, new_table['buy'], key, volume)
|
185
|
+
elif (key.find("賣出") >= 0):
|
186
|
+
self.cal_institution(old_table, new_table['sell'], key, volume)
|
187
|
+
elif (key.find("買賣超") >= 0):
|
188
|
+
self.cal_institution(old_table, new_table['over_buy_sell'], key, volume)
|
114
189
|
|
190
|
+
def cal_institution(self, old_table, new_table, key, volume):
|
191
|
+
new_table['stock'] = np.round(old_table[key] / 1000, 2).item()
|
192
|
+
new_table['percentage'] = np.round((old_table[key] / volume) * 100, 2).item()
|
193
|
+
|
194
|
+
def default_institution_chart(self):
|
195
|
+
return {
|
196
|
+
"buy": {
|
197
|
+
"stock": 0,
|
198
|
+
"price": 0,
|
199
|
+
"average_price": 0,
|
200
|
+
"percentage": 0
|
201
|
+
},
|
202
|
+
"sell": {
|
203
|
+
"stock": 0,
|
204
|
+
"price": 0,
|
205
|
+
"average_price": 0,
|
206
|
+
"percentage": 0
|
207
|
+
},
|
208
|
+
"over_buy_sell": {
|
209
|
+
"stock": 0,
|
210
|
+
"price": 0,
|
211
|
+
"average_price": 0,
|
212
|
+
"percentage": 0
|
213
|
+
},
|
214
|
+
}
|
@@ -64,6 +64,17 @@ class MonthRevenueFetcher(StatsFetcher):
|
|
64
64
|
def process_data(self, fetched_data):
|
65
65
|
|
66
66
|
monthly_data = fetched_data['monthly_data']
|
67
|
+
for data in monthly_data:
|
68
|
+
for key, value in data.items():
|
69
|
+
if ("YoY" in key):
|
70
|
+
data[key] = StatsProcessor.cal_percentage(value)
|
71
|
+
elif ("ratio" in key or 'percentage' in key):
|
72
|
+
data[key] = StatsProcessor.cal_non_percentage(value,
|
73
|
+
to_str=True,
|
74
|
+
postfix="%")
|
75
|
+
elif (key not in ('year', 'month')):
|
76
|
+
data[key] = StatsProcessor.cal_non_percentage(value,
|
77
|
+
postfix="千元")
|
67
78
|
target_month = monthly_data[0]['month']
|
68
79
|
monthly_df = pd.DataFrame(monthly_data)
|
69
80
|
target_month_df = monthly_df[monthly_df['month'] == target_month]
|
@@ -77,21 +88,26 @@ class MonthRevenueFetcher(StatsFetcher):
|
|
77
88
|
|
78
89
|
grand_total_df.rename(index={target_month: f"grand_total"},
|
79
90
|
inplace=True)
|
80
|
-
month_revenue_df = month_revenue_df.sort_index(ascending
|
91
|
+
month_revenue_df = month_revenue_df.sort_index(ascending=False)
|
81
92
|
month_revenue_df = pd.concat([grand_total_df, month_revenue_df],
|
82
93
|
axis=0)
|
83
94
|
|
84
|
-
fetched_data['month_revenue'] = month_revenue_df[sorted(
|
95
|
+
fetched_data['month_revenue'] = month_revenue_df[sorted(
|
96
|
+
month_revenue_df.columns, reverse=True)]
|
85
97
|
# 歷年月營收
|
86
98
|
fetched_data[
|
87
99
|
'this_month_revenue_over_years'] = target_month_df.set_index(
|
88
|
-
"year")[[
|
89
|
-
|
100
|
+
"year")[[
|
101
|
+
"revenue", "revenue_increment_ratio", "YoY_1", "YoY_3",
|
102
|
+
"YoY_5", "YoY_10"
|
103
|
+
]].T
|
90
104
|
# 歷年營收成長量
|
91
105
|
fetched_data['grand_total_over_years'] = target_month_df.set_index(
|
92
|
-
"year")[[
|
93
|
-
|
94
|
-
|
106
|
+
"year")[[
|
107
|
+
"grand_total", "grand_total_increment_ratio",
|
108
|
+
"grand_total_YoY_1", "grand_total_YoY_3", "grand_total_YoY_5",
|
109
|
+
"grand_total_YoY_10"
|
110
|
+
]].T
|
95
111
|
|
96
112
|
fetched_data.pop("monthly_data")
|
97
113
|
|
@@ -115,12 +115,15 @@ class ProfitLoseFetcher(StatsFetcher):
|
|
115
115
|
for index_name, value_dict in profit_lose.items():
|
116
116
|
# (2020Q1, 項目, 金額或%)
|
117
117
|
for item_name, item in value_dict.items():
|
118
|
-
if (
|
118
|
+
if ('percentage' in item_name):
|
119
119
|
if (isinstance(item, (float, int))):
|
120
|
-
item =
|
121
|
-
|
120
|
+
item = StatsProcessor.cal_non_percentage(item, to_str=True, postfix="%")
|
121
|
+
elif ('YoY' in item_name):
|
122
122
|
if (isinstance(item, (float, int))):
|
123
|
-
item =
|
123
|
+
item = StatsProcessor.cal_percentage(item)
|
124
|
+
else:
|
125
|
+
if (isinstance(item, (float, int))):
|
126
|
+
item = StatsProcessor.cal_non_percentage(item, postfix="千元")
|
124
127
|
try:
|
125
128
|
table_dict[index_name][(time_index, item_name)] = item
|
126
129
|
|
@@ -1,14 +1,14 @@
|
|
1
|
-
neurostats_API/__init__.py,sha256=
|
1
|
+
neurostats_API/__init__.py,sha256=oR5iCRZvbIRoODxS1VocreTo19N5L8Omvx_AgflzOO0,20
|
2
2
|
neurostats_API/cli.py,sha256=UJSWLIw03P24p-gkBb6JSEI5dW5U12UvLf1L8HjQD-o,873
|
3
3
|
neurostats_API/main.py,sha256=QcsfmWivg2Dnqw3MTJWiI0QvEiRs0VuH-BjwQHFCv00,677
|
4
|
-
neurostats_API/fetchers/__init__.py,sha256=
|
5
|
-
neurostats_API/fetchers/balance_sheet.py,sha256=
|
4
|
+
neurostats_API/fetchers/__init__.py,sha256=27kdeBuM7dNBRcIyQ1u863CYw0P_DQz-I1G6iSFDq-c,357
|
5
|
+
neurostats_API/fetchers/balance_sheet.py,sha256=sQv4Gk5uoKURLEdh57YknOQWiyVwaXJ2Mw75jxNqUS0,5804
|
6
6
|
neurostats_API/fetchers/base.py,sha256=NW2SFzrimyAIrdJx1LVmTazelyZOAtcj54kJKHc4Vaw,1662
|
7
|
-
neurostats_API/fetchers/cash_flow.py,sha256=
|
8
|
-
neurostats_API/fetchers/finance_overview.py,sha256=
|
9
|
-
neurostats_API/fetchers/institution.py,sha256=
|
10
|
-
neurostats_API/fetchers/month_revenue.py,sha256=
|
11
|
-
neurostats_API/fetchers/profit_lose.py,sha256=
|
7
|
+
neurostats_API/fetchers/cash_flow.py,sha256=TY7VAWVXkj5-mzH5Iu0sIE-oV8MvGmmDy0URNotNV1E,7614
|
8
|
+
neurostats_API/fetchers/finance_overview.py,sha256=PxUdWY0x030olYMLcCHDBn068JLmCE2RTOce1dxs5vM,27753
|
9
|
+
neurostats_API/fetchers/institution.py,sha256=aODtsFyQcnD9PnMeaehMAN9wZdZ2a0EqSSZO57dY9RE,7691
|
10
|
+
neurostats_API/fetchers/month_revenue.py,sha256=nixX2llzjCFr2m2YVjxrSfkBusnZPrPb2dRDq1XLGhw,4251
|
11
|
+
neurostats_API/fetchers/profit_lose.py,sha256=xlLNsGSy4Azf4HyZyYaX3dFad-ACO-vuQToBooZi1_w,5698
|
12
12
|
neurostats_API/fetchers/tech.py,sha256=wH1kkqiETQhF0HAhk-UIiucnZ3EiL85Q-yMWCcVOiFM,11395
|
13
13
|
neurostats_API/fetchers/value_invest.py,sha256=O5IKC8Nl7p5-E-1zoyAyWtiDznaxNemeabanmaHDdJs,3327
|
14
14
|
neurostats_API/tools/balance_sheet.yaml,sha256=yTxrWh7m4K3LnaNunETidfNzl6S4Bf58VIg9U38XShQ,648
|
@@ -21,7 +21,7 @@ neurostats_API/utils/data_process.py,sha256=mDznLqAAZ7gFX3LlJkJvtrMPt38Lh5-NONqg
|
|
21
21
|
neurostats_API/utils/datetime.py,sha256=XJya4G8b_-ZOaBbMXgQjWh2MC4wc-o6goQ7EQJQMWrQ,773
|
22
22
|
neurostats_API/utils/db_client.py,sha256=OYe6yazcR4Aa6jYmy47JrryUeh2NnKGqY2K_lSZe6i8,455
|
23
23
|
neurostats_API/utils/fetcher.py,sha256=VbrUhjA-GG5AyjPX2SHtFIbZM4dm3jo0RgZzuCbb_Io,40927
|
24
|
-
neurostats_API-0.0.
|
25
|
-
neurostats_API-0.0.
|
26
|
-
neurostats_API-0.0.
|
27
|
-
neurostats_API-0.0.
|
24
|
+
neurostats_API-0.0.11.dist-info/METADATA,sha256=Tddw5SxRekTkTtemDXgYPoiJf9sxICyRkdlFAbvniSM,18529
|
25
|
+
neurostats_API-0.0.11.dist-info/WHEEL,sha256=bFJAMchF8aTQGUgMZzHJyDDMPTO3ToJ7x23SLJa1SVo,92
|
26
|
+
neurostats_API-0.0.11.dist-info/top_level.txt,sha256=nSlQPMG0VtXivJyedp4Bkf86EOy2TpW10VGxolXrqnU,15
|
27
|
+
neurostats_API-0.0.11.dist-info/RECORD,,
|
File without changes
|
File without changes
|