neurostats-API 0.0.23b0__tar.gz → 0.0.23b2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/PKG-INFO +2 -2
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/README.md +1 -1
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/__init__.py +1 -1
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/fetchers/institution.py +139 -93
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/fetchers/margin_trading.py +121 -91
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/fetchers/tej_finance_report.py +5 -2
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/fetchers/value_invest.py +7 -4
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/tools/tej_db/tej_db_percent_index.yaml +0 -3
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/tools/tej_db/tej_db_skip_index.yaml +12 -1
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/tools/tej_db/tej_db_thousand_index.yaml +0 -4
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API.egg-info/PKG-INFO +2 -2
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/setup.py +1 -1
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/test/test_fetchers.py +6 -3
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/MANIFEST.in +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/cli.py +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/fetchers/__init__.py +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/fetchers/balance_sheet.py +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/fetchers/base.py +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/fetchers/cash_flow.py +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/fetchers/finance_overview.py +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/fetchers/month_revenue.py +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/fetchers/profit_lose.py +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/fetchers/tech.py +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/main.py +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/tools/company_list/tw.json +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/tools/tej_db/tej_db_index.yaml +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/tools/twse/balance_sheet.yaml +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/tools/twse/cash_flow_percentage.yaml +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/tools/twse/finance_overview_dict.yaml +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/tools/twse/profit_lose.yaml +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/tools/twse/seasonal_data_field_dict.txt +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/utils/__init__.py +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/utils/calculate_value.py +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/utils/data_process.py +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/utils/datetime.py +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/utils/db_client.py +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API.egg-info/SOURCES.txt +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API.egg-info/dependency_links.txt +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API.egg-info/requires.txt +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API.egg-info/top_level.txt +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/setup.cfg +0 -0
- {neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/test/test_tej.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: neurostats_API
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.23b2
|
4
4
|
Summary: The service of NeuroStats website
|
5
5
|
Home-page: https://github.com/NeurowattStats/NeuroStats_API.git
|
6
6
|
Author: JasonWang@Neurowatt
|
@@ -89,7 +89,7 @@ pip install neurostats-API
|
|
89
89
|
```Python
|
90
90
|
>>> import neurostats_API
|
91
91
|
>>> print(neurostats_API.__version__)
|
92
|
-
0.0.
|
92
|
+
0.0.23b2
|
93
93
|
```
|
94
94
|
|
95
95
|
### 得到最新一期的評價資料與歷年評價
|
@@ -1,5 +1,5 @@
|
|
1
1
|
from .base import StatsFetcher
|
2
|
-
from datetime import datetime, timedelta
|
2
|
+
from datetime import datetime, timedelta, date
|
3
3
|
import json
|
4
4
|
import numpy as np
|
5
5
|
import pandas as pd
|
@@ -28,53 +28,65 @@ class InstitutionFetcher(StatsFetcher):
|
|
28
28
|
# "institution_trading": "$$target_season_data.institution_trading"
|
29
29
|
# }
|
30
30
|
|
31
|
-
pipeline.append(
|
32
|
-
|
33
|
-
"
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
"
|
38
|
-
"
|
39
|
-
"
|
40
|
-
"
|
41
|
-
|
42
|
-
|
43
|
-
"
|
44
|
-
"$
|
45
|
-
|
46
|
-
|
47
|
-
|
31
|
+
pipeline.append(
|
32
|
+
{
|
33
|
+
"$project": {
|
34
|
+
"_id": 0,
|
35
|
+
"ticker": 1,
|
36
|
+
"company_name": 1,
|
37
|
+
"daily_data": {
|
38
|
+
"$map": {
|
39
|
+
"input": {
|
40
|
+
"$filter": {
|
41
|
+
"input": "$daily_data",
|
42
|
+
"as": "daily",
|
43
|
+
"cond": {
|
44
|
+
"$and": [
|
45
|
+
{
|
46
|
+
"$gte":
|
47
|
+
["$$daily.date", start_date]
|
48
|
+
}, {
|
49
|
+
"$lte":
|
50
|
+
["$$daily.date", end_date]
|
51
|
+
}
|
52
|
+
]
|
53
|
+
}
|
48
54
|
}
|
49
|
-
}
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
}
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
55
|
+
},
|
56
|
+
"as": "target_daily_data",
|
57
|
+
"in": "$$target_daily_data"
|
58
|
+
}
|
59
|
+
},
|
60
|
+
"institution_trading": {
|
61
|
+
"$map": {
|
62
|
+
"input": {
|
63
|
+
"$filter": {
|
64
|
+
"input": "$institution_trading",
|
65
|
+
"as": "institution",
|
66
|
+
"cond": {
|
67
|
+
"$and": [
|
68
|
+
{
|
69
|
+
"$gte": [
|
70
|
+
"$$institution.date",
|
71
|
+
start_date
|
72
|
+
]
|
73
|
+
}, {
|
74
|
+
"$lte": [
|
75
|
+
"$$institution.date",
|
76
|
+
end_date
|
77
|
+
]
|
78
|
+
}
|
79
|
+
]
|
80
|
+
}
|
69
81
|
}
|
70
|
-
}
|
82
|
+
},
|
83
|
+
"as": "target_institution_data",
|
84
|
+
"in": "$$target_institution_data"
|
71
85
|
},
|
72
|
-
|
73
|
-
"in": "$$target_institution_data"
|
74
|
-
},
|
86
|
+
}
|
75
87
|
}
|
76
88
|
}
|
77
|
-
|
89
|
+
)
|
78
90
|
|
79
91
|
return pipeline
|
80
92
|
|
@@ -88,33 +100,37 @@ class InstitutionFetcher(StatsFetcher):
|
|
88
100
|
def query_data(self):
|
89
101
|
try:
|
90
102
|
latest_time = StatsDateTime.get_latest_time(
|
91
|
-
self.ticker, self.collection
|
103
|
+
self.ticker, self.collection
|
104
|
+
)['last_update_time']
|
92
105
|
latest_date = latest_time['institution_trading']['latest_date']
|
93
|
-
end_date = latest_date.replace(
|
94
|
-
|
95
|
-
|
96
|
-
microsecond=0)
|
106
|
+
end_date = latest_date.replace(
|
107
|
+
hour=0, minute=0, second=0, microsecond=0
|
108
|
+
)
|
97
109
|
except Exception as e:
|
98
110
|
print(
|
99
111
|
f"No updated time for institution_trading in {self.ticker}, use current time instead"
|
100
112
|
)
|
101
113
|
end_date = datetime.now(self.timezone)
|
102
|
-
end_date =
|
114
|
+
end_date = end_date.replace(
|
115
|
+
hour=0, minute=0, second=0, microsecond=0
|
116
|
+
)
|
103
117
|
|
104
|
-
if (
|
118
|
+
if (end_date.hour < 17): # 拿不到今天的資料
|
105
119
|
end_date = end_date - timedelta(days=1)
|
106
120
|
|
107
121
|
start_date = end_date - timedelta(days=365)
|
108
122
|
|
109
123
|
fetched_data = self.collect_data(start_date, end_date)
|
110
124
|
|
111
|
-
fetched_data['daily_data'] = sorted(
|
112
|
-
|
113
|
-
|
125
|
+
fetched_data['daily_data'] = sorted(
|
126
|
+
fetched_data['daily_data'], key=lambda x: x['date'], reverse=True
|
127
|
+
)
|
128
|
+
|
114
129
|
fetched_data['institution_trading'] = sorted(
|
115
130
|
fetched_data['institution_trading'],
|
116
131
|
key=lambda x: x['date'],
|
117
|
-
reverse=True
|
132
|
+
reverse=True
|
133
|
+
) if (fetched_data['institution_trading']) else []
|
118
134
|
|
119
135
|
table_dict = self.process_data(fetched_data)
|
120
136
|
|
@@ -131,16 +147,16 @@ class InstitutionFetcher(StatsFetcher):
|
|
131
147
|
|
132
148
|
# 交易價格與昨天交易
|
133
149
|
price_dict = {
|
134
|
-
"open": latest_daily_data['open'],
|
135
|
-
'close': latest_daily_data['close'],
|
150
|
+
"open": round(latest_daily_data['open'], 2),
|
151
|
+
'close': round(latest_daily_data['close'], 2),
|
136
152
|
'range':
|
137
|
-
f"{latest_daily_data['low']} - {latest_daily_data['high']}",
|
138
|
-
'volume': latest_daily_data['volume'] / 1000,
|
139
|
-
'last_open': yesterday_daily_data['open'],
|
140
|
-
'last_close': yesterday_daily_data['close'],
|
153
|
+
f"{latest_daily_data['low']:.2f} - {latest_daily_data['high']:.2f}",
|
154
|
+
'volume': round(latest_daily_data['volume'] / 1000, 2),
|
155
|
+
'last_open': round(yesterday_daily_data['open'], 2),
|
156
|
+
'last_close': round(yesterday_daily_data['close'], 2),
|
141
157
|
'last_range':
|
142
|
-
f"{yesterday_daily_data['low']} - {yesterday_daily_data['high']}",
|
143
|
-
'last_volume': yesterday_daily_data['volume'] / 1000
|
158
|
+
f"{yesterday_daily_data['low']:.2f} - {yesterday_daily_data['high']:.2f}",
|
159
|
+
'last_volume': round(yesterday_daily_data['volume'] / 1000, 2)
|
144
160
|
}
|
145
161
|
# 一年範圍
|
146
162
|
annual_lows = [data['low'] for data in daily_datas]
|
@@ -148,20 +164,36 @@ class InstitutionFetcher(StatsFetcher):
|
|
148
164
|
lowest = np.min(annual_lows).item()
|
149
165
|
highest = np.max(annual_highs).item()
|
150
166
|
|
151
|
-
price_dict['52weeks_range'] = f"{lowest} - {highest}"
|
167
|
+
price_dict['52weeks_range'] = f"{lowest:.2f} - {highest:.2f}"
|
152
168
|
table_dict['price'] = price_dict
|
153
169
|
|
154
170
|
# 發行股數 & 市值
|
155
171
|
# 沒有實作
|
156
172
|
|
173
|
+
table_dict['latest_trading'] = {
|
174
|
+
'date': date.today(),
|
175
|
+
'table': pd.DataFrame(
|
176
|
+
columns = ['category', 'variable', 'close', 'volume']
|
177
|
+
)
|
178
|
+
}
|
179
|
+
table_dict['annual_trading'] = pd.DataFrame(
|
180
|
+
columns = ['date', 'close', 'volume']
|
181
|
+
)
|
182
|
+
|
183
|
+
if (not institution_tradings):
|
184
|
+
return table_dict
|
185
|
+
|
157
186
|
# 今日法人買賣
|
158
|
-
latest_trading = institution_tradings[0]
|
187
|
+
latest_trading = institution_tradings[0] if (institution_tradings) else {
|
188
|
+
'date': date.today()
|
189
|
+
}
|
159
190
|
table_dict['latest_trading'] = {
|
160
191
|
"date":
|
161
192
|
latest_trading['date'],
|
162
193
|
"table":
|
163
|
-
self.process_latest_trading(
|
164
|
-
|
194
|
+
self.process_latest_trading(
|
195
|
+
latest_trading, latest_daily_data['volume']
|
196
|
+
)
|
165
197
|
}
|
166
198
|
# 一年內法人
|
167
199
|
annual_dates = [
|
@@ -182,17 +214,20 @@ class InstitutionFetcher(StatsFetcher):
|
|
182
214
|
for data in institution_tradings
|
183
215
|
}
|
184
216
|
|
185
|
-
|
217
|
+
annual_trading_dates = sorted(list(annual_trading.keys()))
|
218
|
+
annual_trading_skip = {
|
186
219
|
date: {
|
187
|
-
"close": annual_closes
|
188
|
-
"volume": annual_volumes
|
220
|
+
"close": annual_closes.get(date, 0.0),
|
221
|
+
"volume": annual_volumes.get(date, 0.0),
|
189
222
|
**annual_trading[date]
|
190
223
|
}
|
191
|
-
for date in
|
224
|
+
for date in annual_trading_dates
|
192
225
|
}
|
193
226
|
|
194
227
|
table_dict['annual_trading'] = self.process_annual_trading(
|
195
|
-
annual_dates,
|
228
|
+
annual_dates, annual_trading_skip
|
229
|
+
)
|
230
|
+
|
196
231
|
|
197
232
|
return table_dict
|
198
233
|
|
@@ -206,30 +241,36 @@ class InstitutionFetcher(StatsFetcher):
|
|
206
241
|
|
207
242
|
for key in latest_trading.keys():
|
208
243
|
if (key.find("外陸資") >= 0 or key.find("外資") >= 0):
|
209
|
-
self.target_institution(
|
210
|
-
|
244
|
+
self.target_institution(
|
245
|
+
latest_trading, latest_table['foreign'], key, volume
|
246
|
+
)
|
211
247
|
elif (key.find("自營商") >= 0):
|
212
|
-
self.target_institution(
|
213
|
-
|
248
|
+
self.target_institution(
|
249
|
+
latest_trading, latest_table['prop'], key, volume
|
250
|
+
)
|
214
251
|
elif (key.find("投信") >= 0):
|
215
|
-
self.target_institution(
|
216
|
-
|
252
|
+
self.target_institution(
|
253
|
+
latest_trading, latest_table['mutual'], key, volume
|
254
|
+
)
|
217
255
|
elif (key.find("三大法人") >= 0):
|
218
|
-
self.target_institution(
|
219
|
-
|
220
|
-
|
256
|
+
self.target_institution(
|
257
|
+
latest_trading, latest_table['institutional_investor'], key,
|
258
|
+
volume
|
259
|
+
)
|
221
260
|
# 計算合計
|
222
261
|
for unit in ['stock', 'percentage']:
|
223
262
|
# 買進總和
|
224
263
|
latest_table['institutional_investor']['buy'][unit] = (
|
225
264
|
latest_table['foreign']['buy'][unit] +
|
226
265
|
latest_table['prop']['buy'][unit] +
|
227
|
-
latest_table['mutual']['buy'][unit]
|
266
|
+
latest_table['mutual']['buy'][unit]
|
267
|
+
)
|
228
268
|
# 賣出總和
|
229
269
|
latest_table['institutional_investor']['sell'][unit] = (
|
230
270
|
latest_table['foreign']['sell'][unit] +
|
231
271
|
latest_table['prop']['sell'][unit] +
|
232
|
-
latest_table['mutual']['sell'][unit]
|
272
|
+
latest_table['mutual']['sell'][unit]
|
273
|
+
)
|
233
274
|
|
234
275
|
frames = []
|
235
276
|
for category, trades in latest_table.items():
|
@@ -243,18 +284,22 @@ class InstitutionFetcher(StatsFetcher):
|
|
243
284
|
'type', 'category', 'stock', 'price', 'average_price', 'percentage'
|
244
285
|
]]
|
245
286
|
|
246
|
-
latest_df = pd.melt(
|
247
|
-
|
248
|
-
|
249
|
-
|
287
|
+
latest_df = pd.melt(
|
288
|
+
latest_df,
|
289
|
+
id_vars=['type', 'category'],
|
290
|
+
var_name='variable',
|
291
|
+
value_name='value'
|
292
|
+
)
|
250
293
|
|
251
|
-
latest_df = latest_df.pivot_table(
|
252
|
-
|
253
|
-
|
254
|
-
|
294
|
+
latest_df = latest_df.pivot_table(
|
295
|
+
index=['category', 'variable'],
|
296
|
+
columns='type',
|
297
|
+
values='value',
|
298
|
+
aggfunc='first'
|
299
|
+
)
|
255
300
|
|
256
301
|
# 重設列名,去除多層索引
|
257
|
-
latest_df.columns.name = None
|
302
|
+
latest_df.columns.name = None # 去除列名稱
|
258
303
|
latest_df = latest_df.reset_index()
|
259
304
|
|
260
305
|
return latest_df
|
@@ -268,8 +313,9 @@ class InstitutionFetcher(StatsFetcher):
|
|
268
313
|
elif (key.find("賣出") >= 0):
|
269
314
|
self.cal_institution(old_table, new_table['sell'], key, volume)
|
270
315
|
elif (key.find("買賣超") >= 0):
|
271
|
-
self.cal_institution(
|
272
|
-
|
316
|
+
self.cal_institution(
|
317
|
+
old_table, new_table['over_buy_sell'], key, volume
|
318
|
+
)
|
273
319
|
|
274
320
|
def cal_institution(self, old_table, new_table, key, volume):
|
275
321
|
new_table['stock'] = np.round(old_table[key] / 1000, 2).item()
|
{neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/fetchers/margin_trading.py
RENAMED
@@ -1,5 +1,5 @@
|
|
1
1
|
from .base import StatsFetcher
|
2
|
-
from datetime import datetime, timedelta
|
2
|
+
from datetime import datetime, timedelta, date
|
3
3
|
import json
|
4
4
|
import numpy as np
|
5
5
|
import pandas as pd
|
@@ -23,70 +23,84 @@ class MarginTradingFetcher(StatsFetcher):
|
|
23
23
|
def prepare_query(self, start_date, end_date):
|
24
24
|
pipeline = super().prepare_query()
|
25
25
|
|
26
|
-
pipeline.append(
|
27
|
-
|
28
|
-
"
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
"
|
33
|
-
"
|
34
|
-
"
|
35
|
-
"
|
36
|
-
|
37
|
-
|
38
|
-
"
|
39
|
-
"$
|
40
|
-
|
41
|
-
|
42
|
-
|
26
|
+
pipeline.append(
|
27
|
+
{
|
28
|
+
"$project": {
|
29
|
+
"_id": 0,
|
30
|
+
"ticker": 1,
|
31
|
+
"company_name": 1,
|
32
|
+
"daily_data": {
|
33
|
+
"$map": {
|
34
|
+
"input": {
|
35
|
+
"$filter": {
|
36
|
+
"input": "$daily_data",
|
37
|
+
"as": "daliy",
|
38
|
+
"cond": {
|
39
|
+
"$and": [
|
40
|
+
{
|
41
|
+
"$gte":
|
42
|
+
["$$daliy.date", start_date]
|
43
|
+
}, {
|
44
|
+
"$lte":
|
45
|
+
["$$daliy.date", end_date]
|
46
|
+
}
|
47
|
+
]
|
48
|
+
}
|
43
49
|
}
|
44
|
-
}
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
}
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
50
|
+
},
|
51
|
+
"as": "target_daliy_data",
|
52
|
+
"in": "$$target_daliy_data"
|
53
|
+
}
|
54
|
+
},
|
55
|
+
"margin_trading": {
|
56
|
+
"$map": {
|
57
|
+
"input": {
|
58
|
+
"$filter": {
|
59
|
+
"input": "$margin_trading",
|
60
|
+
"as": "margin",
|
61
|
+
"cond": {
|
62
|
+
"$and": [
|
63
|
+
{
|
64
|
+
"$gte":
|
65
|
+
["$$margin.date", start_date]
|
66
|
+
}, {
|
67
|
+
"$lte":
|
68
|
+
["$$margin.date", end_date]
|
69
|
+
}
|
70
|
+
]
|
71
|
+
}
|
62
72
|
}
|
63
|
-
}
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
}
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
73
|
+
},
|
74
|
+
"as": "target_margin_data",
|
75
|
+
"in": "$$target_margin_data"
|
76
|
+
}
|
77
|
+
},
|
78
|
+
"security_lending": {
|
79
|
+
"$map": {
|
80
|
+
"input": {
|
81
|
+
"$filter": {
|
82
|
+
"input": "$security_lending",
|
83
|
+
"as": "lending",
|
84
|
+
"cond": {
|
85
|
+
"$and": [
|
86
|
+
{
|
87
|
+
"$gte":
|
88
|
+
["$$lending.date", start_date]
|
89
|
+
}, {
|
90
|
+
"$lte":
|
91
|
+
["$$lending.date", end_date]
|
92
|
+
}
|
93
|
+
]
|
94
|
+
}
|
81
95
|
}
|
82
|
-
}
|
83
|
-
|
84
|
-
|
85
|
-
|
96
|
+
},
|
97
|
+
"as": "target_lending_data",
|
98
|
+
"in": "$$target_lending_data"
|
99
|
+
}
|
86
100
|
}
|
87
101
|
}
|
88
102
|
}
|
89
|
-
|
103
|
+
)
|
90
104
|
|
91
105
|
return pipeline
|
92
106
|
|
@@ -100,39 +114,42 @@ class MarginTradingFetcher(StatsFetcher):
|
|
100
114
|
def query_data(self):
|
101
115
|
try:
|
102
116
|
latest_time = StatsDateTime.get_latest_time(
|
103
|
-
self.ticker, self.collection
|
117
|
+
self.ticker, self.collection
|
118
|
+
)['last_update_time']
|
104
119
|
latest_date = latest_time['margin_trading']['latest_date']
|
105
|
-
end_date = latest_date.replace(
|
106
|
-
|
107
|
-
|
108
|
-
microsecond=0)
|
120
|
+
end_date = latest_date.replace(
|
121
|
+
hour=0, minute=0, second=0, microsecond=0
|
122
|
+
)
|
109
123
|
except Exception as e:
|
110
124
|
print(
|
111
125
|
f"No updated time for institution_trading in {self.ticker}, use current time instead"
|
112
126
|
)
|
113
127
|
end_date = datetime.now(self.timezone)
|
114
|
-
end_date = end_date.replace(
|
115
|
-
|
116
|
-
|
117
|
-
microsecond=0)
|
128
|
+
end_date = end_date.replace(
|
129
|
+
hour=0, minute=0, second=0, microsecond=0
|
130
|
+
)
|
118
131
|
|
119
|
-
if (end_date.hour < 22):
|
132
|
+
if (end_date.hour < 22): # 拿不到今天的資料
|
120
133
|
end_date = end_date - timedelta(days=1)
|
121
134
|
|
122
135
|
start_date = end_date - timedelta(days=365)
|
123
136
|
|
124
137
|
fetched_data = self.collect_data(start_date, end_date)
|
125
138
|
|
126
|
-
fetched_data['daily_data'] = sorted(
|
127
|
-
|
128
|
-
|
129
|
-
fetched_data['margin_trading'] = sorted(
|
130
|
-
|
131
|
-
|
139
|
+
fetched_data['daily_data'] = sorted(
|
140
|
+
fetched_data['daily_data'], key=lambda x: x['date'], reverse=True
|
141
|
+
)
|
142
|
+
fetched_data['margin_trading'] = sorted(
|
143
|
+
fetched_data['margin_trading'],
|
144
|
+
key=lambda x: x['date'],
|
145
|
+
reverse=True
|
146
|
+
) if (fetched_data['margin_trading']) else []
|
147
|
+
|
132
148
|
fetched_data['security_lending'] = sorted(
|
133
149
|
fetched_data['security_lending'],
|
134
150
|
key=lambda x: x['date'],
|
135
|
-
reverse=True
|
151
|
+
reverse=True
|
152
|
+
)if (fetched_data['security_lending']) else []
|
136
153
|
|
137
154
|
table_dict = self.process_data(fetched_data)
|
138
155
|
|
@@ -149,19 +166,18 @@ class MarginTradingFetcher(StatsFetcher):
|
|
149
166
|
price_dict = {
|
150
167
|
"open": latest_data['open'],
|
151
168
|
'close': latest_data['close'],
|
152
|
-
'range': f"{latest_data['low']} - {latest_data['high']}",
|
153
|
-
'volume': latest_data['volume'] / 1000,
|
169
|
+
'range': f"{latest_data['low']:.2f} - {latest_data['high']:.2f}",
|
170
|
+
'volume': round(float(latest_data['volume']) / 1000, 2),
|
154
171
|
'last_open': yesterday_data['open'],
|
155
172
|
'last_close': yesterday_data['close'],
|
156
|
-
'last_range':
|
157
|
-
|
158
|
-
'last_volume': yesterday_data['volume'] / 1000
|
173
|
+
'last_range': f"{yesterday_data['low']:.2f} - {yesterday_data['high']:.2f}",
|
174
|
+
'last_volume': round(float(yesterday_data['volume']) / 1000, 2)
|
159
175
|
}
|
160
176
|
annual_lows = [data['low'] for data in daily_datas]
|
161
177
|
annual_highs = [data['high'] for data in daily_datas]
|
162
178
|
lowest = np.min(annual_lows).item()
|
163
179
|
highest = np.max(annual_highs).item()
|
164
|
-
price_dict['52weeks_range'] = f"{lowest} - {highest}"
|
180
|
+
price_dict['52weeks_range'] = f"{lowest:.2f} - {highest:.2f}"
|
165
181
|
|
166
182
|
return_dict['price'] = price_dict
|
167
183
|
|
@@ -169,6 +185,18 @@ class MarginTradingFetcher(StatsFetcher):
|
|
169
185
|
margin_trading = fetched_data['margin_trading']
|
170
186
|
security_lending = fetched_data['security_lending']
|
171
187
|
|
188
|
+
return_dict['margin_trading'] = pd.DataFrame()
|
189
|
+
return_dict['stock_lending'] = pd.DataFrame()
|
190
|
+
return_dict['latest_trading'] = {
|
191
|
+
'date': date.today(),
|
192
|
+
"margin_trading": pd.DataFrame(),
|
193
|
+
"stock_lending": pd.DataFrame()
|
194
|
+
}
|
195
|
+
return_dict['annual_margin'] = pd.DataFrame()
|
196
|
+
return_dict['security_offset'] = 0.0
|
197
|
+
if (not margin_trading):
|
198
|
+
return return_dict
|
199
|
+
|
172
200
|
latest_margin_date = margin_trading[0]['date']
|
173
201
|
latest_lending_date = security_lending[0]['date']
|
174
202
|
## 融資融券
|
@@ -176,7 +204,8 @@ class MarginTradingFetcher(StatsFetcher):
|
|
176
204
|
for trading in margin_trading:
|
177
205
|
trading['financing']['現償'] = trading['financing'].pop('現金償還')
|
178
206
|
trading['short_selling']['現償'] = trading['short_selling'].pop(
|
179
|
-
'現券償還'
|
207
|
+
'現券償還'
|
208
|
+
)
|
180
209
|
### 轉換
|
181
210
|
latest_margin_trading = margin_trading[0]
|
182
211
|
latest_margin_trading_df = {
|
@@ -185,7 +214,8 @@ class MarginTradingFetcher(StatsFetcher):
|
|
185
214
|
if (isinstance(sub_dict, dict))
|
186
215
|
}
|
187
216
|
latest_margin_trading_df = pd.DataFrame.from_dict(
|
188
|
-
latest_margin_trading_df
|
217
|
+
latest_margin_trading_df
|
218
|
+
)
|
189
219
|
|
190
220
|
## 借券表格
|
191
221
|
latest_stock_lending = security_lending[0]['stock_lending']
|
@@ -195,9 +225,9 @@ class MarginTradingFetcher(StatsFetcher):
|
|
195
225
|
for type_name, value in latest_stock_lending.items()
|
196
226
|
}
|
197
227
|
latest_stock_lending.pop("前日餘額")
|
198
|
-
latest_stock_lending_df = pd.DataFrame.from_dict(
|
199
|
-
|
200
|
-
|
228
|
+
latest_stock_lending_df = pd.DataFrame.from_dict(
|
229
|
+
latest_stock_lending, orient="index", columns=['stock_lending']
|
230
|
+
)
|
201
231
|
|
202
232
|
latest_dict = {
|
203
233
|
"date": latest_margin_date,
|
@@ -262,14 +292,14 @@ class MarginTradingFetcher(StatsFetcher):
|
|
262
292
|
|
263
293
|
annual_dict = {
|
264
294
|
date: {
|
265
|
-
"close": close_prices
|
266
|
-
"volume": volumes
|
295
|
+
"close": close_prices.get(date, 0.0),
|
296
|
+
"volume": volumes.get(date, 0.0),
|
267
297
|
**financings[date],
|
268
298
|
**short_sellings[date],
|
269
299
|
**stock_lendings[date],
|
270
|
-
"資券互抵":security_offsets[date]
|
300
|
+
"資券互抵": security_offsets[date]
|
271
301
|
}
|
272
|
-
for date in
|
302
|
+
for date in financings.keys()
|
273
303
|
}
|
274
304
|
|
275
305
|
annual_table = pd.DataFrame.from_dict(annual_dict)
|
{neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/fetchers/tej_finance_report.py
RENAMED
@@ -176,7 +176,10 @@ class FinanceReportFetcher(BaseTEJFetcher):
|
|
176
176
|
fetched_data = self.collection.aggregate(pipeline).to_list()
|
177
177
|
data_dict = self.transform_value(
|
178
178
|
StatsProcessor.list_of_dict_to_dict(
|
179
|
-
fetched_data,
|
179
|
+
data_list=fetched_data,
|
180
|
+
keys=["year", "season"],
|
181
|
+
delimeter="Q",
|
182
|
+
data_key=report_type
|
180
183
|
)
|
181
184
|
)
|
182
185
|
|
@@ -241,7 +244,7 @@ class FinanceReportFetcher(BaseTEJFetcher):
|
|
241
244
|
"$gt": start_year,
|
242
245
|
"$lt": end_year
|
243
246
|
},
|
244
|
-
"data.season":
|
247
|
+
"data.season": start_season
|
245
248
|
}
|
246
249
|
else:
|
247
250
|
match_stage = {
|
@@ -54,7 +54,8 @@ class ValueFetcher(StatsFetcher):
|
|
54
54
|
"EV_OPI": "$$daily_item.EV_OPI",
|
55
55
|
"EV_EBIT": "$$daily_item.EV_EBIT",
|
56
56
|
"EV_EBITDA": "$$daily_item.EV_EBITDA",
|
57
|
-
"EV_S": "$$daily_item.EV_S"
|
57
|
+
"EV_S": "$$daily_item.EV_S",
|
58
|
+
"Yield": "$$daily_item.Yield"
|
58
59
|
}
|
59
60
|
}
|
60
61
|
},
|
@@ -110,7 +111,8 @@ class ValueFetcher(StatsFetcher):
|
|
110
111
|
EV_OPI,
|
111
112
|
EV_EBIT,
|
112
113
|
EV_EBITDA,
|
113
|
-
EV_S
|
114
|
+
EV_S,
|
115
|
+
Yield
|
114
116
|
}
|
115
117
|
"""
|
116
118
|
|
@@ -142,7 +144,8 @@ class ValueFetcher(StatsFetcher):
|
|
142
144
|
"EV_OPI": "$$daily.EV_OPI",
|
143
145
|
"EV_EBIT": "$$daily.EV_EBIT",
|
144
146
|
"EV_EBITDA": "$$daily.EV_EBITDA",
|
145
|
-
"EV_S": "$$daily.EV_S"
|
147
|
+
"EV_S": "$$daily.EV_S",
|
148
|
+
"Yield": "$$daily.Yield"
|
146
149
|
}
|
147
150
|
}
|
148
151
|
}
|
@@ -153,7 +156,7 @@ class ValueFetcher(StatsFetcher):
|
|
153
156
|
fetched_data = self.collection.aggregate(pipeline).to_list()
|
154
157
|
fetched_data = fetched_data[0]
|
155
158
|
|
156
|
-
value_keys = ["P_E", "P_FCF", "P_B", "P_S", "EV_OPI", "EV_EBIT", "
|
159
|
+
value_keys = ["P_E", "P_FCF", "P_B", "P_S", "EV_OPI", "EV_EBIT", "EV_EBITDA", "EV_S", "Yield"]
|
157
160
|
return_dict = {value_key: dict() for value_key in value_keys}
|
158
161
|
|
159
162
|
for value_key in value_keys:
|
@@ -9,6 +9,13 @@ TWN/AINVFQ1:
|
|
9
9
|
- annd
|
10
10
|
- fin_ind
|
11
11
|
- eps
|
12
|
+
- r307
|
13
|
+
- r305
|
14
|
+
- r306
|
15
|
+
- r316
|
16
|
+
- r609
|
17
|
+
- r614
|
18
|
+
- r611
|
12
19
|
TWN/AFESTM1:
|
13
20
|
- coid
|
14
21
|
- mdate
|
@@ -19,4 +26,8 @@ TWN/AFESTM1:
|
|
19
26
|
- curr
|
20
27
|
- annd
|
21
28
|
- fin_ind
|
22
|
-
- eps
|
29
|
+
- eps
|
30
|
+
- r307
|
31
|
+
- r305
|
32
|
+
- r306
|
33
|
+
- r316
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: neurostats_API
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.23b2
|
4
4
|
Summary: The service of NeuroStats website
|
5
5
|
Home-page: https://github.com/NeurowattStats/NeuroStats_API.git
|
6
6
|
Author: JasonWang@Neurowatt
|
@@ -89,7 +89,7 @@ pip install neurostats-API
|
|
89
89
|
```Python
|
90
90
|
>>> import neurostats_API
|
91
91
|
>>> print(neurostats_API.__version__)
|
92
|
-
0.0.
|
92
|
+
0.0.23b2
|
93
93
|
```
|
94
94
|
|
95
95
|
### 得到最新一期的評價資料與歷年評價
|
@@ -12,8 +12,11 @@ pp = PrettyPrinter(
|
|
12
12
|
indent=2
|
13
13
|
)
|
14
14
|
|
15
|
-
|
16
|
-
|
15
|
+
|
16
|
+
company_list = {
|
17
|
+
"1260": "富味香",
|
18
|
+
"2330": "台積電"
|
19
|
+
}
|
17
20
|
db_client = DBClient("mongodb://neurowatt:neurodb123@db.neurowatt.ai:27017/neurowatt").get_client()
|
18
21
|
|
19
22
|
def test_value_serie():
|
@@ -222,7 +225,7 @@ def test_institution_trading():
|
|
222
225
|
|
223
226
|
def test_margin_trading():
|
224
227
|
from neurostats_API.fetchers import MarginTradingFetcher
|
225
|
-
fetcher = MarginTradingFetcher(ticker='
|
228
|
+
fetcher = MarginTradingFetcher(ticker='1260', db_client=db_client)
|
226
229
|
|
227
230
|
fetched_data = fetcher.query_data()
|
228
231
|
|
File without changes
|
File without changes
|
File without changes
|
{neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/fetchers/balance_sheet.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
{neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/fetchers/finance_overview.py
RENAMED
File without changes
|
{neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/fetchers/month_revenue.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/tools/company_list/tw.json
RENAMED
File without changes
|
{neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/tools/tej_db/tej_db_index.yaml
RENAMED
File without changes
|
{neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/tools/twse/balance_sheet.yaml
RENAMED
File without changes
|
File without changes
|
File without changes
|
{neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API/tools/twse/profit_lose.yaml
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{neurostats_api-0.0.23b0 → neurostats_api-0.0.23b2}/neurostats_API.egg-info/dependency_links.txt
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|