borsapy 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- borsapy/__init__.py +134 -0
- borsapy/_models/__init__.py +1 -0
- borsapy/_providers/__init__.py +5 -0
- borsapy/_providers/base.py +94 -0
- borsapy/_providers/bist_index.py +150 -0
- borsapy/_providers/btcturk.py +230 -0
- borsapy/_providers/canlidoviz.py +773 -0
- borsapy/_providers/dovizcom.py +869 -0
- borsapy/_providers/dovizcom_calendar.py +276 -0
- borsapy/_providers/dovizcom_tahvil.py +172 -0
- borsapy/_providers/hedeffiyat.py +376 -0
- borsapy/_providers/isin.py +247 -0
- borsapy/_providers/isyatirim.py +943 -0
- borsapy/_providers/isyatirim_screener.py +468 -0
- borsapy/_providers/kap.py +534 -0
- borsapy/_providers/paratic.py +278 -0
- borsapy/_providers/tcmb.py +317 -0
- borsapy/_providers/tefas.py +802 -0
- borsapy/_providers/viop.py +204 -0
- borsapy/bond.py +162 -0
- borsapy/cache.py +86 -0
- borsapy/calendar.py +272 -0
- borsapy/crypto.py +153 -0
- borsapy/exceptions.py +64 -0
- borsapy/fund.py +471 -0
- borsapy/fx.py +388 -0
- borsapy/index.py +285 -0
- borsapy/inflation.py +166 -0
- borsapy/market.py +53 -0
- borsapy/multi.py +227 -0
- borsapy/screener.py +365 -0
- borsapy/ticker.py +1196 -0
- borsapy/viop.py +162 -0
- borsapy-0.4.0.dist-info/METADATA +969 -0
- borsapy-0.4.0.dist-info/RECORD +37 -0
- borsapy-0.4.0.dist-info/WHEEL +4 -0
- borsapy-0.4.0.dist-info/licenses/LICENSE +190 -0
|
@@ -0,0 +1,943 @@
|
|
|
1
|
+
"""İş Yatırım provider for real-time prices and financial statements."""
|
|
2
|
+
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
import pandas as pd
|
|
7
|
+
|
|
8
|
+
from borsapy._providers.base import BaseProvider
|
|
9
|
+
from borsapy.cache import TTL
|
|
10
|
+
from borsapy.exceptions import APIError, DataNotAvailableError, TickerNotFoundError
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class IsYatirimProvider(BaseProvider):
|
|
14
|
+
"""
|
|
15
|
+
Provider for real-time stock data and financial statements from İş Yatırım.
|
|
16
|
+
|
|
17
|
+
APIs:
|
|
18
|
+
- OneEndeks: Real-time OHLCV data
|
|
19
|
+
- MaliTablo: Financial statements (balance sheet, income, cash flow)
|
|
20
|
+
- GetSermayeArttirimlari: Dividends and capital increases
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
BASE_URL = "https://www.isyatirim.com.tr/_Layouts/15/IsYatirim.Website/Common"
|
|
24
|
+
STOCK_INFO_URL = "https://www.isyatirim.com.tr/_layouts/15/IsYatirim.Website/StockInfo/CompanyInfoAjax.aspx"
|
|
25
|
+
|
|
26
|
+
# Financial statement groups
|
|
27
|
+
FINANCIAL_GROUP_INDUSTRIAL = "XI_29" # Sanayi şirketleri
|
|
28
|
+
FINANCIAL_GROUP_BANK = "UFRS" # Bankalar
|
|
29
|
+
|
|
30
|
+
# Known market indices
|
|
31
|
+
INDICES = {
|
|
32
|
+
"XU100": "BIST 100",
|
|
33
|
+
"XU050": "BIST 50",
|
|
34
|
+
"XU030": "BIST 30",
|
|
35
|
+
"XBANK": "BIST Banka",
|
|
36
|
+
"XUSIN": "BIST Sınai",
|
|
37
|
+
"XHOLD": "BIST Holding ve Yatırım",
|
|
38
|
+
"XUTEK": "BIST Teknoloji",
|
|
39
|
+
"XGIDA": "BIST Gıda",
|
|
40
|
+
"XTRZM": "BIST Turizm",
|
|
41
|
+
"XULAS": "BIST Ulaştırma",
|
|
42
|
+
"XSGRT": "BIST Sigorta",
|
|
43
|
+
"XMANA": "BIST Metal Ana",
|
|
44
|
+
"XKMYA": "BIST Kimya",
|
|
45
|
+
"XMADN": "BIST Maden",
|
|
46
|
+
"XELKT": "BIST Elektrik",
|
|
47
|
+
"XTEKS": "BIST Tekstil",
|
|
48
|
+
"XILTM": "BIST İletişim",
|
|
49
|
+
"XUMAL": "BIST Mali",
|
|
50
|
+
"XUTUM": "BIST Tüm",
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
def get_realtime_quote(self, symbol: str) -> dict[str, Any]:
|
|
54
|
+
"""
|
|
55
|
+
Get real-time quote for a symbol using OneEndeks API.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
symbol: Stock symbol (e.g., "THYAO", "GARAN").
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
Dictionary with quote data:
|
|
62
|
+
- symbol: Stock symbol
|
|
63
|
+
- last: Last price
|
|
64
|
+
- open: Opening price
|
|
65
|
+
- high: High price
|
|
66
|
+
- low: Low price
|
|
67
|
+
- close: Previous day close
|
|
68
|
+
- volume: Trading volume
|
|
69
|
+
- bid: Bid price
|
|
70
|
+
- ask: Ask price
|
|
71
|
+
- change: Price change
|
|
72
|
+
- change_percent: Price change percentage
|
|
73
|
+
- update_time: Last update time
|
|
74
|
+
"""
|
|
75
|
+
symbol = symbol.upper().replace(".IS", "").replace(".E", "")
|
|
76
|
+
|
|
77
|
+
cache_key = f"isyatirim:quote:{symbol}"
|
|
78
|
+
cached = self._cache_get(cache_key)
|
|
79
|
+
if cached is not None:
|
|
80
|
+
return cached
|
|
81
|
+
|
|
82
|
+
url = f"{self.BASE_URL}/ChartData.aspx/OneEndeks"
|
|
83
|
+
params = {"endeks": symbol}
|
|
84
|
+
|
|
85
|
+
try:
|
|
86
|
+
response = self._get(url, params=params)
|
|
87
|
+
data = response.json()
|
|
88
|
+
except Exception as e:
|
|
89
|
+
raise APIError(f"Failed to fetch quote for {symbol}: {e}") from e
|
|
90
|
+
|
|
91
|
+
if not data or "symbol" not in data:
|
|
92
|
+
raise TickerNotFoundError(symbol)
|
|
93
|
+
|
|
94
|
+
result = self._parse_quote(data)
|
|
95
|
+
self._cache_set(cache_key, result, TTL.REALTIME_PRICE)
|
|
96
|
+
|
|
97
|
+
return result
|
|
98
|
+
|
|
99
|
+
def get_index_history(
|
|
100
|
+
self,
|
|
101
|
+
index_code: str,
|
|
102
|
+
start: datetime | None = None,
|
|
103
|
+
end: datetime | None = None,
|
|
104
|
+
) -> pd.DataFrame:
|
|
105
|
+
"""
|
|
106
|
+
Get historical data for an index.
|
|
107
|
+
|
|
108
|
+
Args:
|
|
109
|
+
index_code: Index code (e.g., "XU100", "XU030", "XBANK").
|
|
110
|
+
start: Start date.
|
|
111
|
+
end: End date.
|
|
112
|
+
|
|
113
|
+
Returns:
|
|
114
|
+
DataFrame with columns: Open, High, Low, Close, Volume.
|
|
115
|
+
"""
|
|
116
|
+
index_code = index_code.upper()
|
|
117
|
+
|
|
118
|
+
# Default date range
|
|
119
|
+
if end is None:
|
|
120
|
+
end = datetime.now()
|
|
121
|
+
if start is None:
|
|
122
|
+
from datetime import timedelta
|
|
123
|
+
|
|
124
|
+
start = end - timedelta(days=365)
|
|
125
|
+
|
|
126
|
+
start_str = start.strftime("%d-%m-%Y")
|
|
127
|
+
end_str = end.strftime("%d-%m-%Y")
|
|
128
|
+
|
|
129
|
+
cache_key = f"isyatirim:index_history:{index_code}:{start_str}:{end_str}"
|
|
130
|
+
cached = self._cache_get(cache_key)
|
|
131
|
+
if cached is not None:
|
|
132
|
+
return cached
|
|
133
|
+
|
|
134
|
+
url = f"{self.BASE_URL}/ChartData.aspx/IndexHistoricalAll"
|
|
135
|
+
params = {
|
|
136
|
+
"endeks": index_code,
|
|
137
|
+
"startdate": start_str,
|
|
138
|
+
"enddate": end_str,
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
try:
|
|
142
|
+
response = self._get(url, params=params)
|
|
143
|
+
data = response.json()
|
|
144
|
+
except Exception as e:
|
|
145
|
+
raise APIError(f"Failed to fetch index history for {index_code}: {e}") from e
|
|
146
|
+
|
|
147
|
+
if not data:
|
|
148
|
+
raise DataNotAvailableError(f"No data for index: {index_code}")
|
|
149
|
+
|
|
150
|
+
df = self._parse_index_history(data)
|
|
151
|
+
self._cache_set(cache_key, df, TTL.OHLCV_HISTORY)
|
|
152
|
+
|
|
153
|
+
return df
|
|
154
|
+
|
|
155
|
+
def _parse_index_history(self, data: list[dict[str, Any]]) -> pd.DataFrame:
|
|
156
|
+
"""Parse index history response into DataFrame."""
|
|
157
|
+
records = []
|
|
158
|
+
for item in data:
|
|
159
|
+
try:
|
|
160
|
+
# Parse timestamp from JavaScript date format
|
|
161
|
+
date_str = item.get("date", "")
|
|
162
|
+
if date_str:
|
|
163
|
+
dt = datetime.strptime(date_str[:10], "%Y-%m-%d")
|
|
164
|
+
else:
|
|
165
|
+
continue
|
|
166
|
+
|
|
167
|
+
records.append(
|
|
168
|
+
{
|
|
169
|
+
"Date": dt,
|
|
170
|
+
"Open": float(item.get("open", 0)),
|
|
171
|
+
"High": float(item.get("high", 0)),
|
|
172
|
+
"Low": float(item.get("low", 0)),
|
|
173
|
+
"Close": float(item.get("close", 0)),
|
|
174
|
+
"Volume": int(item.get("volume", 0)),
|
|
175
|
+
}
|
|
176
|
+
)
|
|
177
|
+
except (ValueError, TypeError):
|
|
178
|
+
continue
|
|
179
|
+
|
|
180
|
+
df = pd.DataFrame(records)
|
|
181
|
+
if not df.empty:
|
|
182
|
+
df.set_index("Date", inplace=True)
|
|
183
|
+
df.sort_index(inplace=True)
|
|
184
|
+
|
|
185
|
+
return df
|
|
186
|
+
|
|
187
|
+
def get_index_info(self, index_code: str) -> dict[str, Any]:
|
|
188
|
+
"""
|
|
189
|
+
Get current information for an index.
|
|
190
|
+
|
|
191
|
+
Args:
|
|
192
|
+
index_code: Index code (e.g., "XU100").
|
|
193
|
+
|
|
194
|
+
Returns:
|
|
195
|
+
Dictionary with index information.
|
|
196
|
+
"""
|
|
197
|
+
index_code = index_code.upper()
|
|
198
|
+
|
|
199
|
+
if index_code not in self.INDICES:
|
|
200
|
+
raise TickerNotFoundError(f"Unknown index: {index_code}")
|
|
201
|
+
|
|
202
|
+
# Use the same quote endpoint for indices
|
|
203
|
+
quote = self.get_realtime_quote(index_code)
|
|
204
|
+
quote["name"] = self.INDICES.get(index_code, index_code)
|
|
205
|
+
quote["type"] = "index"
|
|
206
|
+
|
|
207
|
+
return quote
|
|
208
|
+
|
|
209
|
+
def _get_session_for_stock(self, symbol: str) -> None:
|
|
210
|
+
"""Initialize session by visiting stock page to get cookies."""
|
|
211
|
+
stock_page_url = f"https://www.isyatirim.com.tr/tr-tr/analiz/hisse/Sayfalar/sirket-karti.aspx?hisse={symbol}"
|
|
212
|
+
try:
|
|
213
|
+
# Just make a GET request to establish session cookies
|
|
214
|
+
self._client.get(stock_page_url, timeout=10)
|
|
215
|
+
except Exception:
|
|
216
|
+
pass # Ignore errors, we'll try the API anyway
|
|
217
|
+
|
|
218
|
+
def _fetch_sermaye_data(self, symbol: str) -> dict:
|
|
219
|
+
"""
|
|
220
|
+
Fetch dividend and capital increase data from İş Yatırım API.
|
|
221
|
+
|
|
222
|
+
Returns combined data with both temettuList and sermayeList.
|
|
223
|
+
"""
|
|
224
|
+
import json
|
|
225
|
+
|
|
226
|
+
symbol = symbol.upper().replace(".IS", "").replace(".E", "")
|
|
227
|
+
|
|
228
|
+
# First establish session to get ASP.NET_SessionId cookie
|
|
229
|
+
self._get_session_for_stock(symbol)
|
|
230
|
+
|
|
231
|
+
url = f"{self.STOCK_INFO_URL}/GetSermayeArttirimlari"
|
|
232
|
+
|
|
233
|
+
# ASP.NET WebMethod expects specific format
|
|
234
|
+
headers = {
|
|
235
|
+
"Content-Type": "application/json; charset=UTF-8",
|
|
236
|
+
"Accept": "application/json, text/javascript, */*; q=0.01",
|
|
237
|
+
"X-Requested-With": "XMLHttpRequest",
|
|
238
|
+
"Referer": f"https://www.isyatirim.com.tr/tr-tr/analiz/hisse/Sayfalar/sirket-karti.aspx?hisse={symbol}",
|
|
239
|
+
"Origin": "https://www.isyatirim.com.tr",
|
|
240
|
+
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/143.0.0.0 Safari/537.36",
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
# Correct payload format with hisseKodu
|
|
244
|
+
payload = json.dumps({
|
|
245
|
+
"hisseKodu": symbol,
|
|
246
|
+
"hisseTanimKodu": "",
|
|
247
|
+
"yil": 0,
|
|
248
|
+
"zaman": "HEPSI",
|
|
249
|
+
"endeksKodu": "09",
|
|
250
|
+
"sektorKodu": "",
|
|
251
|
+
})
|
|
252
|
+
|
|
253
|
+
try:
|
|
254
|
+
response = self._client.post(url, content=payload, headers=headers, timeout=15)
|
|
255
|
+
response.raise_for_status()
|
|
256
|
+
return response.json()
|
|
257
|
+
except Exception as e:
|
|
258
|
+
raise APIError(f"Failed to fetch sermaye data for {symbol}: {e}") from e
|
|
259
|
+
|
|
260
|
+
def get_dividends(self, symbol: str) -> pd.DataFrame:
|
|
261
|
+
"""
|
|
262
|
+
Get dividend history for a stock.
|
|
263
|
+
|
|
264
|
+
Args:
|
|
265
|
+
symbol: Stock symbol (e.g., "THYAO").
|
|
266
|
+
|
|
267
|
+
Returns:
|
|
268
|
+
DataFrame with columns: Date, Amount, GrossRate, TotalDividend.
|
|
269
|
+
"""
|
|
270
|
+
symbol = symbol.upper().replace(".IS", "").replace(".E", "")
|
|
271
|
+
|
|
272
|
+
cache_key = f"isyatirim:dividends:{symbol}"
|
|
273
|
+
cached = self._cache_get(cache_key)
|
|
274
|
+
if cached is not None:
|
|
275
|
+
return cached
|
|
276
|
+
|
|
277
|
+
try:
|
|
278
|
+
data = self._fetch_sermaye_data(symbol)
|
|
279
|
+
except APIError:
|
|
280
|
+
# Return empty DataFrame if API fails
|
|
281
|
+
return pd.DataFrame(columns=["Amount", "GrossRate", "NetRate", "TotalDividend"])
|
|
282
|
+
|
|
283
|
+
# Parse dividends from response
|
|
284
|
+
df = self._parse_dividends(data)
|
|
285
|
+
self._cache_set(cache_key, df, TTL.FINANCIAL_STATEMENTS)
|
|
286
|
+
|
|
287
|
+
return df
|
|
288
|
+
|
|
289
|
+
def _parse_sermaye_response(self, data: dict[str, Any]) -> list[dict[str, Any]]:
|
|
290
|
+
"""Parse GetSermayeArttirimlari response into list of records."""
|
|
291
|
+
import json
|
|
292
|
+
|
|
293
|
+
# Response structure: {"d": "[{...}, {...}]"} - JSON string inside JSON
|
|
294
|
+
d_value = data.get("d", "[]")
|
|
295
|
+
|
|
296
|
+
if isinstance(d_value, str):
|
|
297
|
+
try:
|
|
298
|
+
return json.loads(d_value)
|
|
299
|
+
except json.JSONDecodeError:
|
|
300
|
+
return []
|
|
301
|
+
elif isinstance(d_value, list):
|
|
302
|
+
return d_value
|
|
303
|
+
return []
|
|
304
|
+
|
|
305
|
+
def _parse_dividends(self, data: dict[str, Any]) -> pd.DataFrame:
|
|
306
|
+
"""Parse dividend data from GetSermayeArttirimlari response."""
|
|
307
|
+
records = []
|
|
308
|
+
items = self._parse_sermaye_response(data)
|
|
309
|
+
|
|
310
|
+
for item in items:
|
|
311
|
+
try:
|
|
312
|
+
# Filter for cash dividend type only: 04 (Nakit Temettü)
|
|
313
|
+
tip = item.get("SHT_KODU", "")
|
|
314
|
+
if tip != "04":
|
|
315
|
+
continue
|
|
316
|
+
|
|
317
|
+
# Parse date from timestamp (milliseconds) - strip time
|
|
318
|
+
timestamp = item.get("SHHE_TARIH", 0)
|
|
319
|
+
if timestamp:
|
|
320
|
+
dt = datetime.fromtimestamp(timestamp / 1000).replace(
|
|
321
|
+
hour=0, minute=0, second=0, microsecond=0
|
|
322
|
+
)
|
|
323
|
+
else:
|
|
324
|
+
continue
|
|
325
|
+
|
|
326
|
+
# Cash dividend rate and amount
|
|
327
|
+
gross_rate = float(item.get("SHHE_NAKIT_TM_ORAN", 0) or 0)
|
|
328
|
+
net_rate = float(item.get("SHHE_NAKIT_TM_ORAN_NET", 0) or 0)
|
|
329
|
+
total_dividend = float(item.get("SHHE_NAKIT_TM_TUTAR", 0) or 0)
|
|
330
|
+
|
|
331
|
+
# Calculate per-share amount (rate / 100 since it's percentage of nominal)
|
|
332
|
+
amount = gross_rate / 100 if gross_rate else 0
|
|
333
|
+
|
|
334
|
+
records.append(
|
|
335
|
+
{
|
|
336
|
+
"Date": dt,
|
|
337
|
+
"Amount": round(amount, 4),
|
|
338
|
+
"GrossRate": round(gross_rate, 2),
|
|
339
|
+
"NetRate": round(net_rate, 2),
|
|
340
|
+
"TotalDividend": total_dividend,
|
|
341
|
+
}
|
|
342
|
+
)
|
|
343
|
+
except (ValueError, TypeError):
|
|
344
|
+
continue
|
|
345
|
+
|
|
346
|
+
df = pd.DataFrame(records)
|
|
347
|
+
if not df.empty:
|
|
348
|
+
df.set_index("Date", inplace=True)
|
|
349
|
+
df.sort_index(ascending=False, inplace=True)
|
|
350
|
+
|
|
351
|
+
return df
|
|
352
|
+
|
|
353
|
+
def get_capital_increases(self, symbol: str) -> pd.DataFrame:
|
|
354
|
+
"""
|
|
355
|
+
Get capital increase (split) history for a stock.
|
|
356
|
+
|
|
357
|
+
Args:
|
|
358
|
+
symbol: Stock symbol (e.g., "THYAO").
|
|
359
|
+
|
|
360
|
+
Returns:
|
|
361
|
+
DataFrame with columns: Date, Capital, RightsIssue, BonusFromCapital, BonusFromDividend.
|
|
362
|
+
"""
|
|
363
|
+
symbol = symbol.upper().replace(".IS", "").replace(".E", "")
|
|
364
|
+
|
|
365
|
+
cache_key = f"isyatirim:splits:{symbol}"
|
|
366
|
+
cached = self._cache_get(cache_key)
|
|
367
|
+
if cached is not None:
|
|
368
|
+
return cached
|
|
369
|
+
|
|
370
|
+
try:
|
|
371
|
+
data = self._fetch_sermaye_data(symbol)
|
|
372
|
+
except APIError:
|
|
373
|
+
# Return empty DataFrame if API fails
|
|
374
|
+
return pd.DataFrame(columns=["Capital", "RightsIssue", "BonusFromCapital", "BonusFromDividend"])
|
|
375
|
+
|
|
376
|
+
# Parse capital increases from response
|
|
377
|
+
df = self._parse_capital_increases(data)
|
|
378
|
+
self._cache_set(cache_key, df, TTL.FINANCIAL_STATEMENTS)
|
|
379
|
+
|
|
380
|
+
return df
|
|
381
|
+
|
|
382
|
+
def _parse_capital_increases(self, data: dict[str, Any]) -> pd.DataFrame:
|
|
383
|
+
"""Parse capital increase data from GetSermayeArttirimlari response."""
|
|
384
|
+
records = []
|
|
385
|
+
items = self._parse_sermaye_response(data)
|
|
386
|
+
|
|
387
|
+
for item in items:
|
|
388
|
+
try:
|
|
389
|
+
# Filter for:
|
|
390
|
+
# - Type 03: Bedelli ve Bedelsiz Sermaye Artırımı (rights + bonus issue)
|
|
391
|
+
# - Type 09: Bedelsiz Temettü (stock dividend / bonus from dividend)
|
|
392
|
+
tip = item.get("SHT_KODU", "")
|
|
393
|
+
if tip not in ("03", "09"):
|
|
394
|
+
continue
|
|
395
|
+
|
|
396
|
+
# Parse date from timestamp (milliseconds) - strip time
|
|
397
|
+
timestamp = item.get("SHHE_TARIH", 0)
|
|
398
|
+
if timestamp:
|
|
399
|
+
dt = datetime.fromtimestamp(timestamp / 1000).replace(
|
|
400
|
+
hour=0, minute=0, second=0, microsecond=0
|
|
401
|
+
)
|
|
402
|
+
else:
|
|
403
|
+
continue
|
|
404
|
+
|
|
405
|
+
# Get capital after increase
|
|
406
|
+
capital = float(item.get("HSP_BOLUNME_SONRASI_SERMAYE", 0) or 0)
|
|
407
|
+
|
|
408
|
+
# Rights issue rate (Bedelli)
|
|
409
|
+
rights_issue = float(item.get("SHHE_BDLI_ORAN", 0) or 0)
|
|
410
|
+
|
|
411
|
+
# Bonus from capital reserves (Bedelsiz İç Kaynak)
|
|
412
|
+
bonus_capital = float(item.get("SHHE_BDSZ_IK_ORAN", 0) or 0)
|
|
413
|
+
|
|
414
|
+
# Bonus from dividend (Bedelsiz Temettüden)
|
|
415
|
+
bonus_dividend = float(item.get("SHHE_BDSZ_TM_ORAN", 0) or 0)
|
|
416
|
+
|
|
417
|
+
records.append(
|
|
418
|
+
{
|
|
419
|
+
"Date": dt,
|
|
420
|
+
"Capital": capital,
|
|
421
|
+
"RightsIssue": round(rights_issue, 2),
|
|
422
|
+
"BonusFromCapital": round(bonus_capital, 2),
|
|
423
|
+
"BonusFromDividend": round(bonus_dividend, 2),
|
|
424
|
+
}
|
|
425
|
+
)
|
|
426
|
+
except (ValueError, TypeError):
|
|
427
|
+
continue
|
|
428
|
+
|
|
429
|
+
df = pd.DataFrame(records)
|
|
430
|
+
if not df.empty:
|
|
431
|
+
df.set_index("Date", inplace=True)
|
|
432
|
+
df.sort_index(ascending=False, inplace=True)
|
|
433
|
+
|
|
434
|
+
return df
|
|
435
|
+
|
|
436
|
+
def _parse_quote(self, data: dict[str, Any]) -> dict[str, Any]:
|
|
437
|
+
"""Parse OneEndeks response into standardized format."""
|
|
438
|
+
last = float(data.get("last", 0))
|
|
439
|
+
prev_close = float(data.get("dayClose", 0))
|
|
440
|
+
change = last - prev_close if prev_close else 0
|
|
441
|
+
change_pct = (change / prev_close * 100) if prev_close else 0
|
|
442
|
+
|
|
443
|
+
update_str = data.get("updateDate", "")
|
|
444
|
+
try:
|
|
445
|
+
update_time = datetime.fromisoformat(update_str.replace("+03", "+03:00"))
|
|
446
|
+
except (ValueError, AttributeError):
|
|
447
|
+
update_time = datetime.now()
|
|
448
|
+
|
|
449
|
+
return {
|
|
450
|
+
"symbol": data.get("symbol", ""),
|
|
451
|
+
"last": last,
|
|
452
|
+
"open": float(data.get("open", 0)),
|
|
453
|
+
"high": float(data.get("high", 0)),
|
|
454
|
+
"low": float(data.get("low", 0)),
|
|
455
|
+
"close": prev_close,
|
|
456
|
+
"volume": int(data.get("volume", 0)),
|
|
457
|
+
"quantity": int(data.get("quantity", 0)),
|
|
458
|
+
"bid": float(data.get("bid", 0)),
|
|
459
|
+
"ask": float(data.get("ask", 0)),
|
|
460
|
+
"change": round(change, 2),
|
|
461
|
+
"change_percent": round(change_pct, 2),
|
|
462
|
+
"week_close": float(data.get("weekClose", 0)),
|
|
463
|
+
"month_close": float(data.get("monthClose", 0)),
|
|
464
|
+
"year_close": float(data.get("yearClose", 0)),
|
|
465
|
+
"update_time": update_time,
|
|
466
|
+
}
|
|
467
|
+
|
|
468
|
+
def get_financial_statements(
|
|
469
|
+
self,
|
|
470
|
+
symbol: str,
|
|
471
|
+
statement_type: str = "balance_sheet",
|
|
472
|
+
quarterly: bool = False,
|
|
473
|
+
financial_group: str | None = None,
|
|
474
|
+
) -> pd.DataFrame:
|
|
475
|
+
"""
|
|
476
|
+
Get financial statements for a company.
|
|
477
|
+
|
|
478
|
+
Args:
|
|
479
|
+
symbol: Stock symbol.
|
|
480
|
+
statement_type: Type of statement ("balance_sheet", "income_stmt", "cashflow").
|
|
481
|
+
quarterly: If True, return quarterly data. If False, return annual data.
|
|
482
|
+
financial_group: Financial group code (XI_29 for industrial, UFRS for banks).
|
|
483
|
+
|
|
484
|
+
Returns:
|
|
485
|
+
DataFrame with financial data.
|
|
486
|
+
"""
|
|
487
|
+
symbol = symbol.upper().replace(".IS", "").replace(".E", "")
|
|
488
|
+
|
|
489
|
+
cache_key = f"isyatirim:financial:{symbol}:{statement_type}:{quarterly}"
|
|
490
|
+
cached = self._cache_get(cache_key)
|
|
491
|
+
if cached is not None:
|
|
492
|
+
return cached
|
|
493
|
+
|
|
494
|
+
# Determine financial group
|
|
495
|
+
if financial_group is None:
|
|
496
|
+
financial_group = self.FINANCIAL_GROUP_INDUSTRIAL
|
|
497
|
+
|
|
498
|
+
# Map statement type to table names
|
|
499
|
+
table_map = {
|
|
500
|
+
"balance_sheet": ["BILANCO_AKTIF", "BILANCO_PASIF"],
|
|
501
|
+
"income_stmt": ["GELIR_TABLOSU"],
|
|
502
|
+
"cashflow": ["NAKIT_AKIM_TABLOSU"],
|
|
503
|
+
}
|
|
504
|
+
|
|
505
|
+
tables = table_map.get(statement_type, ["BILANCO_AKTIF", "BILANCO_PASIF"])
|
|
506
|
+
|
|
507
|
+
# Get last 5 years/quarters
|
|
508
|
+
current_year = datetime.now().year
|
|
509
|
+
periods = self._get_periods(current_year, quarterly, count=5)
|
|
510
|
+
|
|
511
|
+
all_data = []
|
|
512
|
+
for table_name in tables:
|
|
513
|
+
try:
|
|
514
|
+
df = self._fetch_financial_table(
|
|
515
|
+
symbol=symbol,
|
|
516
|
+
table_name=table_name,
|
|
517
|
+
financial_group=financial_group,
|
|
518
|
+
periods=periods,
|
|
519
|
+
)
|
|
520
|
+
if not df.empty:
|
|
521
|
+
all_data.append(df)
|
|
522
|
+
except Exception:
|
|
523
|
+
continue
|
|
524
|
+
|
|
525
|
+
if not all_data:
|
|
526
|
+
raise DataNotAvailableError(f"No financial data available for {symbol}")
|
|
527
|
+
|
|
528
|
+
result = pd.concat(all_data, axis=0) if len(all_data) > 1 else all_data[0]
|
|
529
|
+
result = result.drop_duplicates()
|
|
530
|
+
|
|
531
|
+
self._cache_set(cache_key, result, TTL.FINANCIAL_STATEMENTS)
|
|
532
|
+
|
|
533
|
+
return result
|
|
534
|
+
|
|
535
|
+
def _get_periods(
|
|
536
|
+
self,
|
|
537
|
+
current_year: int,
|
|
538
|
+
quarterly: bool,
|
|
539
|
+
count: int = 5,
|
|
540
|
+
) -> list[tuple[int, int]]:
|
|
541
|
+
"""Generate period tuples (year, period) for financial queries.
|
|
542
|
+
|
|
543
|
+
For quarterly data, starts from the last completed quarter.
|
|
544
|
+
For annual data, starts from the previous year (current year data
|
|
545
|
+
typically not available until Q1 of next year).
|
|
546
|
+
"""
|
|
547
|
+
periods = []
|
|
548
|
+
if quarterly:
|
|
549
|
+
# Determine last AVAILABLE quarter based on current month
|
|
550
|
+
# Financial data has ~45-60 day publication delay after quarter end
|
|
551
|
+
current_month = datetime.now().month
|
|
552
|
+
#
|
|
553
|
+
# Publication timeline (approximate):
|
|
554
|
+
# Q1 (Jan-Mar) data → published May/June
|
|
555
|
+
# Q2 (Apr-Jun) data → published Aug/Sep
|
|
556
|
+
# Q3 (Jul-Sep) data → published Nov/Dec
|
|
557
|
+
# Q4 (Oct-Dec) data → published Feb/Mar of next year
|
|
558
|
+
#
|
|
559
|
+
# So available data by month:
|
|
560
|
+
# Jan-Feb: Q3 of previous year is latest
|
|
561
|
+
# Mar-May: Q4 of previous year is latest
|
|
562
|
+
# Jun-Aug: Q1 of current year is latest
|
|
563
|
+
# Sep-Nov: Q2 of current year is latest
|
|
564
|
+
# Dec: Q3 of current year is latest
|
|
565
|
+
#
|
|
566
|
+
if current_month <= 2:
|
|
567
|
+
# Jan-Feb: Q3 of previous year is latest available
|
|
568
|
+
start_year = current_year - 1
|
|
569
|
+
start_period = 9
|
|
570
|
+
elif current_month <= 5:
|
|
571
|
+
# Mar-May: Q4 of previous year is latest
|
|
572
|
+
start_year = current_year - 1
|
|
573
|
+
start_period = 12
|
|
574
|
+
elif current_month <= 8:
|
|
575
|
+
# Jun-Aug: Q1 of current year is latest
|
|
576
|
+
start_year = current_year
|
|
577
|
+
start_period = 3
|
|
578
|
+
elif current_month <= 11:
|
|
579
|
+
# Sep-Nov: Q2 of current year is latest
|
|
580
|
+
start_year = current_year
|
|
581
|
+
start_period = 6
|
|
582
|
+
else:
|
|
583
|
+
# Dec: Q3 of current year is latest
|
|
584
|
+
start_year = current_year
|
|
585
|
+
start_period = 9
|
|
586
|
+
|
|
587
|
+
# Generate quarters going backward from start
|
|
588
|
+
year = start_year
|
|
589
|
+
period = start_period
|
|
590
|
+
for _ in range(count * 4):
|
|
591
|
+
periods.append((year, period))
|
|
592
|
+
# Move to previous quarter
|
|
593
|
+
period -= 3
|
|
594
|
+
if period <= 0:
|
|
595
|
+
period = 12
|
|
596
|
+
year -= 1
|
|
597
|
+
else:
|
|
598
|
+
# Annual: start from previous year (current year data not ready)
|
|
599
|
+
for i in range(count):
|
|
600
|
+
periods.append((current_year - 1 - i, 12))
|
|
601
|
+
return periods
|
|
602
|
+
|
|
603
|
+
def _fetch_financial_table(
|
|
604
|
+
self,
|
|
605
|
+
symbol: str,
|
|
606
|
+
table_name: str,
|
|
607
|
+
financial_group: str,
|
|
608
|
+
periods: list[tuple[int, int]],
|
|
609
|
+
) -> pd.DataFrame:
|
|
610
|
+
"""Fetch a specific financial table."""
|
|
611
|
+
url = f"{self.BASE_URL}/Data.aspx/MaliTablo"
|
|
612
|
+
|
|
613
|
+
# Build params with multiple year/period pairs
|
|
614
|
+
params: dict[str, Any] = {
|
|
615
|
+
"companyCode": symbol,
|
|
616
|
+
"exchange": "TRY",
|
|
617
|
+
"financialGroup": financial_group,
|
|
618
|
+
}
|
|
619
|
+
|
|
620
|
+
for i, (year, period) in enumerate(periods[:5], 1):
|
|
621
|
+
params[f"year{i}"] = year
|
|
622
|
+
params[f"period{i}"] = period
|
|
623
|
+
|
|
624
|
+
try:
|
|
625
|
+
response = self._get(url, params=params)
|
|
626
|
+
data = response.json()
|
|
627
|
+
except Exception as e:
|
|
628
|
+
raise APIError(f"Failed to fetch financial data for {symbol}: {e}") from e
|
|
629
|
+
|
|
630
|
+
return self._parse_financial_response(data, periods)
|
|
631
|
+
|
|
632
|
+
def _parse_financial_response(
|
|
633
|
+
self,
|
|
634
|
+
data: Any,
|
|
635
|
+
periods: list[tuple[int, int]],
|
|
636
|
+
) -> pd.DataFrame:
|
|
637
|
+
"""Parse MaliTablo API response into DataFrame."""
|
|
638
|
+
if not data or not isinstance(data, dict):
|
|
639
|
+
return pd.DataFrame()
|
|
640
|
+
|
|
641
|
+
# API returns: {"value": [{"itemDescTr": "...", "value1": ..., "value2": ...}, ...]}
|
|
642
|
+
items = data.get("value", [])
|
|
643
|
+
if not items:
|
|
644
|
+
return pd.DataFrame()
|
|
645
|
+
|
|
646
|
+
# Detect quarterly vs annual: annual has all periods = 12
|
|
647
|
+
is_quarterly = len({p[1] for p in periods}) > 1
|
|
648
|
+
|
|
649
|
+
records = []
|
|
650
|
+
for item in items:
|
|
651
|
+
row_name = item.get("itemDescTr", item.get("itemDescEng", "Unknown"))
|
|
652
|
+
row_data = {"Item": row_name}
|
|
653
|
+
|
|
654
|
+
for i, (year, period) in enumerate(periods[:5], 1):
|
|
655
|
+
if is_quarterly:
|
|
656
|
+
col_name = f"{year}Q{period // 3}"
|
|
657
|
+
else:
|
|
658
|
+
col_name = str(year)
|
|
659
|
+
value = item.get(f"value{i}")
|
|
660
|
+
if value is not None:
|
|
661
|
+
try:
|
|
662
|
+
row_data[col_name] = float(value)
|
|
663
|
+
except (ValueError, TypeError):
|
|
664
|
+
row_data[col_name] = value
|
|
665
|
+
|
|
666
|
+
records.append(row_data)
|
|
667
|
+
|
|
668
|
+
df = pd.DataFrame(records)
|
|
669
|
+
if not df.empty and "Item" in df.columns:
|
|
670
|
+
df.set_index("Item", inplace=True)
|
|
671
|
+
|
|
672
|
+
return df
|
|
673
|
+
|
|
674
|
+
def get_major_holders(self, symbol: str) -> pd.DataFrame:
|
|
675
|
+
"""
|
|
676
|
+
Get major shareholders (ortaklık yapısı) for a stock.
|
|
677
|
+
|
|
678
|
+
Args:
|
|
679
|
+
symbol: Stock symbol (e.g., "THYAO").
|
|
680
|
+
|
|
681
|
+
Returns:
|
|
682
|
+
DataFrame with columns: Holder, Percentage.
|
|
683
|
+
"""
|
|
684
|
+
import json
|
|
685
|
+
import re
|
|
686
|
+
|
|
687
|
+
symbol = symbol.upper().replace(".IS", "").replace(".E", "")
|
|
688
|
+
|
|
689
|
+
cache_key = f"isyatirim:major_holders:{symbol}"
|
|
690
|
+
cached = self._cache_get(cache_key)
|
|
691
|
+
if cached is not None:
|
|
692
|
+
return cached
|
|
693
|
+
|
|
694
|
+
# Fetch the stock page HTML
|
|
695
|
+
stock_page_url = f"https://www.isyatirim.com.tr/tr-tr/analiz/hisse/Sayfalar/sirket-karti.aspx?hisse={symbol}"
|
|
696
|
+
|
|
697
|
+
try:
|
|
698
|
+
response = self._client.get(stock_page_url, timeout=15)
|
|
699
|
+
response.raise_for_status()
|
|
700
|
+
html = response.text
|
|
701
|
+
except Exception as e:
|
|
702
|
+
raise APIError(f"Failed to fetch major holders for {symbol}: {e}") from e
|
|
703
|
+
|
|
704
|
+
# Parse JavaScript variable: var OrtaklikYapisidata = [{name: 'xxx', y: 50.88}, ...]
|
|
705
|
+
pattern = r"var OrtaklikYapisidata = \[(.*?)\];"
|
|
706
|
+
match = re.search(pattern, html, re.DOTALL)
|
|
707
|
+
|
|
708
|
+
if not match:
|
|
709
|
+
return pd.DataFrame(columns=["Holder", "Percentage"])
|
|
710
|
+
|
|
711
|
+
js_array = match.group(1).strip()
|
|
712
|
+
if not js_array:
|
|
713
|
+
return pd.DataFrame(columns=["Holder", "Percentage"])
|
|
714
|
+
|
|
715
|
+
# Convert JS object to valid JSON: {name: 'x'} -> {"name": "x"}
|
|
716
|
+
json_str = re.sub(r"([{,])(\w+):", r'\1"\2":', js_array)
|
|
717
|
+
json_str = json_str.replace("'", '"')
|
|
718
|
+
|
|
719
|
+
try:
|
|
720
|
+
data = json.loads(f"[{json_str}]")
|
|
721
|
+
except json.JSONDecodeError:
|
|
722
|
+
return pd.DataFrame(columns=["Holder", "Percentage"])
|
|
723
|
+
|
|
724
|
+
records = []
|
|
725
|
+
for item in data:
|
|
726
|
+
holder = item.get("name", "Unknown")
|
|
727
|
+
percentage = float(item.get("y", 0))
|
|
728
|
+
records.append({"Holder": holder, "Percentage": round(percentage, 2)})
|
|
729
|
+
|
|
730
|
+
df = pd.DataFrame(records)
|
|
731
|
+
if not df.empty:
|
|
732
|
+
df.set_index("Holder", inplace=True)
|
|
733
|
+
|
|
734
|
+
self._cache_set(cache_key, df, TTL.FINANCIAL_STATEMENTS)
|
|
735
|
+
return df
|
|
736
|
+
|
|
737
|
+
def get_recommendations(self, symbol: str) -> dict[str, Any]:
|
|
738
|
+
"""
|
|
739
|
+
Get analyst recommendations and target price for a stock.
|
|
740
|
+
|
|
741
|
+
Args:
|
|
742
|
+
symbol: Stock symbol (e.g., "THYAO").
|
|
743
|
+
|
|
744
|
+
Returns:
|
|
745
|
+
Dictionary with:
|
|
746
|
+
- recommendation: Buy/Hold/Sell (AL/TUT/SAT)
|
|
747
|
+
- target_price: Analyst target price
|
|
748
|
+
- upside_potential: Expected upside (%)
|
|
749
|
+
"""
|
|
750
|
+
symbol = symbol.upper().replace(".IS", "").replace(".E", "")
|
|
751
|
+
|
|
752
|
+
cache_key = f"isyatirim:recommendations:{symbol}"
|
|
753
|
+
cached = self._cache_get(cache_key)
|
|
754
|
+
if cached is not None:
|
|
755
|
+
return cached
|
|
756
|
+
|
|
757
|
+
try:
|
|
758
|
+
data = self._fetch_sermaye_data(symbol)
|
|
759
|
+
except APIError:
|
|
760
|
+
return {
|
|
761
|
+
"recommendation": None,
|
|
762
|
+
"target_price": None,
|
|
763
|
+
"upside_potential": None,
|
|
764
|
+
}
|
|
765
|
+
|
|
766
|
+
# Parse recommendations from sermaye response
|
|
767
|
+
items = self._parse_sermaye_response(data)
|
|
768
|
+
|
|
769
|
+
result = {
|
|
770
|
+
"recommendation": None,
|
|
771
|
+
"target_price": None,
|
|
772
|
+
"upside_potential": None,
|
|
773
|
+
}
|
|
774
|
+
|
|
775
|
+
# Get the most recent entry with recommendation data
|
|
776
|
+
for item in items:
|
|
777
|
+
oneri = item.get("ONERI")
|
|
778
|
+
hedef_fiyat = item.get("HEDEF_FIYAT")
|
|
779
|
+
getiri_pot = item.get("GETIRI_POT")
|
|
780
|
+
|
|
781
|
+
if oneri:
|
|
782
|
+
result["recommendation"] = oneri
|
|
783
|
+
if hedef_fiyat:
|
|
784
|
+
result["target_price"] = round(float(hedef_fiyat), 2)
|
|
785
|
+
if getiri_pot:
|
|
786
|
+
result["upside_potential"] = round(float(getiri_pot) * 100, 2)
|
|
787
|
+
|
|
788
|
+
# Break on first item with data
|
|
789
|
+
if oneri or hedef_fiyat:
|
|
790
|
+
break
|
|
791
|
+
|
|
792
|
+
self._cache_set(cache_key, result, TTL.REALTIME_PRICE)
|
|
793
|
+
return result
|
|
794
|
+
|
|
795
|
+
def get_company_metrics(self, symbol: str) -> dict[str, Any]:
|
|
796
|
+
"""
|
|
797
|
+
Get company metrics from şirket kartı page (Cari Değerler).
|
|
798
|
+
|
|
799
|
+
Args:
|
|
800
|
+
symbol: Stock symbol (e.g., "THYAO").
|
|
801
|
+
|
|
802
|
+
Returns:
|
|
803
|
+
Dictionary with:
|
|
804
|
+
- market_cap: Market capitalization (TL)
|
|
805
|
+
- pe_ratio: Price/Earnings ratio (F/K)
|
|
806
|
+
- pb_ratio: Price/Book ratio (PD/DD)
|
|
807
|
+
- ev_ebitda: Enterprise Value/EBITDA (FD/FAVÖK)
|
|
808
|
+
- free_float: Free float percentage
|
|
809
|
+
- foreign_ratio: Foreign ownership percentage
|
|
810
|
+
- net_debt: Net debt (TL)
|
|
811
|
+
"""
|
|
812
|
+
import re
|
|
813
|
+
|
|
814
|
+
symbol = symbol.upper().replace(".IS", "").replace(".E", "")
|
|
815
|
+
|
|
816
|
+
cache_key = f"isyatirim:metrics:{symbol}"
|
|
817
|
+
cached = self._cache_get(cache_key)
|
|
818
|
+
if cached is not None:
|
|
819
|
+
return cached
|
|
820
|
+
|
|
821
|
+
stock_page_url = f"https://www.isyatirim.com.tr/tr-tr/analiz/hisse/Sayfalar/sirket-karti.aspx?hisse={symbol}"
|
|
822
|
+
|
|
823
|
+
# Retry logic for unstable İş Yatırım connection
|
|
824
|
+
import time
|
|
825
|
+
max_retries = 3
|
|
826
|
+
last_error = None
|
|
827
|
+
html = None
|
|
828
|
+
|
|
829
|
+
for attempt in range(max_retries):
|
|
830
|
+
try:
|
|
831
|
+
response = self._client.get(stock_page_url, timeout=15)
|
|
832
|
+
response.raise_for_status()
|
|
833
|
+
html = response.text
|
|
834
|
+
break
|
|
835
|
+
except Exception as e:
|
|
836
|
+
last_error = e
|
|
837
|
+
if attempt < max_retries - 1:
|
|
838
|
+
time.sleep(1 * (attempt + 1)) # 1s, 2s backoff
|
|
839
|
+
continue
|
|
840
|
+
|
|
841
|
+
if html is None:
|
|
842
|
+
raise APIError(f"Failed to fetch company metrics for {symbol}: {last_error}") from last_error
|
|
843
|
+
|
|
844
|
+
result: dict[str, Any] = {
|
|
845
|
+
"market_cap": None,
|
|
846
|
+
"pe_ratio": None,
|
|
847
|
+
"pb_ratio": None,
|
|
848
|
+
"ev_ebitda": None,
|
|
849
|
+
"free_float": None,
|
|
850
|
+
"foreign_ratio": None,
|
|
851
|
+
"net_debt": None,
|
|
852
|
+
}
|
|
853
|
+
|
|
854
|
+
# Find Cari Değerler section
|
|
855
|
+
idx = html.find("Cari Değerler")
|
|
856
|
+
if idx > 0:
|
|
857
|
+
snippet = html[idx : idx + 3000]
|
|
858
|
+
|
|
859
|
+
# Parse th/td pairs
|
|
860
|
+
pattern = r"<th[^>]*>([^<]+)</th>\s*<td[^>]*>([^<]+)</td>"
|
|
861
|
+
matches = re.findall(pattern, snippet)
|
|
862
|
+
|
|
863
|
+
for label, value in matches:
|
|
864
|
+
label = label.strip()
|
|
865
|
+
value = value.strip().replace(".", "").replace(",", ".")
|
|
866
|
+
|
|
867
|
+
try:
|
|
868
|
+
if "F/K" in label and "FD" not in label:
|
|
869
|
+
result["pe_ratio"] = float(value)
|
|
870
|
+
elif "PD/DD" in label:
|
|
871
|
+
result["pb_ratio"] = float(value)
|
|
872
|
+
elif "FD/FAVÖK" in label:
|
|
873
|
+
result["ev_ebitda"] = float(value)
|
|
874
|
+
elif "Piyasa Değeri" in label:
|
|
875
|
+
# Value is in mnTL, convert to TL
|
|
876
|
+
num = float(re.sub(r"[^\d.]", "", value))
|
|
877
|
+
result["market_cap"] = int(num * 1_000_000)
|
|
878
|
+
elif "Net Borç" in label:
|
|
879
|
+
num = float(re.sub(r"[^\d.]", "", value))
|
|
880
|
+
result["net_debt"] = int(num * 1_000_000)
|
|
881
|
+
elif "Halka Açıklık" in label:
|
|
882
|
+
result["free_float"] = float(re.sub(r"[^\d.]", "", value))
|
|
883
|
+
elif "Yabancı Oranı" in label:
|
|
884
|
+
result["foreign_ratio"] = float(re.sub(r"[^\d.]", "", value))
|
|
885
|
+
except (ValueError, TypeError):
|
|
886
|
+
continue
|
|
887
|
+
|
|
888
|
+
self._cache_set(cache_key, result, TTL.REALTIME_PRICE)
|
|
889
|
+
return result
|
|
890
|
+
|
|
891
|
+
def get_business_summary(self, symbol: str) -> str | None:
|
|
892
|
+
"""
|
|
893
|
+
Get business summary (Faal Alanı) for a stock.
|
|
894
|
+
|
|
895
|
+
Args:
|
|
896
|
+
symbol: Stock symbol (e.g., "THYAO").
|
|
897
|
+
|
|
898
|
+
Returns:
|
|
899
|
+
Business summary text or None if not available.
|
|
900
|
+
"""
|
|
901
|
+
import re
|
|
902
|
+
|
|
903
|
+
symbol = symbol.upper().replace(".IS", "").replace(".E", "")
|
|
904
|
+
|
|
905
|
+
cache_key = f"isyatirim:business_summary:{symbol}"
|
|
906
|
+
cached = self._cache_get(cache_key)
|
|
907
|
+
if cached is not None:
|
|
908
|
+
return cached
|
|
909
|
+
|
|
910
|
+
stock_page_url = f"https://www.isyatirim.com.tr/tr-tr/analiz/hisse/Sayfalar/sirket-karti.aspx?hisse={symbol}"
|
|
911
|
+
|
|
912
|
+
try:
|
|
913
|
+
response = self._client.get(stock_page_url, timeout=15)
|
|
914
|
+
response.raise_for_status()
|
|
915
|
+
html = response.text
|
|
916
|
+
except Exception:
|
|
917
|
+
return None
|
|
918
|
+
|
|
919
|
+
# Find "Faal Alanı" table row
|
|
920
|
+
pattern = r'<th[^>]*>Faal Alanı</th>\s*<td[^>]*>([^<]+)</td>'
|
|
921
|
+
match = re.search(pattern, html)
|
|
922
|
+
|
|
923
|
+
if not match:
|
|
924
|
+
return None
|
|
925
|
+
|
|
926
|
+
summary = match.group(1).strip()
|
|
927
|
+
if not summary:
|
|
928
|
+
return None
|
|
929
|
+
|
|
930
|
+
self._cache_set(cache_key, summary, TTL.FINANCIAL_STATEMENTS)
|
|
931
|
+
return summary
|
|
932
|
+
|
|
933
|
+
|
|
934
|
+
# Singleton instance
|
|
935
|
+
_provider: IsYatirimProvider | None = None
|
|
936
|
+
|
|
937
|
+
|
|
938
|
+
def get_isyatirim_provider() -> IsYatirimProvider:
|
|
939
|
+
"""Get the singleton İş Yatırım provider instance."""
|
|
940
|
+
global _provider
|
|
941
|
+
if _provider is None:
|
|
942
|
+
_provider = IsYatirimProvider()
|
|
943
|
+
return _provider
|