borsapy 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,468 @@
1
+ """İş Yatırım Stock Screener provider for borsapy."""
2
+
3
+ import json
4
+ from typing import Any
5
+
6
+ from borsapy._providers.base import BaseProvider
7
+ from borsapy.cache import TTL
8
+ from borsapy.exceptions import APIError
9
+
10
+
11
+ class IsyatirimScreenerProvider(BaseProvider):
12
+ """
13
+ Provider for BIST stock screener from İş Yatırım.
14
+
15
+ API: https://www.isyatirim.com.tr
16
+ Note: This API requires session cookies obtained from the main page.
17
+ """
18
+
19
+ BASE_URL = "https://www.isyatirim.com.tr"
20
+ PAGE_URL = f"{BASE_URL}/tr-tr/analiz/hisse/Sayfalar/gelismis-hisse-arama.aspx"
21
+ SCREENER_URL = f"{BASE_URL}/tr-tr/analiz/_Layouts/15/IsYatirim.Website/StockInfo/CompanyInfoAjax.aspx/getScreenerDataNEW"
22
+ CRITERIA_URL = f"{BASE_URL}/_layouts/15/IsYatirim.Website/Common/Data.aspx/StockScreenerGenelKriterler"
23
+ HIERARCHY_URL = f"{BASE_URL}/_layouts/15/IsYatirim.Website/Common/Data.aspx/HisseHiyerasi"
24
+
25
+ # Criteria ID mapping (discovered from browser DevTools - data-tanimid attributes)
26
+ # These IDs are used in the getScreenerDataNEW API requests
27
+ CRITERIA_MAP = {
28
+ # Price & Market Cap
29
+ "price": "7", # Kapanış (TL)
30
+ "market_cap": "8", # Piyasa Değeri (mn TL)
31
+ "market_cap_usd": "9", # Piyasa Değeri (mn $)
32
+ "market_cap_1y_avg": "10", # Piyasa Değeri 1Y Ort. (mn $)
33
+ "float_ratio": "11", # Halka Açıklık Oranı (%)
34
+ "float_market_cap": "12", # Halka Açık Piy.Değ.(mn $)
35
+ # Performance - Relative Returns
36
+ "return_1d": "21", # 1 Gün Rel. (%)
37
+ "return_1w": "22", # 1 Hafta Rel. (%)
38
+ "return_1m": "23", # 1 Ay Rel. (%)
39
+ "return_1y": "24", # 1 Yıl Rel. (%)
40
+ "return_ytd": "25", # Yıliçi Rel. (%)
41
+ # Performance - TL Returns
42
+ "return_1d_tl": "16", # 1 Gün TL (%)
43
+ "return_1w_tl": "17", # 1 Hafta TL (%)
44
+ "return_1m_tl": "18", # 1 Ay TL (%)
45
+ "return_1y_tl": "19", # 1 Yıl TL (%)
46
+ "return_ytd_tl": "20", # Yıliçi TL (%)
47
+ # Volume
48
+ "volume_3m": "26", # 3 Ay (mn $)
49
+ "volume_12m": "27", # 12 Ay (mn $)
50
+ # Valuation - Current
51
+ "pe": "28", # Cari F/K
52
+ "ev_ebitda": "29", # Cari FD/FAVÖK
53
+ "pb": "30", # Cari PD/DD
54
+ "ev_sales": "31", # Cari FD/Satışlar
55
+ # Valuation - Forward (2025)
56
+ "pe_2025": "135", # 2025 F/K
57
+ "pb_2025": "138", # 2025 PD/DD
58
+ "ev_ebitda_2025": "141", # 2025 FD/FAVÖK
59
+ # Dividends
60
+ "dividend_yield": "33", # 2024 Temettü Verimi (%)
61
+ "dividend_yield_2025": "36", # 2025 Temettü Verimi (%)
62
+ "dividend_yield_5y_avg": "38", # Temettü Verimi 5Y Ort%
63
+ # Foreign Ownership
64
+ "foreign_ratio": "40", # Cari Yabancı Oranı (%)
65
+ "foreign_ratio_1w_change": "44", # Yabancı Oranı Son 1 Haftalık Değişimi (Baz)
66
+ "foreign_ratio_1m_change": "45", # Yabancı Oranı Son 1 Aylık Değişimi (Baz)
67
+ # Target Price
68
+ "target_price": "51", # Hedef Fiyat (TL)
69
+ "upside_potential": "61", # Getiri Potansiyeli (%)
70
+ # Profitability - Current
71
+ "roe": "422", # Cari ROE
72
+ "roa": "423", # Cari ROA
73
+ # Profitability - Forward (2025)
74
+ "net_margin": "119", # 2025 Net Kar Marjı (%)
75
+ "ebitda_margin": "120", # 2025 FAVÖK Marjı (%)
76
+ "roe_2025": "225", # 2025 ROE
77
+ "roa_2025": "247", # 2025 ROA
78
+ # Historical Averages
79
+ "pe_hist_avg": "126", # Tarihsel Ort. F/K
80
+ "pb_hist_avg": "127", # Tarihsel Ort. PD/DD
81
+ "ev_ebitda_hist_avg": "128", # Tarihsel Ort. FD/FAVÖK
82
+ # Index Weights
83
+ "bist100_weight": "375", # BIST 100 Endeks Ağırlığı
84
+ "bist50_weight": "376", # BIST50 Endeks Ağırlığı
85
+ "bist30_weight": "377", # BIST30 Endeks Ağırlığı
86
+ }
87
+
88
+ # Default price criteria - API requires at least one criteria with min/max
89
+ DEFAULT_CRITERIA = [("7", "1", "50000", "False")] # Price 1-50000 TL
90
+
91
+ # Pre-defined templates with actual working criteria
92
+ # Note: API requires criteria with both min AND max values to work
93
+ # Market cap in TL (ID 8): roughly $1B = 43B TL, $5B = 215B TL (at ~43 TL/USD)
94
+ TEMPLATES = {
95
+ "small_cap": {
96
+ "criteria": [("8", "0", "43000", "False")], # Market cap < ~$1B (43B TL)
97
+ },
98
+ "mid_cap": {
99
+ "criteria": [("8", "43000", "215000", "False")], # Market cap $1B-$5B
100
+ },
101
+ "large_cap": {
102
+ "criteria": [("8", "215000", "5000000", "False")], # Market cap > $5B
103
+ },
104
+ "high_dividend": {
105
+ "criteria": [("33", "2", "100", "False")], # Dividend yield > 2%
106
+ },
107
+ "high_upside": {
108
+ "criteria": [("61", "0", "200", "False")], # Positive upside potential
109
+ },
110
+ "low_upside": {
111
+ "criteria": [("61", "-100", "0", "False")], # Negative upside
112
+ },
113
+ "high_volume": {
114
+ "criteria": [("26", "1", "1000", "False")], # 3M avg volume > $1M
115
+ },
116
+ "low_volume": {
117
+ "criteria": [("26", "0", "0.5", "False")], # 3M avg volume < $0.5M
118
+ },
119
+ "buy_recommendation": {
120
+ "criteria": [("7", "1", "50000", "False")],
121
+ "oneri": "AL",
122
+ },
123
+ "sell_recommendation": {
124
+ "criteria": [("7", "1", "50000", "False")],
125
+ "oneri": "SAT",
126
+ },
127
+ "high_net_margin": {
128
+ "criteria": [("119", "10", "200", "False")], # Net margin > 10%
129
+ },
130
+ "high_return": {
131
+ "criteria": [("22", "0", "100", "False")], # Positive 1-week relative return
132
+ },
133
+ "low_pe": {
134
+ "criteria": [("28", "0", "10", "False")], # P/E < 10
135
+ },
136
+ "high_roe": {
137
+ "criteria": [("422", "15", "200", "False")], # ROE > 15%
138
+ },
139
+ "high_foreign_ownership": {
140
+ "criteria": [("40", "30", "100", "False")], # Foreign ownership > 30%
141
+ },
142
+ }
143
+
144
+ def __init__(self, timeout: float = 30.0, cache=None):
145
+ """Initialize the provider."""
146
+ super().__init__(timeout=timeout, cache=cache)
147
+ self._criteria_cache: list[dict[str, Any]] | None = None
148
+ self._sectors_cache: list[dict[str, Any]] | None = None
149
+ self._indices_cache: list[dict[str, Any]] | None = None
150
+ self._session_initialized = False
151
+ self._request_digest: str | None = None
152
+
153
+ def _init_session(self) -> None:
154
+ """Initialize session by fetching the main page to get cookies."""
155
+ if self._session_initialized:
156
+ return
157
+
158
+ try:
159
+ # Fetch the main page to establish session cookies
160
+ response = self._get(
161
+ self.PAGE_URL,
162
+ headers={
163
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36",
164
+ "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
165
+ },
166
+ )
167
+
168
+ # Extract request digest from page if present
169
+ import re
170
+
171
+ digest_match = re.search(r'id="__REQUESTDIGEST"[^>]*value="([^"]+)"', response.text)
172
+ if digest_match:
173
+ self._request_digest = digest_match.group(1)
174
+
175
+ self._session_initialized = True
176
+ except Exception:
177
+ # Session initialization failed, but we can still try without it
178
+ self._session_initialized = True
179
+
180
+ def _get_headers(self) -> dict[str, str]:
181
+ """Get headers for İş Yatırım API."""
182
+ headers = {
183
+ **self.DEFAULT_HEADERS,
184
+ "Content-Type": "application/json; charset=UTF-8",
185
+ "X-Requested-With": "XMLHttpRequest",
186
+ "Accept": "application/json, text/javascript, */*; q=0.01",
187
+ "Origin": self.BASE_URL,
188
+ "Referer": self.PAGE_URL,
189
+ }
190
+ if self._request_digest:
191
+ headers["X-RequestDigest"] = self._request_digest
192
+ return headers
193
+
194
+ def get_criteria(self) -> list[dict[str, Any]]:
195
+ """
196
+ Get all available screening criteria.
197
+
198
+ Returns:
199
+ List of criteria with id, name, min, max values.
200
+ """
201
+ if self._criteria_cache is not None:
202
+ return self._criteria_cache
203
+
204
+ cache_key = "isyatirim:screener:criteria"
205
+ cached = self._cache_get(cache_key)
206
+ if cached is not None:
207
+ self._criteria_cache = cached
208
+ return cached
209
+
210
+ try:
211
+ response = self._get(
212
+ self.CRITERIA_URL,
213
+ headers={"X-Requested-With": "XMLHttpRequest"},
214
+ )
215
+ data = response.json()
216
+
217
+ criteria = []
218
+ for item in data.get("value", []):
219
+ # Parse the complex format
220
+ kriter_tanim = item.get("KriterTanim", "")
221
+ if ";#" in kriter_tanim:
222
+ parts = kriter_tanim.split(";#")
223
+ kriter_id = parts[0] if len(parts) > 0 else None
224
+ else:
225
+ kriter_id = None
226
+
227
+ # Get name from another field
228
+ name_field = item.get("KriterTanim_x003a_Ba_x015f_l_x01", "")
229
+ if ";#" in name_field:
230
+ name = name_field.split(";#")[1] if len(name_field.split(";#")) > 1 else ""
231
+ else:
232
+ name = name_field
233
+
234
+ # Get min/max
235
+ min_field = item.get("KriterTanim_x003a_MIN_DEGER", "")
236
+ max_field = item.get("KriterTanim_x003a_MAX_DEGER", "")
237
+
238
+ min_val = min_field.split(";#")[1] if ";#" in min_field else min_field
239
+ max_val = max_field.split(";#")[1] if ";#" in max_field else max_field
240
+
241
+ if kriter_id and name:
242
+ criteria.append({
243
+ "id": kriter_id,
244
+ "name": name,
245
+ "min": min_val,
246
+ "max": max_val,
247
+ })
248
+
249
+ # Deduplicate by id
250
+ seen = set()
251
+ unique_criteria = []
252
+ for c in criteria:
253
+ if c["id"] not in seen:
254
+ seen.add(c["id"])
255
+ unique_criteria.append(c)
256
+
257
+ self._criteria_cache = unique_criteria
258
+ self._cache_set(cache_key, unique_criteria, TTL.COMPANY_LIST)
259
+ return unique_criteria
260
+
261
+ except Exception as e:
262
+ raise APIError(f"Failed to fetch screening criteria: {e}") from e
263
+
264
+ def get_sectors(self) -> list[dict[str, Any]]:
265
+ """
266
+ Get list of sectors for filtering.
267
+
268
+ Returns:
269
+ List of sectors with id and name.
270
+ """
271
+ if self._sectors_cache is not None:
272
+ return self._sectors_cache
273
+
274
+ cache_key = "isyatirim:screener:sectors"
275
+ cached = self._cache_get(cache_key)
276
+ if cached is not None:
277
+ self._sectors_cache = cached
278
+ return cached
279
+
280
+ # Extract from page HTML
281
+ sectors = self._extract_sectors_from_page()
282
+ if sectors:
283
+ self._sectors_cache = sectors
284
+ self._cache_set(cache_key, sectors, TTL.COMPANY_LIST)
285
+ return sectors
286
+
287
+ return []
288
+
289
+ def get_indices(self) -> list[dict[str, Any]]:
290
+ """
291
+ Get list of indices for filtering.
292
+
293
+ Returns:
294
+ List of indices with id and name.
295
+ """
296
+ if self._indices_cache is not None:
297
+ return self._indices_cache
298
+
299
+ cache_key = "isyatirim:screener:indices"
300
+ cached = self._cache_get(cache_key)
301
+ if cached is not None:
302
+ self._indices_cache = cached
303
+ return cached
304
+
305
+ # Extract from page HTML
306
+ indices = self._extract_indices_from_page()
307
+ if indices:
308
+ self._indices_cache = indices
309
+ self._cache_set(cache_key, indices, TTL.COMPANY_LIST)
310
+ return indices
311
+
312
+ return []
313
+
314
+ def _extract_sectors_from_page(self) -> list[dict[str, Any]]:
315
+ """Extract sectors from the screener page HTML."""
316
+ from bs4 import BeautifulSoup
317
+
318
+ try:
319
+ self._init_session()
320
+ response = self._get(self.PAGE_URL)
321
+ soup = BeautifulSoup(response.content, "html.parser")
322
+
323
+ # Find sector dropdown
324
+ sector_select = soup.find("select", id="ctl00_ctl58_g_877a6dc3_ec50_46c8_9ce3_f240bf1fe822_ctl00_ddlStockSector")
325
+ if not sector_select:
326
+ return []
327
+
328
+ sectors = []
329
+ for opt in sector_select.find_all("option"):
330
+ value = opt.get("value", "")
331
+ name = opt.text.strip()
332
+ if value and name and name != "Sektör Seçiniz":
333
+ sectors.append({"id": value, "name": name})
334
+
335
+ return sectors
336
+
337
+ except Exception:
338
+ return []
339
+
340
+ def _extract_indices_from_page(self) -> list[dict[str, Any]]:
341
+ """Extract indices from the screener page HTML."""
342
+ # Note: The İş Yatırım screener API does not reliably support
343
+ # index filtering. Return a static list of common indices.
344
+ return [
345
+ {"id": "BIST 30", "name": "BIST 30"},
346
+ {"id": "BIST 50", "name": "BIST 50"},
347
+ {"id": "BIST 100", "name": "BIST 100"},
348
+ {"id": "BIST BANKA", "name": "BIST BANKA"},
349
+ {"id": "BIST SINAİ", "name": "BIST SINAİ"},
350
+ {"id": "BIST HİZMETLER", "name": "BIST HİZMETLER"},
351
+ {"id": "BIST TEKNOLOJİ", "name": "BIST TEKNOLOJİ"},
352
+ ]
353
+
354
+ def screen(
355
+ self,
356
+ criterias: list[tuple[str, str, str, str]] | None = None,
357
+ sector: str | None = None,
358
+ index: str | None = None,
359
+ recommendation: str | None = None,
360
+ template: str | None = None,
361
+ ) -> list[dict[str, Any]]:
362
+ """
363
+ Screen stocks based on criteria.
364
+
365
+ Args:
366
+ criterias: List of (criteria_id, min, max, required) tuples.
367
+ sector: Sector filter (e.g., "Bankacılık").
368
+ index: Index filter (e.g., "BIST30").
369
+ recommendation: Recommendation filter ("AL", "SAT", "TUT").
370
+ template: Pre-defined template name (see TEMPLATES).
371
+
372
+ Returns:
373
+ List of matching stocks with requested criteria values.
374
+ """
375
+ # Build request payload
376
+ payload = {
377
+ "sektor": sector or "",
378
+ "endeks": index or "",
379
+ "takip": "",
380
+ "oneri": recommendation or "",
381
+ "criterias": [],
382
+ "lang": "1055", # Turkish
383
+ }
384
+
385
+ # Apply template if specified
386
+ if template and template in self.TEMPLATES:
387
+ tmpl = self.TEMPLATES[template]
388
+ if "criteria" in tmpl:
389
+ payload["criterias"] = [list(c) for c in tmpl["criteria"]]
390
+ if "oneri" in tmpl:
391
+ payload["oneri"] = tmpl["oneri"]
392
+
393
+ # Add custom criterias
394
+ if criterias:
395
+ for c in criterias:
396
+ payload["criterias"].append(list(c))
397
+
398
+ # If no criterias specified, add default price criteria
399
+ if not payload["criterias"]:
400
+ payload["criterias"] = [["7", "1", "50000", "False"]] # Price > 1 TL
401
+
402
+ # Initialize session to get cookies
403
+ self._init_session()
404
+
405
+ # Build cache key
406
+ cache_key = f"isyatirim:screener:{json.dumps(payload, sort_keys=True)}"
407
+ cached = self._cache_get(cache_key)
408
+ if cached is not None:
409
+ return cached
410
+
411
+ try:
412
+ response = self._post(
413
+ self.SCREENER_URL,
414
+ json=payload,
415
+ headers=self._get_headers(),
416
+ )
417
+
418
+ data = response.json()
419
+
420
+ # Parse response - it's a JSON string inside "d" field
421
+ result_str = data.get("d", "[]")
422
+ results = json.loads(result_str)
423
+
424
+ # Parse results
425
+ stocks = []
426
+ for item in results:
427
+ # Parse "Hisse" field: "THYAO - Türk Hava Yolları"
428
+ hisse = item.get("Hisse", "")
429
+ if " - " in hisse:
430
+ parts = hisse.split(" - ", 1)
431
+ symbol = parts[0].strip()
432
+ name = parts[1].strip()
433
+ else:
434
+ symbol = hisse
435
+ name = ""
436
+
437
+ stock = {
438
+ "symbol": symbol,
439
+ "name": name,
440
+ }
441
+
442
+ # Add criteria values
443
+ for key, value in item.items():
444
+ if key != "Hisse":
445
+ try:
446
+ stock[f"criteria_{key}"] = float(value)
447
+ except (ValueError, TypeError):
448
+ stock[f"criteria_{key}"] = value
449
+
450
+ stocks.append(stock)
451
+
452
+ self._cache_set(cache_key, stocks, TTL.REALTIME_PRICE * 15) # 15 minutes
453
+ return stocks
454
+
455
+ except Exception as e:
456
+ raise APIError(f"Failed to screen stocks: {e}") from e
457
+
458
+
459
+ # Singleton instance
460
+ _provider: IsyatirimScreenerProvider | None = None
461
+
462
+
463
+ def get_screener_provider() -> IsyatirimScreenerProvider:
464
+ """Get the singleton screener provider instance."""
465
+ global _provider
466
+ if _provider is None:
467
+ _provider = IsyatirimScreenerProvider()
468
+ return _provider