financegy 1.5__py3-none-any.whl → 2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
financegy/__init__.py CHANGED
@@ -1,34 +1,48 @@
1
1
  """FinanceGY - a Python library for accessing data from the Guyana Stock Exchange."""
2
2
 
3
- from importlib.metadata import version, PackageNotFoundError
3
+ from financegy.cache.cache_manager import clear_cache
4
+ from financegy.utils.utils import save_to_csv, to_dataframe, save_to_excel
4
5
 
5
- from financegy.modules.securities import(
6
+ from financegy.modules.securities import (
6
7
  get_securities,
7
8
  get_security_by_symbol,
8
9
  get_recent_trade,
10
+ get_previous_close,
11
+ get_price_change,
12
+ get_price_change_percent,
9
13
  get_security_recent_year,
10
14
  get_session_trades,
15
+ get_sessions_average_price,
16
+ get_average_price,
17
+ get_sessions_volatility,
18
+ get_ytd_high_low,
19
+ get_latest_session_for_symbol,
11
20
  get_security_session_trade,
12
21
  search_securities,
13
22
  get_trades_for_year,
14
- get_historical_trades
23
+ get_historical_trades,
15
24
  )
16
25
 
17
- from financegy.cache.cache_manager import clear_cache
18
- from financegy.utils.utils import save_to_csv, to_dataframe, save_to_excel
19
-
20
26
  __all__ = [
21
27
  "get_securities",
22
28
  "get_security_by_symbol",
23
29
  "get_recent_trade",
30
+ "get_previous_close",
31
+ "get_price_change",
32
+ "get_price_change_percent",
24
33
  "get_security_recent_year",
25
34
  "get_session_trades",
35
+ "get_sessions_average_price",
36
+ "get_average_price",
26
37
  "get_security_session_trade",
38
+ "get_latest_session_for_symbol",
39
+ "get_sessions_volatility",
40
+ "get_ytd_high_low",
27
41
  "search_securities",
28
42
  "get_trades_for_year",
29
43
  "get_historical_trades",
30
44
  "clear_cache",
31
45
  "save_to_csv",
32
46
  "to_dataframe",
33
- "save_to_excel"
47
+ "save_to_excel",
34
48
  ]
@@ -5,20 +5,18 @@ import shutil
5
5
  SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
6
6
  CACHE_DIR = os.path.join(SCRIPT_DIR, "cache")
7
7
 
8
+
8
9
  def make_cache_key(func_name, *args, **kwargs):
9
10
  """Create a unique hash for the given function call."""
10
11
 
11
- key_data = {
12
- "func": func_name,
13
- "args": args,
14
- "kwargs": kwargs
15
- }
12
+ key_data = {"func": func_name, "args": args, "kwargs": kwargs}
16
13
 
17
14
  key_string = json.dumps(key_data, sort_keys=True, default=str)
18
15
  hashed = hashlib.md5(key_string.encode()).hexdigest()
19
16
 
20
17
  return f"{func_name}_{hashed}.json"
21
18
 
19
+
22
20
  def load_cache(func_name, *args, max_age_days=7, **kwargs):
23
21
  os.makedirs(CACHE_DIR, exist_ok=True)
24
22
  cache_file = make_cache_key(func_name, *args, **kwargs)
@@ -36,18 +34,17 @@ def load_cache(func_name, *args, max_age_days=7, **kwargs):
36
34
 
37
35
  return data["value"]
38
36
 
37
+
39
38
  def save_cache(func_name, value, *args, **kwargs):
40
39
  os.makedirs(CACHE_DIR, exist_ok=True)
41
40
  cache_file = make_cache_key(func_name, *args, **kwargs)
42
41
  cache_path = os.path.join(CACHE_DIR, cache_file)
43
42
 
44
43
  with open(cache_path, "w") as f:
45
- json.dump({
46
- "timestamp": datetime.now().isoformat(),
47
- "value": value
48
- }, f)
44
+ json.dump({"timestamp": datetime.now().isoformat(), "value": value}, f)
49
45
 
50
- def clear_cache():
46
+
47
+ def clear_cache(silent: bool = False):
51
48
  """Completely clears the FinanceGY cache directory."""
52
49
  if not os.path.exists(CACHE_DIR):
53
50
  print("No cache directory found.")
@@ -55,8 +52,9 @@ def clear_cache():
55
52
 
56
53
  try:
57
54
  shutil.rmtree(CACHE_DIR)
58
- print("Cache cleared successfully.")
55
+ if not silent:
56
+ print("\nCache cleared successfully.")
59
57
  return True
60
-
58
+
61
59
  except Exception as e:
62
- print(f"Failed to clear cache: {e}")
60
+ print(f"Failed to clear cache: {e}")
financegy/config.py CHANGED
@@ -1,7 +1,7 @@
1
- __version__ = "0.1.0"
1
+ __version__ = "2.0"
2
2
 
3
3
  BASE_URL = "https://guyanastockexchangeinc.com"
4
4
  HEADERS = {
5
5
  "User-Agent": f"FinanceGY/{__version__} (https://github.com/xbze3/financegy)"
6
6
  }
7
- REQUEST_TIMEOUT = 10
7
+ REQUEST_TIMEOUT = 10
financegy/core/parser.py CHANGED
@@ -1,5 +1,8 @@
1
1
  from bs4 import BeautifulSoup
2
2
  from datetime import datetime
3
+ from financegy.helpers.to_float import to_float
4
+ from financegy.helpers.safe_text import safe_text
5
+
3
6
 
4
7
  def parse_get_securities(html: str):
5
8
  """Extract security info"""
@@ -10,25 +13,26 @@ def parse_get_securities(html: str):
10
13
 
11
14
  if not security_info_html:
12
15
  return []
13
-
16
+
14
17
  security_info = []
15
18
  securities = security_info_html.find_all("div", class_="security group")
16
19
 
17
20
  for security in securities:
18
- symbol = security.find_all("div", class_="acronym inline")[0].get_text(strip=True)
19
- name = security.find_all("div", class_="name inline")[0].get_text(strip=True)
21
+ symbol = security.find_all("div", class_="acronym inline")[0].get_text(
22
+ strip=True
23
+ )
24
+ name = security.find_all("div", class_="name inline")[0].get_text(
25
+ strip=True
26
+ )
20
27
 
21
- security_info.append({
22
- "symbol": symbol,
23
- "name": name
24
- })
28
+ security_info.append({"symbol": symbol, "name": name})
25
29
 
26
-
27
30
  return security_info
28
-
31
+
29
32
  except Exception as e:
30
33
  print(f"[parse_get_securities] Error parsing securities: {e}")
31
34
 
35
+
32
36
  def parse_get_security_recent_year(html: str):
33
37
  """Extract selected security's trade info from current year"""
34
38
 
@@ -38,35 +42,34 @@ def parse_get_security_recent_year(html: str):
38
42
  security_info_html = soup.find("div", class_="year slide")
39
43
  if not security_info_html:
40
44
  raise ValueError("Could not find 'div.year.slide' section in HTML.")
41
-
45
+
42
46
  trade_data = []
43
47
 
44
48
  trades = security_info_html.find_all("tr", class_="trade")
45
49
  if not trades:
46
50
  raise ValueError("No trade rows found for this security.")
47
-
48
- def safe_text(parent, class_name):
49
- cell = parent.find("td", class_=class_name)
50
- return cell.get_text(strip=True) if cell else None
51
-
51
+
52
52
  for trade in trades:
53
- trade_data.append({
54
- "session": safe_text(trade, "session"),
55
- "date": safe_text(trade, "date"),
56
- "ltp": safe_text(trade, "name"),
57
- "best_bid": safe_text(trade, "best bid"),
58
- "vol_bid": safe_text(trade, "vol bid"),
59
- "best_offer": safe_text(trade, "best offer"),
60
- "vol_offer": safe_text(trade, "vol offer"),
61
- "opening_price": safe_text(trade, "opening price"),
62
- })
53
+ trade_data.append(
54
+ {
55
+ "session": safe_text(trade, "session"),
56
+ "session_date": safe_text(trade, "date"),
57
+ "last_trade_price": safe_text(trade, "name"),
58
+ "eps": safe_text(trade, "best bid"),
59
+ "pe_ratio": safe_text(trade, "vol bid"),
60
+ "dividends_paid_last_12_months": safe_text(trade, "best offer"),
61
+ "dividend_yield": safe_text(trade, "vol offer"),
62
+ "notes": safe_text(trade, "opening price"),
63
+ }
64
+ )
63
65
 
64
66
  return trade_data
65
-
67
+
66
68
  except Exception as e:
67
69
  print(f"[parse_get_security_recent_year] Error parsing HTML: {e}")
68
70
  return None
69
71
 
72
+
70
73
  def parse_get_recent_trade(html: str):
71
74
  """Extract selected security's most recent trade info"""
72
75
  try:
@@ -82,27 +85,253 @@ def parse_get_recent_trade(html: str):
82
85
 
83
86
  recent = trades[-1]
84
87
 
85
- def safe_text(parent, class_name):
86
- cell = parent.find("td", class_=class_name)
87
- return cell.get_text(strip=True) if cell else None
88
-
89
88
  recent_info = {
90
89
  "session": safe_text(recent, "session"),
91
- "date": safe_text(recent, "date"),
92
- "ltp": safe_text(recent, "name"),
93
- "best_bid": safe_text(recent, "best bid"),
94
- "vol_bid": safe_text(recent, "vol bid"),
95
- "best_offer": safe_text(recent, "best offer"),
96
- "vol_offer": safe_text(recent, "vol offer"),
97
- "opening_price": safe_text(recent, "opening price"),
90
+ "session_date": safe_text(recent, "date"),
91
+ "last_trade_price": safe_text(recent, "name"),
92
+ "eps": safe_text(recent, "best bid"),
93
+ "pe_ratio": safe_text(recent, "vol bid"),
94
+ "dividends_paid_last_12_months": safe_text(recent, "best offer"),
95
+ "dividend_yield": safe_text(recent, "vol offer"),
96
+ "notes": safe_text(recent, "opening price"),
98
97
  }
99
98
 
100
99
  return recent_info
101
100
 
102
101
  except Exception as e:
103
- print(f"[parse_get_security_recent] Error parsing HTML: {e}")
102
+ print(f"[parse_get_recent_trade] Error parsing HTML: {e}")
103
+ return None
104
+
105
+
106
+ def parse_get_previous_close(html: str):
107
+ """Extract selected security's most recent closing price"""
108
+ try:
109
+ soup = BeautifulSoup(html, "html.parser")
110
+
111
+ security_info_html = soup.find("div", class_="year slide")
112
+ if not security_info_html:
113
+ raise ValueError("Could not find 'div.year.slide' section in HTML.")
114
+
115
+ trades = security_info_html.find_all("tr", class_="trade")
116
+ if not trades:
117
+ raise ValueError("No trade rows found for this security.")
118
+
119
+ recent = trades[-1]
120
+
121
+ previous_close = {
122
+ "last_trade_price": safe_text(recent, "name"),
123
+ }
124
+
125
+ return previous_close
126
+
127
+ except Exception as e:
128
+ print(f"[parse_get_previous_close] Error parsing HTML: {e}")
129
+ return None
130
+
131
+
132
+ def parse_get_price_change(html: str):
133
+ """Extract selected security's absolute price difference between the most recent trade and the previous session close"""
134
+
135
+ try:
136
+ soup = BeautifulSoup(html, "html.parser")
137
+
138
+ security_info_html = soup.find("div", class_="year slide")
139
+ if not security_info_html:
140
+ raise ValueError("Could not find 'div.year.slide' section in HTML.")
141
+
142
+ trades = security_info_html.find_all("tr", class_="trade")
143
+ if not trades:
144
+ raise ValueError("No trade rows found for this security.")
145
+
146
+ recent = trades[-1]
147
+ previous = trades[-2]
148
+
149
+ price_change = {
150
+ "recent_trade": {
151
+ "session": safe_text(recent, "session"),
152
+ "session_date": safe_text(recent, "date"),
153
+ "last_trade_price": safe_text(recent, "name"),
154
+ },
155
+ "previous_trade": {
156
+ "session": safe_text(previous, "session"),
157
+ "session_date": safe_text(previous, "date"),
158
+ "last_trade_price": safe_text(previous, "name"),
159
+ },
160
+ "price_difference": f"{(
161
+ to_float(safe_text(recent, 'name'))
162
+ - to_float(safe_text(previous, 'name'))
163
+ )}",
164
+ }
165
+
166
+ return price_change
167
+
168
+ except Exception as e:
169
+ print(f"[parse_get_price_change] Error parsing HTML: {e}")
170
+ return None
171
+
172
+
173
+ def parse_get_price_change_percent(html: str):
174
+ """Extract selected security's percentage price difference between the most recent trade and the previous session close"""
175
+
176
+ try:
177
+ soup = BeautifulSoup(html, "html.parser")
178
+
179
+ security_info_html = soup.find("div", class_="year slide")
180
+ if not security_info_html:
181
+ raise ValueError("Could not find 'div.year.slide' section in HTML.")
182
+
183
+ trades = security_info_html.find_all("tr", class_="trade")
184
+ if not trades:
185
+ raise ValueError("No trade rows found for this security.")
186
+
187
+ recent = trades[-1]
188
+ previous = trades[-2]
189
+
190
+ price_change_percent = {
191
+ "recent_trade": {
192
+ "session": safe_text(recent, "session"),
193
+ "session_date": safe_text(recent, "date"),
194
+ "last_trade_price": safe_text(recent, "name"),
195
+ },
196
+ "previous_trade": {
197
+ "session": safe_text(previous, "session"),
198
+ "session_date": safe_text(previous, "date"),
199
+ "last_trade_price": safe_text(previous, "name"),
200
+ },
201
+ "price_change_percent": f"{round(((to_float(safe_text(recent, 'name')) - to_float(safe_text(previous, 'name'))) / to_float(safe_text(previous, 'name'))) * 100, 2)}",
202
+ }
203
+
204
+ return price_change_percent
205
+
206
+ except Exception as e:
207
+ print(f"[parse_get_price_change_percent] Error parsing HTML: {e}")
208
+ return None
209
+
210
+
211
+ def parse_get_sessions_average_price(symbol: str, html: str):
212
+ """Extract average traded price of the security over a specified session range."""
213
+
214
+ try:
215
+ soup = BeautifulSoup(html, "html.parser")
216
+
217
+ session_div = soup.find("div", class_="session")
218
+ if not session_div:
219
+ raise ValueError("Could not find 'div.session' section in HTML.")
220
+
221
+ rows = session_div.find_all("tr", class_="trade")
222
+ if not rows:
223
+ raise ValueError("No trade rows found in session data.")
224
+
225
+ for row in rows:
226
+ row_symbol = safe_text(row, "mnemonic")
227
+ if row_symbol == symbol:
228
+ return to_float(safe_text(row, "name"))
229
+
230
+ return None
231
+
232
+ except Exception as e:
233
+ print(f"[parse_get_sessions_average_price] Error parsing HTML: {e}")
104
234
  return None
105
-
235
+
236
+
237
+ def parse_get_average_price(symbol: str, html: str):
238
+ """
239
+ From a /financial_session/{session}/ page, return the last_trade_price (LTP)
240
+ for the given symbol. Returns None if not found/unparseable.
241
+ """
242
+ try:
243
+ soup = BeautifulSoup(html, "html.parser")
244
+
245
+ session_div = soup.find("div", class_="session")
246
+ if not session_div:
247
+ raise ValueError("Could not find 'div.session' section in HTML.")
248
+
249
+ rows = session_div.find_all("tr", class_="trade")
250
+ if not rows:
251
+ raise ValueError("No trade rows found in session data.")
252
+
253
+ for row in rows:
254
+ row_symbol = safe_text(row, "mnemonic")
255
+ if row_symbol == symbol:
256
+ return to_float(safe_text(row, "name"))
257
+
258
+ return None
259
+ except Exception as e:
260
+ print(f"[parse_get_average_price] Error parsing HTML: {e}")
261
+ return None
262
+
263
+
264
+ def parse_get_session_ltp(symbol: str, html: str):
265
+ """From a /financial_session/{session}/ page, return LTP for symbol."""
266
+
267
+ try:
268
+ soup = BeautifulSoup(html, "html.parser")
269
+
270
+ session_div = soup.find("div", class_="session")
271
+ if not session_div:
272
+ return None
273
+
274
+ rows = session_div.find_all("tr", class_="trade")
275
+ if not rows:
276
+ return None
277
+
278
+ for row in rows:
279
+ if safe_text(row, "mnemonic") == symbol:
280
+ return to_float(safe_text(row, "name"))
281
+
282
+ return None
283
+ except Exception as e:
284
+ print(f"[parse_get_session_ltp] Error parsing HTML: {e}")
285
+ return None
286
+
287
+
288
+ def parse_get_ytd_high_low(html: str):
289
+ """
290
+ Parse the current-year trade rows in the security page HTML and return
291
+ YTD high/low with session/date metadata.
292
+ """
293
+
294
+ try:
295
+ year = str(datetime.now().year)
296
+
297
+ trades = parse_get_trades_for_year(year, html)
298
+ if not trades:
299
+ return None
300
+
301
+ best_high = None
302
+ best_low = None
303
+
304
+ for t in trades:
305
+ price = to_float(t.get("last_trade_price"))
306
+ if price is None:
307
+ continue
308
+
309
+ entry = {
310
+ "price": price,
311
+ "session": t.get("session"),
312
+ "session_date": t.get("session_date"),
313
+ }
314
+
315
+ if best_high is None or price > best_high["price"]:
316
+ best_high = entry
317
+
318
+ if best_low is None or price < best_low["price"]:
319
+ best_low = entry
320
+
321
+ if best_high is None or best_low is None:
322
+ return None
323
+
324
+ return {
325
+ "year": int(year),
326
+ "high": best_high,
327
+ "low": best_low,
328
+ }
329
+
330
+ except Exception as e:
331
+ print(f"[parse_get_ytd_high_low] Error parsing HTML: {e}")
332
+ return None
333
+
334
+
106
335
  def parse_get_session_trades(html: str):
107
336
  """Extract session data for all securities"""
108
337
 
@@ -117,29 +346,28 @@ def parse_get_session_trades(html: str):
117
346
  if not sessions:
118
347
  raise ValueError("No session data found.")
119
348
 
120
- def safe_text(parent, class_name):
121
- cell = parent.find("td", class_=class_name)
122
- return cell.get_text(strip=True) if cell else None
123
-
124
349
  session_data = []
125
350
 
126
351
  for session in sessions:
127
- session_data.append({
128
- "symbol": safe_text(session, "mnemonic"),
129
- "ltp": safe_text(session, "name"),
130
- "best_bid": safe_text(session, "best bid"),
131
- "vol_bid": safe_text(session, "vol bid"),
132
- "best_offer": safe_text(session, "best offer"),
133
- "vol_offer": safe_text(session, "vol offer"),
134
- "opening_price": safe_text(session, "opening price"),
135
- })
352
+ session_data.append(
353
+ {
354
+ "symbol": safe_text(session, "mnemonic"),
355
+ "last_trade_price": safe_text(session, "name"),
356
+ "eps": safe_text(session, "best bid"),
357
+ "pe_ratio": safe_text(session, "vol bid"),
358
+ "dividends_paid_last_12_months": safe_text(session, "best offer"),
359
+ "dividend_yield": safe_text(session, "vol offer"),
360
+ "notes": safe_text(session, "opening price"),
361
+ }
362
+ )
136
363
 
137
364
  return session_data
138
-
365
+
139
366
  except Exception as e:
140
367
  print(f"[parse_get_securities_session] Error parsing HTML: {e}")
141
368
  return None
142
-
369
+
370
+
143
371
  def parse_get_security_session_trade(symbol: str, html: str):
144
372
  """Extract session data for given security"""
145
373
 
@@ -153,32 +381,28 @@ def parse_get_security_session_trade(symbol: str, html: str):
153
381
  sessions = sessions_info_html.find_all("tr", class_="trade")
154
382
  if not sessions:
155
383
  raise ValueError("No session data found.")
156
-
157
- def safe_text(parent, class_name):
158
- cell = parent.find("td", class_=class_name)
159
- return cell.get_text(strip=True) if cell else None
160
-
161
384
 
162
385
  for session in sessions:
163
386
  session_symbol = safe_text(session, "mnemonic")
164
387
 
165
- if (session_symbol == symbol):
388
+ if session_symbol == symbol:
166
389
  session_data = {
167
390
  "symbol": safe_text(session, "mnemonic"),
168
- "ltp": safe_text(session, "name"),
169
- "best_bid": safe_text(session, "best bid"),
170
- "vol_bid": safe_text(session, "vol bid"),
171
- "best_offer": safe_text(session, "best offer"),
172
- "vol_offer": safe_text(session, "vol offer"),
173
- "opening_price": safe_text(session, "opening price"),
391
+ "last_trade_price": safe_text(session, "name"),
392
+ "eps": safe_text(session, "best bid"),
393
+ "pe_ratio": safe_text(session, "vol bid"),
394
+ "dividends_paid_last_12_months": safe_text(session, "best offer"),
395
+ "dividend_yield": safe_text(session, "vol offer"),
396
+ "notes": safe_text(session, "opening price"),
174
397
  }
175
-
398
+
176
399
  return session_data
177
-
400
+
178
401
  except Exception as e:
179
402
  print(f"[parse_get_security_session] Error parsing HTML: {e}")
180
403
  return None
181
-
404
+
405
+
182
406
  def parse_get_trades_for_year(year: str, html: str):
183
407
  """Get security trade information from a specific year"""
184
408
 
@@ -188,37 +412,33 @@ def parse_get_trades_for_year(year: str, html: str):
188
412
  security_info_html = soup.find("div", class_="year slide", id=year)
189
413
  if not security_info_html:
190
414
  raise ValueError("Could not find 'div.year.slide' section in HTML.")
191
-
415
+
192
416
  trade_data = []
193
417
 
194
418
  trades = security_info_html.find_all("tr", class_="trade")
195
419
  if not trades:
196
420
  raise ValueError("No trade rows found for this security.")
197
-
198
- def safe_text(parent, class_name):
199
- cell = parent.find("td", class_=class_name)
200
- return cell.get_text(strip=True) if cell else None
201
-
421
+
202
422
  for trade in trades:
203
- trade_data.append({
204
- "session": safe_text(trade, "session"),
205
- "date": safe_text(trade, "date"),
206
- "ltp": safe_text(trade, "name"),
207
- "best_bid": safe_text(trade, "best bid"),
208
- "vol_bid": safe_text(trade, "vol bid"),
209
- "best_offer": safe_text(trade, "best offer"),
210
- "vol_offer": safe_text(trade, "vol offer"),
211
- "opening_price": safe_text(trade, "opening price"),
212
- })
423
+ trade_data.append(
424
+ {
425
+ "session": safe_text(trade, "session"),
426
+ "session_date": safe_text(trade, "date"),
427
+ "last_trade_price": safe_text(trade, "name"),
428
+ "eps": safe_text(trade, "best bid"),
429
+ "pe_ratio": safe_text(trade, "vol bid"),
430
+ "dividends_paid_last_12_months": safe_text(trade, "best offer"),
431
+ "dividend_yield": safe_text(trade, "vol offer"),
432
+ "notes": safe_text(trade, "opening price"),
433
+ }
434
+ )
213
435
 
214
436
  return trade_data
215
-
437
+
216
438
  except Exception as e:
217
439
  print(f"[parse_get_security_recent_year] Error parsing HTML: {e}")
218
440
  return None
219
441
 
220
- from datetime import datetime
221
- from bs4 import BeautifulSoup
222
442
 
223
443
  def parse_get_historical_trades(start_date: str, end_date: str, html: str):
224
444
  """Parse historical trade data from HTML between given dates (DD/MM/YYYY)"""
@@ -254,10 +474,6 @@ def parse_get_historical_trades(start_date: str, end_date: str, html: str):
254
474
 
255
475
  trade_data = []
256
476
 
257
- def safe_text(parent, class_name):
258
- cell = parent.find("td", class_=class_name)
259
- return cell.get_text(strip=True) if cell else None
260
-
261
477
  for section in year_sections:
262
478
  year_id = section.get("id")
263
479
  if not year_id or not year_id.isdigit():
@@ -279,18 +495,22 @@ def parse_get_historical_trades(start_date: str, end_date: str, html: str):
279
495
  continue
280
496
 
281
497
  if start <= trade_date <= end:
282
- trade_data.append({
283
- "session": safe_text(trade, "session"),
284
- "date": date_text,
285
- "ltp": safe_text(trade, "name"),
286
- "best_bid": safe_text(trade, "best bid"),
287
- "vol_bid": safe_text(trade, "vol bid"),
288
- "best_offer": safe_text(trade, "best offer"),
289
- "vol_offer": safe_text(trade, "vol offer"),
290
- "opening_price": safe_text(trade, "opening price"),
291
- })
292
-
293
- trade_data.sort(key=lambda x: datetime.strptime(x["date"], "%d/%m/%Y"))
498
+ trade_data.append(
499
+ {
500
+ "session": safe_text(trade, "session"),
501
+ "session_date": safe_text(trade, "date"),
502
+ "last_trade_price": safe_text(trade, "name"),
503
+ "eps": safe_text(trade, "best bid"),
504
+ "pe_ratio": safe_text(trade, "vol bid"),
505
+ "dividends_paid_last_12_months": safe_text(
506
+ trade, "best offer"
507
+ ),
508
+ "dividend_yield": safe_text(trade, "vol offer"),
509
+ "notes": safe_text(trade, "opening price"),
510
+ }
511
+ )
512
+
513
+ trade_data.sort(key=lambda x: datetime.strptime(x["session_date"], "%d/%m/%Y"))
294
514
  return trade_data
295
515
 
296
516
  except Exception as e:
@@ -0,0 +1,3 @@
1
+ def safe_text(parent, class_name):
2
+ cell = parent.find("td", class_=class_name)
3
+ return cell.get_text(strip=True) if cell else None