hkjc 0.3.18__py3-none-any.whl → 0.3.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hkjc/__init__.py +4 -4
- hkjc/features.py +6 -0
- hkjc/historical.py +28 -43
- hkjc/live.py +22 -18
- hkjc/processing.py +6 -1
- {hkjc-0.3.18.dist-info → hkjc-0.3.19.dist-info}/METADATA +1 -1
- hkjc-0.3.19.dist-info/RECORD +14 -0
- hkjc/analysis.py +0 -3
- hkjc-0.3.18.dist-info/RECORD +0 -14
- {hkjc-0.3.18.dist-info → hkjc-0.3.19.dist-info}/WHEEL +0 -0
hkjc/__init__.py
CHANGED
@@ -4,10 +4,10 @@ This module re-exports commonly used symbols from the submodules.
|
|
4
4
|
"""
|
5
5
|
from importlib.metadata import version as _version
|
6
6
|
|
7
|
-
__all__ = ["live", "
|
7
|
+
__all__ = ["live", "features",
|
8
8
|
"generate_all_qp_trades", "generate_all_pla_trades", "pareto_filter",
|
9
|
-
|
10
|
-
|
9
|
+
"speedpro_energy", "speedmap", "harveille_model",
|
10
|
+
"generate_historical_data"]
|
11
11
|
|
12
12
|
try:
|
13
13
|
__version__ = _version(__name__)
|
@@ -17,4 +17,4 @@ except Exception: # pragma: no cover - best-effort version resolution
|
|
17
17
|
from .processing import generate_all_qp_trades, generate_all_pla_trades, generate_historical_data
|
18
18
|
from .utils import pareto_filter
|
19
19
|
from .speedpro import speedmap, speedpro_energy
|
20
|
-
from . import harville_model, live
|
20
|
+
from . import harville_model, live, features
|
hkjc/features.py
ADDED
@@ -0,0 +1,6 @@
|
|
1
|
+
""" Polars expressions to commonly-used analysis features, subject to frequent changes.
|
2
|
+
"""
|
3
|
+
import polars as pl
|
4
|
+
|
5
|
+
rating_diff = (pl.col('Rtg').max().over('RaceId')-pl.col('Rtg')).alias('RtgDiff')
|
6
|
+
frontrunner_pct = (pl.col('FavoriteRunningStyle')=='FrontRunner').mean().over('RaceId').alias('FRPct')
|
hkjc/historical.py
CHANGED
@@ -52,11 +52,13 @@ def _classify_running_style(df: pl.DataFrame, running_pos_col="RunningPosition")
|
|
52
52
|
.alias("split_data").cast(pl.Int64, strict=False)
|
53
53
|
).unnest("split_data")
|
54
54
|
|
55
|
-
df = df.with_columns(
|
55
|
+
df = df.with_columns(
|
56
|
+
pl.col('FinishPosition').fill_null(pl.col('Position3')))
|
56
57
|
|
57
58
|
df = df.with_columns([
|
58
59
|
(pl.col("StartPosition")-pl.col("FinishPosition")).alias("PositionChange"),
|
59
|
-
pl.mean_horizontal("StartPosition", "Position2").alias(
|
60
|
+
pl.mean_horizontal("StartPosition", "Position2").alias(
|
61
|
+
"AvgStartPosition"),
|
60
62
|
]).with_columns(pl.when(pl.col("StartPosition").is_null()).then(pl.lit("--"))
|
61
63
|
.when((pl.col("AvgStartPosition") <= 3) & (pl.col("StartPosition") <= 3)).then(pl.lit("FrontRunner"))
|
62
64
|
.when((pl.col("PositionChange") >= 1) & (pl.col("StartPosition") >= 6)).then(pl.lit("Closer"))
|
@@ -77,35 +79,7 @@ def _extract_horse_data(horse_no: str) -> pl.DataFrame:
|
|
77
79
|
pl.col('Date') != '') # Remove empty rows
|
78
80
|
horse_data = _classify_running_style(horse_data)
|
79
81
|
|
80
|
-
|
81
|
-
table = soup.find_all('table', class_='table_eng_text')
|
82
|
-
profile_data = _parse_html_table(table[0], skip_header=True)
|
83
|
-
profile_data = _parse_html_table(table[1], skip_header=True)
|
84
|
-
|
85
|
-
try:
|
86
|
-
current_rating = int(profile_data.filter(
|
87
|
-
pl.col("column_0").str.starts_with("Current Rating"))['column_2'].item(0))
|
88
|
-
season_start_rating = int(profile_data.filter(pl.col(
|
89
|
-
"column_0").str.starts_with("Start of Season Rating"))['column_2'].item(0))
|
90
|
-
except:
|
91
|
-
current_rating, season_start_rating = 0, 0
|
92
|
-
|
93
|
-
try:
|
94
|
-
last_rating = int(profile_data.filter(
|
95
|
-
pl.col("column_0").str.starts_with("Last Rating"))['column_2'].item(0))
|
96
|
-
except:
|
97
|
-
last_rating = 0
|
98
|
-
|
99
|
-
horse_info = {
|
100
|
-
'HorseID': horse_no,
|
101
|
-
'CurrentRating': current_rating,
|
102
|
-
'SeasonStartRating': season_start_rating,
|
103
|
-
'LastRating': last_rating if current_rating == 0 else current_rating
|
104
|
-
}
|
105
|
-
horse_data = (horse_data.with_columns([
|
106
|
-
pl.lit(value).alias(key) for key, value in horse_info.items()
|
107
|
-
])
|
108
|
-
)
|
82
|
+
horse_data = horse_data.with_columns(pl.lit(horse_no).alias('HorseNo'))
|
109
83
|
|
110
84
|
return horse_data
|
111
85
|
|
@@ -124,16 +98,16 @@ def _clean_horse_data(df: pl.DataFrame) -> pl.DataFrame:
|
|
124
98
|
pl.col('Dr').cast(pl.Int64, strict=False),
|
125
99
|
pl.col('Rtg').cast(pl.Int64, strict=False),
|
126
100
|
pl.col('Dist').cast(pl.Int64, strict=False),
|
127
|
-
pl.col('WinOdds').cast(pl.Float64, strict=False)
|
128
|
-
pl.col('RaceIndex').cast(pl.Int64, strict=False)
|
101
|
+
pl.col('WinOdds').cast(pl.Float64, strict=False)
|
129
102
|
])
|
130
103
|
|
131
|
-
df = df.
|
104
|
+
df = (df.filter(~pl.col('FinishTime').str.starts_with('--'))
|
105
|
+
.with_columns(
|
132
106
|
(
|
133
|
-
pl.col("FinishTime").str.
|
134
|
-
pl.col("FinishTime").str.
|
135
|
-
).cast(pl.Float64).alias("FinishTime")
|
136
|
-
)
|
107
|
+
pl.col("FinishTime").str.splitn(".", 2).struct.field("field_0").cast(pl.Int64) * 60 +
|
108
|
+
pl.col("FinishTime").str.splitn(".", 2).struct.field("field_1").cast(pl.Float64)
|
109
|
+
).cast(pl.Float64).round(2).alias("FinishTime")
|
110
|
+
))
|
137
111
|
|
138
112
|
df = df.with_columns(
|
139
113
|
pl.col('RCTrackCourse').str.split_exact(' / ', 2)
|
@@ -141,12 +115,22 @@ def _clean_horse_data(df: pl.DataFrame) -> pl.DataFrame:
|
|
141
115
|
.alias('RCTrackCourse')
|
142
116
|
).unnest('RCTrackCourse')
|
143
117
|
|
118
|
+
df = df.with_columns(
|
119
|
+
pl.when(pl.col('Date').str.len_chars() <= 8)
|
120
|
+
.then(pl.col('Date').str.strptime(pl.Date, '%d/%m/%y', strict=False))
|
121
|
+
.otherwise(pl.col('Date').str.strptime(pl.Date, '%d/%m/%Y'))
|
122
|
+
).with_columns(
|
123
|
+
pl.concat_str(pl.col('Date').dt.strftime('%Y%m%d'), pl.col(
|
124
|
+
'Venue'), pl.col('RaceIndex')).alias('RaceId')
|
125
|
+
).drop("VideoReplay")
|
144
126
|
return df
|
145
127
|
|
128
|
+
|
146
129
|
def get_horse_data(horse_no: str) -> pl.DataFrame:
|
147
130
|
df = _extract_horse_data(horse_no)
|
148
131
|
return _clean_horse_data(df)
|
149
132
|
|
133
|
+
|
150
134
|
def _clean_race_data(df: pl.DataFrame) -> pl.DataFrame:
|
151
135
|
""" Clean and convert horse data to suitable data types
|
152
136
|
"""
|
@@ -165,13 +149,14 @@ def _clean_race_data(df: pl.DataFrame) -> pl.DataFrame:
|
|
165
149
|
|
166
150
|
df = df.with_columns(
|
167
151
|
(
|
168
|
-
pl.col("FinishTime").str.
|
169
|
-
pl.col("FinishTime").str.
|
170
|
-
).cast(pl.Float64).alias("FinishTime")
|
152
|
+
pl.col("FinishTime").str.splitn(":", 2).struct.field("field_0").cast(pl.Int64) * 60 +
|
153
|
+
pl.col("FinishTime").str.splitn(":", 2).struct.field("field_1").cast(pl.Float64)
|
154
|
+
).cast(pl.Float64).round(2).alias("FinishTime")
|
171
155
|
)
|
172
156
|
|
173
157
|
return df
|
174
158
|
|
159
|
+
|
175
160
|
def _extract_race_data(date: str, venue_code: str, race_number: int) -> pl.DataFrame:
|
176
161
|
soup = _soupify_race_page(date, venue_code, race_number)
|
177
162
|
table = soup.find('div', class_='race_tab').find('table')
|
@@ -211,5 +196,5 @@ def _extract_race_data(date: str, venue_code: str, race_number: int) -> pl.DataF
|
|
211
196
|
|
212
197
|
|
213
198
|
def get_race_data(date: str, venue_code: str, race_number: int) -> pl.DataFrame:
|
214
|
-
df = _extract_race_data(date,venue_code,race_number)
|
215
|
-
return _clean_race_data(df)
|
199
|
+
df = _extract_race_data(date, venue_code, race_number)
|
200
|
+
return _clean_race_data(df)
|
hkjc/live.py
CHANGED
@@ -7,8 +7,6 @@ import requests
|
|
7
7
|
from cachetools.func import ttl_cache
|
8
8
|
import numpy as np
|
9
9
|
|
10
|
-
from .utils import _validate_date, _validate_venue_code
|
11
|
-
|
12
10
|
HKJC_LIVEODDS_ENDPOINT = "https://info.cld.hkjc.com/graphql/base/"
|
13
11
|
|
14
12
|
RACEMTG_PAYLOAD = {
|
@@ -245,7 +243,7 @@ query racing($date: String, $venueCode: String, $oddsTypes: [OddsType], $raceNo:
|
|
245
243
|
|
246
244
|
|
247
245
|
@ttl_cache(maxsize=12, ttl=1000)
|
248
|
-
def _fetch_live_races(date: str, venue_code: str) -> dict:
|
246
|
+
def _fetch_live_races(date: str=None, venue_code: str=None) -> dict:
|
249
247
|
"""Fetch live race data from HKJC GraphQL endpoint."""
|
250
248
|
payload = RACEMTG_PAYLOAD.copy()
|
251
249
|
payload["variables"] = payload["variables"].copy()
|
@@ -265,9 +263,10 @@ def _fetch_live_races(date: str, venue_code: str) -> dict:
|
|
265
263
|
if r.status_code != 200:
|
266
264
|
raise RuntimeError(f"Request failed: {r.status_code} - {r.text}")
|
267
265
|
|
268
|
-
|
266
|
+
data = r.json()['data']['raceMeetings'][0]
|
267
|
+
races = data['races']
|
269
268
|
|
270
|
-
race_info = {}
|
269
|
+
race_info = {'Date': data['date'], 'Venue': data['venueCode'], 'Races': {}}
|
271
270
|
for race in races:
|
272
271
|
race_num = race['no']
|
273
272
|
race_name = race['raceName_en']
|
@@ -277,12 +276,15 @@ def _fetch_live_races(date: str, venue_code: str) -> dict:
|
|
277
276
|
race_class = race['raceClass_en']
|
278
277
|
race_course = race['raceCourse']['displayCode']
|
279
278
|
|
280
|
-
runners = [{'
|
281
|
-
'
|
282
|
-
'
|
279
|
+
runners = [{'No': runner['no'],
|
280
|
+
'Name': runner['name_en'],
|
281
|
+
'Dr': runner['barrierDrawNumber'],
|
282
|
+
'Rtg': int(runner['currentRating']),
|
283
|
+
'Wt': int(runner['currentWeight']),
|
284
|
+
'Handicap': int(runner['handicapWeight']),
|
283
285
|
'HorseNo': runner['horse']['code']
|
284
|
-
} for runner in race['runners']]
|
285
|
-
race_info[race_num]={
|
286
|
+
} for runner in race['runners'] if runner['status'] != "Standby"]
|
287
|
+
race_info['Races'][race_num] = {
|
286
288
|
'No': race_num,
|
287
289
|
'Name': race_name,
|
288
290
|
'Class': race_class,
|
@@ -290,13 +292,13 @@ def _fetch_live_races(date: str, venue_code: str) -> dict:
|
|
290
292
|
'Dist': race_dist,
|
291
293
|
'Going': race_going,
|
292
294
|
'Track': race_track,
|
293
|
-
'Runners': runners
|
295
|
+
'Runners': runners
|
294
296
|
}
|
295
297
|
return race_info
|
296
298
|
|
297
299
|
|
298
300
|
@ttl_cache(maxsize=12, ttl=30)
|
299
|
-
def _fetch_live_odds(date: str, venue_code: str, race_number: int, odds_type: Tuple[str] = ('PLA',
|
301
|
+
def _fetch_live_odds(date: str, venue_code: str, race_number: int, odds_type: Tuple[str] = ('PLA', )) -> List[dict]:
|
300
302
|
"""Fetch live odds data from HKJC GraphQL endpoint."""
|
301
303
|
payload = LIVEODDS_PAYLOAD.copy()
|
302
304
|
payload["variables"] = payload["variables"].copy()
|
@@ -329,14 +331,14 @@ def _fetch_live_odds(date: str, venue_code: str, race_number: int, odds_type: Tu
|
|
329
331
|
]
|
330
332
|
|
331
333
|
|
332
|
-
def live_odds(date: str, venue_code: str, race_number: int, odds_type: List[str] = ['PLA', 'QPL']) -> dict:
|
334
|
+
def live_odds(date: str, venue_code: str, race_number: int, odds_type: List[str] = ['WIN', 'PLA', 'QPL', 'QIN']) -> dict:
|
333
335
|
"""Fetch live odds as numpy arrays.
|
334
336
|
|
335
337
|
Args:
|
336
338
|
date (str): Date in 'YYYY-MM-DD' format.
|
337
339
|
venue_code (str): Venue code, e.g., 'ST' for Shatin, 'HV' for Happy Valley.
|
338
340
|
race_number (int): Race number.
|
339
|
-
odds_type (List[str]): Types of odds to fetch. Default is ['PLA', 'QPL']. Currently the following types are supported:
|
341
|
+
odds_type (List[str]): Types of odds to fetch. Default is ['WIN', 'PLA', 'QPL', 'QIN']. Currently the following types are supported:
|
340
342
|
- 'WIN': Win odds
|
341
343
|
- 'PLA': Place odds
|
342
344
|
- 'QIN': Quinella odds
|
@@ -348,11 +350,13 @@ def live_odds(date: str, venue_code: str, race_number: int, odds_type: List[str]
|
|
348
350
|
If odds_type is 'WIN','PLA', returns a 1D array of place odds.
|
349
351
|
If odds_type is 'QIN','QPL', returns a 2D array of quinella place odds.
|
350
352
|
"""
|
351
|
-
_validate_date(date)
|
352
|
-
_validate_venue_code(venue_code)
|
353
|
-
|
354
353
|
race_info = _fetch_live_races(date, venue_code)
|
355
|
-
N = len(race_info[race_number]['Runners'])
|
354
|
+
N = len(race_info['Races'][race_number]['Runners'])
|
355
|
+
|
356
|
+
if (race_info['Date'] != date) or (race_info['Venue'] != venue_code):
|
357
|
+
print(f"[WARNING] Requested {date} {venue_code} but server returned {race_info['Date']} {race_info['Venue']}.")
|
358
|
+
date = race_info['Date']
|
359
|
+
venue_code = race_info['Venue']
|
356
360
|
|
357
361
|
data = _fetch_live_odds(date, venue_code, race_number,
|
358
362
|
odds_type=tuple(odds_type))
|
hkjc/processing.py
CHANGED
@@ -63,7 +63,12 @@ def generate_historical_data(start_date: str, end_date: str) -> pl.DataFrame:
|
|
63
63
|
# Use horse track records
|
64
64
|
dfs = [_extract_horse_data(horse_id) for horse_id in tqdm(horse_ids, desc='Processing horses ...', leave=False)]
|
65
65
|
df = pl.concat(dfs)
|
66
|
-
|
66
|
+
|
67
|
+
try:
|
68
|
+
return _clean_horse_data(df).filter(pl.col('Date').is_between(start_dt, end_dt))
|
69
|
+
except:
|
70
|
+
print('Failed to clean data. Returning raw data for debug.')
|
71
|
+
return df
|
67
72
|
|
68
73
|
|
69
74
|
# ==========================
|
@@ -0,0 +1,14 @@
|
|
1
|
+
hkjc/__init__.py,sha256=XSm9N6YbZ2SzyxjO9aR26ctB4Z1-VeBImuroSgncUfk,737
|
2
|
+
hkjc/features.py,sha256=1mcF9Pq2LsnQ8yIv2F8Uyg1HXz7LdNcPf9RDzFN3sbA,297
|
3
|
+
hkjc/harville_model.py,sha256=MZjPLS-1nbEhp1d4Syuq13DtraKnd7TlNqBmOOCwxgc,15976
|
4
|
+
hkjc/historical.py,sha256=Yujb4Q2cTkvVvvZxVaSoawbwwxfzq03lprG_s_4H7Dk,7682
|
5
|
+
hkjc/live.py,sha256=CfMeHRQfhKSmhQaexM99sdP0KRbIEqg2DIvNPc1gohk,10696
|
6
|
+
hkjc/processing.py,sha256=uNjM5eeH9Mj8Dg9-9K7z-7xeufaXJT42F49zUHzj0h0,6968
|
7
|
+
hkjc/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
8
|
+
hkjc/speedpro.py,sha256=Y2Z3GYGeePc4sM-ZnCHXCI1N7L-_j9nrMqS3CC5BBSo,2031
|
9
|
+
hkjc/utils.py,sha256=4CA_FPf_U3GvzoLkqBX0qDPZgrSvKJKvbP7VWqd5FiA,6323
|
10
|
+
hkjc/strategy/place_only.py,sha256=lHPjTSj8PzghxncNBg8FI4T4HJigekB9a3bV7l7VtPA,2079
|
11
|
+
hkjc/strategy/qpbanker.py,sha256=MQxjwsfhllKZroKS8w8Q3bi3HMjGc1DAyBIjNZAp3yQ,4805
|
12
|
+
hkjc-0.3.19.dist-info/METADATA,sha256=npklDb_gSAZHliXMNDZYOlRIuV_Klkzgw4ELdW9zzjc,480
|
13
|
+
hkjc-0.3.19.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
14
|
+
hkjc-0.3.19.dist-info/RECORD,,
|
hkjc/analysis.py
DELETED
hkjc-0.3.18.dist-info/RECORD
DELETED
@@ -1,14 +0,0 @@
|
|
1
|
-
hkjc/__init__.py,sha256=5A9MzcITYJDcA2UbIBpkimZBYSqS4pgRuQJhTagOfpE,753
|
2
|
-
hkjc/analysis.py,sha256=0042_NMIkQCl0J6B0P4TFfrBDCnm2B6jsCZKOEO30yI,108
|
3
|
-
hkjc/harville_model.py,sha256=MZjPLS-1nbEhp1d4Syuq13DtraKnd7TlNqBmOOCwxgc,15976
|
4
|
-
hkjc/historical.py,sha256=v9k_R47Na5en5ftrocjIHofkNAUthE_lp4CyLaCTsQE,8280
|
5
|
-
hkjc/live.py,sha256=GqctH-BVdIL6Vi1g8XHe3p8fZBopCQf5KACLAR0meP0,10249
|
6
|
-
hkjc/processing.py,sha256=H0chtW_FBMMhK3IzcjYjrryd3fAPYimanc2fWuGiB0M,6807
|
7
|
-
hkjc/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
8
|
-
hkjc/speedpro.py,sha256=Y2Z3GYGeePc4sM-ZnCHXCI1N7L-_j9nrMqS3CC5BBSo,2031
|
9
|
-
hkjc/utils.py,sha256=4CA_FPf_U3GvzoLkqBX0qDPZgrSvKJKvbP7VWqd5FiA,6323
|
10
|
-
hkjc/strategy/place_only.py,sha256=lHPjTSj8PzghxncNBg8FI4T4HJigekB9a3bV7l7VtPA,2079
|
11
|
-
hkjc/strategy/qpbanker.py,sha256=MQxjwsfhllKZroKS8w8Q3bi3HMjGc1DAyBIjNZAp3yQ,4805
|
12
|
-
hkjc-0.3.18.dist-info/METADATA,sha256=aoXp6Fvn3EkuXyv6p5LClSbZa5XS_bfcUxMKBJXcNvw,480
|
13
|
-
hkjc-0.3.18.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
14
|
-
hkjc-0.3.18.dist-info/RECORD,,
|
File without changes
|