hockey-blast-common-lib 0.1.48__py3-none-any.whl → 0.1.50__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hockey_blast_common_lib/aggregate_goalie_stats.py +7 -3
- hockey_blast_common_lib/aggregate_human_stats.py +7 -4
- hockey_blast_common_lib/aggregate_referee_stats.py +12 -8
- hockey_blast_common_lib/aggregate_skater_stats.py +8 -5
- hockey_blast_common_lib/hockey_blast_sample_backup.sql.gz +0 -0
- hockey_blast_common_lib/options.py +1 -0
- hockey_blast_common_lib/utils.py +13 -3
- {hockey_blast_common_lib-0.1.48.dist-info → hockey_blast_common_lib-0.1.50.dist-info}/METADATA +1 -1
- {hockey_blast_common_lib-0.1.48.dist-info → hockey_blast_common_lib-0.1.50.dist-info}/RECORD +11 -12
- hockey_blast_common_lib/find_leagues_seasons.py +0 -53
- {hockey_blast_common_lib-0.1.48.dist-info → hockey_blast_common_lib-0.1.50.dist-info}/WHEEL +0 -0
- {hockey_blast_common_lib-0.1.48.dist-info → hockey_blast_common_lib-0.1.50.dist-info}/top_level.txt +0 -0
@@ -56,6 +56,10 @@ def aggregate_goalie_stats(session, aggregation_type, aggregation_id, names_to_f
|
|
56
56
|
else:
|
57
57
|
raise ValueError("Invalid aggregation type")
|
58
58
|
|
59
|
+
# Delete existing items from the stats table
|
60
|
+
session.query(StatsModel).filter(StatsModel.aggregation_id == aggregation_id).delete()
|
61
|
+
session.commit()
|
62
|
+
|
59
63
|
# Apply aggregation window filter
|
60
64
|
if aggregation_window:
|
61
65
|
last_game_datetime_str = session.query(func.max(func.concat(Game.date, ' ', Game.time))).filter(filter_condition, Game.status.like('Final%')).scalar()
|
@@ -63,10 +67,10 @@ def aggregate_goalie_stats(session, aggregation_type, aggregation_id, names_to_f
|
|
63
67
|
if start_datetime:
|
64
68
|
game_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP).between(start_datetime, last_game_datetime_str)
|
65
69
|
filter_condition = filter_condition & game_window_filter
|
70
|
+
else:
|
71
|
+
#print(f"Warning: No valid start datetime for aggregation window '{aggregation_window}' for {aggregation_name}. No games will be included.")
|
72
|
+
return
|
66
73
|
|
67
|
-
# Delete existing items from the stats table
|
68
|
-
session.query(StatsModel).filter(StatsModel.aggregation_id == aggregation_id).delete()
|
69
|
-
session.commit()
|
70
74
|
|
71
75
|
# Filter for specific human_id if provided
|
72
76
|
human_filter = []
|
@@ -54,6 +54,10 @@ def aggregate_human_stats(session, aggregation_type, aggregation_id, names_to_fi
|
|
54
54
|
else:
|
55
55
|
raise ValueError("Invalid aggregation type")
|
56
56
|
|
57
|
+
# Delete existing items from the stats table
|
58
|
+
session.query(StatsModel).filter(StatsModel.aggregation_id == aggregation_id).delete()
|
59
|
+
session.commit()
|
60
|
+
|
57
61
|
# Apply aggregation window filter
|
58
62
|
if aggregation_window:
|
59
63
|
last_game_datetime_str = session.query(func.max(func.concat(Game.date, ' ', Game.time))).filter(filter_condition, Game.status.like('Final%')).scalar()
|
@@ -61,10 +65,9 @@ def aggregate_human_stats(session, aggregation_type, aggregation_id, names_to_fi
|
|
61
65
|
if start_datetime:
|
62
66
|
game_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP).between(start_datetime, last_game_datetime_str)
|
63
67
|
filter_condition = filter_condition & game_window_filter
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
session.commit()
|
68
|
+
else:
|
69
|
+
#print(f"Warning: No valid start datetime for aggregation window '{aggregation_window}' for {aggregation_name}. No games will be included.")
|
70
|
+
return
|
68
71
|
|
69
72
|
# Filter for specific human_id if provided
|
70
73
|
human_filter = []
|
@@ -48,12 +48,16 @@ def aggregate_referee_stats(session, aggregation_type, aggregation_id, names_to_
|
|
48
48
|
min_games = MIN_GAMES_FOR_LEVEL_STATS
|
49
49
|
filter_condition = Division.level_id == aggregation_id
|
50
50
|
# Add filter to only include games for the last 5 years
|
51
|
-
five_years_ago = datetime.now() - timedelta(days=5*365)
|
52
|
-
level_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP) >= five_years_ago
|
53
|
-
filter_condition = filter_condition & level_window_filter
|
51
|
+
# five_years_ago = datetime.now() - timedelta(days=5*365)
|
52
|
+
# level_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP) >= five_years_ago
|
53
|
+
# filter_condition = filter_condition & level_window_filter
|
54
54
|
else:
|
55
55
|
raise ValueError("Invalid aggregation type")
|
56
56
|
|
57
|
+
# Delete existing items from the stats table
|
58
|
+
session.query(StatsModel).filter(StatsModel.aggregation_id == aggregation_id).delete()
|
59
|
+
session.commit()
|
60
|
+
|
57
61
|
# Apply aggregation window filter
|
58
62
|
if aggregation_window:
|
59
63
|
last_game_datetime_str = session.query(func.max(func.concat(Game.date, ' ', Game.time))).filter(filter_condition, Game.status.like('Final%')).scalar()
|
@@ -61,11 +65,11 @@ def aggregate_referee_stats(session, aggregation_type, aggregation_id, names_to_
|
|
61
65
|
if start_datetime:
|
62
66
|
game_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP).between(start_datetime, last_game_datetime_str)
|
63
67
|
filter_condition = filter_condition & game_window_filter
|
68
|
+
else:
|
69
|
+
#print(f"Warning: No valid start datetime for aggregation window '{aggregation_window}' for {aggregation_name}. No games will be included.")
|
70
|
+
return
|
64
71
|
|
65
|
-
|
66
|
-
session.query(StatsModel).filter(StatsModel.aggregation_id == aggregation_id).delete()
|
67
|
-
session.commit()
|
68
|
-
|
72
|
+
filter_condition = filter_condition & (Division.id == Game.division_id)
|
69
73
|
# Aggregate games reffed for each referee
|
70
74
|
games_reffed_stats = session.query(
|
71
75
|
Game.referee_1_id.label('human_id'),
|
@@ -239,4 +243,4 @@ def run_aggregate_referee_stats():
|
|
239
243
|
aggregate_referee_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names)
|
240
244
|
|
241
245
|
if __name__ == "__main__":
|
242
|
-
run_aggregate_referee_stats()
|
246
|
+
run_aggregate_referee_stats()
|
@@ -65,6 +65,10 @@ def aggregate_skater_stats(session, aggregation_type, aggregation_id, names_to_f
|
|
65
65
|
else:
|
66
66
|
raise ValueError("Invalid aggregation type")
|
67
67
|
|
68
|
+
# Delete existing items from the stats table
|
69
|
+
session.query(StatsModel).filter(StatsModel.aggregation_id == aggregation_id).delete()
|
70
|
+
session.commit()
|
71
|
+
|
68
72
|
# Apply aggregation window filter
|
69
73
|
if aggregation_window:
|
70
74
|
last_game_datetime_str = session.query(func.max(func.concat(Game.date, ' ', Game.time))).filter(filter_condition, Game.status.like('Final%')).scalar()
|
@@ -72,11 +76,10 @@ def aggregate_skater_stats(session, aggregation_type, aggregation_id, names_to_f
|
|
72
76
|
if start_datetime:
|
73
77
|
game_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP).between(start_datetime, last_game_datetime_str)
|
74
78
|
filter_condition = filter_condition & game_window_filter
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
79
|
+
else:
|
80
|
+
#print(f"Warning: No valid start datetime for aggregation window '{aggregation_window}' for {aggregation_name}. No games will be included.")
|
81
|
+
return
|
82
|
+
|
80
83
|
# Filter for specific human_id if provided
|
81
84
|
human_filter = []
|
82
85
|
# if debug_human_id:
|
Binary file
|
hockey_blast_common_lib/utils.py
CHANGED
@@ -14,7 +14,8 @@ def get_org_id_from_alias(session, org_alias):
|
|
14
14
|
predefined_organizations = [
|
15
15
|
{"id": 1, "organization_name": "Sharks Ice", "alias": "sharksice", "website": "https://www.sharksice.com"},
|
16
16
|
{"id": 2, "organization_name": "TriValley Ice", "alias": "tvice", "website": "https://www.trivalleyice.com"},
|
17
|
-
{"id": 3, "organization_name": "CAHA", "alias": "caha", "website": "https://www.caha.com"}
|
17
|
+
{"id": 3, "organization_name": "CAHA", "alias": "caha", "website": "https://www.caha.com"},
|
18
|
+
{"id": 4, "organization_name": "Tacoma Twin Rinks", "alias": "ttr", "website": "https://psicesports.com"},
|
18
19
|
]
|
19
20
|
|
20
21
|
# Check if the organization exists
|
@@ -84,13 +85,22 @@ def get_fake_human_for_stats(session):
|
|
84
85
|
return human.id
|
85
86
|
|
86
87
|
def get_start_datetime(last_game_datetime_str, aggregation_window):
|
88
|
+
if aggregation_window == 'Weekly':
|
89
|
+
if last_game_datetime_str:
|
90
|
+
last_game_datetime = datetime.strptime(last_game_datetime_str, '%Y-%m-%d %H:%M:%S')
|
91
|
+
# Check if the last game datetime is over 1 week from now
|
92
|
+
if datetime.now() - last_game_datetime > timedelta(weeks=1):
|
93
|
+
return None
|
94
|
+
# Use current time as the start of the weekly window
|
95
|
+
return datetime.now() - timedelta(weeks=1)
|
87
96
|
if last_game_datetime_str:
|
88
97
|
last_game_datetime = datetime.strptime(last_game_datetime_str, '%Y-%m-%d %H:%M:%S')
|
89
98
|
if aggregation_window == 'Daily':
|
99
|
+
# Check if the last game datetime is over 24 hours from now
|
100
|
+
if datetime.now() - last_game_datetime > timedelta(hours=24):
|
101
|
+
return None
|
90
102
|
# From 10AM till midnight, 14 hours to avoid last day games
|
91
103
|
return last_game_datetime - timedelta(hours=14)
|
92
|
-
elif aggregation_window == 'Weekly':
|
93
|
-
return last_game_datetime - timedelta(weeks=1)
|
94
104
|
return None
|
95
105
|
|
96
106
|
def assign_ranks(stats_dict, field, reverse_rank=False):
|
{hockey_blast_common_lib-0.1.48.dist-info → hockey_blast_common_lib-0.1.50.dist-info}/RECORD
RENAMED
@@ -1,24 +1,23 @@
|
|
1
1
|
hockey_blast_common_lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
2
2
|
hockey_blast_common_lib/aggregate_all_stats.py,sha256=2bOj2BW0k3ZPQR1NH04upnkIfO9SastzTz7XwO3ujYo,1104
|
3
|
-
hockey_blast_common_lib/aggregate_goalie_stats.py,sha256=
|
4
|
-
hockey_blast_common_lib/aggregate_human_stats.py,sha256=
|
5
|
-
hockey_blast_common_lib/aggregate_referee_stats.py,sha256=
|
6
|
-
hockey_blast_common_lib/aggregate_skater_stats.py,sha256=
|
3
|
+
hockey_blast_common_lib/aggregate_goalie_stats.py,sha256=_9lP5xNX_QcIge-sHbqIlt7mKcFJBGVBbr33Rtf3LAY,12365
|
4
|
+
hockey_blast_common_lib/aggregate_human_stats.py,sha256=uBNzXbFsK5PhPg-5wjH2iuJ7VPpA7MWKFHx90rMbhjQ,24084
|
5
|
+
hockey_blast_common_lib/aggregate_referee_stats.py,sha256=dSl0LEpCzyzpD_tv8yw07NLBImo2RP_xuUQZK_p5kFs,12189
|
6
|
+
hockey_blast_common_lib/aggregate_skater_stats.py,sha256=MU4ICUUtDDKhdeb4sc0UPzjREwSB0rFZuJqRtIDedDU,17393
|
7
7
|
hockey_blast_common_lib/assign_skater_skill.py,sha256=p-0fbodGpM8BCjKHDpxNb7BH2FcIlBsJwON844KNrUY,1817
|
8
8
|
hockey_blast_common_lib/db_connection.py,sha256=HvPxDvOj7j5H85RfslGvHVNevfg7mKCd0syJ6NX21mU,1890
|
9
9
|
hockey_blast_common_lib/dump_sample_db.sh,sha256=MY3lnzTXBoWd76-ZlZr9nWsKMEVgyRsUn-LZ2d1JWZs,810
|
10
|
-
hockey_blast_common_lib/
|
11
|
-
hockey_blast_common_lib/hockey_blast_sample_backup.sql.gz,sha256=aueh_CTlfYKoztGlzGVWRgaEYQ4GsyMmmYOxxkdDHIE,1256769
|
10
|
+
hockey_blast_common_lib/hockey_blast_sample_backup.sql.gz,sha256=Mg0lpOPFHwm8DnyiVEiLq3UbWa0eDan1ltV9ieFlyAc,4648905
|
12
11
|
hockey_blast_common_lib/models.py,sha256=nbJjypa2OBO5_fwjAbWVgBp4WDCuE-RtaELzJ9cvqB4,16468
|
13
|
-
hockey_blast_common_lib/options.py,sha256=
|
12
|
+
hockey_blast_common_lib/options.py,sha256=2L4J9rKCKr58om34259D3_s7kbPdknMSwoo6IwTNnx0,849
|
14
13
|
hockey_blast_common_lib/restore_sample_db.sh,sha256=7W3lzRZeu9zXIu1Bvtnaw8EHc1ulHmFM4mMh86oUQJo,2205
|
15
14
|
hockey_blast_common_lib/skills_in_divisions.py,sha256=m-UEwMwn1KM7wOYvDstgsOEeH57M9V6yrkBoghzGYKE,7005
|
16
15
|
hockey_blast_common_lib/skills_propagation.py,sha256=x6yy7fJ6IX3YiHqiP_v7-p_S2Expb8JJ-mWuajEFBdY,16388
|
17
16
|
hockey_blast_common_lib/stats_models.py,sha256=NWigeIowIJU6o1Sk1cP08kEy4t594LZpecKUnl-O6as,25552
|
18
17
|
hockey_blast_common_lib/stats_utils.py,sha256=DXsPO4jw8XsdRUN46TGF_IiBAfz3GCIVBswCGp5ELDk,284
|
19
|
-
hockey_blast_common_lib/utils.py,sha256=
|
18
|
+
hockey_blast_common_lib/utils.py,sha256=PduHp6HoI4sfr5HvJfQAaz7170dy5kTFVdIfWvBR-Jg,5874
|
20
19
|
hockey_blast_common_lib/wsgi.py,sha256=7LGUzioigviJp-EUhSEaQcd4jBae0mxbkyBscQfZhlc,730
|
21
|
-
hockey_blast_common_lib-0.1.
|
22
|
-
hockey_blast_common_lib-0.1.
|
23
|
-
hockey_blast_common_lib-0.1.
|
24
|
-
hockey_blast_common_lib-0.1.
|
20
|
+
hockey_blast_common_lib-0.1.50.dist-info/METADATA,sha256=fd7SwCTv4w-PgDhuALLALEsq_w3nJXgy_RhD_APV9pE,318
|
21
|
+
hockey_blast_common_lib-0.1.50.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
22
|
+
hockey_blast_common_lib-0.1.50.dist-info/top_level.txt,sha256=wIR4LIkE40npoA2QlOdfCYlgFeGbsHR8Z6r0h46Vtgc,24
|
23
|
+
hockey_blast_common_lib-0.1.50.dist-info/RECORD,,
|
@@ -1,53 +0,0 @@
|
|
1
|
-
import time
|
2
|
-
import json
|
3
|
-
import os
|
4
|
-
import requests
|
5
|
-
from bs4 import BeautifulSoup
|
6
|
-
|
7
|
-
def probe_leagues_and_seasons(min_league, max_league, min_season, max_season, interval_seconds, output_file):
|
8
|
-
results = []
|
9
|
-
|
10
|
-
for league_id in range(min_league, max_league + 1):
|
11
|
-
league_data = {"league": league_id, "seasons": set()}
|
12
|
-
for season_id in range(min_season, max_season + 1):
|
13
|
-
url = f"https://stats.sharksice.timetoscore.com/display-schedule.php?stat_class=1&league={league_id}&season={season_id}"
|
14
|
-
print(f"Probing URL: {url}")
|
15
|
-
|
16
|
-
try:
|
17
|
-
response = requests.get(url, timeout=10)
|
18
|
-
response.raise_for_status()
|
19
|
-
schedule_html = response.text
|
20
|
-
|
21
|
-
# Parse the schedule page to find all game links
|
22
|
-
soup = BeautifulSoup(schedule_html, "html.parser")
|
23
|
-
tables = soup.find_all('table')
|
24
|
-
top_level_tables = [table for table in tables if table.find_parent('table') is None]
|
25
|
-
|
26
|
-
if len(top_level_tables) > 0:
|
27
|
-
print(f"Data found for league {league_id}, season {season_id}")
|
28
|
-
league_data["seasons"].add(season_id)
|
29
|
-
|
30
|
-
# Save the fetched HTML to a subfolder
|
31
|
-
folder_path = os.path.join("schedules", f"league={league_id}", f"season={season_id}")
|
32
|
-
os.makedirs(folder_path, exist_ok=True)
|
33
|
-
with open(os.path.join(folder_path, "schedule.html"), "w") as f:
|
34
|
-
f.write(schedule_html)
|
35
|
-
else:
|
36
|
-
print(f"No data for league {league_id}, season {season_id}")
|
37
|
-
except Exception as e:
|
38
|
-
print(f"Error probing URL {url}: {e}")
|
39
|
-
|
40
|
-
# Wait for the specified interval before the next request
|
41
|
-
time.sleep(interval_seconds)
|
42
|
-
|
43
|
-
if league_data["seasons"]:
|
44
|
-
league_data["seasons"] = list(league_data["seasons"]) # Convert set to list for JSON serialization
|
45
|
-
results.append(league_data)
|
46
|
-
|
47
|
-
# Save results to a JSON file
|
48
|
-
with open(output_file, "w") as f:
|
49
|
-
json.dump(results, f, indent=4)
|
50
|
-
print(f"Results saved to {output_file}")
|
51
|
-
|
52
|
-
# Example usage
|
53
|
-
probe_leagues_and_seasons(min_league=1, max_league=70, min_season=1, max_season=70, interval_seconds=9, output_file="leagues_seasons_2_70_1_70.json")
|
File without changes
|
{hockey_blast_common_lib-0.1.48.dist-info → hockey_blast_common_lib-0.1.50.dist-info}/top_level.txt
RENAMED
File without changes
|