hockey-blast-common-lib 0.1.66__py3-none-any.whl → 0.1.67__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,150 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Orchestrator for per-game statistics aggregation.
4
+
5
+ This script provides a unified interface for running both skater and goalie
6
+ per-game statistics aggregation. It supports full and append modes and can
7
+ run aggregations for specific roles or all roles together.
8
+
9
+ Usage examples:
10
+ # Full regeneration of all per-game stats
11
+ python aggregate_game_stats_all.py --mode full --role all
12
+
13
+ # Append new games for skaters only
14
+ python aggregate_game_stats_all.py --mode append --role skater
15
+
16
+ # Append new games for goalies only
17
+ python aggregate_game_stats_all.py --mode append --role goalie
18
+
19
+ The script automatically manages sentinel record tracking across both stat types
20
+ to ensure consistent append mode behavior.
21
+ """
22
+
23
+ import argparse
24
+ import os
25
+ import sys
26
+ from datetime import datetime
27
+
28
+ # Add the package directory to the Python path
29
+ sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
30
+
31
+ from hockey_blast_common_lib.aggregate_game_stats_goalie import aggregate_game_stats_goalie
32
+ from hockey_blast_common_lib.aggregate_game_stats_skater import aggregate_game_stats_skater
33
+ from hockey_blast_common_lib.db_connection import create_session
34
+
35
+
36
+ def run_aggregation(mode="full", role="all", human_id=None):
37
+ """Run per-game statistics aggregation.
38
+
39
+ Args:
40
+ mode: "full" to regenerate all records, "append" to process new games only
41
+ role: "skater", "goalie", or "all" to specify which stats to aggregate
42
+ human_id: Optional human_id to process only one player (for testing/debugging)
43
+ """
44
+ session = create_session("boss")
45
+
46
+ start_time = datetime.now()
47
+ print(f"\n{'='*80}")
48
+ print(f"PER-GAME STATISTICS AGGREGATION")
49
+ print(f"Mode: {mode.upper()}")
50
+ print(f"Role: {role.upper()}")
51
+ if human_id:
52
+ print(f"Human ID Filter: {human_id}")
53
+ print(f"Started: {start_time.strftime('%Y-%m-%d %H:%M:%S')}")
54
+ print(f"{'='*80}\n")
55
+
56
+ # Run skater aggregation
57
+ if role in ["skater", "all"]:
58
+ try:
59
+ aggregate_game_stats_skater(session, mode=mode, human_id=human_id)
60
+ except Exception as e:
61
+ print(f"\nERROR: Skater aggregation failed: {e}")
62
+ import traceback
63
+ traceback.print_exc()
64
+ if role == "skater":
65
+ sys.exit(1)
66
+ # If running all, continue to goalie even if skater fails
67
+ print("\nContinuing to goalie aggregation...\n")
68
+
69
+ # Run goalie aggregation
70
+ if role in ["goalie", "all"]:
71
+ try:
72
+ aggregate_game_stats_goalie(session, mode=mode, human_id=human_id)
73
+ except Exception as e:
74
+ print(f"\nERROR: Goalie aggregation failed: {e}")
75
+ import traceback
76
+ traceback.print_exc()
77
+ sys.exit(1)
78
+
79
+ end_time = datetime.now()
80
+ duration = end_time - start_time
81
+
82
+ print(f"\n{'='*80}")
83
+ print(f"AGGREGATION COMPLETE")
84
+ print(f"Duration: {duration}")
85
+ print(f"Finished: {end_time.strftime('%Y-%m-%d %H:%M:%S')}")
86
+ print(f"{'='*80}\n")
87
+
88
+
89
+ def main():
90
+ """Main entry point for CLI."""
91
+ parser = argparse.ArgumentParser(
92
+ description="Aggregate per-game statistics for skaters and goalies",
93
+ formatter_class=argparse.RawDescriptionHelpFormatter,
94
+ epilog="""
95
+ Examples:
96
+ # Full regeneration of all stats
97
+ %(prog)s --mode full --role all
98
+
99
+ # Append new games for skaters only
100
+ %(prog)s --mode append --role skater
101
+
102
+ # Append new games for goalies only
103
+ %(prog)s --mode append --role goalie
104
+
105
+ Notes:
106
+ - Full mode deletes and regenerates all records
107
+ - Append mode uses sentinel tracking with 1-day overlap
108
+ - Only saves non-zero records (RAG optimization)
109
+ - Skater stats: saves games with goals, assists, or penalties
110
+ - Goalie stats: saves games where goalie faced shots
111
+ """,
112
+ )
113
+
114
+ parser.add_argument(
115
+ "--mode",
116
+ choices=["full", "append"],
117
+ default="full",
118
+ help="Aggregation mode (default: full)",
119
+ )
120
+
121
+ parser.add_argument(
122
+ "--role",
123
+ choices=["skater", "goalie", "all"],
124
+ default="all",
125
+ help="Which role stats to aggregate (default: all)",
126
+ )
127
+
128
+ parser.add_argument(
129
+ "--human-id",
130
+ type=int,
131
+ default=None,
132
+ help="Optional: Limit processing to specific human_id (for testing)",
133
+ )
134
+
135
+ args = parser.parse_args()
136
+
137
+ try:
138
+ run_aggregation(mode=args.mode, role=args.role, human_id=args.human_id)
139
+ except KeyboardInterrupt:
140
+ print("\n\nAggregation cancelled by user.")
141
+ sys.exit(130)
142
+ except Exception as e:
143
+ print(f"\n\nFATAL ERROR: {e}")
144
+ import traceback
145
+ traceback.print_exc()
146
+ sys.exit(1)
147
+
148
+
149
+ if __name__ == "__main__":
150
+ main()
@@ -0,0 +1,257 @@
1
+ import os
2
+ import sys
3
+
4
+ # Add the package directory to the Python path
5
+ sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
6
+
7
+ from datetime import datetime, timedelta
8
+
9
+ from sqlalchemy import and_, func
10
+ from sqlalchemy.exc import IntegrityError
11
+
12
+ from hockey_blast_common_lib.db_connection import create_session
13
+ from hockey_blast_common_lib.models import Division, Game, GameRoster, GoalieSaves, Human
14
+ from hockey_blast_common_lib.progress_utils import create_progress_tracker
15
+ from hockey_blast_common_lib.stats_models import GameStatsGoalie
16
+ from hockey_blast_common_lib.utils import get_non_human_ids
17
+
18
+ # Import status constants for game filtering
19
+ FINAL_STATUS = "Final"
20
+ FINAL_SO_STATUS = "Final(SO)"
21
+
22
+
23
+ def aggregate_game_stats_goalie(session, mode="full", human_id=None):
24
+ """Aggregate per-game goalie statistics.
25
+
26
+ Args:
27
+ session: Database session
28
+ mode: "full" to regenerate all records, "append" to process new games only
29
+ human_id: Optional human_id to process only one goalie (for testing/debugging)
30
+
31
+ The function stores individual game performance for each goalie with non-zero stats.
32
+ Only games where the goalie faced at least one shot are saved.
33
+ This sparse storage is optimized for RAG system queries.
34
+
35
+ Uses Incognito Human sentinel record (game_id=-1) to track last processed timestamp
36
+ for append mode with 1-day overlap to catch data corrections.
37
+
38
+ Note: Uses GameStatsGoalie table but shares sentinel tracking with GameStatsSkater
39
+ since both are per-game stats that should be processed together.
40
+ """
41
+
42
+ # Get Incognito Human for sentinel tracking (first_name="Incognito", middle_name="", last_name="Human")
43
+ incognito_human = session.query(Human).filter_by(
44
+ first_name="Incognito", middle_name="", last_name="Human"
45
+ ).first()
46
+ if not incognito_human:
47
+ raise RuntimeError("Incognito Human not found in database - required for sentinel tracking")
48
+ incognito_human_id = incognito_human.id
49
+
50
+ non_human_ids = get_non_human_ids(session)
51
+
52
+ # Add human_id to filter if specified
53
+ if human_id:
54
+ human = session.query(Human).filter_by(id=human_id).first()
55
+ if not human:
56
+ print(f"ERROR: Human ID {human_id} not found in database")
57
+ return
58
+ print(f"Limiting to human_id={human_id}: {human.first_name} {human.last_name}\n")
59
+
60
+ print(f"\n{'='*80}")
61
+ print(f"Aggregating per-game goalie statistics (mode: {mode})")
62
+ print(f"{'='*80}\n")
63
+
64
+ # Determine game filtering based on mode
65
+ # Note: We check GameStatsSkater for sentinel since they're processed together
66
+ if mode == "append":
67
+ # Import here to avoid circular dependency
68
+ from hockey_blast_common_lib.stats_models import GameStatsSkater
69
+
70
+ # Query sentinel record for last processed timestamp
71
+ sentinel = (
72
+ session.query(GameStatsSkater)
73
+ .filter(
74
+ GameStatsSkater.human_id == incognito_human_id,
75
+ GameStatsSkater.game_id == -1,
76
+ )
77
+ .first()
78
+ )
79
+
80
+ if sentinel:
81
+ last_processed = datetime.combine(sentinel.game_date, sentinel.game_time)
82
+ # Subtract 1 day for overlap to catch data corrections
83
+ start_datetime = last_processed - timedelta(days=1)
84
+ print(f"Append mode: Processing games after {start_datetime}")
85
+ print(f"(1-day overlap from last processed: {last_processed})\n")
86
+
87
+ # Delete records for games in the overlap window
88
+ delete_count = (
89
+ session.query(GameStatsGoalie)
90
+ .filter(
91
+ GameStatsGoalie.human_id != incognito_human_id,
92
+ func.cast(
93
+ func.concat(GameStatsGoalie.game_date, " ", GameStatsGoalie.game_time),
94
+ func.TIMESTAMP,
95
+ ) >= start_datetime,
96
+ )
97
+ .delete(synchronize_session=False)
98
+ )
99
+ session.commit()
100
+ print(f"Deleted {delete_count} existing records in overlap window\n")
101
+ else:
102
+ # No sentinel found, treat as full mode
103
+ print("No sentinel record found - treating as full mode\n")
104
+ mode = "full"
105
+ start_datetime = None
106
+ else:
107
+ start_datetime = None
108
+
109
+ if mode == "full":
110
+ # Delete all existing records (no sentinel for goalie table)
111
+ delete_count = session.query(GameStatsGoalie).delete(synchronize_session=False)
112
+ session.commit()
113
+ print(f"Full mode: Deleted {delete_count} existing records\n")
114
+
115
+ # Build game filter for eligible games
116
+ game_filter = Game.status.in_([FINAL_STATUS, FINAL_SO_STATUS])
117
+ if mode == "append" and start_datetime:
118
+ game_filter = and_(
119
+ game_filter,
120
+ func.cast(
121
+ func.concat(Game.date, " ", Game.time),
122
+ func.TIMESTAMP,
123
+ ) >= start_datetime,
124
+ )
125
+
126
+ # Count total GoalieSaves records to process for progress tracking
127
+ total_saves_records = (
128
+ session.query(GoalieSaves)
129
+ .join(Game, GoalieSaves.game_id == Game.id)
130
+ .filter(game_filter)
131
+ .count()
132
+ )
133
+ print(f"Processing {total_saves_records} goalie save records...\n")
134
+
135
+ if total_saves_records == 0:
136
+ print("No goalie records to process.\n")
137
+ return
138
+
139
+ # Query goalie saves joined with game metadata and roster
140
+ # GoalieSaves already has per-game goalie data
141
+ goalie_query = (
142
+ session.query(
143
+ GoalieSaves.game_id,
144
+ GoalieSaves.goalie_id.label("human_id"),
145
+ GameRoster.team_id,
146
+ Game.org_id,
147
+ Division.level_id,
148
+ Game.date.label("game_date"),
149
+ Game.time.label("game_time"),
150
+ GoalieSaves.goals_allowed,
151
+ GoalieSaves.shots_against.label("shots_faced"),
152
+ GoalieSaves.saves_count.label("saves"),
153
+ )
154
+ .join(Game, GoalieSaves.game_id == Game.id)
155
+ .join(Division, Game.division_id == Division.id)
156
+ .join(
157
+ GameRoster,
158
+ and_(
159
+ GameRoster.game_id == GoalieSaves.game_id,
160
+ GameRoster.human_id == GoalieSaves.goalie_id,
161
+ ),
162
+ )
163
+ .filter(
164
+ game_filter,
165
+ GoalieSaves.goalie_id.notin_(non_human_ids), # Filter placeholder humans
166
+ )
167
+ )
168
+
169
+ # Add human_id filter if specified
170
+ if human_id:
171
+ goalie_query = goalie_query.filter(GoalieSaves.goalie_id == human_id)
172
+
173
+ goalie_records = goalie_query.all()
174
+
175
+ print(f"Found {len(goalie_records)} goalie save records\n")
176
+
177
+ # Filter to only non-zero stats (CRITICAL for RAG efficiency)
178
+ # Only save records where goalie faced at least one shot
179
+ print("Filtering to non-zero records...")
180
+ nonzero_records = [record for record in goalie_records if record.shots_faced > 0]
181
+
182
+ print(f"Filtered: {len(nonzero_records)} non-zero records (from {len(goalie_records)} total)\n")
183
+
184
+ # Insert records in batches with progress tracking
185
+ batch_size = 1000
186
+ total_records = len(nonzero_records)
187
+
188
+ if total_records == 0:
189
+ print("No non-zero records to insert.\n")
190
+ else:
191
+ progress = create_progress_tracker(total_records, "Inserting per-game goalie stats")
192
+
193
+ records_to_insert = []
194
+ for i, record in enumerate(nonzero_records, 1):
195
+ # Calculate save percentage
196
+ if record.shots_faced > 0:
197
+ save_percentage = (record.shots_faced - record.goals_allowed) / record.shots_faced
198
+ else:
199
+ save_percentage = 0.0
200
+
201
+ game_stats_record = GameStatsGoalie(
202
+ game_id=record.game_id,
203
+ human_id=record.human_id,
204
+ team_id=record.team_id,
205
+ org_id=record.org_id,
206
+ level_id=record.level_id,
207
+ game_date=record.game_date,
208
+ game_time=record.game_time,
209
+ goals_allowed=record.goals_allowed,
210
+ shots_faced=record.shots_faced,
211
+ saves=record.saves,
212
+ save_percentage=save_percentage,
213
+ created_at=datetime.utcnow(),
214
+ )
215
+
216
+ records_to_insert.append(game_stats_record)
217
+
218
+ # Commit in batches
219
+ if i % batch_size == 0 or i == total_records:
220
+ session.bulk_save_objects(records_to_insert)
221
+ session.commit()
222
+ records_to_insert = []
223
+ progress.update(i)
224
+
225
+ print("\nInsert complete.\n")
226
+
227
+ print(f"\n{'='*80}")
228
+ print("Per-game goalie statistics aggregation complete")
229
+ print(f"{'='*80}\n")
230
+
231
+
232
+ def run_aggregate_game_stats_goalie():
233
+ """Main entry point for goalie per-game aggregation."""
234
+ import argparse
235
+
236
+ parser = argparse.ArgumentParser(description="Aggregate per-game goalie statistics")
237
+ parser.add_argument(
238
+ "--mode",
239
+ choices=["full", "append"],
240
+ default="full",
241
+ help="Aggregation mode: 'full' to regenerate all, 'append' to add new games only",
242
+ )
243
+ parser.add_argument(
244
+ "--human-id",
245
+ type=int,
246
+ default=None,
247
+ help="Optional: Limit processing to specific human_id (for testing)",
248
+ )
249
+
250
+ args = parser.parse_args()
251
+
252
+ session = create_session("boss")
253
+ aggregate_game_stats_goalie(session, mode=args.mode, human_id=args.human_id)
254
+
255
+
256
+ if __name__ == "__main__":
257
+ run_aggregate_game_stats_goalie()
@@ -0,0 +1,361 @@
1
+ import os
2
+ import sys
3
+
4
+ # Add the package directory to the Python path
5
+ sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
6
+
7
+ from datetime import datetime, timedelta
8
+
9
+ from sqlalchemy import and_, case, func
10
+ from sqlalchemy.exc import IntegrityError
11
+
12
+ from hockey_blast_common_lib.db_connection import create_session
13
+ from hockey_blast_common_lib.models import Division, Game, GameRoster, Goal, Human, Penalty
14
+ from hockey_blast_common_lib.progress_utils import create_progress_tracker
15
+ from hockey_blast_common_lib.stats_models import GameStatsSkater
16
+ from hockey_blast_common_lib.utils import get_non_human_ids
17
+
18
+ # Import status constants for game filtering
19
+ FINAL_STATUS = "Final"
20
+ FINAL_SO_STATUS = "Final(SO)"
21
+
22
+
23
+ def aggregate_game_stats_skater(session, mode="full", human_id=None):
24
+ """Aggregate per-game skater statistics.
25
+
26
+ Args:
27
+ session: Database session
28
+ mode: "full" to regenerate all records, "append" to process new games only
29
+ human_id: Optional human_id to process only one player (for testing/debugging)
30
+
31
+ The function stores individual game performance for each skater with non-zero stats.
32
+ Only games where the player recorded at least one goal, assist, or penalty minute are saved.
33
+ This sparse storage is optimized for RAG system queries.
34
+
35
+ Uses Incognito Human sentinel record (game_id=-1) to track last processed timestamp
36
+ for append mode with 1-day overlap to catch data corrections.
37
+ """
38
+
39
+ # Get Incognito Human for sentinel tracking (first_name="Incognito", middle_name="", last_name="Human")
40
+ incognito_human = session.query(Human).filter_by(
41
+ first_name="Incognito", middle_name="", last_name="Human"
42
+ ).first()
43
+ if not incognito_human:
44
+ raise RuntimeError("Incognito Human not found in database - required for sentinel tracking")
45
+ incognito_human_id = incognito_human.id
46
+
47
+ non_human_ids = get_non_human_ids(session)
48
+
49
+ # Add human_id to filter if specified
50
+ if human_id:
51
+ human = session.query(Human).filter_by(id=human_id).first()
52
+ if not human:
53
+ print(f"ERROR: Human ID {human_id} not found in database")
54
+ return
55
+ print(f"Limiting to human_id={human_id}: {human.first_name} {human.last_name}\n")
56
+
57
+ print(f"\n{'='*80}")
58
+ print(f"Aggregating per-game skater statistics (mode: {mode})")
59
+ print(f"{'='*80}\n")
60
+
61
+ # Determine game filtering based on mode
62
+ if mode == "append":
63
+ # Query sentinel record for last processed timestamp
64
+ sentinel = (
65
+ session.query(GameStatsSkater)
66
+ .filter(
67
+ GameStatsSkater.human_id == incognito_human_id,
68
+ GameStatsSkater.game_id == -1,
69
+ )
70
+ .first()
71
+ )
72
+
73
+ if sentinel:
74
+ last_processed = datetime.combine(sentinel.game_date, sentinel.game_time)
75
+ # Subtract 1 day for overlap to catch data corrections
76
+ start_datetime = last_processed - timedelta(days=1)
77
+ print(f"Append mode: Processing games after {start_datetime}")
78
+ print(f"(1-day overlap from last processed: {last_processed})\n")
79
+
80
+ # Delete records for games in the overlap window
81
+ delete_count = (
82
+ session.query(GameStatsSkater)
83
+ .filter(
84
+ GameStatsSkater.human_id != incognito_human_id,
85
+ func.cast(
86
+ func.concat(GameStatsSkater.game_date, " ", GameStatsSkater.game_time),
87
+ func.TIMESTAMP,
88
+ ) >= start_datetime,
89
+ )
90
+ .delete(synchronize_session=False)
91
+ )
92
+ session.commit()
93
+ print(f"Deleted {delete_count} existing records in overlap window\n")
94
+ else:
95
+ # No sentinel found, treat as full mode
96
+ print("No sentinel record found - treating as full mode\n")
97
+ mode = "full"
98
+ start_datetime = None
99
+ else:
100
+ start_datetime = None
101
+
102
+ if mode == "full":
103
+ # Delete all existing records except sentinel
104
+ delete_count = (
105
+ session.query(GameStatsSkater)
106
+ .filter(GameStatsSkater.human_id != incognito_human_id)
107
+ .delete(synchronize_session=False)
108
+ )
109
+ session.commit()
110
+ print(f"Full mode: Deleted {delete_count} existing records\n")
111
+
112
+ # Build game filter for eligible games
113
+ game_filter = Game.status.in_([FINAL_STATUS, FINAL_SO_STATUS])
114
+ if mode == "append" and start_datetime:
115
+ game_filter = and_(
116
+ game_filter,
117
+ func.cast(
118
+ func.concat(Game.date, " ", Game.time),
119
+ func.TIMESTAMP,
120
+ ) >= start_datetime,
121
+ )
122
+
123
+ # Count total games to process for progress tracking
124
+ total_games = session.query(Game).filter(game_filter).count()
125
+ print(f"Processing {total_games} games...\n")
126
+
127
+ if total_games == 0:
128
+ print("No games to process.\n")
129
+ return
130
+
131
+ # Query game roster entries for skaters (exclude goalies)
132
+ # Join with games to get metadata, filter by game status and date window
133
+ roster_query = (
134
+ session.query(
135
+ GameRoster.game_id,
136
+ GameRoster.human_id,
137
+ GameRoster.team_id,
138
+ Game.org_id,
139
+ Division.level_id,
140
+ Game.date.label("game_date"),
141
+ Game.time.label("game_time"),
142
+ )
143
+ .join(Game, GameRoster.game_id == Game.id)
144
+ .join(Division, Game.division_id == Division.id)
145
+ .filter(
146
+ ~GameRoster.role.ilike("g"), # Exclude goalies
147
+ GameRoster.human_id.notin_(non_human_ids), # Filter placeholder humans
148
+ game_filter,
149
+ )
150
+ )
151
+
152
+ # Add human_id filter if specified
153
+ if human_id:
154
+ roster_query = roster_query.filter(GameRoster.human_id == human_id)
155
+
156
+ roster_entries = roster_query.all()
157
+
158
+ # Build dict of roster entries by (game_id, human_id) for fast lookup
159
+ roster_dict = {}
160
+ for entry in roster_entries:
161
+ key = (entry.game_id, entry.human_id)
162
+ roster_dict[key] = {
163
+ "team_id": entry.team_id,
164
+ "org_id": entry.org_id,
165
+ "level_id": entry.level_id,
166
+ "game_date": entry.game_date,
167
+ "game_time": entry.game_time,
168
+ "goals": 0,
169
+ "assists": 0,
170
+ "points": 0,
171
+ "penalty_minutes": 0,
172
+ }
173
+
174
+ print(f"Found {len(roster_dict)} skater roster entries\n")
175
+
176
+ # Query goals and count by scorer and assisters
177
+ print("Aggregating goals and assists...")
178
+ goals = (
179
+ session.query(Goal)
180
+ .join(Game, Goal.game_id == Game.id)
181
+ .filter(game_filter)
182
+ .all()
183
+ )
184
+
185
+ for goal in goals:
186
+ # Count goal for scorer
187
+ key = (goal.game_id, goal.goal_scorer_id)
188
+ if key in roster_dict:
189
+ roster_dict[key]["goals"] += 1
190
+ roster_dict[key]["points"] += 1
191
+
192
+ # Count assists
193
+ if goal.assist_1_id:
194
+ key = (goal.game_id, goal.assist_1_id)
195
+ if key in roster_dict:
196
+ roster_dict[key]["assists"] += 1
197
+ roster_dict[key]["points"] += 1
198
+
199
+ if goal.assist_2_id:
200
+ key = (goal.game_id, goal.assist_2_id)
201
+ if key in roster_dict:
202
+ roster_dict[key]["assists"] += 1
203
+ roster_dict[key]["points"] += 1
204
+
205
+ print(f"Processed {len(goals)} goals\n")
206
+
207
+ # Query penalties and aggregate by penalized player
208
+ print("Aggregating penalties...")
209
+ penalties = (
210
+ session.query(Penalty)
211
+ .join(Game, Penalty.game_id == Game.id)
212
+ .filter(game_filter)
213
+ .all()
214
+ )
215
+
216
+ for penalty in penalties:
217
+ key = (penalty.game_id, penalty.penalized_player_id)
218
+ if key in roster_dict:
219
+ # Convert penalty minutes: "GM" (game misconduct) = 10, else parse integer
220
+ if penalty.penalty_minutes and penalty.penalty_minutes.upper() == "GM":
221
+ roster_dict[key]["penalty_minutes"] += 10
222
+ else:
223
+ try:
224
+ minutes = int(penalty.penalty_minutes) if penalty.penalty_minutes else 0
225
+ roster_dict[key]["penalty_minutes"] += minutes
226
+ except (ValueError, TypeError):
227
+ # Log unconvertible values but don't crash
228
+ print(f"Warning: Could not convert penalty_minutes '{penalty.penalty_minutes}' for penalty {penalty.id}")
229
+
230
+ print(f"Processed {len(penalties)} penalties\n")
231
+
232
+ # Filter to only non-zero stats (CRITICAL for RAG efficiency)
233
+ print("Filtering to non-zero records...")
234
+ nonzero_dict = {
235
+ key: stats
236
+ for key, stats in roster_dict.items()
237
+ if stats["goals"] > 0 or stats["assists"] > 0 or stats["penalty_minutes"] > 0
238
+ }
239
+
240
+ print(f"Filtered: {len(nonzero_dict)} non-zero records (from {len(roster_dict)} total)\n")
241
+
242
+ # Insert records in batches with progress tracking
243
+ batch_size = 1000
244
+ total_records = len(nonzero_dict)
245
+
246
+ if total_records == 0:
247
+ print("No non-zero records to insert.\n")
248
+ else:
249
+ progress = create_progress_tracker(total_records, "Inserting per-game skater stats")
250
+
251
+ records_to_insert = []
252
+ for i, (key, stats) in enumerate(nonzero_dict.items(), 1):
253
+ game_id, human_id = key
254
+
255
+ record = GameStatsSkater(
256
+ game_id=game_id,
257
+ human_id=human_id,
258
+ team_id=stats["team_id"],
259
+ org_id=stats["org_id"],
260
+ level_id=stats["level_id"],
261
+ game_date=stats["game_date"],
262
+ game_time=stats["game_time"],
263
+ goals=stats["goals"],
264
+ assists=stats["assists"],
265
+ points=stats["points"],
266
+ penalty_minutes=stats["penalty_minutes"],
267
+ created_at=datetime.utcnow(),
268
+ )
269
+
270
+ records_to_insert.append(record)
271
+
272
+ # Commit in batches
273
+ if i % batch_size == 0 or i == total_records:
274
+ session.bulk_save_objects(records_to_insert)
275
+ session.commit()
276
+ records_to_insert = []
277
+ progress.update(i)
278
+
279
+ print("\nInsert complete.\n")
280
+
281
+ # Update or create sentinel record with max game timestamp (skip if filtering by human_id)
282
+ if not human_id:
283
+ max_game = (
284
+ session.query(
285
+ Game.date.label("game_date"),
286
+ Game.time.label("game_time"),
287
+ )
288
+ .filter(game_filter)
289
+ .order_by(Game.date.desc(), Game.time.desc())
290
+ .first()
291
+ )
292
+
293
+ if max_game:
294
+ # Try to update existing sentinel
295
+ sentinel = (
296
+ session.query(GameStatsSkater)
297
+ .filter(
298
+ GameStatsSkater.human_id == incognito_human_id,
299
+ GameStatsSkater.game_id == -1,
300
+ )
301
+ .first()
302
+ )
303
+
304
+ if sentinel:
305
+ sentinel.game_date = max_game.game_date
306
+ sentinel.game_time = max_game.game_time
307
+ print(f"Updated sentinel record: {max_game.game_date} {max_game.game_time}")
308
+ else:
309
+ # Create new sentinel
310
+ sentinel = GameStatsSkater(
311
+ game_id=-1,
312
+ human_id=incognito_human_id,
313
+ team_id=-1, # Dummy value
314
+ org_id=-1, # Dummy value
315
+ level_id=-1, # Dummy value
316
+ game_date=max_game.game_date,
317
+ game_time=max_game.game_time,
318
+ goals=0,
319
+ assists=0,
320
+ points=0,
321
+ penalty_minutes=0,
322
+ created_at=datetime.utcnow(),
323
+ )
324
+ session.add(sentinel)
325
+ print(f"Created sentinel record: {max_game.game_date} {max_game.game_time}")
326
+
327
+ session.commit()
328
+ else:
329
+ print("Skipping sentinel record creation (human_id filter active)")
330
+
331
+ print(f"\n{'='*80}")
332
+ print("Per-game skater statistics aggregation complete")
333
+ print(f"{'='*80}\n")
334
+
335
+
336
+ def run_aggregate_game_stats_skater():
337
+ """Main entry point for skater per-game aggregation."""
338
+ import argparse
339
+
340
+ parser = argparse.ArgumentParser(description="Aggregate per-game skater statistics")
341
+ parser.add_argument(
342
+ "--mode",
343
+ choices=["full", "append"],
344
+ default="full",
345
+ help="Aggregation mode: 'full' to regenerate all, 'append' to add new games only",
346
+ )
347
+ parser.add_argument(
348
+ "--human-id",
349
+ type=int,
350
+ default=None,
351
+ help="Optional: Limit processing to specific human_id (for testing)",
352
+ )
353
+
354
+ args = parser.parse_args()
355
+
356
+ session = create_session("boss")
357
+ aggregate_game_stats_skater(session, mode=args.mode, human_id=args.human_id)
358
+
359
+
360
+ if __name__ == "__main__":
361
+ run_aggregate_game_stats_skater()
@@ -117,6 +117,9 @@ def aggregate_goalie_stats(
117
117
  debug_human_id=None,
118
118
  aggregation_window=None,
119
119
  ):
120
+ # Capture start time for aggregation tracking
121
+ aggregation_start_time = datetime.utcnow()
122
+
120
123
  human_ids_to_filter = get_non_human_ids(session)
121
124
 
122
125
  # Get the name of the aggregation, for debug purposes
@@ -364,6 +367,7 @@ def aggregate_goalie_stats(
364
367
  total_in_rank=total_in_rank,
365
368
  first_game_id=stat["first_game_id"],
366
369
  last_game_id=stat["last_game_id"],
370
+ aggregation_started_at=aggregation_start_time,
367
371
  )
368
372
  session.add(goalie_stat)
369
373
  # Commit in batches
@@ -371,6 +375,13 @@ def aggregate_goalie_stats(
371
375
  session.commit()
372
376
  session.commit()
373
377
 
378
+ # Update all records with completion timestamp
379
+ aggregation_end_time = datetime.utcnow()
380
+ session.query(StatsModel).filter(
381
+ StatsModel.aggregation_id == aggregation_id
382
+ ).update({StatsModel.aggregation_completed_at: aggregation_end_time})
383
+ session.commit()
384
+
374
385
 
375
386
  def run_aggregate_goalie_stats():
376
387
  session = create_session("boss")
@@ -44,6 +44,9 @@ def aggregate_human_stats(
44
44
  human_id_filter=None,
45
45
  aggregation_window=None,
46
46
  ):
47
+ # Capture start time for aggregation tracking
48
+ aggregation_start_time = datetime.utcnow()
49
+
47
50
  human_ids_to_filter = get_non_human_ids(session)
48
51
 
49
52
  if aggregation_type == "org":
@@ -517,6 +520,7 @@ def aggregate_human_stats(
517
520
  last_game_id_referee=stat["last_game_id_referee"],
518
521
  first_game_id_scorekeeper=stat["first_game_id_scorekeeper"],
519
522
  last_game_id_scorekeeper=stat["last_game_id_scorekeeper"],
523
+ aggregation_started_at=aggregation_start_time,
520
524
  )
521
525
  session.add(human_stat)
522
526
  # Commit in batches
@@ -607,10 +611,18 @@ def aggregate_human_stats(
607
611
  last_game_id_referee=overall_stats["last_game_id_referee"],
608
612
  first_game_id_scorekeeper=overall_stats["first_game_id_scorekeeper"],
609
613
  last_game_id_scorekeeper=overall_stats["last_game_id_scorekeeper"],
614
+ aggregation_started_at=aggregation_start_time,
610
615
  )
611
616
  session.add(overall_human_stat)
612
617
  session.commit()
613
618
 
619
+ # Update all records with completion timestamp
620
+ aggregation_end_time = datetime.utcnow()
621
+ session.query(StatsModel).filter(
622
+ StatsModel.aggregation_id == aggregation_id
623
+ ).update({StatsModel.aggregation_completed_at: aggregation_end_time})
624
+ session.commit()
625
+
614
626
 
615
627
  def run_aggregate_human_stats():
616
628
  session = create_session("boss")
@@ -5,6 +5,7 @@ import sys
5
5
  sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
6
6
 
7
7
 
8
+ from datetime import datetime
8
9
 
9
10
  import sqlalchemy
10
11
  from sqlalchemy.sql import case, func
@@ -102,6 +103,9 @@ def insert_percentile_markers_referee(
102
103
  def aggregate_referee_stats(
103
104
  session, aggregation_type, aggregation_id, aggregation_window=None
104
105
  ):
106
+ # Capture start time for aggregation tracking
107
+ aggregation_start_time = datetime.utcnow()
108
+
105
109
  human_ids_to_filter = get_non_human_ids(session)
106
110
 
107
111
  if aggregation_type == "org":
@@ -375,6 +379,7 @@ def aggregate_referee_stats(
375
379
  total_in_rank=total_in_rank,
376
380
  first_game_id=stat["first_game_id"],
377
381
  last_game_id=stat["last_game_id"],
382
+ aggregation_started_at=aggregation_start_time,
378
383
  )
379
384
  session.add(referee_stat)
380
385
  # Commit in batches
@@ -382,6 +387,13 @@ def aggregate_referee_stats(
382
387
  session.commit()
383
388
  session.commit()
384
389
 
390
+ # Update all records with completion timestamp
391
+ aggregation_end_time = datetime.utcnow()
392
+ session.query(StatsModel).filter(
393
+ StatsModel.aggregation_id == aggregation_id
394
+ ).update({StatsModel.aggregation_completed_at: aggregation_end_time})
395
+ session.commit()
396
+
385
397
 
386
398
  def run_aggregate_referee_stats():
387
399
  session = create_session("boss")
@@ -5,6 +5,8 @@ import sys
5
5
  sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
6
6
 
7
7
 
8
+ from datetime import datetime
9
+
8
10
  import sqlalchemy
9
11
  from sqlalchemy.sql import func
10
12
 
@@ -151,6 +153,9 @@ def aggregate_scorekeeper_stats(
151
153
  if aggregation_type == "org" and aggregation_id != ALL_ORGS_ID:
152
154
  return # Do nothing for individual organization IDs
153
155
 
156
+ # Capture start time for aggregation tracking
157
+ aggregation_start_time = datetime.utcnow()
158
+
154
159
  human_ids_to_filter = get_non_human_ids(session)
155
160
 
156
161
  if aggregation_type == "org":
@@ -370,6 +375,7 @@ def aggregate_scorekeeper_stats(
370
375
  total_in_rank=total_in_rank,
371
376
  first_game_id=stat["first_game_id"],
372
377
  last_game_id=stat["last_game_id"],
378
+ aggregation_started_at=aggregation_start_time,
373
379
  )
374
380
  session.add(scorekeeper_stat)
375
381
  # Commit in batches
@@ -377,6 +383,13 @@ def aggregate_scorekeeper_stats(
377
383
  session.commit()
378
384
  session.commit()
379
385
 
386
+ # Update all records with completion timestamp
387
+ aggregation_end_time = datetime.utcnow()
388
+ session.query(StatsModel).filter(
389
+ StatsModel.aggregation_id == aggregation_id
390
+ ).update({StatsModel.aggregation_completed_at: aggregation_end_time})
391
+ session.commit()
392
+
380
393
 
381
394
  def run_aggregate_scorekeeper_stats():
382
395
  session = create_session("boss")
@@ -6,6 +6,7 @@ sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
6
6
 
7
7
 
8
8
  import sqlalchemy
9
+ from datetime import datetime
9
10
  from sqlalchemy import and_, case, func
10
11
  from sqlalchemy.sql import case, func
11
12
 
@@ -223,6 +224,9 @@ def aggregate_skater_stats(
223
224
  debug_human_id=None,
224
225
  aggregation_window=None,
225
226
  ):
227
+ # Capture start time for aggregation tracking
228
+ aggregation_start_time = datetime.utcnow()
229
+
226
230
  human_ids_to_filter = get_non_human_ids(session)
227
231
 
228
232
  # Get the name of the aggregation, for debug purposes
@@ -754,6 +758,7 @@ def aggregate_skater_stats(
754
758
  ),
755
759
  first_game_id=stat["first_game_id"],
756
760
  last_game_id=stat["last_game_id"],
761
+ aggregation_started_at=aggregation_start_time,
757
762
  )
758
763
  session.add(skater_stat)
759
764
  # Commit in batches
@@ -761,6 +766,13 @@ def aggregate_skater_stats(
761
766
  session.commit()
762
767
  session.commit()
763
768
 
769
+ # Update all records with completion timestamp
770
+ aggregation_end_time = datetime.utcnow()
771
+ session.query(StatsModel).filter(
772
+ StatsModel.aggregation_id == aggregation_id
773
+ ).update({StatsModel.aggregation_completed_at: aggregation_end_time})
774
+ session.commit()
775
+
764
776
 
765
777
  def run_aggregate_skater_stats():
766
778
  session = create_session("boss")
@@ -15,6 +15,8 @@ import sys
15
15
 
16
16
  sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
17
17
 
18
+ from datetime import datetime
19
+
18
20
  import sqlalchemy
19
21
  from sqlalchemy import func
20
22
 
@@ -62,6 +64,9 @@ def aggregate_team_goalie_stats(session, aggregation_type, aggregation_id):
62
64
  aggregation_type: "org" or "division"
63
65
  aggregation_id: ID of the organization or division
64
66
  """
67
+ # Capture start time for aggregation tracking
68
+ aggregation_start_time = datetime.utcnow()
69
+
65
70
  human_ids_to_filter = get_non_human_ids(session)
66
71
 
67
72
  # Determine aggregation details
@@ -221,10 +226,19 @@ def aggregate_team_goalie_stats(session, aggregation_type, aggregation_id):
221
226
  shots_faced_rank=0,
222
227
  goals_allowed_per_game_rank=0,
223
228
  save_percentage_rank=0,
229
+ aggregation_started_at=aggregation_start_time,
224
230
  )
225
231
  session.add(goalie_stat)
226
232
 
227
233
  session.commit()
234
+
235
+ # Update all records with completion timestamp
236
+ aggregation_end_time = datetime.utcnow()
237
+ session.query(StatsModel).filter(
238
+ StatsModel.aggregation_id == aggregation_id
239
+ ).update({StatsModel.aggregation_completed_at: aggregation_end_time})
240
+ session.commit()
241
+
228
242
  progress.finish()
229
243
  print(f"✓ Team goalie stats aggregation complete for {aggregation_name}")
230
244
 
@@ -15,6 +15,8 @@ import sys
15
15
 
16
16
  sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
17
17
 
18
+ from datetime import datetime
19
+
18
20
  import sqlalchemy
19
21
  from sqlalchemy import and_, case, func
20
22
 
@@ -63,6 +65,9 @@ def aggregate_team_skater_stats(session, aggregation_type, aggregation_id):
63
65
  aggregation_type: "org" or "division"
64
66
  aggregation_id: ID of the organization or division
65
67
  """
68
+ # Capture start time for aggregation tracking
69
+ aggregation_start_time = datetime.utcnow()
70
+
66
71
  human_ids_to_filter = get_non_human_ids(session)
67
72
 
68
73
  # Determine aggregation details
@@ -269,10 +274,19 @@ def aggregate_team_skater_stats(session, aggregation_type, aggregation_id):
269
274
  gm_penalties_per_game_rank=0,
270
275
  current_point_streak_rank=0,
271
276
  current_point_streak_avg_points_rank=0,
277
+ aggregation_started_at=aggregation_start_time,
272
278
  )
273
279
  session.add(skater_stat)
274
280
 
275
281
  session.commit()
282
+
283
+ # Update all records with completion timestamp
284
+ aggregation_end_time = datetime.utcnow()
285
+ session.query(StatsModel).filter(
286
+ StatsModel.aggregation_id == aggregation_id
287
+ ).update({StatsModel.aggregation_completed_at: aggregation_end_time})
288
+ session.commit()
289
+
276
290
  progress.finish()
277
291
  print(f"✓ Team skater stats aggregation complete for {aggregation_name}")
278
292
 
@@ -53,7 +53,8 @@ class Game(db.Model):
53
53
  time = db.Column(db.Time)
54
54
  day_of_week = db.Column(db.Integer) # 1 to 7 for Monday to Sunday
55
55
  period_length = db.Column(db.Integer) # In minutes
56
- location = db.Column(db.String(100))
56
+ location = db.Column(db.String(100)) # DEPRECATED: Use location_id instead
57
+ location_id = db.Column(db.Integer, db.ForeignKey("locations.id"), nullable=True)
57
58
  scorekeeper_id = db.Column(db.Integer, db.ForeignKey("humans.id"))
58
59
  referee_1_id = db.Column(db.Integer, db.ForeignKey("humans.id"))
59
60
  referee_2_id = db.Column(db.Integer, db.ForeignKey("humans.id"))
@@ -72,6 +73,7 @@ class Game(db.Model):
72
73
  home_ot_score = db.Column(db.Integer, default=0)
73
74
  visitor_ot_score = db.Column(db.Integer, default=0)
74
75
  game_type = db.Column(db.String(50))
76
+ live_time = db.Column(db.String(50), nullable=True) # e.g., "Period 1, 1:10 left" for live games
75
77
  went_to_ot = db.Column(db.Boolean, default=False)
76
78
  home_period_1_shots = db.Column(db.Integer)
77
79
  home_period_2_shots = db.Column(db.Integer)
@@ -245,6 +247,7 @@ class Level(db.Model):
245
247
  org_id = db.Column(db.Integer, db.ForeignKey("organizations.id"), nullable=False)
246
248
  skill_value = db.Column(db.Float) # A number from 0 (NHL) to 100 (pedestrian)
247
249
  level_name = db.Column(db.String(100))
250
+ short_name = db.Column(db.String(50)) # Shortened display name (e.g., "D-7B-W" for "Adult Division 7B West")
248
251
  level_alternative_name = db.Column(db.String(100))
249
252
  is_seed = db.Column(db.Boolean, nullable=True, default=False) # New field
250
253
  skill_propagation_sequence = db.Column(db.Integer, nullable=True, default=-1)
@@ -253,6 +256,17 @@ class Level(db.Model):
253
256
  )
254
257
 
255
258
 
259
+ class Location(db.Model):
260
+ __tablename__ = "locations"
261
+ id = db.Column(db.Integer, primary_key=True)
262
+ location_in_game_source = db.Column(db.String(200), nullable=False, unique=True) # Raw string from Game.location, e.g., "San Jose Orange (N)"
263
+ location_name = db.Column(db.String(200), nullable=True) # Optional: Facility name, e.g., "Sharks Ice At San Jose"
264
+ rink_name = db.Column(db.String(200), nullable=True) # Optional: Specific rink, e.g., "Orange (N)"
265
+ address = db.Column(db.String(500), nullable=True)
266
+ google_maps_link = db.Column(db.String(500), nullable=True)
267
+ master_location_id = db.Column(db.Integer, db.ForeignKey("locations.id"), nullable=True) # Points to the canonical location for this rink
268
+
269
+
256
270
  class LevelsMonthly(db.Model):
257
271
  __tablename__ = "levels_monthly"
258
272
  id = db.Column(db.Integer, primary_key=True)
@@ -427,6 +441,7 @@ class Season(db.Model):
427
441
  id = db.Column(db.Integer, primary_key=True)
428
442
  season_number = db.Column(db.Integer)
429
443
  season_name = db.Column(db.String(100))
444
+ base_season_name = db.Column(db.String(100)) # Static prefix for season name (e.g., "Silver Stick", "Over", etc.)
430
445
  start_date = db.Column(db.Date)
431
446
  end_date = db.Column(db.Date)
432
447
  league_number = db.Column(
@@ -4,7 +4,13 @@ from sqlalchemy.orm import synonym
4
4
  from hockey_blast_common_lib.models import db
5
5
 
6
6
 
7
- class BaseStatsHuman(db.Model):
7
+ class AggregationTimestampMixin:
8
+ """Mixin to add aggregation timestamp tracking to all stats models."""
9
+ aggregation_started_at = db.Column(db.DateTime, nullable=True)
10
+ aggregation_completed_at = db.Column(db.DateTime, nullable=True)
11
+
12
+
13
+ class BaseStatsHuman(AggregationTimestampMixin, db.Model):
8
14
  __abstract__ = True
9
15
  id = db.Column(db.Integer, primary_key=True)
10
16
  human_id = db.Column(db.Integer, db.ForeignKey("humans.id"), nullable=False)
@@ -92,7 +98,7 @@ class BaseStatsHuman(db.Model):
92
98
  )
93
99
 
94
100
 
95
- class BaseStatsSkater(db.Model):
101
+ class BaseStatsSkater(AggregationTimestampMixin, db.Model):
96
102
  __abstract__ = True
97
103
  id = db.Column(db.Integer, primary_key=True)
98
104
  human_id = db.Column(db.Integer, db.ForeignKey("humans.id"), nullable=False)
@@ -193,7 +199,7 @@ class BaseStatsSkater(db.Model):
193
199
  )
194
200
 
195
201
 
196
- class BaseStatsGoalie(db.Model):
202
+ class BaseStatsGoalie(AggregationTimestampMixin, db.Model):
197
203
  __abstract__ = True
198
204
  id = db.Column(db.Integer, primary_key=True)
199
205
  human_id = db.Column(db.Integer, db.ForeignKey("humans.id"), nullable=False)
@@ -261,7 +267,7 @@ class BaseStatsGoalie(db.Model):
261
267
  )
262
268
 
263
269
 
264
- class BaseStatsReferee(db.Model):
270
+ class BaseStatsReferee(AggregationTimestampMixin, db.Model):
265
271
  __abstract__ = True
266
272
  id = db.Column(db.Integer, primary_key=True)
267
273
  human_id = db.Column(db.Integer, db.ForeignKey("humans.id"), nullable=False)
@@ -331,7 +337,7 @@ class BaseStatsReferee(db.Model):
331
337
  )
332
338
 
333
339
 
334
- class BaseStatsScorekeeper(db.Model):
340
+ class BaseStatsScorekeeper(AggregationTimestampMixin, db.Model):
335
341
  __abstract__ = True
336
342
  id = db.Column(db.Integer, primary_key=True)
337
343
  human_id = db.Column(db.Integer, db.ForeignKey("humans.id"), nullable=False)
@@ -1045,3 +1051,80 @@ class DivisionStatsGoalieTeam(BaseStatsGoalie):
1045
1051
  db.Index("idx_division_team_goalie_save_pct", "division_id", "save_percentage"),
1046
1052
  db.Index("idx_division_team_goalie_gaa", "division_id", "goals_allowed_per_game"),
1047
1053
  )
1054
+
1055
+
1056
+ # Per-Game Statistics Models (for RAG system)
1057
+ # These models store individual game performance data for each player/goalie
1058
+ # CRITICAL: Only non-zero rows are saved (games where player recorded stats)
1059
+
1060
+
1061
+ class GameStatsSkater(db.Model):
1062
+ """Per-game skater statistics.
1063
+
1064
+ Stores individual game performance for skaters.
1065
+ Only records where player had non-zero stats are saved.
1066
+ Optimized for queries like "show me top N games by points for player X".
1067
+ """
1068
+ __tablename__ = "game_stats_skater"
1069
+
1070
+ id = db.Column(db.Integer, primary_key=True)
1071
+ game_id = db.Column(db.Integer, db.ForeignKey("games.id"), nullable=False, index=True)
1072
+ human_id = db.Column(db.Integer, db.ForeignKey("humans.id"), nullable=False, index=True)
1073
+ team_id = db.Column(db.Integer, db.ForeignKey("teams.id"), nullable=False, index=True)
1074
+ org_id = db.Column(db.Integer, db.ForeignKey("organizations.id"), nullable=False, index=True)
1075
+ level_id = db.Column(db.Integer, db.ForeignKey("levels.id"), nullable=False, index=True)
1076
+
1077
+ # Denormalized game metadata for sorting/filtering
1078
+ game_date = db.Column(db.Date, nullable=False, index=True)
1079
+ game_time = db.Column(db.Time, nullable=False)
1080
+
1081
+ # Performance stats
1082
+ goals = db.Column(db.Integer, default=0, nullable=False)
1083
+ assists = db.Column(db.Integer, default=0, nullable=False)
1084
+ points = db.Column(db.Integer, default=0, nullable=False)
1085
+ penalty_minutes = db.Column(db.Integer, default=0, nullable=False)
1086
+
1087
+ # Tracking
1088
+ created_at = db.Column(db.DateTime, nullable=False, default=db.func.current_timestamp())
1089
+
1090
+ __table_args__ = (
1091
+ db.UniqueConstraint("game_id", "human_id", name="_game_human_uc_skater"),
1092
+ db.Index("idx_game_stats_skater_human_date", "human_id", "game_date", postgresql_using="btree"),
1093
+ db.Index("idx_game_stats_skater_human_team_date", "human_id", "team_id", "game_date", postgresql_using="btree"),
1094
+ )
1095
+
1096
+
1097
+ class GameStatsGoalie(db.Model):
1098
+ """Per-game goalie statistics.
1099
+
1100
+ Stores individual game performance for goalies.
1101
+ Only records where goalie faced shots are saved.
1102
+ Optimized for queries like "show me top N games by save% for goalie X".
1103
+ """
1104
+ __tablename__ = "game_stats_goalie"
1105
+
1106
+ id = db.Column(db.Integer, primary_key=True)
1107
+ game_id = db.Column(db.Integer, db.ForeignKey("games.id"), nullable=False, index=True)
1108
+ human_id = db.Column(db.Integer, db.ForeignKey("humans.id"), nullable=False, index=True)
1109
+ team_id = db.Column(db.Integer, db.ForeignKey("teams.id"), nullable=False, index=True)
1110
+ org_id = db.Column(db.Integer, db.ForeignKey("organizations.id"), nullable=False, index=True)
1111
+ level_id = db.Column(db.Integer, db.ForeignKey("levels.id"), nullable=False, index=True)
1112
+
1113
+ # Denormalized game metadata for sorting/filtering
1114
+ game_date = db.Column(db.Date, nullable=False, index=True)
1115
+ game_time = db.Column(db.Time, nullable=False)
1116
+
1117
+ # Performance stats
1118
+ goals_allowed = db.Column(db.Integer, default=0, nullable=False)
1119
+ shots_faced = db.Column(db.Integer, default=0, nullable=False)
1120
+ saves = db.Column(db.Integer, default=0, nullable=False) # Computed: shots_faced - goals_allowed
1121
+ save_percentage = db.Column(db.Float, default=0.0, nullable=False) # Computed: saves / shots_faced
1122
+
1123
+ # Tracking
1124
+ created_at = db.Column(db.DateTime, nullable=False, default=db.func.current_timestamp())
1125
+
1126
+ __table_args__ = (
1127
+ db.UniqueConstraint("game_id", "human_id", "team_id", name="_game_human_uc_goalie"),
1128
+ db.Index("idx_game_stats_goalie_human_date", "human_id", "game_date", postgresql_using="btree"),
1129
+ db.Index("idx_game_stats_goalie_human_team_date", "human_id", "team_id", "game_date", postgresql_using="btree"),
1130
+ )
@@ -269,7 +269,8 @@ def calculate_percentile_value(values, percentile):
269
269
  lower_value = sorted_values[lower_index]
270
270
  upper_value = sorted_values[upper_index]
271
271
 
272
- return lower_value + fraction * (upper_value - lower_value)
272
+ # Convert Decimal to float to avoid type errors with database Decimal fields
273
+ return float(lower_value) + fraction * (float(upper_value) - float(lower_value))
273
274
 
274
275
 
275
276
  # TEST DB CONNECTION, PERMISSIONS...
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hockey-blast-common-lib
3
- Version: 0.1.66
3
+ Version: 0.1.67
4
4
  Summary: Common library for shared functionality and DB models
5
5
  Author: Pavel Kletskov
6
6
  Author-email: kletskov@gmail.com
@@ -1,31 +1,34 @@
1
1
  hockey_blast_common_lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  hockey_blast_common_lib/aggregate_all_stats.py,sha256=lWDhdYMYFEdNFTM3FmAKWiHFYSkb0OLjTkagguHlwls,1914
3
- hockey_blast_common_lib/aggregate_goalie_stats.py,sha256=Z_xRHR-C6_2KO67LmIW8uKVD4tpEXXvFhfo0DHouuHo,19871
3
+ hockey_blast_common_lib/aggregate_game_stats_all.py,sha256=r0s2ki8y60pHL3wI8yDhRkAfNRRRgnOfzbJUwO6T4QY,4738
4
+ hockey_blast_common_lib/aggregate_game_stats_goalie.py,sha256=XMGhkCfj9EQ2kK1V8z0xLrCDi9t1Sqq0OxusG1uSXNk,9505
5
+ hockey_blast_common_lib/aggregate_game_stats_skater.py,sha256=00x8TUzPxClmfGZ7M6dPKUNJdYaCF1qIA6KmO7izI0Q,13013
6
+ hockey_blast_common_lib/aggregate_goalie_stats.py,sha256=hYhoCGf3p0lsLMYT11e-jqFLi3Yv2O3JmljdWRqww4I,20310
4
7
  hockey_blast_common_lib/aggregate_h2h_stats.py,sha256=nStyIm_be25pKDYbPCaOSHFTjbaMLFxFAa2mTU1tL_k,11486
5
- hockey_blast_common_lib/aggregate_human_stats.py,sha256=uoGBkROBKh8n18TyzZ6vHX_viCTpHbRsiVLyflJq92g,29247
6
- hockey_blast_common_lib/aggregate_referee_stats.py,sha256=VZVqiTfcHKtpyqUjvFBlypz2P8bIVg9wp0OK-MOu7O8,19580
8
+ hockey_blast_common_lib/aggregate_human_stats.py,sha256=2NrdgKHQnaYHaM9tNjq8512Ea9RuySTRdzpRlHpkt7k,29741
9
+ hockey_blast_common_lib/aggregate_referee_stats.py,sha256=UUbd_YqdOFGFCRMcobU5ROfojKXKtcpEYJ8pL8Wqka8,20049
7
10
  hockey_blast_common_lib/aggregate_s2s_stats.py,sha256=gB3Oi1emtBWL3bKojUhHH01gAbQTSLvgqO1WcvLI6F8,7449
8
- hockey_blast_common_lib/aggregate_scorekeeper_stats.py,sha256=NVCL5QzeIodTKs_OvDlcKDtGKnyxA_ZMlKTGfeO4H6Y,17829
9
- hockey_blast_common_lib/aggregate_skater_stats.py,sha256=pU9ULO90165QqWWMz5leSHD9iJb5rJegBzGjZpiKYGw,34870
10
- hockey_blast_common_lib/aggregate_team_goalie_stats.py,sha256=Yy06zDrgLDsI2QqjFzPck3mYWNhyTxSVpZHSdpDSkRE,9569
11
- hockey_blast_common_lib/aggregate_team_skater_stats.py,sha256=cGP8eLTDD0lEkkTmUlBvSwqYfAGIfEOQREy_xtHxHKI,11962
11
+ hockey_blast_common_lib/aggregate_scorekeeper_stats.py,sha256=Bk6XOlv61kd1PwmWws8JEdsJt1nXnGEuDM1ecPaHEZM,18299
12
+ hockey_blast_common_lib/aggregate_skater_stats.py,sha256=ognlux7PFqRZS0YxW89aQGZkfdoG1Z0pjT8A9lrfATs,35339
13
+ hockey_blast_common_lib/aggregate_team_goalie_stats.py,sha256=ApgaR2beGmrV7docvDjXIGEACTt8f5T5pcmKisqLHpI,10044
14
+ hockey_blast_common_lib/aggregate_team_skater_stats.py,sha256=f93Dq884U47nlatWkyl4Tgtl-SvaBMg3pmR-rOJdWXU,12437
12
15
  hockey_blast_common_lib/assign_skater_skill.py,sha256=it3jiSyUq7XpKqxzs88lyB5t1c3t1idIS_JRwq_FQoo,2810
13
16
  hockey_blast_common_lib/db_connection.py,sha256=KACyHaOMeTX9zPNztYy8uOeB1ubIUenZcEKAeD5gC24,3333
14
17
  hockey_blast_common_lib/dump_sample_db.sh,sha256=MY3lnzTXBoWd76-ZlZr9nWsKMEVgyRsUn-LZ2d1JWZs,810
15
18
  hockey_blast_common_lib/embedding_utils.py,sha256=XbJvJlq6BKE6_oLzhUKcCrx6-TM8P-xl-S1SVLr_teU,10222
16
19
  hockey_blast_common_lib/h2h_models.py,sha256=DEmQnmuacBVRNWvpRvq2RlwmhQYrT7XPOSTDNVtchr0,8597
17
- hockey_blast_common_lib/hockey_blast_sample_backup.sql.gz,sha256=sh_-vvwEIiKDCtmIIzyiyaNs67gUHCTu6CGCCdBcT6Q,4648833
18
- hockey_blast_common_lib/models.py,sha256=RQGUq8C8eJqUB2o3QCSs14W-9B4lMTUNvwNDM-Lc6j4,21687
20
+ hockey_blast_common_lib/hockey_blast_sample_backup.sql.gz,sha256=dIAe4lsuCBQqE1WEkRyycEPYkAUlsA1scIU50SRalsI,4648840
21
+ hockey_blast_common_lib/models.py,sha256=HdSrKvNVvKNr2klvAinpj5aXiFxv9Vb0IrYkrIURj0o,22889
19
22
  hockey_blast_common_lib/options.py,sha256=wzfGWKK_dHBA_PfiOvbP_-HtdoJCR0E7DkA5_cYDb_k,1578
20
23
  hockey_blast_common_lib/progress_utils.py,sha256=7Txjpx5G4vHbnPTvNYuBA_WtrY0QFA4mDEYUDuZyY1E,3923
21
24
  hockey_blast_common_lib/restore_sample_db.sh,sha256=7W3lzRZeu9zXIu1Bvtnaw8EHc1ulHmFM4mMh86oUQJo,2205
22
25
  hockey_blast_common_lib/skills_in_divisions.py,sha256=9sGtU6SLj8BXb5R74ue1oPWa2nbk4JfJz5VmcuxetzA,8542
23
26
  hockey_blast_common_lib/skills_propagation.py,sha256=qBK84nzkn8ZQHum0bdxFQwLvdgVE7DtWoPP9cdbOmRo,20201
24
- hockey_blast_common_lib/stats_models.py,sha256=EU7Uw9ANPNNEx4vORgJkMFdgyTXQGmGLOF7YiY8jeBY,35488
27
+ hockey_blast_common_lib/stats_models.py,sha256=r-7cqStcIAO72gotRp1o8-HB4tXvJPsc4d3702gIQYM,39605
25
28
  hockey_blast_common_lib/stats_utils.py,sha256=PTZvykl1zfEcojnzDFa1J3V3F5gREmoFG1lQHLnYHgo,300
26
- hockey_blast_common_lib/utils.py,sha256=xHgA3Xh40i4CBVArvfW2j123XGdgrMTFqTudPQHwkho,8997
29
+ hockey_blast_common_lib/utils.py,sha256=911NlMLzwMX5uwmytcpxNPRP-Y8OjDxTGKKIcyED5ls,9099
27
30
  hockey_blast_common_lib/wsgi.py,sha256=oL9lPWccKLTAYIKPJkKZV5keVE-Dgosv74CBi770NNc,786
28
- hockey_blast_common_lib-0.1.66.dist-info/METADATA,sha256=ANT3HdUqzzFk-q_2ujzu9oPPmohLJFXXZdYAqj-P0Yg,318
29
- hockey_blast_common_lib-0.1.66.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
30
- hockey_blast_common_lib-0.1.66.dist-info/top_level.txt,sha256=wIR4LIkE40npoA2QlOdfCYlgFeGbsHR8Z6r0h46Vtgc,24
31
- hockey_blast_common_lib-0.1.66.dist-info/RECORD,,
31
+ hockey_blast_common_lib-0.1.67.dist-info/METADATA,sha256=U8V1E5eCzhwWfqAX7sAPimvb6WpAsnO44AgNNb3q9tI,318
32
+ hockey_blast_common_lib-0.1.67.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
33
+ hockey_blast_common_lib-0.1.67.dist-info/top_level.txt,sha256=wIR4LIkE40npoA2QlOdfCYlgFeGbsHR8Z6r0h46Vtgc,24
34
+ hockey_blast_common_lib-0.1.67.dist-info/RECORD,,