hockey-blast-common-lib 0.1.65__py3-none-any.whl → 0.1.67__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5,6 +5,8 @@ import sys
5
5
  sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
6
6
 
7
7
 
8
+ from datetime import datetime
9
+
8
10
  import sqlalchemy
9
11
  from sqlalchemy.sql import func
10
12
 
@@ -22,7 +24,9 @@ from hockey_blast_common_lib.stats_models import (
22
24
  from hockey_blast_common_lib.stats_utils import ALL_ORGS_ID
23
25
  from hockey_blast_common_lib.utils import (
24
26
  assign_ranks,
27
+ calculate_percentile_value,
25
28
  get_non_human_ids,
29
+ get_percentile_human,
26
30
  get_start_datetime,
27
31
  )
28
32
 
@@ -33,6 +37,77 @@ FORFEIT_STATUS = "FORFEIT"
33
37
  NOEVENTS_STATUS = "NOEVENTS"
34
38
 
35
39
 
40
+ def insert_percentile_markers_scorekeeper(
41
+ session, stats_dict, aggregation_id, total_in_rank, StatsModel
42
+ ):
43
+ """Insert percentile marker records for scorekeeper stats."""
44
+ if not stats_dict:
45
+ return
46
+
47
+ stat_fields = [
48
+ "games_recorded",
49
+ "games_participated",
50
+ "games_with_stats",
51
+ "sog_given",
52
+ "sog_per_game",
53
+ "total_saves_recorded",
54
+ "avg_saves_per_game",
55
+ "avg_max_saves_per_5sec",
56
+ "avg_max_saves_per_20sec",
57
+ "peak_max_saves_per_5sec",
58
+ "peak_max_saves_per_20sec",
59
+ "quality_score",
60
+ ]
61
+
62
+ percentiles = [25, 50, 75, 90, 95]
63
+
64
+ for percentile in percentiles:
65
+ percentile_human_id = get_percentile_human(session, "Scorekeeper", percentile)
66
+
67
+ percentile_values = {}
68
+ for field in stat_fields:
69
+ values = [stat[field] for stat in stats_dict.values() if field in stat]
70
+ if values:
71
+ percentile_values[field] = calculate_percentile_value(values, percentile)
72
+ else:
73
+ percentile_values[field] = 0
74
+
75
+ scorekeeper_stat = StatsModel(
76
+ aggregation_id=aggregation_id,
77
+ human_id=percentile_human_id,
78
+ games_recorded=int(percentile_values.get("games_recorded", 0)),
79
+ games_participated=int(percentile_values.get("games_participated", 0)),
80
+ games_participated_rank=0,
81
+ games_with_stats=int(percentile_values.get("games_with_stats", 0)),
82
+ games_with_stats_rank=0,
83
+ sog_given=int(percentile_values.get("sog_given", 0)),
84
+ sog_per_game=percentile_values.get("sog_per_game", 0.0),
85
+ total_saves_recorded=int(percentile_values.get("total_saves_recorded", 0)),
86
+ avg_saves_per_game=percentile_values.get("avg_saves_per_game", 0.0),
87
+ avg_max_saves_per_5sec=percentile_values.get("avg_max_saves_per_5sec", 0.0),
88
+ avg_max_saves_per_20sec=percentile_values.get("avg_max_saves_per_20sec", 0.0),
89
+ peak_max_saves_per_5sec=int(percentile_values.get("peak_max_saves_per_5sec", 0)),
90
+ peak_max_saves_per_20sec=int(percentile_values.get("peak_max_saves_per_20sec", 0)),
91
+ quality_score=percentile_values.get("quality_score", 0.0),
92
+ games_recorded_rank=0,
93
+ sog_given_rank=0,
94
+ sog_per_game_rank=0,
95
+ total_saves_recorded_rank=0,
96
+ avg_saves_per_game_rank=0,
97
+ avg_max_saves_per_5sec_rank=0,
98
+ avg_max_saves_per_20sec_rank=0,
99
+ peak_max_saves_per_5sec_rank=0,
100
+ peak_max_saves_per_20sec_rank=0,
101
+ quality_score_rank=0,
102
+ total_in_rank=total_in_rank,
103
+ first_game_id=None,
104
+ last_game_id=None,
105
+ )
106
+ session.add(scorekeeper_stat)
107
+
108
+ session.commit()
109
+
110
+
36
111
  def calculate_quality_score(
37
112
  avg_max_saves_5sec, avg_max_saves_20sec, peak_max_saves_5sec, peak_max_saves_20sec
38
113
  ):
@@ -78,6 +153,9 @@ def aggregate_scorekeeper_stats(
78
153
  if aggregation_type == "org" and aggregation_id != ALL_ORGS_ID:
79
154
  return # Do nothing for individual organization IDs
80
155
 
156
+ # Capture start time for aggregation tracking
157
+ aggregation_start_time = datetime.utcnow()
158
+
81
159
  human_ids_to_filter = get_non_human_ids(session)
82
160
 
83
161
  if aggregation_type == "org":
@@ -252,6 +330,11 @@ def aggregate_scorekeeper_stats(
252
330
  stats_dict, "quality_score", reverse_rank=True
253
331
  ) # Lower is better (less problematic)
254
332
 
333
+ # Calculate and insert percentile marker records
334
+ insert_percentile_markers_scorekeeper(
335
+ session, stats_dict, aggregation_id, total_in_rank, StatsModel
336
+ )
337
+
255
338
  # Insert aggregated stats into the appropriate table with progress output
256
339
  batch_size = 1000
257
340
  for i, (key, stat) in enumerate(stats_dict.items(), 1):
@@ -292,6 +375,7 @@ def aggregate_scorekeeper_stats(
292
375
  total_in_rank=total_in_rank,
293
376
  first_game_id=stat["first_game_id"],
294
377
  last_game_id=stat["last_game_id"],
378
+ aggregation_started_at=aggregation_start_time,
295
379
  )
296
380
  session.add(scorekeeper_stat)
297
381
  # Commit in batches
@@ -299,6 +383,13 @@ def aggregate_scorekeeper_stats(
299
383
  session.commit()
300
384
  session.commit()
301
385
 
386
+ # Update all records with completion timestamp
387
+ aggregation_end_time = datetime.utcnow()
388
+ session.query(StatsModel).filter(
389
+ StatsModel.aggregation_id == aggregation_id
390
+ ).update({StatsModel.aggregation_completed_at: aggregation_end_time})
391
+ session.commit()
392
+
302
393
 
303
394
  def run_aggregate_scorekeeper_stats():
304
395
  session = create_session("boss")
@@ -6,6 +6,7 @@ sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
6
6
 
7
7
 
8
8
  import sqlalchemy
9
+ from datetime import datetime
9
10
  from sqlalchemy import and_, case, func
10
11
  from sqlalchemy.sql import case, func
11
12
 
@@ -37,8 +38,10 @@ from hockey_blast_common_lib.stats_models import (
37
38
  )
38
39
  from hockey_blast_common_lib.stats_utils import ALL_ORGS_ID
39
40
  from hockey_blast_common_lib.utils import (
41
+ calculate_percentile_value,
40
42
  get_all_division_ids_for_org,
41
43
  get_non_human_ids,
44
+ get_percentile_human,
42
45
  get_start_datetime,
43
46
  )
44
47
 
@@ -115,6 +118,105 @@ def calculate_current_point_streak(session, human_id, filter_condition):
115
118
  return current_streak, avg_points_during_streak
116
119
 
117
120
 
121
+ def insert_percentile_markers_skater(
122
+ session, stats_dict, aggregation_id, total_in_rank, StatsModel, aggregation_window
123
+ ):
124
+ """Insert percentile marker records for skater stats.
125
+
126
+ For each stat field, calculate the 25th, 50th, 75th, 90th, and 95th percentile values
127
+ and insert marker records with fake human IDs.
128
+ """
129
+ if not stats_dict:
130
+ return
131
+
132
+ # Define the stat fields we want to calculate percentiles for
133
+ # Each field has percentile calculated SEPARATELY
134
+ stat_fields = [
135
+ "games_played",
136
+ "games_participated",
137
+ "games_with_stats",
138
+ "goals",
139
+ "assists",
140
+ "points",
141
+ "penalties",
142
+ "gm_penalties",
143
+ "goals_per_game",
144
+ "assists_per_game",
145
+ "points_per_game",
146
+ "penalties_per_game",
147
+ "gm_penalties_per_game",
148
+ ]
149
+
150
+ # Add streak fields only for all-time stats
151
+ if aggregation_window is None:
152
+ stat_fields.extend(
153
+ ["current_point_streak", "current_point_streak_avg_points"]
154
+ )
155
+
156
+ # For each percentile (25, 50, 75, 90, 95)
157
+ percentiles = [25, 50, 75, 90, 95]
158
+
159
+ for percentile in percentiles:
160
+ # Get or create the percentile marker human
161
+ percentile_human_id = get_percentile_human(session, "Skater", percentile)
162
+
163
+ # Calculate percentile values for each stat field SEPARATELY
164
+ percentile_values = {}
165
+ for field in stat_fields:
166
+ # Extract all values for this field
167
+ values = [stat[field] for stat in stats_dict.values() if field in stat]
168
+ if values:
169
+ percentile_values[field] = calculate_percentile_value(values, percentile)
170
+ else:
171
+ percentile_values[field] = 0
172
+
173
+ # Create the stats record for this percentile marker
174
+ skater_stat = StatsModel(
175
+ aggregation_id=aggregation_id,
176
+ human_id=percentile_human_id,
177
+ games_played=int(percentile_values.get("games_played", 0)),
178
+ games_participated=int(percentile_values.get("games_participated", 0)),
179
+ games_participated_rank=0, # Percentile markers don't have ranks
180
+ games_with_stats=int(percentile_values.get("games_with_stats", 0)),
181
+ games_with_stats_rank=0,
182
+ goals=int(percentile_values.get("goals", 0)),
183
+ assists=int(percentile_values.get("assists", 0)),
184
+ points=int(percentile_values.get("points", 0)),
185
+ penalties=int(percentile_values.get("penalties", 0)),
186
+ gm_penalties=int(percentile_values.get("gm_penalties", 0)),
187
+ goals_per_game=percentile_values.get("goals_per_game", 0.0),
188
+ points_per_game=percentile_values.get("points_per_game", 0.0),
189
+ assists_per_game=percentile_values.get("assists_per_game", 0.0),
190
+ penalties_per_game=percentile_values.get("penalties_per_game", 0.0),
191
+ gm_penalties_per_game=percentile_values.get("gm_penalties_per_game", 0.0),
192
+ games_played_rank=0,
193
+ goals_rank=0,
194
+ assists_rank=0,
195
+ points_rank=0,
196
+ penalties_rank=0,
197
+ gm_penalties_rank=0,
198
+ goals_per_game_rank=0,
199
+ points_per_game_rank=0,
200
+ assists_per_game_rank=0,
201
+ penalties_per_game_rank=0,
202
+ gm_penalties_per_game_rank=0,
203
+ total_in_rank=total_in_rank,
204
+ current_point_streak=int(
205
+ percentile_values.get("current_point_streak", 0)
206
+ ),
207
+ current_point_streak_rank=0,
208
+ current_point_streak_avg_points=percentile_values.get(
209
+ "current_point_streak_avg_points", 0.0
210
+ ),
211
+ current_point_streak_avg_points_rank=0,
212
+ first_game_id=None, # Percentile markers don't have game references
213
+ last_game_id=None,
214
+ )
215
+ session.add(skater_stat)
216
+
217
+ session.commit()
218
+
219
+
118
220
  def aggregate_skater_stats(
119
221
  session,
120
222
  aggregation_type,
@@ -122,6 +224,9 @@ def aggregate_skater_stats(
122
224
  debug_human_id=None,
123
225
  aggregation_window=None,
124
226
  ):
227
+ # Capture start time for aggregation tracking
228
+ aggregation_start_time = datetime.utcnow()
229
+
125
230
  human_ids_to_filter = get_non_human_ids(session)
126
231
 
127
232
  # Get the name of the aggregation, for debug purposes
@@ -561,6 +666,11 @@ def aggregate_skater_stats(
561
666
  assign_ranks(stats_dict, "current_point_streak")
562
667
  assign_ranks(stats_dict, "current_point_streak_avg_points")
563
668
 
669
+ # Calculate and insert percentile marker records
670
+ insert_percentile_markers_skater(
671
+ session, stats_dict, aggregation_id, total_in_rank, StatsModel, aggregation_window
672
+ )
673
+
564
674
  # Debug output for specific human
565
675
  if debug_human_id:
566
676
  if any(key[1] == debug_human_id for key in stats_dict):
@@ -648,6 +758,7 @@ def aggregate_skater_stats(
648
758
  ),
649
759
  first_game_id=stat["first_game_id"],
650
760
  last_game_id=stat["last_game_id"],
761
+ aggregation_started_at=aggregation_start_time,
651
762
  )
652
763
  session.add(skater_stat)
653
764
  # Commit in batches
@@ -655,6 +766,13 @@ def aggregate_skater_stats(
655
766
  session.commit()
656
767
  session.commit()
657
768
 
769
+ # Update all records with completion timestamp
770
+ aggregation_end_time = datetime.utcnow()
771
+ session.query(StatsModel).filter(
772
+ StatsModel.aggregation_id == aggregation_id
773
+ ).update({StatsModel.aggregation_completed_at: aggregation_end_time})
774
+ session.commit()
775
+
658
776
 
659
777
  def run_aggregate_skater_stats():
660
778
  session = create_session("boss")
@@ -0,0 +1,265 @@
1
+ """
2
+ Aggregate goalie statistics by team.
3
+
4
+ This module aggregates goalie statistics for each team, counting only games
5
+ where the goalie was on that specific team (using GameRoster.team_id).
6
+
7
+ Key difference from regular aggregation:
8
+ - Aggregates by (aggregation_id, team_id, human_id) instead of just (aggregation_id, human_id)
9
+ - Filters to only games where GameRoster.team_id matches the target team
10
+ - Stores results in OrgStatsGoalieTeam / DivisionStatsGoalieTeam
11
+
12
+ """
13
+ import os
14
+ import sys
15
+
16
+ sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
17
+
18
+ from datetime import datetime
19
+
20
+ import sqlalchemy
21
+ from sqlalchemy import func
22
+
23
+ from hockey_blast_common_lib.db_connection import create_session
24
+ from hockey_blast_common_lib.models import (
25
+ Division,
26
+ Game,
27
+ GameRoster,
28
+ GoalieSaves,
29
+ Human,
30
+ Organization,
31
+ Team,
32
+ )
33
+ from hockey_blast_common_lib.options import (
34
+ MIN_GAMES_FOR_DIVISION_STATS,
35
+ MIN_GAMES_FOR_ORG_STATS,
36
+ )
37
+ from hockey_blast_common_lib.progress_utils import create_progress_tracker
38
+ from hockey_blast_common_lib.stats_models import (
39
+ DivisionStatsGoalieTeam,
40
+ OrgStatsGoalieTeam,
41
+ )
42
+ from hockey_blast_common_lib.utils import (
43
+ calculate_percentile_value,
44
+ get_non_human_ids,
45
+ get_percentile_human,
46
+ )
47
+
48
+ # Import status constants for game filtering
49
+ FINAL_STATUS = "Final"
50
+ FINAL_SO_STATUS = "Final(SO)"
51
+ FORFEIT_STATUS = "FORFEIT"
52
+ NOEVENTS_STATUS = "NOEVENTS"
53
+
54
+
55
+ def aggregate_team_goalie_stats(session, aggregation_type, aggregation_id):
56
+ """
57
+ Aggregate goalie stats by team for an organization or division.
58
+
59
+ For each team in the aggregation scope, calculates stats for all goalies
60
+ who played for that team, counting only games where they were on that team.
61
+
62
+ Args:
63
+ session: Database session
64
+ aggregation_type: "org" or "division"
65
+ aggregation_id: ID of the organization or division
66
+ """
67
+ # Capture start time for aggregation tracking
68
+ aggregation_start_time = datetime.utcnow()
69
+
70
+ human_ids_to_filter = get_non_human_ids(session)
71
+
72
+ # Determine aggregation details
73
+ if aggregation_type == "org":
74
+ StatsModel = OrgStatsGoalieTeam
75
+ min_games = MIN_GAMES_FOR_ORG_STATS
76
+ aggregation_name = (
77
+ session.query(Organization)
78
+ .filter(Organization.id == aggregation_id)
79
+ .first()
80
+ .organization_name
81
+ )
82
+ filter_condition = Game.org_id == aggregation_id
83
+ elif aggregation_type == "division":
84
+ StatsModel = DivisionStatsGoalieTeam
85
+ min_games = MIN_GAMES_FOR_DIVISION_STATS
86
+ aggregation_name = (
87
+ session.query(Division).filter(Division.id == aggregation_id).first().level
88
+ )
89
+ filter_condition = Game.division_id == aggregation_id
90
+ else:
91
+ raise ValueError(f"Invalid aggregation type: {aggregation_type}")
92
+
93
+ print(f"Aggregating team goalie stats for {aggregation_name}...")
94
+
95
+ # Delete existing stats for this aggregation
96
+ session.query(StatsModel).filter(StatsModel.aggregation_id == aggregation_id).delete()
97
+ session.commit()
98
+
99
+ # Get all teams in this aggregation scope
100
+ if aggregation_type == "org":
101
+ teams_query = (
102
+ session.query(Team.id, Team.name)
103
+ .join(Game, (Game.home_team_id == Team.id) | (Game.visitor_team_id == Team.id))
104
+ .filter(Game.org_id == aggregation_id)
105
+ .distinct()
106
+ )
107
+ else: # division
108
+ teams_query = (
109
+ session.query(Team.id, Team.name)
110
+ .join(Game, (Game.home_team_id == Team.id) | (Game.visitor_team_id == Team.id))
111
+ .filter(Game.division_id == aggregation_id)
112
+ .distinct()
113
+ )
114
+
115
+ teams = teams_query.all()
116
+ print(f"Found {len(teams)} teams in {aggregation_name}")
117
+
118
+ # Process each team
119
+ progress = create_progress_tracker(len(teams), description="Processing teams")
120
+ for team_id, team_name in teams:
121
+ progress.update(1)
122
+
123
+ # Aggregate stats for goalies on this team
124
+ # Filter to only games where goalies were on THIS team
125
+ games_played_query = (
126
+ session.query(
127
+ GameRoster.human_id,
128
+ func.count(Game.id).label("games_played"),
129
+ func.count(Game.id).label("games_participated"),
130
+ func.count(Game.id).label("games_with_stats"),
131
+ func.array_agg(Game.id).label("game_ids"),
132
+ )
133
+ .join(Game, Game.id == GameRoster.game_id)
134
+ .filter(
135
+ GameRoster.team_id == team_id, # KEY: Filter by team
136
+ GameRoster.role.ilike("g"), # Only goalies
137
+ GameRoster.human_id.notin_(human_ids_to_filter),
138
+ Game.status.in_([FINAL_STATUS, FINAL_SO_STATUS, FORFEIT_STATUS, NOEVENTS_STATUS]),
139
+ filter_condition, # org_id or division_id filter
140
+ )
141
+ .group_by(GameRoster.human_id)
142
+ .having(func.count(Game.id) >= min_games)
143
+ )
144
+
145
+ games_played_data = games_played_query.all()
146
+ if not games_played_data:
147
+ continue # No goalies met minimum games for this team
148
+
149
+ # Create stats dictionary
150
+ stats_dict = {}
151
+ for row in games_played_data:
152
+ stats_dict[row.human_id] = {
153
+ "games_played": row.games_played,
154
+ "games_participated": row.games_participated,
155
+ "games_with_stats": row.games_with_stats,
156
+ "game_ids": row.game_ids,
157
+ "first_game_id": row.game_ids[0] if row.game_ids else None,
158
+ "last_game_id": row.game_ids[-1] if row.game_ids else None,
159
+ }
160
+
161
+ # Aggregate goals allowed and shots faced from GoalieSaves table
162
+ goalie_saves_query = (
163
+ session.query(
164
+ GameRoster.human_id,
165
+ func.sum(GoalieSaves.goals_allowed).label("goals_allowed"),
166
+ func.sum(GoalieSaves.shots_against).label("shots_faced"),
167
+ )
168
+ .join(Game, Game.id == GameRoster.game_id)
169
+ .join(
170
+ GoalieSaves,
171
+ (GoalieSaves.game_id == Game.id) & (GoalieSaves.goalie_id == GameRoster.human_id),
172
+ )
173
+ .filter(
174
+ GameRoster.team_id == team_id, # KEY: Filter by team
175
+ GameRoster.role.ilike("g"),
176
+ GameRoster.human_id.in_(stats_dict.keys()),
177
+ Game.status.in_([FINAL_STATUS, FINAL_SO_STATUS]),
178
+ filter_condition,
179
+ )
180
+ .group_by(GameRoster.human_id)
181
+ )
182
+
183
+ for row in goalie_saves_query.all():
184
+ if row.human_id in stats_dict:
185
+ stats_dict[row.human_id]["goals_allowed"] = row.goals_allowed or 0
186
+ stats_dict[row.human_id]["shots_faced"] = row.shots_faced or 0
187
+
188
+ # Calculate per-game averages and save percentage
189
+ for human_id, stats in stats_dict.items():
190
+ games_with_stats = stats.get("games_with_stats", 0)
191
+ goals_allowed = stats.get("goals_allowed", 0)
192
+ shots_faced = stats.get("shots_faced", 0)
193
+
194
+ if games_with_stats > 0:
195
+ stats["goals_allowed_per_game"] = goals_allowed / games_with_stats
196
+ else:
197
+ stats["goals_allowed_per_game"] = 0.0
198
+
199
+ if shots_faced > 0:
200
+ saves = shots_faced - goals_allowed
201
+ stats["save_percentage"] = saves / shots_faced
202
+ else:
203
+ stats["save_percentage"] = 0.0
204
+
205
+ # Insert stats for each goalie on this team
206
+ for human_id, stats in stats_dict.items():
207
+ goalie_stat = StatsModel(
208
+ aggregation_id=aggregation_id,
209
+ team_id=team_id,
210
+ human_id=human_id,
211
+ games_played=stats.get("games_played", 0),
212
+ games_participated=stats.get("games_participated", 0),
213
+ games_with_stats=stats.get("games_with_stats", 0),
214
+ goals_allowed=stats.get("goals_allowed", 0),
215
+ shots_faced=stats.get("shots_faced", 0),
216
+ goals_allowed_per_game=stats.get("goals_allowed_per_game", 0.0),
217
+ save_percentage=stats.get("save_percentage", 0.0),
218
+ total_in_rank=len(stats_dict),
219
+ first_game_id=stats.get("first_game_id"),
220
+ last_game_id=stats.get("last_game_id"),
221
+ # Initialize rank fields to 0 (not calculated for team stats)
222
+ games_played_rank=0,
223
+ games_participated_rank=0,
224
+ games_with_stats_rank=0,
225
+ goals_allowed_rank=0,
226
+ shots_faced_rank=0,
227
+ goals_allowed_per_game_rank=0,
228
+ save_percentage_rank=0,
229
+ aggregation_started_at=aggregation_start_time,
230
+ )
231
+ session.add(goalie_stat)
232
+
233
+ session.commit()
234
+
235
+ # Update all records with completion timestamp
236
+ aggregation_end_time = datetime.utcnow()
237
+ session.query(StatsModel).filter(
238
+ StatsModel.aggregation_id == aggregation_id
239
+ ).update({StatsModel.aggregation_completed_at: aggregation_end_time})
240
+ session.commit()
241
+
242
+ progress.finish()
243
+ print(f"✓ Team goalie stats aggregation complete for {aggregation_name}")
244
+
245
+
246
+ def run_aggregate_team_goalie_stats():
247
+ """
248
+ Run team goalie stats aggregation for all organizations and divisions.
249
+ """
250
+ from hockey_blast_common_lib.utils import get_all_division_ids_for_org
251
+
252
+ session = create_session("boss")
253
+
254
+ # Get all org_id present in the Organization table
255
+ org_ids = session.query(Organization.id).all()
256
+ org_ids = [org_id[0] for org_id in org_ids]
257
+
258
+ for org_id in org_ids:
259
+ # Aggregate for organization level
260
+ aggregate_team_goalie_stats(session, "org", org_id)
261
+
262
+ # Aggregate for all divisions in this organization
263
+ division_ids = get_all_division_ids_for_org(session, org_id)
264
+ for division_id in division_ids:
265
+ aggregate_team_goalie_stats(session, "division", division_id)