hockey-blast-common-lib 0.1.51__py3-none-any.whl → 0.1.54__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -16,6 +16,7 @@ from hockey_blast_common_lib.utils import assign_ranks
16
16
  from sqlalchemy import func, case, and_
17
17
  from collections import defaultdict
18
18
  from hockey_blast_common_lib.stats_utils import ALL_ORGS_ID
19
+ from hockey_blast_common_lib.progress_utils import create_progress_tracker
19
20
 
20
21
  def aggregate_goalie_stats(session, aggregation_type, aggregation_id, names_to_filter_out, debug_human_id=None, aggregation_window=None):
21
22
  human_ids_to_filter = get_human_ids_by_names(session, names_to_filter_out)
@@ -190,33 +191,51 @@ def run_aggregate_goalie_stats():
190
191
 
191
192
  for org_id in org_ids:
192
193
  division_ids = get_all_division_ids_for_org(session, org_id)
193
- print(f"Aggregating goalie stats for {len(division_ids)} divisions in org_id {org_id}...")
194
- total_divisions = len(division_ids)
195
- processed_divisions = 0
196
- for division_id in division_ids:
197
- aggregate_goalie_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
198
- aggregate_goalie_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Weekly')
199
- aggregate_goalie_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Daily')
200
- processed_divisions += 1
201
- if human_id_to_debug is None:
202
- print(f"\rProcessed {processed_divisions}/{total_divisions} divisions ({(processed_divisions/total_divisions)*100:.2f}%)", end="")
203
-
204
- aggregate_goalie_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
205
- aggregate_goalie_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Weekly')
206
- aggregate_goalie_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Daily')
194
+ org_name = session.query(Organization.organization_name).filter(Organization.id == org_id).scalar() or f"org_id {org_id}"
207
195
 
208
- # Aggregate by level
209
- level_ids = session.query(Division.level_id).distinct().all()
210
- level_ids = [level_id[0] for level_id in level_ids]
211
- total_levels = len(level_ids)
212
- processed_levels = 0
213
- for level_id in level_ids:
214
- if level_id is None:
215
- continue
196
+ if human_id_to_debug is None and division_ids:
197
+ # Process divisions with progress tracking
198
+ progress = create_progress_tracker(len(division_ids), f"Processing {len(division_ids)} divisions for {org_name}")
199
+ for i, division_id in enumerate(division_ids):
200
+ aggregate_goalie_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
201
+ aggregate_goalie_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Weekly')
202
+ aggregate_goalie_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Daily')
203
+ progress.update(i + 1)
204
+ else:
205
+ # Debug mode or no divisions - process without progress tracking
206
+ for division_id in division_ids:
207
+ aggregate_goalie_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
208
+ aggregate_goalie_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Weekly')
209
+ aggregate_goalie_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Daily')
210
+
211
+ # Process org-level stats with progress tracking
216
212
  if human_id_to_debug is None:
217
- print(f"\rProcessed {processed_levels}/{total_levels} levels ({(processed_levels/total_levels)*100:.2f}%)", end="")
218
- processed_levels += 1
219
- aggregate_goalie_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
213
+ org_progress = create_progress_tracker(3, f"Processing org-level stats for {org_name}")
214
+ aggregate_goalie_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
215
+ org_progress.update(1)
216
+ aggregate_goalie_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Weekly')
217
+ org_progress.update(2)
218
+ aggregate_goalie_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Daily')
219
+ org_progress.update(3)
220
+ else:
221
+ aggregate_goalie_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
222
+ aggregate_goalie_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Weekly')
223
+ aggregate_goalie_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Daily')
224
+
225
+ # Aggregate by level
226
+ level_ids = session.query(Division.level_id).distinct().all()
227
+ level_ids = [level_id[0] for level_id in level_ids if level_id[0] is not None]
228
+
229
+ if human_id_to_debug is None and level_ids:
230
+ # Process levels with progress tracking
231
+ level_progress = create_progress_tracker(len(level_ids), f"Processing {len(level_ids)} skill levels")
232
+ for i, level_id in enumerate(level_ids):
233
+ aggregate_goalie_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
234
+ level_progress.update(i + 1)
235
+ else:
236
+ # Debug mode or no levels - process without progress tracking
237
+ for level_id in level_ids:
238
+ aggregate_goalie_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
220
239
 
221
240
  if __name__ == "__main__":
222
241
  run_aggregate_goalie_stats()
@@ -7,6 +7,7 @@ sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
7
7
  from hockey_blast_common_lib.models import Game, GameRoster, Goal, Penalty
8
8
  from hockey_blast_common_lib.h2h_models import H2HStats, H2HStatsMeta
9
9
  from hockey_blast_common_lib.db_connection import create_session
10
+ from hockey_blast_common_lib.progress_utils import create_progress_tracker
10
11
  from sqlalchemy.sql import func
11
12
  from sqlalchemy import types
12
13
 
@@ -41,6 +42,9 @@ def aggregate_h2h_stats():
41
42
 
42
43
  total_games = games_query.count()
43
44
  print(f"Total games to process: {total_games}")
45
+
46
+ # Create progress tracker
47
+ progress = create_progress_tracker(total_games, "Processing H2H stats")
44
48
  processed = 0
45
49
  latest_game_id = None
46
50
  for game in games_query:
@@ -191,14 +195,11 @@ def aggregate_h2h_stats():
191
195
  # --- TODO: Add more detailed logic for goalie/skater, referee/player, shootouts, etc. ---
192
196
  latest_game_id = game.id
193
197
  processed += 1
194
- if processed % 10 == 0 or processed == total_games:
195
- print(f"\rProcessed {processed}/{total_games} games ({(processed/total_games)*100:.2f}%)", end="")
196
- sys.stdout.flush()
198
+ progress.update(processed)
197
199
  # Commit all stats at once
198
200
  session.query(H2HStats).delete()
199
201
  session.add_all(list(h2h_stats_dict.values()))
200
202
  session.commit()
201
- print(f"\rProcessed {processed}/{total_games} games (100.00%)")
202
203
  # Save/update meta
203
204
  meta = H2HStatsMeta(
204
205
  last_run_timestamp=datetime.utcnow(),
@@ -15,6 +15,7 @@ from hockey_blast_common_lib.utils import get_fake_human_for_stats, get_org_id_f
15
15
  from hockey_blast_common_lib.utils import assign_ranks
16
16
  from hockey_blast_common_lib.utils import get_start_datetime
17
17
  from hockey_blast_common_lib.stats_utils import ALL_ORGS_ID
18
+ from hockey_blast_common_lib.progress_utils import create_progress_tracker
18
19
 
19
20
  def aggregate_human_stats(session, aggregation_type, aggregation_id, names_to_filter_out, human_id_filter=None, aggregation_window=None):
20
21
  human_ids_to_filter = get_human_ids_by_names(session, names_to_filter_out)
@@ -425,35 +426,53 @@ def run_aggregate_human_stats():
425
426
  org_ids = session.query(Organization.id).all()
426
427
  org_ids = [org_id[0] for org_id in org_ids]
427
428
 
428
- for org_id in [-1]:#org_ids:
429
+ for org_id in org_ids:
429
430
  division_ids = get_all_division_ids_for_org(session, org_id)
430
- print(f"Aggregating human stats for {len(division_ids)} divisions in org_id {org_id}...")
431
- total_divisions = len(division_ids)
432
- processed_divisions = 0
433
- for division_id in division_ids:
434
- aggregate_human_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug)
435
- aggregate_human_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Weekly')
436
- aggregate_human_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Daily')
437
- processed_divisions += 1
438
- if human_id_to_debug is None:
439
- print(f"\rProcessed {processed_divisions}/{total_divisions} divisions ({(processed_divisions/total_divisions)*100:.2f}%)", end="")
440
- print("")
441
- aggregate_human_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug)
442
- aggregate_human_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Weekly')
443
- aggregate_human_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Daily')
431
+ org_name = session.query(Organization.organization_name).filter(Organization.id == org_id).scalar() or f"org_id {org_id}"
444
432
 
445
- # Aggregate by level
446
- level_ids = session.query(Division.level_id).distinct().all()
447
- level_ids = [level_id[0] for level_id in level_ids]
448
- total_levels = len(level_ids)
449
- processed_levels = 0
450
- for level_id in level_ids:
451
- if level_id is None:
452
- continue
433
+ if human_id_to_debug is None and division_ids:
434
+ # Process divisions with progress tracking
435
+ progress = create_progress_tracker(len(division_ids), f"Processing {len(division_ids)} divisions for {org_name}")
436
+ for i, division_id in enumerate(division_ids):
437
+ aggregate_human_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug)
438
+ aggregate_human_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Weekly')
439
+ aggregate_human_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Daily')
440
+ progress.update(i + 1)
441
+ else:
442
+ # Debug mode or no divisions - process without progress tracking
443
+ for division_id in division_ids:
444
+ aggregate_human_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug)
445
+ aggregate_human_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Weekly')
446
+ aggregate_human_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Daily')
447
+
448
+ # Process org-level stats with progress tracking
453
449
  if human_id_to_debug is None:
454
- print(f"\rProcessed {processed_levels}/{total_levels} levels ({(processed_levels/total_levels)*100:.2f}%)", end="")
455
- processed_levels += 1
456
- aggregate_human_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug)
450
+ org_progress = create_progress_tracker(3, f"Processing org-level stats for {org_name}")
451
+ aggregate_human_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug)
452
+ org_progress.update(1)
453
+ aggregate_human_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Weekly')
454
+ org_progress.update(2)
455
+ aggregate_human_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Daily')
456
+ org_progress.update(3)
457
+ else:
458
+ aggregate_human_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug)
459
+ aggregate_human_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Weekly')
460
+ aggregate_human_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Daily')
461
+
462
+ # Aggregate by level
463
+ level_ids = session.query(Division.level_id).distinct().all()
464
+ level_ids = [level_id[0] for level_id in level_ids if level_id[0] is not None]
465
+
466
+ if human_id_to_debug is None and level_ids:
467
+ # Process levels with progress tracking
468
+ level_progress = create_progress_tracker(len(level_ids), f"Processing {len(level_ids)} skill levels")
469
+ for i, level_id in enumerate(level_ids):
470
+ aggregate_human_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug)
471
+ level_progress.update(i + 1)
472
+ else:
473
+ # Debug mode or no levels - process without progress tracking
474
+ for level_id in level_ids:
475
+ aggregate_human_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug)
457
476
 
458
477
  if __name__ == "__main__":
459
478
  run_aggregate_human_stats()
@@ -15,6 +15,7 @@ from hockey_blast_common_lib.utils import get_org_id_from_alias, get_human_ids_b
15
15
  from hockey_blast_common_lib.utils import assign_ranks
16
16
  from hockey_blast_common_lib.utils import get_start_datetime
17
17
  from hockey_blast_common_lib.stats_utils import ALL_ORGS_ID
18
+ from hockey_blast_common_lib.progress_utils import create_progress_tracker
18
19
 
19
20
  def aggregate_referee_stats(session, aggregation_type, aggregation_id, names_to_filter_out, aggregation_window=None):
20
21
  human_ids_to_filter = get_human_ids_by_names(session, names_to_filter_out)
@@ -214,33 +215,51 @@ def run_aggregate_referee_stats():
214
215
 
215
216
  for org_id in org_ids:
216
217
  division_ids = get_all_division_ids_for_org(session, org_id)
217
- print(f"Aggregating referee stats for {len(division_ids)} divisions in org_id {org_id}...")
218
- total_divisions = len(division_ids)
219
- processed_divisions = 0
220
- for division_id in division_ids:
221
- aggregate_referee_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names)
222
- aggregate_referee_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, aggregation_window='Weekly')
223
- aggregate_referee_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, aggregation_window='Daily')
224
- processed_divisions += 1
225
- if human_id_to_debug is None:
226
- print(f"\rProcessed {processed_divisions}/{total_divisions} divisions ({(processed_divisions/total_divisions)*100:.2f}%)", end="")
227
-
228
- aggregate_referee_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names)
229
- aggregate_referee_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, aggregation_window='Weekly')
230
- aggregate_referee_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, aggregation_window='Daily')
218
+ org_name = session.query(Organization.organization_name).filter(Organization.id == org_id).scalar() or f"org_id {org_id}"
231
219
 
232
- # Aggregate by level
233
- level_ids = session.query(Division.level_id).distinct().all()
234
- level_ids = [level_id[0] for level_id in level_ids]
235
- total_levels = len(level_ids)
236
- processed_levels = 0
237
- for level_id in level_ids:
238
- if level_id is None:
239
- continue
220
+ if human_id_to_debug is None and division_ids:
221
+ # Process divisions with progress tracking
222
+ progress = create_progress_tracker(len(division_ids), f"Processing {len(division_ids)} divisions for {org_name}")
223
+ for i, division_id in enumerate(division_ids):
224
+ aggregate_referee_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names)
225
+ aggregate_referee_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, aggregation_window='Weekly')
226
+ aggregate_referee_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, aggregation_window='Daily')
227
+ progress.update(i + 1)
228
+ else:
229
+ # Debug mode or no divisions - process without progress tracking
230
+ for division_id in division_ids:
231
+ aggregate_referee_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names)
232
+ aggregate_referee_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, aggregation_window='Weekly')
233
+ aggregate_referee_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, aggregation_window='Daily')
234
+
235
+ # Process org-level stats with progress tracking
240
236
  if human_id_to_debug is None:
241
- print(f"\rProcessed {processed_levels}/{total_levels} levels ({(processed_levels/total_levels)*100:.2f}%)", end="")
242
- processed_levels += 1
243
- aggregate_referee_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names)
237
+ org_progress = create_progress_tracker(3, f"Processing org-level stats for {org_name}")
238
+ aggregate_referee_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names)
239
+ org_progress.update(1)
240
+ aggregate_referee_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, aggregation_window='Weekly')
241
+ org_progress.update(2)
242
+ aggregate_referee_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, aggregation_window='Daily')
243
+ org_progress.update(3)
244
+ else:
245
+ aggregate_referee_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names)
246
+ aggregate_referee_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, aggregation_window='Weekly')
247
+ aggregate_referee_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, aggregation_window='Daily')
248
+
249
+ # Aggregate by level
250
+ level_ids = session.query(Division.level_id).distinct().all()
251
+ level_ids = [level_id[0] for level_id in level_ids if level_id[0] is not None]
252
+
253
+ if human_id_to_debug is None and level_ids:
254
+ # Process levels with progress tracking
255
+ level_progress = create_progress_tracker(len(level_ids), f"Processing {len(level_ids)} skill levels")
256
+ for i, level_id in enumerate(level_ids):
257
+ aggregate_referee_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names)
258
+ level_progress.update(i + 1)
259
+ else:
260
+ # Debug mode or no levels - process without progress tracking
261
+ for level_id in level_ids:
262
+ aggregate_referee_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names)
244
263
 
245
264
  if __name__ == "__main__":
246
265
  run_aggregate_referee_stats()
@@ -0,0 +1,143 @@
1
+ import sys, os
2
+ from datetime import datetime
3
+
4
+ # Add the package directory to the Python path
5
+ sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
6
+
7
+ from hockey_blast_common_lib.models import Game, Goal, Penalty, GameRoster
8
+ from hockey_blast_common_lib.h2h_models import SkaterToSkaterStats, SkaterToSkaterStatsMeta
9
+ from hockey_blast_common_lib.db_connection import create_session
10
+ from sqlalchemy.sql import func
11
+ from sqlalchemy import types
12
+
13
+ # Optional: Limit processing to a specific human_id
14
+ LIMIT_HUMAN_ID = None
15
+
16
+ def aggregate_s2s_stats():
17
+ session = create_session("boss")
18
+ meta = session.query(SkaterToSkaterStatsMeta).order_by(SkaterToSkaterStatsMeta.id.desc()).first()
19
+ s2s_stats_dict = {} # (skater1_id, skater2_id) -> SkaterToSkaterStats instance
20
+
21
+ if meta is None or meta.last_run_timestamp is None or meta.last_processed_game_id is None:
22
+ # Full run: delete all existing stats and process all games
23
+ session.query(SkaterToSkaterStats).delete()
24
+ session.commit()
25
+ games_query = session.query(Game).order_by(Game.date, Game.time, Game.id)
26
+ print("No previous run found, deleted all existing Skater-to-Skater stats, processing all games...")
27
+ else:
28
+ # Incremental: only process games after last processed
29
+ for stat in session.query(SkaterToSkaterStats).all():
30
+ s2s_stats_dict[(stat.skater1_id, stat.skater2_id)] = stat
31
+ last_game = session.query(Game).filter(Game.id == meta.last_processed_game_id).first()
32
+ if last_game:
33
+ last_dt = datetime.combine(last_game.date, last_game.time)
34
+ games_query = session.query(Game).filter(
35
+ func.cast(func.concat(Game.date, ' ', Game.time), types.TIMESTAMP()) > last_dt
36
+ ).order_by(Game.date, Game.time, Game.id)
37
+ print(f"Resuming from game after id {meta.last_processed_game_id} ({last_dt})...")
38
+ else:
39
+ games_query = session.query(Game).order_by(Game.date, Game.time, Game.id)
40
+ print("Previous game id not found, processing all games...")
41
+
42
+ total_games = games_query.count()
43
+ print(f"Total games to process: {total_games}")
44
+ processed = 0
45
+ latest_game_id = None
46
+
47
+ for game in games_query:
48
+ # Separate skaters into home and away rosters (exclude goalies)
49
+ home_skaters = [entry.human_id for entry in session.query(GameRoster).filter(GameRoster.game_id == game.id, GameRoster.team_id == game.home_team_id, ~GameRoster.role.ilike('g')).all()]
50
+ away_skaters = [entry.human_id for entry in session.query(GameRoster).filter(GameRoster.game_id == game.id, GameRoster.team_id == game.visitor_team_id, ~GameRoster.role.ilike('g')).all()]
51
+
52
+ if LIMIT_HUMAN_ID is not None and LIMIT_HUMAN_ID not in home_skaters + away_skaters:
53
+ continue
54
+
55
+ # Create pairs of skaters from different rosters
56
+ for h_skater in home_skaters:
57
+ for a_skater in away_skaters:
58
+ if LIMIT_HUMAN_ID is not None and LIMIT_HUMAN_ID not in [h_skater, a_skater]:
59
+ continue
60
+
61
+ s1, s2 = sorted([h_skater, a_skater])
62
+ key = (s1, s2)
63
+ s2s = s2s_stats_dict.get(key)
64
+ if not s2s:
65
+ s2s = SkaterToSkaterStats(
66
+ skater1_id=s1,
67
+ skater2_id=s2,
68
+ games_against=0,
69
+ games_tied_against=0,
70
+ skater1_wins_vs_skater2=0,
71
+ skater2_wins_vs_skater1=0,
72
+ skater1_goals_against_skater2=0,
73
+ skater2_goals_against_skater1=0,
74
+ skater1_assists_against_skater2=0,
75
+ skater2_assists_against_skater1=0,
76
+ skater1_penalties_against_skater2=0,
77
+ skater2_penalties_against_skater1=0
78
+ )
79
+ s2s_stats_dict[key] = s2s
80
+
81
+ # Update stats
82
+ s2s.games_against += 1
83
+ if _is_tie(game):
84
+ s2s.games_tied_against += 1
85
+ elif _is_win(game, s1, game.home_team_id):
86
+ s2s.skater1_wins_vs_skater2 += 1
87
+ elif _is_win(game, s2, game.visitor_team_id):
88
+ s2s.skater2_wins_vs_skater1 += 1
89
+
90
+ # Goals and assists
91
+ goals_stats = session.query(Goal).filter(Goal.game_id == game.id).all()
92
+ for goal in goals_stats:
93
+ if goal.goal_scorer_id == s1:
94
+ s2s.skater1_goals_against_skater2 += 1
95
+ if goal.goal_scorer_id == s2:
96
+ s2s.skater2_goals_against_skater1 += 1
97
+ if goal.assist_1_id == s1 or goal.assist_2_id == s1:
98
+ s2s.skater1_assists_against_skater2 += 1
99
+ if goal.assist_1_id == s2 or goal.assist_2_id == s2:
100
+ s2s.skater2_assists_against_skater1 += 1
101
+
102
+ # Penalties
103
+ penalties_stats = session.query(Penalty).filter(Penalty.game_id == game.id).all()
104
+ for penalty in penalties_stats:
105
+ if penalty.penalized_player_id == s1:
106
+ s2s.skater1_penalties_against_skater2 += 1
107
+ if penalty.penalized_player_id == s2:
108
+ s2s.skater2_penalties_against_skater1 += 1
109
+
110
+ latest_game_id = game.id
111
+ processed += 1
112
+ if processed % 10 == 0 or processed == total_games:
113
+ print(f"\rProcessed {processed}/{total_games} games ({(processed/total_games)*100:.2f}%)", end="")
114
+ sys.stdout.flush()
115
+
116
+ # Commit all stats at once
117
+ session.query(SkaterToSkaterStats).delete()
118
+ session.add_all(list(s2s_stats_dict.values()))
119
+ session.commit()
120
+ print(f"\rProcessed {processed}/{total_games} games (100.00%)")
121
+
122
+ # Save/update meta
123
+ meta = SkaterToSkaterStatsMeta(
124
+ last_run_timestamp=datetime.utcnow(),
125
+ last_processed_game_id=latest_game_id
126
+ )
127
+ session.add(meta)
128
+ session.commit()
129
+ print("Skater-to-Skater aggregation complete.")
130
+
131
+ # --- Helper functions for win/loss/tie ---
132
+ def _is_win(game, skater_id, team_id):
133
+ if team_id == game.home_team_id:
134
+ return (game.home_final_score or 0) > (game.visitor_final_score or 0)
135
+ if team_id == game.visitor_team_id:
136
+ return (game.visitor_final_score or 0) > (game.home_final_score or 0)
137
+ return False
138
+
139
+ def _is_tie(game):
140
+ return (game.home_final_score is not None and game.visitor_final_score is not None and game.home_final_score == game.visitor_final_score)
141
+
142
+ if __name__ == "__main__":
143
+ aggregate_s2s_stats()
@@ -16,6 +16,53 @@ from hockey_blast_common_lib.utils import get_start_datetime
16
16
  from sqlalchemy import func, case, and_
17
17
  from collections import defaultdict
18
18
  from hockey_blast_common_lib.stats_utils import ALL_ORGS_ID
19
+ from hockey_blast_common_lib.progress_utils import create_progress_tracker
20
+
21
+ def calculate_current_point_streak(session, human_id, filter_condition):
22
+ """
23
+ Calculate the current point streak for a player.
24
+ A point streak is consecutive games (from the most recent game backward) where the player had at least one point.
25
+ Returns a tuple: (streak_length, average_points_during_streak)
26
+ """
27
+ # Get all games for this player ordered by date/time descending (most recent first)
28
+ games = session.query(Game).join(GameRoster, Game.id == GameRoster.game_id).filter(
29
+ GameRoster.human_id == human_id,
30
+ ~GameRoster.role.ilike('g'), # Exclude goalie games
31
+ filter_condition,
32
+ Game.status.like('Final%') # Only final games
33
+ ).order_by(Game.date.desc(), Game.time.desc()).all()
34
+
35
+ if not games:
36
+ return 0, 0.0
37
+
38
+ current_streak = 0
39
+ total_points_in_streak = 0
40
+
41
+ for game in games:
42
+ # Check if the player had any points in this game
43
+ goals = session.query(Goal).filter(
44
+ Goal.game_id == game.id,
45
+ Goal.goal_scorer_id == human_id
46
+ ).count()
47
+
48
+ assists = session.query(Goal).filter(
49
+ Goal.game_id == game.id,
50
+ ((Goal.assist_1_id == human_id) | (Goal.assist_2_id == human_id))
51
+ ).count()
52
+
53
+ total_points = goals + assists
54
+
55
+ if total_points > 0:
56
+ current_streak += 1
57
+ total_points_in_streak += total_points
58
+ else:
59
+ # Streak is broken, stop counting
60
+ break
61
+
62
+ # Calculate average points during streak
63
+ avg_points_during_streak = total_points_in_streak / current_streak if current_streak > 0 else 0.0
64
+
65
+ return current_streak, avg_points_during_streak
19
66
 
20
67
  def aggregate_skater_stats(session, aggregation_type, aggregation_id, names_to_filter_out, debug_human_id=None, aggregation_window=None):
21
68
  human_ids_to_filter = get_human_ids_by_names(session, names_to_filter_out)
@@ -57,7 +104,12 @@ def aggregate_skater_stats(session, aggregation_type, aggregation_id, names_to_f
57
104
  elif aggregation_type == 'level':
58
105
  StatsModel = LevelStatsSkater
59
106
  min_games = MIN_GAMES_FOR_LEVEL_STATS
60
- filter_condition = Division.level_id == aggregation_id
107
+ # Get division IDs for this level to avoid cartesian product
108
+ division_ids = session.query(Division.id).filter(Division.level_id == aggregation_id).all()
109
+ division_ids = [div_id[0] for div_id in division_ids]
110
+ if not division_ids:
111
+ return # No divisions for this level
112
+ filter_condition = Game.division_id.in_(division_ids)
61
113
  # Add filter to only include games for the last 5 years
62
114
  # five_years_ago = datetime.now() - timedelta(days=5*365)
63
115
  # level_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP) >= five_years_ago
@@ -86,39 +138,65 @@ def aggregate_skater_stats(session, aggregation_type, aggregation_id, names_to_f
86
138
  # human_filter = [GameRoster.human_id == debug_human_id]
87
139
 
88
140
  # Aggregate games played for each human in each division, excluding goalies
89
- games_played_stats = session.query(
141
+ games_played_query = session.query(
90
142
  GameRoster.human_id,
91
143
  func.count(Game.id).label('games_played'),
92
144
  func.array_agg(Game.id).label('game_ids')
93
- ).join(Game, Game.id == GameRoster.game_id).join(Division, Game.division_id == Division.id).filter(filter_condition, ~GameRoster.role.ilike('g'), *human_filter).group_by(GameRoster.human_id).all()
145
+ ).join(Game, Game.id == GameRoster.game_id)
146
+
147
+ # Only join Division if not level aggregation (since we filter on Game.division_id directly for levels)
148
+ if aggregation_type != 'level':
149
+ games_played_query = games_played_query.join(Division, Game.division_id == Division.id)
150
+
151
+ games_played_stats = games_played_query.filter(filter_condition, ~GameRoster.role.ilike('g'), *human_filter).group_by(GameRoster.human_id).all()
94
152
 
95
153
  # Aggregate goals for each human in each division, excluding goalies
96
- goals_stats = session.query(
154
+ goals_query = session.query(
97
155
  Goal.goal_scorer_id.label('human_id'),
98
156
  func.count(Goal.id).label('goals'),
99
157
  func.array_agg(Goal.game_id).label('goal_game_ids')
100
- ).join(Game, Game.id == Goal.game_id).join(GameRoster, and_(Game.id == GameRoster.game_id, Goal.goal_scorer_id == GameRoster.human_id)).join(Division, Game.division_id == Division.id).filter(filter_condition, ~GameRoster.role.ilike('g'), *human_filter).group_by(Goal.goal_scorer_id).all()
158
+ ).join(Game, Game.id == Goal.game_id).join(GameRoster, and_(Game.id == GameRoster.game_id, Goal.goal_scorer_id == GameRoster.human_id))
159
+
160
+ if aggregation_type != 'level':
161
+ goals_query = goals_query.join(Division, Game.division_id == Division.id)
162
+
163
+ goals_stats = goals_query.filter(filter_condition, ~GameRoster.role.ilike('g'), *human_filter).group_by(Goal.goal_scorer_id).all()
101
164
 
102
165
  # Aggregate assists for each human in each division, excluding goalies
103
- assists_stats = session.query(
166
+ assists_query = session.query(
104
167
  Goal.assist_1_id.label('human_id'),
105
168
  func.count(Goal.id).label('assists'),
106
169
  func.array_agg(Goal.game_id).label('assist_game_ids')
107
- ).join(Game, Game.id == Goal.game_id).join(GameRoster, and_(Game.id == GameRoster.game_id, Goal.assist_1_id == GameRoster.human_id)).join(Division, Game.division_id == Division.id).filter(filter_condition, ~GameRoster.role.ilike('g'), *human_filter).group_by(Goal.assist_1_id).all()
108
-
109
- assists_stats_2 = session.query(
170
+ ).join(Game, Game.id == Goal.game_id).join(GameRoster, and_(Game.id == GameRoster.game_id, Goal.assist_1_id == GameRoster.human_id))
171
+
172
+ if aggregation_type != 'level':
173
+ assists_query = assists_query.join(Division, Game.division_id == Division.id)
174
+
175
+ assists_stats = assists_query.filter(filter_condition, ~GameRoster.role.ilike('g'), *human_filter).group_by(Goal.assist_1_id).all()
176
+
177
+ assists_query_2 = session.query(
110
178
  Goal.assist_2_id.label('human_id'),
111
179
  func.count(Goal.id).label('assists'),
112
180
  func.array_agg(Goal.game_id).label('assist_2_game_ids')
113
- ).join(Game, Game.id == Goal.game_id).join(GameRoster, and_(Game.id == GameRoster.game_id, Goal.assist_2_id == GameRoster.human_id)).join(Division, Game.division_id == Division.id).filter(filter_condition, ~GameRoster.role.ilike('g'), *human_filter).group_by(Goal.assist_2_id).all()
181
+ ).join(Game, Game.id == Goal.game_id).join(GameRoster, and_(Game.id == GameRoster.game_id, Goal.assist_2_id == GameRoster.human_id))
182
+
183
+ if aggregation_type != 'level':
184
+ assists_query_2 = assists_query_2.join(Division, Game.division_id == Division.id)
185
+
186
+ assists_stats_2 = assists_query_2.filter(filter_condition, ~GameRoster.role.ilike('g'), *human_filter).group_by(Goal.assist_2_id).all()
114
187
 
115
188
  # Aggregate penalties for each human in each division, excluding goalies
116
- penalties_stats = session.query(
189
+ penalties_query = session.query(
117
190
  Penalty.penalized_player_id.label('human_id'),
118
191
  func.count(Penalty.id).label('penalties'),
119
192
  func.sum(case((Penalty.penalty_minutes == 'GM', 1), else_=0)).label('gm_penalties'), # New aggregation for GM penalties
120
193
  func.array_agg(Penalty.game_id).label('penalty_game_ids')
121
- ).join(Game, Game.id == Penalty.game_id).join(GameRoster, and_(Game.id == GameRoster.game_id, Penalty.penalized_player_id == GameRoster.human_id)).join(Division, Game.division_id == Division.id).filter(filter_condition, ~GameRoster.role.ilike('g'), *human_filter).group_by(Penalty.penalized_player_id).all()
194
+ ).join(Game, Game.id == Penalty.game_id).join(GameRoster, and_(Game.id == GameRoster.game_id, Penalty.penalized_player_id == GameRoster.human_id))
195
+
196
+ if aggregation_type != 'level':
197
+ penalties_query = penalties_query.join(Division, Game.division_id == Division.id)
198
+
199
+ penalties_stats = penalties_query.filter(filter_condition, ~GameRoster.role.ilike('g'), *human_filter).group_by(Penalty.penalized_player_id).all()
122
200
 
123
201
  # Combine the results
124
202
  stats_dict = {}
@@ -139,6 +217,8 @@ def aggregate_skater_stats(session, aggregation_type, aggregation_id, names_to_f
139
217
  'assists_per_game': 0.0,
140
218
  'penalties_per_game': 0.0,
141
219
  'gm_penalties_per_game': 0.0, # Initialize GM penalties per game
220
+ 'current_point_streak': 0, # Initialize current point streak
221
+ 'current_point_streak_avg_points': 0.0, # Initialize current point streak average points
142
222
  'game_ids': [],
143
223
  'first_game_id': None,
144
224
  'last_game_id': None
@@ -194,6 +274,14 @@ def aggregate_skater_stats(session, aggregation_type, aggregation_id, names_to_f
194
274
  stat['first_game_id'] = first_game.id if first_game else None
195
275
  stat['last_game_id'] = last_game.id if last_game else None
196
276
 
277
+ # Calculate current point streak (only for all-time stats)
278
+ if aggregation_window is None:
279
+ for key, stat in stats_dict.items():
280
+ aggregation_id, human_id = key
281
+ streak_length, avg_points = calculate_current_point_streak(session, human_id, filter_condition)
282
+ stat['current_point_streak'] = streak_length
283
+ stat['current_point_streak_avg_points'] = avg_points
284
+
197
285
  # Calculate total_in_rank
198
286
  total_in_rank = len(stats_dict)
199
287
 
@@ -214,6 +302,9 @@ def aggregate_skater_stats(session, aggregation_type, aggregation_id, names_to_f
214
302
  assign_ranks(stats_dict, 'assists_per_game')
215
303
  assign_ranks(stats_dict, 'penalties_per_game')
216
304
  assign_ranks(stats_dict, 'gm_penalties_per_game') # Assign ranks for GM penalties per game
305
+ if aggregation_window is None: # Only assign current_point_streak ranks for all-time stats
306
+ assign_ranks(stats_dict, 'current_point_streak')
307
+ assign_ranks(stats_dict, 'current_point_streak_avg_points')
217
308
 
218
309
  # Debug output for specific human
219
310
  if debug_human_id:
@@ -262,6 +353,10 @@ def aggregate_skater_stats(session, aggregation_type, aggregation_id, names_to_f
262
353
  penalties_per_game_rank=stat['penalties_per_game_rank'],
263
354
  gm_penalties_per_game_rank=stat['gm_penalties_per_game_rank'], # Include GM penalties per game rank
264
355
  total_in_rank=total_in_rank,
356
+ current_point_streak=stat.get('current_point_streak', 0),
357
+ current_point_streak_rank=stat.get('current_point_streak_rank', 0),
358
+ current_point_streak_avg_points=stat.get('current_point_streak_avg_points', 0.0),
359
+ current_point_streak_avg_points_rank=stat.get('current_point_streak_avg_points_rank', 0),
265
360
  first_game_id=stat['first_game_id'],
266
361
  last_game_id=stat['last_game_id']
267
362
  )
@@ -281,33 +376,51 @@ def run_aggregate_skater_stats():
281
376
 
282
377
  for org_id in org_ids:
283
378
  division_ids = get_all_division_ids_for_org(session, org_id)
284
- print(f"Aggregating skater stats for {len(division_ids)} divisions in org_id {org_id}...")
285
- total_divisions = len(division_ids)
286
- processed_divisions = 0
287
- for division_id in division_ids:
288
- aggregate_skater_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
289
- aggregate_skater_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Weekly')
290
- aggregate_skater_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Daily')
291
- processed_divisions += 1
292
- if human_id_to_debug is None:
293
- print(f"\rProcessed {processed_divisions}/{total_divisions} divisions ({(processed_divisions/total_divisions)*100:.2f}%)", end="")
294
-
295
- aggregate_skater_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
296
- aggregate_skater_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Weekly')
297
- aggregate_skater_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Daily')
379
+ org_name = session.query(Organization.organization_name).filter(Organization.id == org_id).scalar() or f"org_id {org_id}"
380
+
381
+ if human_id_to_debug is None and division_ids:
382
+ # Process divisions with progress tracking
383
+ progress = create_progress_tracker(len(division_ids), f"Processing {len(division_ids)} divisions for {org_name}")
384
+ for i, division_id in enumerate(division_ids):
385
+ aggregate_skater_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
386
+ aggregate_skater_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Weekly')
387
+ aggregate_skater_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Daily')
388
+ progress.update(i + 1)
389
+ else:
390
+ # Debug mode or no divisions - process without progress tracking
391
+ for division_id in division_ids:
392
+ aggregate_skater_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
393
+ aggregate_skater_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Weekly')
394
+ aggregate_skater_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Daily')
395
+
396
+ # Process org-level stats with progress tracking
397
+ if human_id_to_debug is None:
398
+ org_progress = create_progress_tracker(3, f"Processing org-level stats for {org_name}")
399
+ aggregate_skater_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
400
+ org_progress.update(1)
401
+ aggregate_skater_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Weekly')
402
+ org_progress.update(2)
403
+ aggregate_skater_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Daily')
404
+ org_progress.update(3)
405
+ else:
406
+ aggregate_skater_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
407
+ aggregate_skater_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Weekly')
408
+ aggregate_skater_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Daily')
298
409
 
299
410
  # Aggregate by level
300
411
  level_ids = session.query(Division.level_id).distinct().all()
301
- level_ids = [level_id[0] for level_id in level_ids]
302
- total_levels = len(level_ids)
303
- processed_levels = 0
304
- for level_id in level_ids:
305
- if level_id is None:
306
- continue
307
- if human_id_to_debug is None:
308
- print(f"\rProcessed {processed_levels}/{total_levels} levels ({(processed_levels/total_levels)*100:.2f}%)", end="")
309
- processed_levels += 1
310
- aggregate_skater_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
412
+ level_ids = [level_id[0] for level_id in level_ids if level_id[0] is not None]
413
+
414
+ if human_id_to_debug is None and level_ids:
415
+ # Process levels with progress tracking
416
+ level_progress = create_progress_tracker(len(level_ids), f"Processing {len(level_ids)} skill levels")
417
+ for i, level_id in enumerate(level_ids):
418
+ aggregate_skater_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
419
+ level_progress.update(i + 1)
420
+ else:
421
+ # Debug mode or no levels - process without progress tracking
422
+ for level_id in level_ids:
423
+ aggregate_skater_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
311
424
 
312
425
  if __name__ == "__main__":
313
426
  run_aggregate_skater_stats()
@@ -6,43 +6,53 @@ sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
6
6
  from hockey_blast_common_lib.models import Human, Level
7
7
  from hockey_blast_common_lib.stats_models import LevelStatsSkater
8
8
  from hockey_blast_common_lib.db_connection import create_session
9
- from sqlalchemy.sql import func
9
+ from hockey_blast_common_lib.progress_utils import create_progress_tracker
10
10
 
11
- def calculate_skater_skill_value(session, human_id, level_stats):
12
- max_skill_value = 0
11
+ def calculate_skater_skill_value(session, level_stats):
12
+ min_skill_value = float('inf') # Start with infinity since we want the minimum
13
13
 
14
14
  for stat in level_stats:
15
15
  level = session.query(Level).filter(Level.id == stat.level_id).first()
16
16
  if not level or level.skill_value < 0:
17
17
  continue
18
18
  level_skill_value = level.skill_value
19
- ppg_ratio = stat.points_per_game_rank / stat.total_in_rank
20
- games_played_ratio = stat.games_played_rank / stat.total_in_rank
21
-
22
- # Take the maximum of the two ratios
23
- skill_value = level_skill_value * max(ppg_ratio, games_played_ratio)
24
- max_skill_value = max(max_skill_value, skill_value)
25
-
26
- return max_skill_value
19
+
20
+ # Fix critical bug: Invert rank ratios so better players (lower ranks) get higher skill values
21
+ # Rank 1 (best) should get factor close to 1.0, worst rank should get factor close to 0.0
22
+ if stat.total_in_rank > 1:
23
+ ppg_skill_factor = 1 - (stat.points_per_game_rank - 1) / (stat.total_in_rank - 1)
24
+ else:
25
+ ppg_skill_factor = 1.0 # Only one player in level
26
+
27
+ # Apply skill adjustment: range from 0.8 to 1.2 of level base skill
28
+ # Since lower skill_value is better: Best player gets 0.8x (closer to better levels), worst gets 1.2x
29
+ skill_adjustment = 1.2 - 0.4 * ppg_skill_factor
30
+ skill_value = level_skill_value * skill_adjustment
31
+
32
+ # Take the minimum skill value across all levels the player has played in (lower is better)
33
+ min_skill_value = min(min_skill_value, skill_value)
34
+
35
+ return min_skill_value if min_skill_value != float('inf') else 0
27
36
 
28
37
  def assign_skater_skill_values():
29
38
  session = create_session("boss")
30
39
 
31
40
  humans = session.query(Human).all()
32
41
  total_humans = len(humans)
33
- processed_humans = 0
42
+
43
+ # Create progress tracker
44
+ progress = create_progress_tracker(total_humans, "Assigning skater skill values")
34
45
 
35
- for human in humans:
46
+ for i, human in enumerate(humans):
36
47
  level_stats = session.query(LevelStatsSkater).filter(LevelStatsSkater.human_id == human.id).all()
37
48
  if level_stats:
38
- skater_skill_value = calculate_skater_skill_value(session, human.id, level_stats)
49
+ skater_skill_value = calculate_skater_skill_value(session, level_stats)
39
50
  human.skater_skill_value = skater_skill_value
40
51
  session.commit()
41
52
 
42
- processed_humans += 1
43
- print(f"\rProcessed {processed_humans}/{total_humans} humans ({(processed_humans/total_humans)*100:.2f}%)", end="")
53
+ progress.update(i + 1)
44
54
 
45
- print("\nSkater skill values have been assigned to all humans.")
55
+ print("Skater skill values have been assigned to all humans.")
46
56
 
47
57
  if __name__ == "__main__":
48
58
  assign_skater_skill_values()
@@ -75,3 +75,33 @@ class H2HStatsMeta(db.Model):
75
75
  id = db.Column(db.Integer, primary_key=True)
76
76
  last_run_timestamp = db.Column(db.DateTime, nullable=True) # When the h2h stats were last updated
77
77
  last_processed_game_id = db.Column(db.Integer, nullable=True) # Game.id of the latest processed game
78
+
79
+ class SkaterToSkaterStats(db.Model):
80
+ __tablename__ = 'skater_to_skater_stats'
81
+ id = db.Column(db.Integer, primary_key=True)
82
+ skater1_id = db.Column(db.Integer, db.ForeignKey('humans.id'), nullable=False)
83
+ skater2_id = db.Column(db.Integer, db.ForeignKey('humans.id'), nullable=False)
84
+ __table_args__ = (
85
+ db.UniqueConstraint('skater1_id', 'skater2_id', name='_s2s_skater_pair_uc'),
86
+ db.Index('ix_s2s_skater_pair', 'skater1_id', 'skater2_id'),
87
+ )
88
+
89
+ # General stats
90
+ games_against = db.Column(db.Integer, default=0, nullable=False)
91
+ games_tied_against = db.Column(db.Integer, default=0, nullable=False)
92
+ skater1_wins_vs_skater2 = db.Column(db.Integer, default=0, nullable=False)
93
+ skater2_wins_vs_skater1 = db.Column(db.Integer, default=0, nullable=False)
94
+
95
+ # Cumulative stats
96
+ skater1_goals_against_skater2 = db.Column(db.Integer, default=0, nullable=False)
97
+ skater2_goals_against_skater1 = db.Column(db.Integer, default=0, nullable=False)
98
+ skater1_assists_against_skater2 = db.Column(db.Integer, default=0, nullable=False)
99
+ skater2_assists_against_skater1 = db.Column(db.Integer, default=0, nullable=False)
100
+ skater1_penalties_against_skater2 = db.Column(db.Integer, default=0, nullable=False)
101
+ skater2_penalties_against_skater1 = db.Column(db.Integer, default=0, nullable=False)
102
+
103
+ class SkaterToSkaterStatsMeta(db.Model):
104
+ __tablename__ = 'skater_to_skater_stats_meta'
105
+ id = db.Column(db.Integer, primary_key=True)
106
+ last_run_timestamp = db.Column(db.DateTime, nullable=True)
107
+ last_processed_game_id = db.Column(db.Integer, nullable=True)
@@ -322,7 +322,11 @@ class Shootout(db.Model):
322
322
  class Team(db.Model):
323
323
  __tablename__ = 'teams'
324
324
  id = db.Column(db.Integer, primary_key=True)
325
- name = db.Column(db.String(100), unique=True, nullable=False)
325
+ name = db.Column(db.String(100), nullable=False)
326
+ org_id = db.Column(db.Integer, db.ForeignKey('organizations.id'), nullable=False)
327
+ __table_args__ = (
328
+ db.UniqueConstraint('org_id', 'name', name='_org_team_name_uc'),
329
+ )
326
330
 
327
331
  class TeamDivision(db.Model):
328
332
  __tablename__ = 'teams_divisions'
@@ -351,6 +355,7 @@ class RequestLog(db.Model):
351
355
  path = db.Column(db.String, nullable=False)
352
356
  timestamp = db.Column(db.DateTime, nullable=False)
353
357
  cgi_params = db.Column(db.String, nullable=True)
358
+ response_time_ms = db.Column(db.Float, nullable=True) # Response time in milliseconds
354
359
 
355
360
  # # MANUAL AMENDS HAPPEN HERE :)
356
361
  # from db_connection import create_session
@@ -0,0 +1,91 @@
1
+ import time
2
+ from datetime import datetime, timedelta
3
+
4
+ class ProgressTracker:
5
+ """
6
+ Reusable progress tracker with ETA calculation for stats aggregation processes.
7
+ """
8
+
9
+ def __init__(self, total_items, description="Processing"):
10
+ self.total_items = total_items
11
+ self.description = description
12
+ self.start_time = time.time()
13
+ self.processed_items = 0
14
+ self.last_update_time = self.start_time
15
+
16
+ def update(self, processed_count=None):
17
+ """
18
+ Update progress. If processed_count is None, increment by 1.
19
+ """
20
+ if processed_count is not None:
21
+ self.processed_items = processed_count
22
+ else:
23
+ self.processed_items += 1
24
+
25
+ current_time = time.time()
26
+
27
+ # Only update display if at least 0.1 seconds have passed (to avoid spamming)
28
+ if current_time - self.last_update_time >= 0.1 or self.processed_items == self.total_items:
29
+ self._display_progress()
30
+ self.last_update_time = current_time
31
+
32
+ def _display_progress(self):
33
+ """
34
+ Display progress with percentage, ETA, and elapsed time.
35
+ """
36
+ current_time = time.time()
37
+ elapsed_time = current_time - self.start_time
38
+
39
+ if self.processed_items == 0:
40
+ percentage = 0.0
41
+ eta_str = "calculating..."
42
+ else:
43
+ percentage = (self.processed_items / self.total_items) * 100
44
+
45
+ # Calculate ETA
46
+ if self.processed_items == self.total_items:
47
+ eta_str = "completed!"
48
+ else:
49
+ avg_time_per_item = elapsed_time / self.processed_items
50
+ remaining_items = self.total_items - self.processed_items
51
+ eta_seconds = avg_time_per_item * remaining_items
52
+ eta_str = self._format_time(eta_seconds)
53
+
54
+ elapsed_str = self._format_time(elapsed_time)
55
+
56
+ progress_msg = f"\r{self.description}: {self.processed_items}/{self.total_items} ({percentage:.1f}%) | "
57
+ progress_msg += f"Elapsed: {elapsed_str} | ETA: {eta_str}"
58
+
59
+ print(progress_msg, end="", flush=True)
60
+
61
+ # Add newline when complete
62
+ if self.processed_items == self.total_items:
63
+ print() # Newline to finish the progress line
64
+
65
+ def _format_time(self, seconds):
66
+ """
67
+ Format seconds into a human-readable string.
68
+ """
69
+ if seconds < 60:
70
+ return f"{seconds:.1f}s"
71
+ elif seconds < 3600:
72
+ minutes = int(seconds // 60)
73
+ secs = int(seconds % 60)
74
+ return f"{minutes}m {secs}s"
75
+ else:
76
+ hours = int(seconds // 3600)
77
+ minutes = int((seconds % 3600) // 60)
78
+ return f"{hours}h {minutes}m"
79
+
80
+ def finish(self):
81
+ """
82
+ Mark progress as complete and add final newline.
83
+ """
84
+ self.processed_items = self.total_items
85
+ self._display_progress()
86
+
87
+ def create_progress_tracker(total_items, description="Processing"):
88
+ """
89
+ Factory function to create a progress tracker.
90
+ """
91
+ return ProgressTracker(total_items, description)
@@ -9,6 +9,7 @@ sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
9
9
  from hockey_blast_common_lib.models import Level, Division
10
10
  from hockey_blast_common_lib.stats_models import LevelsGraphEdge, LevelStatsSkater, SkillValuePPGRatio
11
11
  from hockey_blast_common_lib.db_connection import create_session
12
+ from hockey_blast_common_lib.progress_utils import create_progress_tracker
12
13
  from sqlalchemy import func
13
14
 
14
15
  import numpy as np
@@ -56,10 +57,11 @@ def reset_skill_values_in_divisions():
56
57
  # If no match found, check each alternative name individually
57
58
  skills = session.query(Level).filter(Level.org_id == division.org_id).all()
58
59
  for s in skills:
59
- alternative_names = s.level_alternative_name.split(',')
60
- if div_level in alternative_names:
61
- level = s
62
- break
60
+ if s.level_alternative_name: # Check if not None
61
+ alternative_names = s.level_alternative_name.split(',')
62
+ if div_level in alternative_names:
63
+ level = s
64
+ break
63
65
 
64
66
  if level:
65
67
  # Assign the skill_value and set skill_propagation_sequence to 0
@@ -70,19 +72,30 @@ def reset_skill_values_in_divisions():
70
72
  level.skill_propagation_sequence = -1
71
73
  level.skill_value = -1
72
74
  else:
73
- # Add new Skill with values previously used for division
74
- new_level = Level(
75
- org_id=division.org_id,
76
- skill_value=-1,
77
- level_name=division.level,
78
- level_alternative_name='',
79
- is_seed=False,
80
- skill_propagation_sequence=-1
81
- )
82
- session.add(new_level)
83
- session.commit()
84
- division.skill_id = new_level.id
85
- print(f"Created new Level for Division {division.level}")
75
+ # Check if level already exists with this org_id/level_name combination
76
+ existing_level = session.query(Level).filter(
77
+ Level.org_id == division.org_id,
78
+ Level.level_name == division.level
79
+ ).first()
80
+
81
+ if existing_level:
82
+ # Use existing level
83
+ division.level_id = existing_level.id
84
+ print(f"Using existing Level for Division {division.level}")
85
+ else:
86
+ # Add new Skill with values previously used for division
87
+ new_level = Level(
88
+ org_id=division.org_id,
89
+ skill_value=-1,
90
+ level_name=division.level,
91
+ level_alternative_name='',
92
+ is_seed=False,
93
+ skill_propagation_sequence=-1
94
+ )
95
+ session.add(new_level)
96
+ session.commit()
97
+ division.level_id = new_level.id
98
+ print(f"Created new Level for Division {division.level}")
86
99
 
87
100
  # Commit the changes to the Division
88
101
  session.commit()
@@ -113,14 +126,20 @@ def build_levels_graph_edges():
113
126
  # Dictionary to store edges
114
127
  edges = {}
115
128
 
116
- # Build edges
129
+ # Build edges - batch load all levels first for performance
130
+ all_level_ids = list(level_human_stats.keys())
131
+ levels_dict = {level.id: level for level in session.query(Level).filter(Level.id.in_(all_level_ids)).all()}
132
+
117
133
  total_levels = len(level_human_stats)
134
+ progress = create_progress_tracker(total_levels, "Building level graph edges")
118
135
  processed_levels = 0
119
136
  for from_level_id, from_humans in level_human_stats.items():
120
- from_level = session.query(Level).filter_by(id=from_level_id).first()
137
+ from_level = levels_dict.get(from_level_id)
138
+ if not from_level:
139
+ continue
121
140
  for to_level_id, to_humans in level_human_stats.items():
122
- to_level = session.query(Level).filter_by(id=to_level_id).first()
123
- if from_level.id >= to_level.id:
141
+ to_level = levels_dict.get(to_level_id)
142
+ if not to_level or from_level.id >= to_level.id:
124
143
  continue
125
144
 
126
145
  common_humans = set(from_humans.keys()) & set(to_humans.keys())
@@ -171,7 +190,7 @@ def build_levels_graph_edges():
171
190
  edges[(from_level_id, to_level_id)] = edge
172
191
 
173
192
  processed_levels += 1
174
- print(f"\rProcessed {processed_levels}/{total_levels} levels ({(processed_levels/total_levels)*100:.2f}%)", end="")
193
+ progress.update(processed_levels)
175
194
 
176
195
  # Insert edges into the database
177
196
  for edge in edges.values():
@@ -313,12 +332,12 @@ def propagate_skill_levels(propagation_sequence):
313
332
  # First confirm which way are we going here
314
333
  if (ppg_ratio_edge < 1 and correlation.ppg_ratio > 1) or (ppg_ratio_edge > 1 and correlation.ppg_ratio < 1):
315
334
  # Reverse the correlation
316
- from_skill_value=correlation.to_skill_value
317
- to_skill_value=correlation.from_skill_value
335
+ from_skill_value = correlation.to_skill_value
336
+ to_skill_value = correlation.from_skill_value
318
337
  ppg_ratio_range = 1 / correlation.ppg_ratio
319
338
  else:
320
- from_skill_value=correlation.from_skill_value
321
- to_skill_value=correlation.to_skill_value
339
+ from_skill_value = correlation.from_skill_value
340
+ to_skill_value = correlation.to_skill_value
322
341
  ppg_ratio_range = correlation.ppg_ratio
323
342
 
324
343
  # Now both ratios are either < 1 or > 1
@@ -345,6 +364,7 @@ def propagate_skill_levels(propagation_sequence):
345
364
  suggested_skill_values[target_level_id].append(weighted_avg_skill_value)
346
365
 
347
366
  # Update skill values for target levels
367
+ session.flush() # Ensure all previous changes are flushed before updates
348
368
  for target_level_id, skill_values in suggested_skill_values.items():
349
369
  skill_values = Config.discard_outliers(np.array(skill_values))
350
370
  if len(skill_values) > 0:
@@ -352,10 +372,16 @@ def propagate_skill_levels(propagation_sequence):
352
372
  avg_skill_value = max(avg_skill_value, 9.6)
353
373
  if avg_skill_value < min_skill_value:
354
374
  avg_skill_value = min_skill_value - 0.01
355
- session.query(Level).filter_by(id=target_level_id).update({
356
- 'skill_value': avg_skill_value,
357
- 'skill_propagation_sequence': propagation_sequence + 1
358
- })
375
+ try:
376
+ session.query(Level).filter_by(id=target_level_id).update({
377
+ 'skill_value': avg_skill_value,
378
+ 'skill_propagation_sequence': propagation_sequence + 1
379
+ })
380
+ session.flush() # Flush each update individually
381
+ except Exception as e:
382
+ print(f"Error updating level {target_level_id}: {e}")
383
+ session.rollback()
384
+ continue
359
385
  session.commit()
360
386
 
361
387
  print(f"Skill levels have been propagated for sequence {propagation_sequence}.")
@@ -75,6 +75,10 @@ class BaseStatsSkater(db.Model):
75
75
  gm_penalties_per_game = db.Column(db.Float, default=0.0)
76
76
  gm_penalties_per_game_rank = db.Column(db.Integer, default=0)
77
77
  total_in_rank = db.Column(db.Integer, default=0)
78
+ current_point_streak = db.Column(db.Integer, default=0)
79
+ current_point_streak_rank = db.Column(db.Integer, default=0)
80
+ current_point_streak_avg_points = db.Column(db.Float, default=0.0)
81
+ current_point_streak_avg_points_rank = db.Column(db.Integer, default=0)
78
82
  first_game_id = db.Column(db.Integer, db.ForeignKey('games.id'))
79
83
  last_game_id = db.Column(db.Integer, db.ForeignKey('games.id'))
80
84
 
@@ -87,6 +91,8 @@ class BaseStatsSkater(db.Model):
87
91
  db.Index(f'idx_{cls.aggregation_type}_assists_per_game3', cls.get_aggregation_column(), 'assists_per_game'),
88
92
  db.Index(f'idx_{cls.aggregation_type}_penalties_per_game3', cls.get_aggregation_column(), 'penalties_per_game'),
89
93
  db.Index(f'idx_{cls.aggregation_type}_gm_penalties_per_game3', cls.get_aggregation_column(), 'gm_penalties_per_game'),
94
+ db.Index(f'idx_{cls.aggregation_type}_current_point_streak3', cls.get_aggregation_column(), 'current_point_streak'),
95
+ db.Index(f'idx_{cls.aggregation_type}_current_point_streak_avg_points3', cls.get_aggregation_column(), 'current_point_streak_avg_points'),
90
96
  db.Index(f'idx_{cls.aggregation_type}_games_played3', cls.get_aggregation_column(), 'games_played')
91
97
  )
92
98
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hockey-blast-common-lib
3
- Version: 0.1.51
3
+ Version: 0.1.54
4
4
  Summary: Common library for shared functionality and DB models
5
5
  Author: Pavel Kletskov
6
6
  Author-email: kletskov@gmail.com
@@ -0,0 +1,27 @@
1
+ hockey_blast_common_lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ hockey_blast_common_lib/aggregate_all_stats.py,sha256=2bOj2BW0k3ZPQR1NH04upnkIfO9SastzTz7XwO3ujYo,1104
3
+ hockey_blast_common_lib/aggregate_goalie_stats.py,sha256=0pJOT6nKgFYmSTWA3HY-BKARgc7l6czqvzWfwNQMad0,14346
4
+ hockey_blast_common_lib/aggregate_h2h_stats.py,sha256=dC5TcJZGkpIQTiq3z40kOX6EjEhFbGv5EL0P1EClBQ0,11117
5
+ hockey_blast_common_lib/aggregate_human_stats.py,sha256=ku42TAjUIj49822noM8fEeB8GS4vFeCCNrLupTWmqzg,26043
6
+ hockey_blast_common_lib/aggregate_referee_stats.py,sha256=mUcTVQH9K4kwmIfgfGsnh_3AqX6Ia3RjfukkYuQas3I,13938
7
+ hockey_blast_common_lib/aggregate_s2s_stats.py,sha256=urYN0Q06twwLO-XWGlSMVAVOTVR_D2AWdmoGsxIYHXE,6737
8
+ hockey_blast_common_lib/aggregate_skater_stats.py,sha256=pA_2pDOGcJyEywISg2ySG8gFCuoLWwqw6a3Gm2wHLyo,23302
9
+ hockey_blast_common_lib/assign_skater_skill.py,sha256=8gAiqQm14QMFJNmdKb2jjaGyQlhzvVhXrqVvaZ84KDM,2499
10
+ hockey_blast_common_lib/db_connection.py,sha256=HvPxDvOj7j5H85RfslGvHVNevfg7mKCd0syJ6NX21mU,1890
11
+ hockey_blast_common_lib/dump_sample_db.sh,sha256=MY3lnzTXBoWd76-ZlZr9nWsKMEVgyRsUn-LZ2d1JWZs,810
12
+ hockey_blast_common_lib/h2h_models.py,sha256=0st4xoJO0U6ONfx3BV03BQvHjZE31e_PqZfphAJMoSU,7968
13
+ hockey_blast_common_lib/hockey_blast_sample_backup.sql.gz,sha256=fzSnsPZVLF7RVGymJcXZPIragcHXnMwX-Levh9LY_ao,4648903
14
+ hockey_blast_common_lib/models.py,sha256=aa5M1hF-IQ3XbDBgq_GK2pk-GcHtatH94ADEg7GAR7M,16734
15
+ hockey_blast_common_lib/options.py,sha256=2L4J9rKCKr58om34259D3_s7kbPdknMSwoo6IwTNnx0,849
16
+ hockey_blast_common_lib/progress_utils.py,sha256=H_zRFOsb2qQQpGw56wJghZ1nUe_m6zqGeR9hZ33Y1Uo,3229
17
+ hockey_blast_common_lib/restore_sample_db.sh,sha256=7W3lzRZeu9zXIu1Bvtnaw8EHc1ulHmFM4mMh86oUQJo,2205
18
+ hockey_blast_common_lib/skills_in_divisions.py,sha256=m-UEwMwn1KM7wOYvDstgsOEeH57M9V6yrkBoghzGYKE,7005
19
+ hockey_blast_common_lib/skills_propagation.py,sha256=CYpnjcJit01-QxkvVstNx1DhUo5ljZB_-o31vGzPT-A,17668
20
+ hockey_blast_common_lib/stats_models.py,sha256=uBNQSqCMXurzS-tD13OoV5WqurYYGHMZMHk1CeA5jgI,26104
21
+ hockey_blast_common_lib/stats_utils.py,sha256=DXsPO4jw8XsdRUN46TGF_IiBAfz3GCIVBswCGp5ELDk,284
22
+ hockey_blast_common_lib/utils.py,sha256=PduHp6HoI4sfr5HvJfQAaz7170dy5kTFVdIfWvBR-Jg,5874
23
+ hockey_blast_common_lib/wsgi.py,sha256=y3NxoJfWjdzX3iP7RGvDEer6zcnPyCanpqSgW1BlXgg,779
24
+ hockey_blast_common_lib-0.1.54.dist-info/METADATA,sha256=ApLUd6xFjG9PecXGI42K-4eyVYLrCbgGNtbccp3EOXY,318
25
+ hockey_blast_common_lib-0.1.54.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
26
+ hockey_blast_common_lib-0.1.54.dist-info/top_level.txt,sha256=wIR4LIkE40npoA2QlOdfCYlgFeGbsHR8Z6r0h46Vtgc,24
27
+ hockey_blast_common_lib-0.1.54.dist-info/RECORD,,
@@ -1,25 +0,0 @@
1
- hockey_blast_common_lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- hockey_blast_common_lib/aggregate_all_stats.py,sha256=2bOj2BW0k3ZPQR1NH04upnkIfO9SastzTz7XwO3ujYo,1104
3
- hockey_blast_common_lib/aggregate_goalie_stats.py,sha256=_9lP5xNX_QcIge-sHbqIlt7mKcFJBGVBbr33Rtf3LAY,12365
4
- hockey_blast_common_lib/aggregate_h2h_stats.py,sha256=Hqhca_OyA9BwLOLU488RcPuZZgpAW0gx4G8EhD7Noy8,11166
5
- hockey_blast_common_lib/aggregate_human_stats.py,sha256=uBNzXbFsK5PhPg-5wjH2iuJ7VPpA7MWKFHx90rMbhjQ,24084
6
- hockey_blast_common_lib/aggregate_referee_stats.py,sha256=dSl0LEpCzyzpD_tv8yw07NLBImo2RP_xuUQZK_p5kFs,12189
7
- hockey_blast_common_lib/aggregate_skater_stats.py,sha256=MU4ICUUtDDKhdeb4sc0UPzjREwSB0rFZuJqRtIDedDU,17393
8
- hockey_blast_common_lib/assign_skater_skill.py,sha256=p-0fbodGpM8BCjKHDpxNb7BH2FcIlBsJwON844KNrUY,1817
9
- hockey_blast_common_lib/db_connection.py,sha256=HvPxDvOj7j5H85RfslGvHVNevfg7mKCd0syJ6NX21mU,1890
10
- hockey_blast_common_lib/dump_sample_db.sh,sha256=MY3lnzTXBoWd76-ZlZr9nWsKMEVgyRsUn-LZ2d1JWZs,810
11
- hockey_blast_common_lib/h2h_models.py,sha256=inB_QAm8Unkc0QRVibiw-Wf8yebNk8zhwxF9EZGMNKM,6350
12
- hockey_blast_common_lib/hockey_blast_sample_backup.sql.gz,sha256=6_atrXLDO31Qyc9CnangK-xywBHIAI3Ztvng-PZXe6U,4648905
13
- hockey_blast_common_lib/models.py,sha256=nbJjypa2OBO5_fwjAbWVgBp4WDCuE-RtaELzJ9cvqB4,16468
14
- hockey_blast_common_lib/options.py,sha256=2L4J9rKCKr58om34259D3_s7kbPdknMSwoo6IwTNnx0,849
15
- hockey_blast_common_lib/restore_sample_db.sh,sha256=7W3lzRZeu9zXIu1Bvtnaw8EHc1ulHmFM4mMh86oUQJo,2205
16
- hockey_blast_common_lib/skills_in_divisions.py,sha256=m-UEwMwn1KM7wOYvDstgsOEeH57M9V6yrkBoghzGYKE,7005
17
- hockey_blast_common_lib/skills_propagation.py,sha256=x6yy7fJ6IX3YiHqiP_v7-p_S2Expb8JJ-mWuajEFBdY,16388
18
- hockey_blast_common_lib/stats_models.py,sha256=NWigeIowIJU6o1Sk1cP08kEy4t594LZpecKUnl-O6as,25552
19
- hockey_blast_common_lib/stats_utils.py,sha256=DXsPO4jw8XsdRUN46TGF_IiBAfz3GCIVBswCGp5ELDk,284
20
- hockey_blast_common_lib/utils.py,sha256=PduHp6HoI4sfr5HvJfQAaz7170dy5kTFVdIfWvBR-Jg,5874
21
- hockey_blast_common_lib/wsgi.py,sha256=y3NxoJfWjdzX3iP7RGvDEer6zcnPyCanpqSgW1BlXgg,779
22
- hockey_blast_common_lib-0.1.51.dist-info/METADATA,sha256=wr1jHyXwbpWDlzqSguE3UWILsSlrcu3xgjwZ_IdoQ3A,318
23
- hockey_blast_common_lib-0.1.51.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
24
- hockey_blast_common_lib-0.1.51.dist-info/top_level.txt,sha256=wIR4LIkE40npoA2QlOdfCYlgFeGbsHR8Z6r0h46Vtgc,24
25
- hockey_blast_common_lib-0.1.51.dist-info/RECORD,,