hockey-blast-common-lib 0.1.33__tar.gz → 0.1.34__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/PKG-INFO +1 -1
  2. hockey_blast_common_lib-0.1.34/hockey_blast_common_lib/aggregate_all_stats.py +26 -0
  3. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/hockey_blast_common_lib/aggregate_goalie_stats.py +12 -16
  4. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/hockey_blast_common_lib/aggregate_human_stats.py +32 -35
  5. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/hockey_blast_common_lib/aggregate_referee_stats.py +12 -18
  6. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/hockey_blast_common_lib/aggregate_skater_stats.py +14 -19
  7. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/hockey_blast_common_lib/hockey_blast_sample_backup.sql.gz +0 -0
  8. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/hockey_blast_common_lib/models.py +1 -0
  9. hockey_blast_common_lib-0.1.34/hockey_blast_common_lib/stats_utils.py +0 -0
  10. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/hockey_blast_common_lib/utils.py +20 -4
  11. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/hockey_blast_common_lib.egg-info/PKG-INFO +1 -1
  12. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/hockey_blast_common_lib.egg-info/SOURCES.txt +1 -0
  13. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/setup.py +1 -1
  14. hockey_blast_common_lib-0.1.33/hockey_blast_common_lib/stats_utils.py +0 -4
  15. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/MANIFEST.in +0 -0
  16. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/README.md +0 -0
  17. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/hockey_blast_common_lib/__init__.py +0 -0
  18. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/hockey_blast_common_lib/assign_skater_skill.py +0 -0
  19. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/hockey_blast_common_lib/db_connection.py +0 -0
  20. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/hockey_blast_common_lib/dump_sample_db.sh +0 -0
  21. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/hockey_blast_common_lib/options.py +0 -0
  22. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/hockey_blast_common_lib/restore_sample_db.sh +0 -0
  23. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/hockey_blast_common_lib/skills_in_divisions.py +0 -0
  24. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/hockey_blast_common_lib/skills_propagation.py +0 -0
  25. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/hockey_blast_common_lib/stats_models.py +0 -0
  26. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/hockey_blast_common_lib/wsgi.py +0 -0
  27. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/hockey_blast_common_lib.egg-info/dependency_links.txt +0 -0
  28. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/hockey_blast_common_lib.egg-info/requires.txt +0 -0
  29. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/hockey_blast_common_lib.egg-info/top_level.txt +0 -0
  30. {hockey_blast_common_lib-0.1.33 → hockey_blast_common_lib-0.1.34}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hockey-blast-common-lib
3
- Version: 0.1.33
3
+ Version: 0.1.34
4
4
  Summary: Common library for shared functionality and DB models
5
5
  Author: Pavel Kletskov
6
6
  Author-email: kletskov@gmail.com
@@ -0,0 +1,26 @@
1
+ import sys, os
2
+
3
+ # Add the package directory to the Python path
4
+ sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
5
+
6
+ from hockey_blast_common_lib.aggregate_human_stats import run_aggregate_human_stats
7
+ from hockey_blast_common_lib.aggregate_skater_stats import run_aggregate_skater_stats
8
+ from hockey_blast_common_lib.aggregate_goalie_stats import run_aggregate_goalie_stats
9
+ from hockey_blast_common_lib.aggregate_referee_stats import run_aggregate_referee_stats
10
+
11
+ if __name__ == "__main__":
12
+ print("Running aggregate_human_stats...")
13
+ run_aggregate_human_stats()
14
+ print("Finished running aggregate_human_stats\n")
15
+
16
+ print("Running aggregate_skater_stats...")
17
+ run_aggregate_skater_stats()
18
+ print("Finished running aggregate_skater_stats\n")
19
+
20
+ print("Running aggregate_goalie_stats...")
21
+ run_aggregate_goalie_stats()
22
+ print("Finished running aggregate_goalie_stats\n")
23
+
24
+ print("Running aggregate_referee_stats...")
25
+ run_aggregate_referee_stats()
26
+ print("Finished running aggregate_referee_stats\n")
@@ -11,8 +11,8 @@ from hockey_blast_common_lib.stats_models import OrgStatsGoalie, DivisionStatsGo
11
11
  from hockey_blast_common_lib.db_connection import create_session
12
12
  from sqlalchemy.sql import func, case
13
13
  from hockey_blast_common_lib.options import not_human_names, parse_args, MIN_GAMES_FOR_ORG_STATS, MIN_GAMES_FOR_DIVISION_STATS, MIN_GAMES_FOR_LEVEL_STATS
14
- from hockey_blast_common_lib.utils import get_org_id_from_alias, get_human_ids_by_names, get_division_ids_for_last_season_in_all_leagues, get_all_division_ids_for_org
15
- from hockey_blast_common_lib.stats_utils import assign_ranks
14
+ from hockey_blast_common_lib.utils import get_org_id_from_alias, get_human_ids_by_names, get_division_ids_for_last_season_in_all_leagues, get_all_division_ids_for_org, get_start_datetime
15
+ from hockey_blast_common_lib.utils import assign_ranks
16
16
  from sqlalchemy import func, case, and_
17
17
  from collections import defaultdict
18
18
 
@@ -61,18 +61,11 @@ def aggregate_goalie_stats(session, aggregation_type, aggregation_id, names_to_f
61
61
 
62
62
  # Apply aggregation window filter
63
63
  if aggregation_window:
64
- last_game_datetime = session.query(func.max(func.concat(Game.date, ' ', Game.time))).filter(filter_condition, Game.status.like('Final%')).scalar()
65
- if last_game_datetime:
66
- last_game_datetime = datetime.strptime(last_game_datetime, '%Y-%m-%d %H:%M:%S')
67
- if aggregation_window == 'Daily':
68
- start_datetime = last_game_datetime - timedelta(days=1)
69
- elif aggregation_window == 'Weekly':
70
- start_datetime = last_game_datetime - timedelta(weeks=1)
71
- else:
72
- start_datetime = None
73
- if start_datetime:
74
- game_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP).between(start_datetime, last_game_datetime)
75
- filter_condition = filter_condition & game_window_filter
64
+ last_game_datetime_str = session.query(func.max(func.concat(Game.date, ' ', Game.time))).filter(filter_condition, Game.status.like('Final%')).scalar()
65
+ start_datetime = get_start_datetime(last_game_datetime_str, aggregation_window)
66
+ if start_datetime:
67
+ game_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP).between(start_datetime, last_game_datetime_str)
68
+ filter_condition = filter_condition & game_window_filter
76
69
 
77
70
  # Delete existing items from the stats table
78
71
  session.query(StatsModel).filter(StatsModel.aggregation_id == aggregation_id).delete()
@@ -180,7 +173,7 @@ def aggregate_goalie_stats(session, aggregation_type, aggregation_id, names_to_f
180
173
  session.commit()
181
174
  session.commit()
182
175
 
183
- if __name__ == "__main__":
176
+ def run_aggregate_goalie_stats():
184
177
  session = create_session("boss")
185
178
  human_id_to_debug = None
186
179
 
@@ -216,4 +209,7 @@ if __name__ == "__main__":
216
209
  if human_id_to_debug is None:
217
210
  print(f"\rProcessed {processed_levels}/{total_levels} levels ({(processed_levels/total_levels)*100:.2f}%)", end="")
218
211
  processed_levels += 1
219
- aggregate_goalie_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
212
+ aggregate_goalie_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
213
+
214
+ if __name__ == "__main__":
215
+ run_aggregate_goalie_stats()
@@ -12,7 +12,8 @@ from hockey_blast_common_lib.db_connection import create_session
12
12
  from sqlalchemy.sql import func, case
13
13
  from hockey_blast_common_lib.options import parse_args, MIN_GAMES_FOR_ORG_STATS, MIN_GAMES_FOR_DIVISION_STATS, MIN_GAMES_FOR_LEVEL_STATS, not_human_names
14
14
  from hockey_blast_common_lib.utils import get_fake_human_for_stats, get_org_id_from_alias, get_human_ids_by_names, get_division_ids_for_last_season_in_all_leagues, get_all_division_ids_for_org
15
- from hockey_blast_common_lib.stats_utils import assign_ranks
15
+ from hockey_blast_common_lib.utils import assign_ranks
16
+ from hockey_blast_common_lib.utils import get_start_datetime
16
17
 
17
18
  def aggregate_human_stats(session, aggregation_type, aggregation_id, names_to_filter_out, human_id_filter=None, aggregation_window=None):
18
19
  human_ids_to_filter = get_human_ids_by_names(session, names_to_filter_out)
@@ -50,18 +51,11 @@ def aggregate_human_stats(session, aggregation_type, aggregation_id, names_to_fi
50
51
 
51
52
  # Apply aggregation window filter
52
53
  if aggregation_window:
53
- last_game_datetime = session.query(func.max(func.concat(Game.date, ' ', Game.time))).filter(filter_condition, Game.status.like('Final%')).scalar()
54
- if last_game_datetime:
55
- last_game_datetime = datetime.strptime(last_game_datetime, '%Y-%m-%d %H:%M:%S')
56
- if aggregation_window == 'Daily':
57
- start_datetime = last_game_datetime - timedelta(days=1)
58
- elif aggregation_window == 'Weekly':
59
- start_datetime = last_game_datetime - timedelta(weeks=1)
60
- else:
61
- start_datetime = None
62
- if start_datetime:
63
- game_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP).between(start_datetime, last_game_datetime)
64
- filter_condition = filter_condition & game_window_filter
54
+ last_game_datetime_str = session.query(func.max(func.concat(Game.date, ' ', Game.time))).filter(filter_condition, Game.status.like('Final%')).scalar()
55
+ start_datetime = get_start_datetime(last_game_datetime_str, aggregation_window)
56
+ if start_datetime:
57
+ game_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP).between(start_datetime, last_game_datetime_str)
58
+ filter_condition = filter_condition & game_window_filter
65
59
 
66
60
  # Delete existing items from the stats table
67
61
  session.query(StatsModel).filter(StatsModel.aggregation_id == aggregation_id).delete()
@@ -415,30 +409,30 @@ def aggregate_human_stats(session, aggregation_type, aggregation_id, names_to_fi
415
409
  session.add(overall_human_stat)
416
410
  session.commit()
417
411
 
418
- if __name__ == "__main__":
412
+ def run_aggregate_human_stats():
419
413
  session = create_session("boss")
420
414
  human_id_to_debug = None
421
415
 
422
- # Get all org_id present in the Organization table
423
- # org_ids = session.query(Organization.id).all()
424
- # org_ids = [org_id[0] for org_id in org_ids]
425
-
426
- # for org_id in org_ids:
427
- # division_ids = get_all_division_ids_for_org(session, org_id)
428
- # print(f"Aggregating human stats for {len(division_ids)} divisions in org_id {org_id}...")
429
- # total_divisions = len(division_ids)
430
- # processed_divisions = 0
431
- # for division_id in division_ids:
432
- # aggregate_human_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug)
433
- # aggregate_human_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Weekly')
434
- # aggregate_human_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Daily')
435
- # processed_divisions += 1
436
- # if human_id_to_debug is None:
437
- # print(f"\rProcessed {processed_divisions}/{total_divisions} divisions ({(processed_divisions/total_divisions)*100:.2f}%)", end="")
438
- # print("")
439
- # aggregate_human_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug)
440
- # aggregate_human_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Weekly')
441
- # aggregate_human_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Daily')
416
+ # Aggregate by Org and Division inside Org
417
+ org_ids = session.query(Organization.id).all()
418
+ org_ids = [org_id[0] for org_id in org_ids]
419
+
420
+ for org_id in org_ids:
421
+ division_ids = get_all_division_ids_for_org(session, org_id)
422
+ print(f"Aggregating human stats for {len(division_ids)} divisions in org_id {org_id}...")
423
+ total_divisions = len(division_ids)
424
+ processed_divisions = 0
425
+ for division_id in division_ids:
426
+ aggregate_human_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug)
427
+ aggregate_human_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Weekly')
428
+ aggregate_human_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Daily')
429
+ processed_divisions += 1
430
+ if human_id_to_debug is None:
431
+ print(f"\rProcessed {processed_divisions}/{total_divisions} divisions ({(processed_divisions/total_divisions)*100:.2f}%)", end="")
432
+ print("")
433
+ aggregate_human_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug)
434
+ aggregate_human_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Weekly')
435
+ aggregate_human_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Daily')
442
436
 
443
437
  # Aggregate by level
444
438
  level_ids = session.query(Division.level_id).distinct().all()
@@ -451,4 +445,7 @@ if __name__ == "__main__":
451
445
  if human_id_to_debug is None:
452
446
  print(f"\rProcessed {processed_levels}/{total_levels} levels ({(processed_levels/total_levels)*100:.2f}%)", end="")
453
447
  processed_levels += 1
454
- aggregate_human_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug)
448
+ aggregate_human_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug)
449
+
450
+ if __name__ == "__main__":
451
+ run_aggregate_human_stats()
@@ -12,7 +12,8 @@ from hockey_blast_common_lib.db_connection import create_session
12
12
  from sqlalchemy.sql import func, case
13
13
  from hockey_blast_common_lib.options import parse_args, MIN_GAMES_FOR_ORG_STATS, MIN_GAMES_FOR_DIVISION_STATS, MIN_GAMES_FOR_LEVEL_STATS, not_human_names
14
14
  from hockey_blast_common_lib.utils import get_org_id_from_alias, get_human_ids_by_names, get_division_ids_for_last_season_in_all_leagues, get_all_division_ids_for_org
15
- from hockey_blast_common_lib.stats_utils import assign_ranks
15
+ from hockey_blast_common_lib.utils import assign_ranks
16
+ from hockey_blast_common_lib.utils import get_start_datetime
16
17
 
17
18
  def aggregate_referee_stats(session, aggregation_type, aggregation_id, names_to_filter_out, aggregation_window=None):
18
19
  human_ids_to_filter = get_human_ids_by_names(session, names_to_filter_out)
@@ -50,18 +51,11 @@ def aggregate_referee_stats(session, aggregation_type, aggregation_id, names_to_
50
51
 
51
52
  # Apply aggregation window filter
52
53
  if aggregation_window:
53
- last_game_datetime = session.query(func.max(func.concat(Game.date, ' ', Game.time))).filter(filter_condition, Game.status.like('Final%')).scalar()
54
- if last_game_datetime:
55
- last_game_datetime = datetime.strptime(last_game_datetime, '%Y-%m-%d %H:%M:%S')
56
- if aggregation_window == 'Daily':
57
- start_datetime = last_game_datetime - timedelta(days=1)
58
- elif aggregation_window == 'Weekly':
59
- start_datetime = last_game_datetime - timedelta(weeks=1)
60
- else:
61
- start_datetime = None
62
- if start_datetime:
63
- game_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP).between(start_datetime, last_game_datetime)
64
- filter_condition = filter_condition & game_window_filter
54
+ last_game_datetime_str = session.query(func.max(func.concat(Game.date, ' ', Game.time))).filter(filter_condition, Game.status.like('Final%')).scalar()
55
+ start_datetime = get_start_datetime(last_game_datetime_str, aggregation_window)
56
+ if start_datetime:
57
+ game_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP).between(start_datetime, last_game_datetime_str)
58
+ filter_condition = filter_condition & game_window_filter
65
59
 
66
60
  # Delete existing items from the stats table
67
61
  session.query(StatsModel).filter(StatsModel.aggregation_id == aggregation_id).delete()
@@ -197,12 +191,9 @@ def aggregate_referee_stats(session, aggregation_type, aggregation_id, names_to_
197
191
  # Commit in batches
198
192
  if i % batch_size == 0:
199
193
  session.commit()
200
- print(f"\r{i}/{total_items} ({(i/total_items)*100:.2f}%)", end="")
201
194
  session.commit()
202
- print(f"\r{total_items}/{total_items} (100.00%)")
203
- print("\nDone.")
204
195
 
205
- if __name__ == "__main__":
196
+ def run_aggregate_referee_stats():
206
197
  session = create_session("boss")
207
198
  human_id_to_debug = None
208
199
 
@@ -238,4 +229,7 @@ if __name__ == "__main__":
238
229
  if human_id_to_debug is None:
239
230
  print(f"\rProcessed {processed_levels}/{total_levels} levels ({(processed_levels/total_levels)*100:.2f}%)", end="")
240
231
  processed_levels += 1
241
- aggregate_referee_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names)
232
+ aggregate_referee_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names)
233
+
234
+ if __name__ == "__main__":
235
+ run_aggregate_referee_stats()
@@ -12,6 +12,7 @@ from hockey_blast_common_lib.db_connection import create_session
12
12
  from sqlalchemy.sql import func, case
13
13
  from hockey_blast_common_lib.options import not_human_names, parse_args, MIN_GAMES_FOR_ORG_STATS, MIN_GAMES_FOR_DIVISION_STATS, MIN_GAMES_FOR_LEVEL_STATS
14
14
  from hockey_blast_common_lib.utils import get_org_id_from_alias, get_human_ids_by_names, get_division_ids_for_last_season_in_all_leagues, get_all_division_ids_for_org
15
+ from hockey_blast_common_lib.utils import get_start_datetime
15
16
  from sqlalchemy import func, case, and_
16
17
  from collections import defaultdict
17
18
 
@@ -21,6 +22,9 @@ def aggregate_skater_stats(session, aggregation_type, aggregation_id, names_to_f
21
22
  # Get the name of the aggregation, for debug purposes
22
23
  if aggregation_type == 'org':
23
24
  aggregation_name = session.query(Organization).filter(Organization.id == aggregation_id).first().organization_name
25
+ print(f"Aggregating skater stats for {aggregation_name} with window {aggregation_window}...")
26
+
27
+ aggregation_name = session.query(Organization).filter(Organization.id == aggregation_id).first().organization_name
24
28
  elif aggregation_type == 'division':
25
29
  aggregation_name = session.query(Division).filter(Division.id == aggregation_id).first().level
26
30
  elif aggregation_type == 'level':
@@ -59,18 +63,11 @@ def aggregate_skater_stats(session, aggregation_type, aggregation_id, names_to_f
59
63
 
60
64
  # Apply aggregation window filter
61
65
  if aggregation_window:
62
- last_game_datetime = session.query(func.max(func.concat(Game.date, ' ', Game.time))).filter(filter_condition, Game.status.like('Final%')).scalar()
63
- if last_game_datetime:
64
- last_game_datetime = datetime.strptime(last_game_datetime, '%Y-%m-%d %H:%M:%S')
65
- if aggregation_window == 'Daily':
66
- start_datetime = last_game_datetime - timedelta(days=1)
67
- elif aggregation_window == 'Weekly':
68
- start_datetime = last_game_datetime - timedelta(weeks=1)
69
- else:
70
- start_datetime = None
71
- if start_datetime:
72
- game_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP).between(start_datetime, last_game_datetime)
73
- filter_condition = filter_condition & game_window_filter
66
+ last_game_datetime_str = session.query(func.max(func.concat(Game.date, ' ', Game.time))).filter(filter_condition, Game.status.like('Final%')).scalar()
67
+ start_datetime = get_start_datetime(last_game_datetime_str, aggregation_window)
68
+ if start_datetime:
69
+ game_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP).between(start_datetime, last_game_datetime_str)
70
+ filter_condition = filter_condition & game_window_filter
74
71
 
75
72
  # Delete existing items from the stats table
76
73
  session.query(StatsModel).filter(StatsModel.aggregation_id == aggregation_id).delete()
@@ -254,16 +251,11 @@ def aggregate_skater_stats(session, aggregation_type, aggregation_id, names_to_f
254
251
  # Commit in batches
255
252
  if i % batch_size == 0:
256
253
  session.commit()
257
- if debug_human_id is None:
258
- print(f"\r{i}/{total_items} ({(i/total_items)*100:.2f}%)", end="")
259
-
260
254
  session.commit()
261
- if debug_human_id is None:
262
- print(f"\r{total_items}/{total_items} (100.00%)")
263
255
 
264
- if __name__ == "__main__":
256
+ def run_aggregate_skater_stats():
265
257
  session = create_session("boss")
266
- human_id_to_debug = 117076
258
+ human_id_to_debug = None
267
259
 
268
260
  # Get all org_id present in the Organization table
269
261
  org_ids = session.query(Organization.id).all()
@@ -298,3 +290,6 @@ if __name__ == "__main__":
298
290
  print(f"\rProcessed {processed_levels}/{total_levels} levels ({(processed_levels/total_levels)*100:.2f}%)", end="")
299
291
  processed_levels += 1
300
292
  aggregate_skater_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
293
+
294
+ if __name__ == "__main__":
295
+ run_aggregate_skater_stats()
@@ -224,6 +224,7 @@ class Organization(db.Model):
224
224
  id = db.Column(db.Integer, primary_key=True)
225
225
  alias = db.Column(db.String(100), unique=True)
226
226
  organization_name = db.Column(db.String(100), unique=True)
227
+ website = db.Column(db.String(100), nullable=True) # New field for website
227
228
 
228
229
  class Penalty(db.Model):
229
230
  __tablename__ = 'penalties'
@@ -1,5 +1,6 @@
1
1
  import sys
2
2
  import os
3
+ from datetime import datetime, timedelta
3
4
 
4
5
  # Add the package directory to the Python path
5
6
  sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
@@ -11,9 +12,9 @@ from sqlalchemy.sql import func
11
12
  def get_org_id_from_alias(session, org_alias):
12
13
  # Predefined organizations
13
14
  predefined_organizations = [
14
- {"id": 1, "organization_name": "Sharks Ice", "alias": "sharksice"},
15
- {"id": 2, "organization_name": "TriValley Ice", "alias": "tvice"},
16
- {"id": 3, "organization_name": "CAHA", "alias": "caha"}
15
+ {"id": 1, "organization_name": "Sharks Ice", "alias": "sharksice", "website": "https://www.sharksice.com"},
16
+ {"id": 2, "organization_name": "TriValley Ice", "alias": "tvice", "website": "https://www.trivalleyice.com"},
17
+ {"id": 3, "organization_name": "CAHA", "alias": "caha", "website": "https://www.caha.com"}
17
18
  ]
18
19
 
19
20
  # Check if the organization exists
@@ -25,7 +26,7 @@ def get_org_id_from_alias(session, org_alias):
25
26
  for org in predefined_organizations:
26
27
  existing_org = session.query(Organization).filter_by(id=org["id"]).first()
27
28
  if not existing_org:
28
- new_org = Organization(id=org["id"], organization_name=org["organization_name"], alias=org["alias"])
29
+ new_org = Organization(id=org["id"], organization_name=org["organization_name"], alias=org["alias"], website=org["website"])
29
30
  session.add(new_org)
30
31
  session.commit()
31
32
 
@@ -82,6 +83,21 @@ def get_fake_human_for_stats(session):
82
83
 
83
84
  return human.id
84
85
 
86
+ def get_start_datetime(last_game_datetime_str, aggregation_window):
87
+ if last_game_datetime_str:
88
+ last_game_datetime = datetime.strptime(last_game_datetime_str, '%Y-%m-%d %H:%M:%S')
89
+ if aggregation_window == 'Daily':
90
+ # From 10AM till midnight, 14 hours to avoid last day games
91
+ return last_game_datetime - timedelta(hours=14)
92
+ elif aggregation_window == 'Weekly':
93
+ return last_game_datetime - timedelta(weeks=1)
94
+ return None
95
+
96
+ def assign_ranks(stats_dict, field, reverse_rank=False):
97
+ sorted_stats = sorted(stats_dict.items(), key=lambda x: x[1][field], reverse=not reverse_rank)
98
+ for rank, (key, stat) in enumerate(sorted_stats, start=1):
99
+ stats_dict[key][f'{field}_rank'] = rank
100
+
85
101
  #TEST DB CONNECTION, PERMISSIONS...
86
102
  # from hockey_blast_common_lib.db_connection import create_session
87
103
  # session = create_session("frontend")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hockey-blast-common-lib
3
- Version: 0.1.33
3
+ Version: 0.1.34
4
4
  Summary: Common library for shared functionality and DB models
5
5
  Author: Pavel Kletskov
6
6
  Author-email: kletskov@gmail.com
@@ -2,6 +2,7 @@ MANIFEST.in
2
2
  README.md
3
3
  setup.py
4
4
  hockey_blast_common_lib/__init__.py
5
+ hockey_blast_common_lib/aggregate_all_stats.py
5
6
  hockey_blast_common_lib/aggregate_goalie_stats.py
6
7
  hockey_blast_common_lib/aggregate_human_stats.py
7
8
  hockey_blast_common_lib/aggregate_referee_stats.py
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
2
2
 
3
3
  setup(
4
4
  name='hockey-blast-common-lib', # The name of your package
5
- version='0.1.33',
5
+ version='0.1.34',
6
6
  description='Common library for shared functionality and DB models',
7
7
  author='Pavel Kletskov',
8
8
  author_email='kletskov@gmail.com',
@@ -1,4 +0,0 @@
1
- def assign_ranks(stats_dict, field, reverse_rank=False):
2
- sorted_stats = sorted(stats_dict.items(), key=lambda x: x[1][field], reverse=not reverse_rank)
3
- for rank, (key, stat) in enumerate(sorted_stats, start=1):
4
- stats_dict[key][f'{field}_rank'] = rank