hockey-blast-common-lib 0.1.32__py3-none-any.whl → 0.1.34__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hockey_blast_common_lib/aggregate_all_stats.py +26 -0
- hockey_blast_common_lib/aggregate_goalie_stats.py +99 -48
- hockey_blast_common_lib/aggregate_human_stats.py +228 -72
- hockey_blast_common_lib/aggregate_referee_stats.py +65 -64
- hockey_blast_common_lib/aggregate_skater_stats.py +14 -19
- hockey_blast_common_lib/hockey_blast_sample_backup.sql.gz +0 -0
- hockey_blast_common_lib/models.py +1 -0
- hockey_blast_common_lib/skills_in_divisions.py +1 -1
- hockey_blast_common_lib/stats_models.py +53 -2
- hockey_blast_common_lib/stats_utils.py +0 -0
- hockey_blast_common_lib/utils.py +20 -4
- {hockey_blast_common_lib-0.1.32.dist-info → hockey_blast_common_lib-0.1.34.dist-info}/METADATA +1 -1
- hockey_blast_common_lib-0.1.34.dist-info/RECORD +23 -0
- hockey_blast_common_lib-0.1.32.dist-info/RECORD +0 -21
- {hockey_blast_common_lib-0.1.32.dist-info → hockey_blast_common_lib-0.1.34.dist-info}/WHEEL +0 -0
- {hockey_blast_common_lib-0.1.32.dist-info → hockey_blast_common_lib-0.1.34.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,26 @@
|
|
1
|
+
import sys, os
|
2
|
+
|
3
|
+
# Add the package directory to the Python path
|
4
|
+
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
5
|
+
|
6
|
+
from hockey_blast_common_lib.aggregate_human_stats import run_aggregate_human_stats
|
7
|
+
from hockey_blast_common_lib.aggregate_skater_stats import run_aggregate_skater_stats
|
8
|
+
from hockey_blast_common_lib.aggregate_goalie_stats import run_aggregate_goalie_stats
|
9
|
+
from hockey_blast_common_lib.aggregate_referee_stats import run_aggregate_referee_stats
|
10
|
+
|
11
|
+
if __name__ == "__main__":
|
12
|
+
print("Running aggregate_human_stats...")
|
13
|
+
run_aggregate_human_stats()
|
14
|
+
print("Finished running aggregate_human_stats\n")
|
15
|
+
|
16
|
+
print("Running aggregate_skater_stats...")
|
17
|
+
run_aggregate_skater_stats()
|
18
|
+
print("Finished running aggregate_skater_stats\n")
|
19
|
+
|
20
|
+
print("Running aggregate_goalie_stats...")
|
21
|
+
run_aggregate_goalie_stats()
|
22
|
+
print("Finished running aggregate_goalie_stats\n")
|
23
|
+
|
24
|
+
print("Running aggregate_referee_stats...")
|
25
|
+
run_aggregate_referee_stats()
|
26
|
+
print("Finished running aggregate_referee_stats\n")
|
@@ -5,16 +5,31 @@ sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
5
5
|
|
6
6
|
from datetime import datetime, timedelta
|
7
7
|
import sqlalchemy
|
8
|
-
|
9
|
-
from hockey_blast_common_lib.
|
8
|
+
|
9
|
+
from hockey_blast_common_lib.models import Game, Goal, Penalty, GameRoster, Organization, Division, Human, Level
|
10
|
+
from hockey_blast_common_lib.stats_models import OrgStatsGoalie, DivisionStatsGoalie, OrgStatsWeeklyGoalie, OrgStatsDailyGoalie, DivisionStatsWeeklyGoalie, DivisionStatsDailyGoalie, LevelStatsGoalie
|
10
11
|
from hockey_blast_common_lib.db_connection import create_session
|
11
12
|
from sqlalchemy.sql import func, case
|
12
|
-
from hockey_blast_common_lib.options import not_human_names, parse_args, MIN_GAMES_FOR_ORG_STATS, MIN_GAMES_FOR_DIVISION_STATS
|
13
|
-
from hockey_blast_common_lib.utils import get_org_id_from_alias, get_human_ids_by_names, get_division_ids_for_last_season_in_all_leagues, get_all_division_ids_for_org
|
13
|
+
from hockey_blast_common_lib.options import not_human_names, parse_args, MIN_GAMES_FOR_ORG_STATS, MIN_GAMES_FOR_DIVISION_STATS, MIN_GAMES_FOR_LEVEL_STATS
|
14
|
+
from hockey_blast_common_lib.utils import get_org_id_from_alias, get_human_ids_by_names, get_division_ids_for_last_season_in_all_leagues, get_all_division_ids_for_org, get_start_datetime
|
15
|
+
from hockey_blast_common_lib.utils import assign_ranks
|
16
|
+
from sqlalchemy import func, case, and_
|
17
|
+
from collections import defaultdict
|
14
18
|
|
15
|
-
def aggregate_goalie_stats(session, aggregation_type, aggregation_id, names_to_filter_out, aggregation_window=None):
|
19
|
+
def aggregate_goalie_stats(session, aggregation_type, aggregation_id, names_to_filter_out, debug_human_id=None, aggregation_window=None):
|
16
20
|
human_ids_to_filter = get_human_ids_by_names(session, names_to_filter_out)
|
17
21
|
|
22
|
+
# Get the name of the aggregation, for debug purposes
|
23
|
+
if aggregation_type == 'org':
|
24
|
+
aggregation_name = session.query(Organization).filter(Organization.id == aggregation_id).first().organization_name
|
25
|
+
print(f"Aggregating goalie stats for {aggregation_name} with window {aggregation_window}...")
|
26
|
+
elif aggregation_type == 'division':
|
27
|
+
aggregation_name = session.query(Division).filter(Division.id == aggregation_id).first().level
|
28
|
+
elif aggregation_type == 'level':
|
29
|
+
aggregation_name = session.query(Level).filter(Level.id == aggregation_id).first().level_name
|
30
|
+
else:
|
31
|
+
aggregation_name = "Unknown"
|
32
|
+
|
18
33
|
if aggregation_type == 'org':
|
19
34
|
if aggregation_window == 'Daily':
|
20
35
|
StatsModel = OrgStatsDailyGoalie
|
@@ -33,28 +48,34 @@ def aggregate_goalie_stats(session, aggregation_type, aggregation_id, names_to_f
|
|
33
48
|
StatsModel = DivisionStatsGoalie
|
34
49
|
min_games = MIN_GAMES_FOR_DIVISION_STATS
|
35
50
|
filter_condition = Game.division_id == aggregation_id
|
51
|
+
elif aggregation_type == 'level':
|
52
|
+
StatsModel = LevelStatsGoalie
|
53
|
+
min_games = MIN_GAMES_FOR_LEVEL_STATS
|
54
|
+
filter_condition = Division.level_id == aggregation_id
|
55
|
+
# Add filter to only include games for the last 5 years
|
56
|
+
five_years_ago = datetime.now() - timedelta(days=5*365)
|
57
|
+
level_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP) >= five_years_ago
|
58
|
+
filter_condition = filter_condition & level_window_filter
|
36
59
|
else:
|
37
60
|
raise ValueError("Invalid aggregation type")
|
38
61
|
|
39
62
|
# Apply aggregation window filter
|
40
63
|
if aggregation_window:
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
elif aggregation_window == 'Weekly':
|
47
|
-
start_datetime = last_game_datetime - timedelta(weeks=1)
|
48
|
-
else:
|
49
|
-
start_datetime = None
|
50
|
-
if start_datetime:
|
51
|
-
game_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP).between(start_datetime, last_game_datetime)
|
52
|
-
filter_condition = filter_condition & game_window_filter
|
64
|
+
last_game_datetime_str = session.query(func.max(func.concat(Game.date, ' ', Game.time))).filter(filter_condition, Game.status.like('Final%')).scalar()
|
65
|
+
start_datetime = get_start_datetime(last_game_datetime_str, aggregation_window)
|
66
|
+
if start_datetime:
|
67
|
+
game_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP).between(start_datetime, last_game_datetime_str)
|
68
|
+
filter_condition = filter_condition & game_window_filter
|
53
69
|
|
54
70
|
# Delete existing items from the stats table
|
55
71
|
session.query(StatsModel).filter(StatsModel.aggregation_id == aggregation_id).delete()
|
56
72
|
session.commit()
|
57
73
|
|
74
|
+
# Filter for specific human_id if provided
|
75
|
+
human_filter = []
|
76
|
+
# if debug_human_id:
|
77
|
+
# human_filter = [GameRoster.human_id == debug_human_id]
|
78
|
+
|
58
79
|
# Aggregate games played, goals allowed, and shots faced for each goalie
|
59
80
|
goalie_stats = session.query(
|
60
81
|
GameRoster.human_id,
|
@@ -62,7 +83,7 @@ def aggregate_goalie_stats(session, aggregation_type, aggregation_id, names_to_f
|
|
62
83
|
func.sum(case((GameRoster.team_id == Game.home_team_id, Game.visitor_final_score), else_=Game.home_final_score)).label('goals_allowed'),
|
63
84
|
func.sum(case((GameRoster.team_id == Game.home_team_id, Game.visitor_period_1_shots + Game.visitor_period_2_shots + Game.visitor_period_3_shots + Game.visitor_ot_shots + Game.visitor_so_shots), else_=Game.home_period_1_shots + Game.home_period_2_shots + Game.home_period_3_shots + Game.home_ot_shots + Game.home_so_shots)).label('shots_faced'),
|
64
85
|
func.array_agg(Game.id).label('game_ids')
|
65
|
-
).join(Game, GameRoster.game_id == Game.id).filter(filter_condition, GameRoster.role
|
86
|
+
).join(Game, GameRoster.game_id == Game.id).join(Division, Game.division_id == Division.id).filter(filter_condition, GameRoster.role.ilike('g')).group_by(GameRoster.human_id).all()
|
66
87
|
|
67
88
|
# Combine the results
|
68
89
|
stats_dict = {}
|
@@ -70,6 +91,8 @@ def aggregate_goalie_stats(session, aggregation_type, aggregation_id, names_to_f
|
|
70
91
|
if stat.human_id in human_ids_to_filter:
|
71
92
|
continue
|
72
93
|
key = (aggregation_id, stat.human_id)
|
94
|
+
if stat.games_played < min_games:
|
95
|
+
continue
|
73
96
|
stats_dict[key] = {
|
74
97
|
'games_played': stat.games_played,
|
75
98
|
'goals_allowed': stat.goals_allowed if stat.goals_allowed is not None else 0,
|
@@ -102,37 +125,43 @@ def aggregate_goalie_stats(session, aggregation_type, aggregation_id, names_to_f
|
|
102
125
|
# Calculate total_in_rank
|
103
126
|
total_in_rank = len(stats_dict)
|
104
127
|
|
105
|
-
# Assign ranks
|
106
|
-
def assign_ranks(stats_dict, field):
|
107
|
-
sorted_stats = sorted(stats_dict.items(), key=lambda x: x[1][field], reverse=True)
|
108
|
-
for rank, (key, stat) in enumerate(sorted_stats, start=1):
|
109
|
-
stats_dict[key][f'{field}_rank'] = rank
|
110
|
-
|
128
|
+
# Assign ranks within each level
|
111
129
|
assign_ranks(stats_dict, 'games_played')
|
112
|
-
assign_ranks(stats_dict, 'goals_allowed')
|
113
|
-
assign_ranks(stats_dict, 'goals_allowed_per_game')
|
130
|
+
assign_ranks(stats_dict, 'goals_allowed', reverse_rank=True)
|
114
131
|
assign_ranks(stats_dict, 'shots_faced')
|
132
|
+
assign_ranks(stats_dict, 'goals_allowed_per_game', reverse_rank=True)
|
115
133
|
assign_ranks(stats_dict, 'save_percentage')
|
116
134
|
|
135
|
+
# Debug output for specific human
|
136
|
+
if debug_human_id:
|
137
|
+
if any(key[1] == debug_human_id for key in stats_dict):
|
138
|
+
human = session.query(Human).filter(Human.id == debug_human_id).first()
|
139
|
+
human_name = f"{human.first_name} {human.last_name}" if human else "Unknown"
|
140
|
+
print(f"For Human {debug_human_id} ({human_name}) for {aggregation_type} {aggregation_id} ({aggregation_name}) , total_in_rank {total_in_rank} and window {aggregation_window}:")
|
141
|
+
for key, stat in stats_dict.items():
|
142
|
+
if key[1] == debug_human_id:
|
143
|
+
for k, v in stat.items():
|
144
|
+
print(f"{k}: {v}")
|
145
|
+
|
117
146
|
# Insert aggregated stats into the appropriate table with progress output
|
118
147
|
total_items = len(stats_dict)
|
119
148
|
batch_size = 1000
|
120
149
|
for i, (key, stat) in enumerate(stats_dict.items(), 1):
|
121
150
|
aggregation_id, human_id = key
|
122
|
-
if stat['games_played']
|
123
|
-
|
151
|
+
goals_allowed_per_game = stat['goals_allowed'] / stat['games_played'] if stat['games_played'] > 0 else 0.0
|
152
|
+
save_percentage = (stat['shots_faced'] - stat['goals_allowed']) / stat['shots_faced'] if stat['shots_faced'] > 0 else 0.0
|
124
153
|
goalie_stat = StatsModel(
|
125
154
|
aggregation_id=aggregation_id,
|
126
155
|
human_id=human_id,
|
127
156
|
games_played=stat['games_played'],
|
128
157
|
goals_allowed=stat['goals_allowed'],
|
129
|
-
goals_allowed_per_game=stat['goals_allowed_per_game'],
|
130
158
|
shots_faced=stat['shots_faced'],
|
131
|
-
|
159
|
+
goals_allowed_per_game=goals_allowed_per_game,
|
160
|
+
save_percentage=save_percentage,
|
132
161
|
games_played_rank=stat['games_played_rank'],
|
133
162
|
goals_allowed_rank=stat['goals_allowed_rank'],
|
134
|
-
goals_allowed_per_game_rank=stat['goals_allowed_per_game_rank'],
|
135
163
|
shots_faced_rank=stat['shots_faced_rank'],
|
164
|
+
goals_allowed_per_game_rank=stat['goals_allowed_per_game_rank'],
|
136
165
|
save_percentage_rank=stat['save_percentage_rank'],
|
137
166
|
total_in_rank=total_in_rank,
|
138
167
|
first_game_id=stat['first_game_id'],
|
@@ -142,23 +171,45 @@ def aggregate_goalie_stats(session, aggregation_type, aggregation_id, names_to_f
|
|
142
171
|
# Commit in batches
|
143
172
|
if i % batch_size == 0:
|
144
173
|
session.commit()
|
145
|
-
print(f"\r{i}/{total_items} ({(i/total_items)*100:.2f}%)", end="")
|
146
174
|
session.commit()
|
147
|
-
print(f"\r{total_items}/{total_items} (100.00%)")
|
148
|
-
print("\nDone.")
|
149
175
|
|
150
|
-
|
151
|
-
if __name__ == "__main__":
|
152
|
-
args = parse_args()
|
153
|
-
org_alias = args.org
|
176
|
+
def run_aggregate_goalie_stats():
|
154
177
|
session = create_session("boss")
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
178
|
+
human_id_to_debug = None
|
179
|
+
|
180
|
+
# Get all org_id present in the Organization table
|
181
|
+
org_ids = session.query(Organization.id).all()
|
182
|
+
org_ids = [org_id[0] for org_id in org_ids]
|
183
|
+
|
184
|
+
for org_id in org_ids:
|
185
|
+
division_ids = get_all_division_ids_for_org(session, org_id)
|
186
|
+
print(f"Aggregating goalie stats for {len(division_ids)} divisions in org_id {org_id}...")
|
187
|
+
total_divisions = len(division_ids)
|
188
|
+
processed_divisions = 0
|
189
|
+
for division_id in division_ids:
|
190
|
+
aggregate_goalie_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
|
191
|
+
aggregate_goalie_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Weekly')
|
192
|
+
aggregate_goalie_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Daily')
|
193
|
+
processed_divisions += 1
|
194
|
+
if human_id_to_debug is None:
|
195
|
+
print(f"\rProcessed {processed_divisions}/{total_divisions} divisions ({(processed_divisions/total_divisions)*100:.2f}%)", end="")
|
196
|
+
|
197
|
+
aggregate_goalie_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
|
198
|
+
aggregate_goalie_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Weekly')
|
199
|
+
aggregate_goalie_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug, aggregation_window='Daily')
|
200
|
+
|
201
|
+
# Aggregate by level
|
202
|
+
level_ids = session.query(Division.level_id).distinct().all()
|
203
|
+
level_ids = [level_id[0] for level_id in level_ids]
|
204
|
+
total_levels = len(level_ids)
|
205
|
+
processed_levels = 0
|
206
|
+
for level_id in level_ids:
|
207
|
+
if level_id is None:
|
208
|
+
continue
|
209
|
+
if human_id_to_debug is None:
|
210
|
+
print(f"\rProcessed {processed_levels}/{total_levels} levels ({(processed_levels/total_levels)*100:.2f}%)", end="")
|
211
|
+
processed_levels += 1
|
212
|
+
aggregate_goalie_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
|
213
|
+
|
214
|
+
if __name__ == "__main__":
|
215
|
+
run_aggregate_goalie_stats()
|
@@ -6,17 +6,21 @@ sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
6
6
|
|
7
7
|
from datetime import datetime, timedelta
|
8
8
|
import sqlalchemy
|
9
|
-
from hockey_blast_common_lib.models import Game, GameRoster
|
10
|
-
from hockey_blast_common_lib.stats_models import OrgStatsHuman, DivisionStatsHuman, OrgStatsDailyHuman, OrgStatsWeeklyHuman, DivisionStatsDailyHuman, DivisionStatsWeeklyHuman
|
9
|
+
from hockey_blast_common_lib.models import Game, GameRoster, Organization, Division
|
10
|
+
from hockey_blast_common_lib.stats_models import OrgStatsHuman, DivisionStatsHuman, OrgStatsDailyHuman, OrgStatsWeeklyHuman, DivisionStatsDailyHuman, DivisionStatsWeeklyHuman, LevelStatsHuman
|
11
11
|
from hockey_blast_common_lib.db_connection import create_session
|
12
12
|
from sqlalchemy.sql import func, case
|
13
|
-
from hockey_blast_common_lib.options import parse_args, MIN_GAMES_FOR_ORG_STATS, MIN_GAMES_FOR_DIVISION_STATS, not_human_names
|
14
|
-
from hockey_blast_common_lib.utils import get_fake_human_for_stats, get_org_id_from_alias, get_human_ids_by_names, get_division_ids_for_last_season_in_all_leagues
|
13
|
+
from hockey_blast_common_lib.options import parse_args, MIN_GAMES_FOR_ORG_STATS, MIN_GAMES_FOR_DIVISION_STATS, MIN_GAMES_FOR_LEVEL_STATS, not_human_names
|
14
|
+
from hockey_blast_common_lib.utils import get_fake_human_for_stats, get_org_id_from_alias, get_human_ids_by_names, get_division_ids_for_last_season_in_all_leagues, get_all_division_ids_for_org
|
15
|
+
from hockey_blast_common_lib.utils import assign_ranks
|
16
|
+
from hockey_blast_common_lib.utils import get_start_datetime
|
15
17
|
|
16
18
|
def aggregate_human_stats(session, aggregation_type, aggregation_id, names_to_filter_out, human_id_filter=None, aggregation_window=None):
|
17
19
|
human_ids_to_filter = get_human_ids_by_names(session, names_to_filter_out)
|
18
20
|
|
19
21
|
if aggregation_type == 'org':
|
22
|
+
aggregation_name = session.query(Organization).filter(Organization.id == aggregation_id).first().organization_name
|
23
|
+
print(f"Aggregating goalie stats for {aggregation_name} with window {aggregation_window}...")
|
20
24
|
if aggregation_window == 'Daily':
|
21
25
|
StatsModel = OrgStatsDailyHuman
|
22
26
|
elif aggregation_window == 'Weekly':
|
@@ -34,9 +38,25 @@ def aggregate_human_stats(session, aggregation_type, aggregation_id, names_to_fi
|
|
34
38
|
StatsModel = DivisionStatsHuman
|
35
39
|
min_games = MIN_GAMES_FOR_DIVISION_STATS
|
36
40
|
filter_condition = Game.division_id == aggregation_id
|
41
|
+
elif aggregation_type == 'level':
|
42
|
+
StatsModel = LevelStatsHuman
|
43
|
+
min_games = MIN_GAMES_FOR_LEVEL_STATS
|
44
|
+
filter_condition = Division.level_id == aggregation_id
|
45
|
+
# Add filter to only include games for the last 5 years
|
46
|
+
five_years_ago = datetime.now() - timedelta(days=5*365)
|
47
|
+
level_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP) >= five_years_ago
|
48
|
+
filter_condition = filter_condition & level_window_filter
|
37
49
|
else:
|
38
50
|
raise ValueError("Invalid aggregation type")
|
39
51
|
|
52
|
+
# Apply aggregation window filter
|
53
|
+
if aggregation_window:
|
54
|
+
last_game_datetime_str = session.query(func.max(func.concat(Game.date, ' ', Game.time))).filter(filter_condition, Game.status.like('Final%')).scalar()
|
55
|
+
start_datetime = get_start_datetime(last_game_datetime_str, aggregation_window)
|
56
|
+
if start_datetime:
|
57
|
+
game_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP).between(start_datetime, last_game_datetime_str)
|
58
|
+
filter_condition = filter_condition & game_window_filter
|
59
|
+
|
40
60
|
# Delete existing items from the stats table
|
41
61
|
session.query(StatsModel).filter(StatsModel.aggregation_id == aggregation_id).delete()
|
42
62
|
session.commit()
|
@@ -49,65 +69,94 @@ def aggregate_human_stats(session, aggregation_type, aggregation_id, names_to_fi
|
|
49
69
|
# Filter games by status
|
50
70
|
game_status_filter = Game.status.like('Final%')
|
51
71
|
|
52
|
-
#
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
else:
|
62
|
-
start_datetime = None
|
63
|
-
if start_datetime:
|
64
|
-
game_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP).between(start_datetime, last_game_datetime)
|
65
|
-
filter_condition = filter_condition & game_window_filter
|
66
|
-
|
67
|
-
# Aggregate games played for each human in each role
|
68
|
-
human_stats = session.query(
|
72
|
+
# Aggregate skater games played
|
73
|
+
skater_stats = session.query(
|
74
|
+
GameRoster.human_id,
|
75
|
+
func.count(func.distinct(Game.id)).label('games_skater'),
|
76
|
+
func.array_agg(func.distinct(Game.id)).label('skater_game_ids')
|
77
|
+
).join(Game, GameRoster.game_id == Game.id).join(Division, Game.division_id == Division.id).filter(filter_condition, game_status_filter, ~GameRoster.role.ilike('G'), *human_filter).group_by(GameRoster.human_id).all()
|
78
|
+
|
79
|
+
# Aggregate goalie games played
|
80
|
+
goalie_stats = session.query(
|
69
81
|
GameRoster.human_id,
|
70
|
-
func.count(func.distinct(
|
71
|
-
func.
|
72
|
-
|
73
|
-
).join(Game, GameRoster.game_id == Game.id).filter(filter_condition, game_status_filter, *human_filter).group_by(GameRoster.human_id).all()
|
82
|
+
func.count(func.distinct(Game.id)).label('games_goalie'),
|
83
|
+
func.array_agg(func.distinct(Game.id)).label('goalie_game_ids')
|
84
|
+
).join(Game, GameRoster.game_id == Game.id).join(Division, Game.division_id == Division.id).filter(filter_condition, game_status_filter, GameRoster.role.ilike('G'), *human_filter).group_by(GameRoster.human_id).all()
|
74
85
|
|
75
86
|
# Aggregate referee and scorekeeper games from Game table
|
76
87
|
referee_stats = session.query(
|
77
88
|
Game.referee_1_id.label('human_id'),
|
78
89
|
func.count(func.distinct(Game.id)).label('games_referee'),
|
79
90
|
func.array_agg(func.distinct(Game.id)).label('referee_game_ids')
|
80
|
-
).filter(filter_condition, game_status_filter, *human_filter).group_by(Game.referee_1_id).all()
|
91
|
+
).join(Division, Game.division_id == Division.id).filter(filter_condition, game_status_filter, *human_filter).group_by(Game.referee_1_id).all()
|
81
92
|
|
82
93
|
referee_stats_2 = session.query(
|
83
94
|
Game.referee_2_id.label('human_id'),
|
84
95
|
func.count(func.distinct(Game.id)).label('games_referee'),
|
85
96
|
func.array_agg(func.distinct(Game.id)).label('referee_game_ids')
|
86
|
-
).filter(filter_condition, game_status_filter, *human_filter).group_by(Game.referee_2_id).all()
|
97
|
+
).join(Division, Game.division_id == Division.id).filter(filter_condition, game_status_filter, *human_filter).group_by(Game.referee_2_id).all()
|
87
98
|
|
88
99
|
scorekeeper_stats = session.query(
|
89
100
|
Game.scorekeeper_id.label('human_id'),
|
90
101
|
func.count(func.distinct(Game.id)).label('games_scorekeeper'),
|
91
102
|
func.array_agg(func.distinct(Game.id)).label('scorekeeper_game_ids')
|
92
|
-
).filter(filter_condition, game_status_filter, *human_filter).group_by(Game.scorekeeper_id).all()
|
103
|
+
).join(Division, Game.division_id == Division.id).filter(filter_condition, game_status_filter, *human_filter).group_by(Game.scorekeeper_id).all()
|
93
104
|
|
94
105
|
# Combine the results
|
95
106
|
stats_dict = {}
|
96
|
-
for stat in
|
107
|
+
for stat in skater_stats:
|
97
108
|
if stat.human_id in human_ids_to_filter:
|
98
109
|
continue
|
99
110
|
key = (aggregation_id, stat.human_id)
|
100
111
|
stats_dict[key] = {
|
101
|
-
'games_total': stat.games_skater
|
112
|
+
'games_total': stat.games_skater,
|
102
113
|
'games_skater': stat.games_skater,
|
103
|
-
'games_goalie':
|
114
|
+
'games_goalie': 0,
|
104
115
|
'games_referee': 0,
|
105
116
|
'games_scorekeeper': 0,
|
106
|
-
'
|
117
|
+
'skater_game_ids': stat.skater_game_ids,
|
118
|
+
'goalie_game_ids': [],
|
107
119
|
'referee_game_ids': [],
|
108
|
-
'scorekeeper_game_ids': []
|
120
|
+
'scorekeeper_game_ids': [],
|
121
|
+
'first_game_id_skater': None,
|
122
|
+
'last_game_id_skater': None,
|
123
|
+
'first_game_id_goalie': None,
|
124
|
+
'last_game_id_goalie': None,
|
125
|
+
'first_game_id_referee': None,
|
126
|
+
'last_game_id_referee': None,
|
127
|
+
'first_game_id_scorekeeper': None,
|
128
|
+
'last_game_id_scorekeeper': None
|
109
129
|
}
|
110
130
|
|
131
|
+
for stat in goalie_stats:
|
132
|
+
if stat.human_id in human_ids_to_filter:
|
133
|
+
continue
|
134
|
+
key = (aggregation_id, stat.human_id)
|
135
|
+
if key not in stats_dict:
|
136
|
+
stats_dict[key] = {
|
137
|
+
'games_total': stat.games_goalie,
|
138
|
+
'games_skater': 0,
|
139
|
+
'games_goalie': stat.games_goalie,
|
140
|
+
'games_referee': 0,
|
141
|
+
'games_scorekeeper': 0,
|
142
|
+
'skater_game_ids': [],
|
143
|
+
'goalie_game_ids': stat.goalie_game_ids,
|
144
|
+
'referee_game_ids': [],
|
145
|
+
'scorekeeper_game_ids': [],
|
146
|
+
'first_game_id_skater': None,
|
147
|
+
'last_game_id_skater': None,
|
148
|
+
'first_game_id_goalie': None,
|
149
|
+
'last_game_id_goalie': None,
|
150
|
+
'first_game_id_referee': None,
|
151
|
+
'last_game_id_referee': None,
|
152
|
+
'first_game_id_scorekeeper': None,
|
153
|
+
'last_game_id_scorekeeper': None
|
154
|
+
}
|
155
|
+
else:
|
156
|
+
stats_dict[key]['games_goalie'] += stat.games_goalie
|
157
|
+
stats_dict[key]['games_total'] += stat.games_goalie
|
158
|
+
stats_dict[key]['goalie_game_ids'] += stat.goalie_game_ids
|
159
|
+
|
111
160
|
for stat in referee_stats:
|
112
161
|
if stat.human_id in human_ids_to_filter:
|
113
162
|
continue
|
@@ -119,9 +168,18 @@ def aggregate_human_stats(session, aggregation_type, aggregation_id, names_to_fi
|
|
119
168
|
'games_goalie': 0,
|
120
169
|
'games_referee': stat.games_referee,
|
121
170
|
'games_scorekeeper': 0,
|
122
|
-
'
|
171
|
+
'skater_game_ids': [],
|
172
|
+
'goalie_game_ids': [],
|
123
173
|
'referee_game_ids': stat.referee_game_ids,
|
124
|
-
'scorekeeper_game_ids': []
|
174
|
+
'scorekeeper_game_ids': [],
|
175
|
+
'first_game_id_skater': None,
|
176
|
+
'last_game_id_skater': None,
|
177
|
+
'first_game_id_goalie': None,
|
178
|
+
'last_game_id_goalie': None,
|
179
|
+
'first_game_id_referee': None,
|
180
|
+
'last_game_id_referee': None,
|
181
|
+
'first_game_id_scorekeeper': None,
|
182
|
+
'last_game_id_scorekeeper': None
|
125
183
|
}
|
126
184
|
else:
|
127
185
|
stats_dict[key]['games_referee'] += stat.games_referee
|
@@ -139,9 +197,18 @@ def aggregate_human_stats(session, aggregation_type, aggregation_id, names_to_fi
|
|
139
197
|
'games_goalie': 0,
|
140
198
|
'games_referee': stat.games_referee,
|
141
199
|
'games_scorekeeper': 0,
|
142
|
-
'
|
200
|
+
'skater_game_ids': [],
|
201
|
+
'goalie_game_ids': [],
|
143
202
|
'referee_game_ids': stat.referee_game_ids,
|
144
|
-
'scorekeeper_game_ids': []
|
203
|
+
'scorekeeper_game_ids': [],
|
204
|
+
'first_game_id_skater': None,
|
205
|
+
'last_game_id_skater': None,
|
206
|
+
'first_game_id_goalie': None,
|
207
|
+
'last_game_id_goalie': None,
|
208
|
+
'first_game_id_referee': None,
|
209
|
+
'last_game_id_referee': None,
|
210
|
+
'first_game_id_scorekeeper': None,
|
211
|
+
'last_game_id_scorekeeper': None
|
145
212
|
}
|
146
213
|
else:
|
147
214
|
stats_dict[key]['games_referee'] += stat.games_referee
|
@@ -159,9 +226,18 @@ def aggregate_human_stats(session, aggregation_type, aggregation_id, names_to_fi
|
|
159
226
|
'games_goalie': 0,
|
160
227
|
'games_referee': 0,
|
161
228
|
'games_scorekeeper': stat.games_scorekeeper,
|
162
|
-
'
|
229
|
+
'skater_game_ids': [],
|
230
|
+
'goalie_game_ids': [],
|
163
231
|
'referee_game_ids': [],
|
164
|
-
'scorekeeper_game_ids': stat.scorekeeper_game_ids
|
232
|
+
'scorekeeper_game_ids': stat.scorekeeper_game_ids,
|
233
|
+
'first_game_id_skater': None,
|
234
|
+
'last_game_id_skater': None,
|
235
|
+
'first_game_id_goalie': None,
|
236
|
+
'last_game_id_goalie': None,
|
237
|
+
'first_game_id_referee': None,
|
238
|
+
'last_game_id_referee': None,
|
239
|
+
'first_game_id_scorekeeper': None,
|
240
|
+
'last_game_id_scorekeeper': None
|
165
241
|
}
|
166
242
|
else:
|
167
243
|
stats_dict[key]['games_scorekeeper'] += stat.games_scorekeeper
|
@@ -174,36 +250,61 @@ def aggregate_human_stats(session, aggregation_type, aggregation_id, names_to_fi
|
|
174
250
|
# Calculate total_in_rank
|
175
251
|
total_in_rank = len(stats_dict)
|
176
252
|
|
177
|
-
#
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
253
|
+
# Calculate number of items in rank per role
|
254
|
+
skaters_in_rank = len([stat for stat in stats_dict.values() if stat['games_skater'] > 0])
|
255
|
+
goalies_in_rank = len([stat for stat in stats_dict.values() if stat['games_goalie'] > 0])
|
256
|
+
referees_in_rank = len([stat for stat in stats_dict.values() if stat['games_referee'] > 0])
|
257
|
+
scorekeepers_in_rank = len([stat for stat in stats_dict.values() if stat['games_scorekeeper'] > 0])
|
258
|
+
|
259
|
+
# Filter out humans with less than min_games
|
260
|
+
stats_dict = {key: value for key, value in stats_dict.items() if value['games_total'] >= min_games}
|
182
261
|
|
262
|
+
# Assign ranks
|
183
263
|
assign_ranks(stats_dict, 'games_total')
|
184
264
|
assign_ranks(stats_dict, 'games_skater')
|
185
265
|
assign_ranks(stats_dict, 'games_goalie')
|
186
266
|
assign_ranks(stats_dict, 'games_referee')
|
187
267
|
assign_ranks(stats_dict, 'games_scorekeeper')
|
188
268
|
|
189
|
-
# Populate first_game_id and last_game_id
|
269
|
+
# Populate first_game_id and last_game_id for each role
|
190
270
|
for key, stat in stats_dict.items():
|
191
|
-
all_game_ids = stat['
|
271
|
+
all_game_ids = stat['skater_game_ids'] + stat['goalie_game_ids'] + stat['referee_game_ids'] + stat['scorekeeper_game_ids']
|
192
272
|
if all_game_ids:
|
193
273
|
first_game = session.query(Game).filter(Game.id.in_(all_game_ids)).order_by(Game.date, Game.time).first()
|
194
274
|
last_game = session.query(Game).filter(Game.id.in_(all_game_ids)).order_by(Game.date.desc(), Game.time.desc()).first()
|
195
275
|
stat['first_game_id'] = first_game.id if first_game else None
|
196
276
|
stat['last_game_id'] = last_game.id if last_game else None
|
197
277
|
|
278
|
+
if stat['skater_game_ids']:
|
279
|
+
first_game_skater = session.query(Game).filter(Game.id.in_(stat['skater_game_ids'])).order_by(Game.date, Game.time).first()
|
280
|
+
last_game_skater = session.query(Game).filter(Game.id.in_(stat['skater_game_ids'])).order_by(Game.date.desc(), Game.time.desc()).first()
|
281
|
+
stat['first_game_id_skater'] = first_game_skater.id if first_game_skater else None
|
282
|
+
stat['last_game_id_skater'] = last_game_skater.id if last_game_skater else None
|
283
|
+
|
284
|
+
if stat['goalie_game_ids']:
|
285
|
+
first_game_goalie = session.query(Game).filter(Game.id.in_(stat['goalie_game_ids'])).order_by(Game.date, Game.time).first()
|
286
|
+
last_game_goalie = session.query(Game).filter(Game.id.in_(stat['goalie_game_ids'])).order_by(Game.date.desc(), Game.time.desc()).first()
|
287
|
+
stat['first_game_id_goalie'] = first_game_goalie.id if first_game_goalie else None
|
288
|
+
stat['last_game_id_goalie'] = last_game_goalie.id if last_game_goalie else None
|
289
|
+
|
290
|
+
if stat['referee_game_ids']:
|
291
|
+
first_game_referee = session.query(Game).filter(Game.id.in_(stat['referee_game_ids'])).order_by(Game.date, Game.time).first()
|
292
|
+
last_game_referee = session.query(Game).filter(Game.id.in_(stat['referee_game_ids'])).order_by(Game.date.desc(), Game.time.desc()).first()
|
293
|
+
stat['first_game_id_referee'] = first_game_referee.id if first_game_referee else None
|
294
|
+
stat['last_game_id_referee'] = last_game_referee.id if last_game_referee else None
|
295
|
+
|
296
|
+
if stat['scorekeeper_game_ids']:
|
297
|
+
first_game_scorekeeper = session.query(Game).filter(Game.id.in_(stat['scorekeeper_game_ids'])).order_by(Game.date, Game.time).first()
|
298
|
+
last_game_scorekeeper = session.query(Game).filter(Game.id.in_(stat['scorekeeper_game_ids'])).order_by(Game.date.desc(), Game.time.desc()).first()
|
299
|
+
stat['first_game_id_scorekeeper'] = first_game_scorekeeper.id if first_game_scorekeeper else None
|
300
|
+
stat['last_game_id_scorekeeper'] = last_game_scorekeeper.id if last_game_scorekeeper else None
|
301
|
+
|
198
302
|
# Insert aggregated stats into the appropriate table with progress output
|
199
|
-
total_items = len(stats_dict)
|
200
303
|
batch_size = 1000
|
201
304
|
for i, (key, stat) in enumerate(stats_dict.items(), 1):
|
202
305
|
aggregation_id, human_id = key
|
203
306
|
if human_id_filter and human_id != human_id_filter:
|
204
307
|
continue
|
205
|
-
if stat['games_total'] < min_games:
|
206
|
-
continue
|
207
308
|
|
208
309
|
human_stat = StatsModel(
|
209
310
|
aggregation_id=aggregation_id,
|
@@ -219,14 +320,25 @@ def aggregate_human_stats(session, aggregation_type, aggregation_id, names_to_fi
|
|
219
320
|
games_scorekeeper=stat['games_scorekeeper'],
|
220
321
|
games_scorekeeper_rank=stat['games_scorekeeper_rank'],
|
221
322
|
total_in_rank=total_in_rank,
|
323
|
+
skaters_in_rank=skaters_in_rank,
|
324
|
+
goalies_in_rank=goalies_in_rank,
|
325
|
+
referees_in_rank=referees_in_rank,
|
326
|
+
scorekeepers_in_rank=scorekeepers_in_rank,
|
222
327
|
first_game_id=stat['first_game_id'],
|
223
|
-
last_game_id=stat['last_game_id']
|
328
|
+
last_game_id=stat['last_game_id'],
|
329
|
+
first_game_id_skater=stat['first_game_id_skater'],
|
330
|
+
last_game_id_skater=stat['last_game_id_skater'],
|
331
|
+
first_game_id_goalie=stat['first_game_id_goalie'],
|
332
|
+
last_game_id_goalie=stat['last_game_id_goalie'],
|
333
|
+
first_game_id_referee=stat['first_game_id_referee'],
|
334
|
+
last_game_id_referee=stat['last_game_id_referee'],
|
335
|
+
first_game_id_scorekeeper=stat['first_game_id_scorekeeper'],
|
336
|
+
last_game_id_scorekeeper=stat['last_game_id_scorekeeper']
|
224
337
|
)
|
225
338
|
session.add(human_stat)
|
226
339
|
# Commit in batches
|
227
340
|
if i % batch_size == 0:
|
228
341
|
session.commit()
|
229
|
-
print(f"\r{i}/{total_items} ({(i/total_items)*100:.2f}%)", end="")
|
230
342
|
session.commit()
|
231
343
|
|
232
344
|
# Fetch fake human ID for overall stats
|
@@ -240,12 +352,24 @@ def aggregate_human_stats(session, aggregation_type, aggregation_id, names_to_fi
|
|
240
352
|
'games_referee': sum(stat['games_referee'] for stat in stats_dict.values()),
|
241
353
|
'games_scorekeeper': sum(stat['games_scorekeeper'] for stat in stats_dict.values()),
|
242
354
|
'total_in_rank': total_in_rank,
|
355
|
+
'skaters_in_rank': skaters_in_rank,
|
356
|
+
'goalies_in_rank': goalies_in_rank,
|
357
|
+
'referees_in_rank': referees_in_rank,
|
358
|
+
'scorekeepers_in_rank': scorekeepers_in_rank,
|
243
359
|
'first_game_id': None,
|
244
|
-
'last_game_id': None
|
360
|
+
'last_game_id': None,
|
361
|
+
'first_game_id_skater': None,
|
362
|
+
'last_game_id_skater': None,
|
363
|
+
'first_game_id_goalie': None,
|
364
|
+
'last_game_id_goalie': None,
|
365
|
+
'first_game_id_referee': None,
|
366
|
+
'last_game_id_referee': None,
|
367
|
+
'first_game_id_scorekeeper': None,
|
368
|
+
'last_game_id_scorekeeper': None
|
245
369
|
}
|
246
370
|
|
247
371
|
# Populate first_game_id and last_game_id for overall stats
|
248
|
-
all_game_ids = [game_id for stat in stats_dict.values() for game_id in stat['
|
372
|
+
all_game_ids = [game_id for stat in stats_dict.values() for game_id in stat['skater_game_ids'] + stat['goalie_game_ids'] + stat['referee_game_ids'] + stat['scorekeeper_game_ids']]
|
249
373
|
if all_game_ids:
|
250
374
|
first_game = session.query(Game).filter(Game.id.in_(all_game_ids)).order_by(Game.date, Game.time).first()
|
251
375
|
last_game = session.query(Game).filter(Game.id.in_(all_game_ids)).order_by(Game.date.desc(), Game.time.desc()).first()
|
@@ -267,29 +391,61 @@ def aggregate_human_stats(session, aggregation_type, aggregation_id, names_to_fi
|
|
267
391
|
games_scorekeeper=overall_stats['games_scorekeeper'],
|
268
392
|
games_scorekeeper_rank=0, # Overall stats do not need a rank
|
269
393
|
total_in_rank=overall_stats['total_in_rank'],
|
394
|
+
skaters_in_rank=overall_stats['skaters_in_rank'],
|
395
|
+
goalies_in_rank=overall_stats['goalies_in_rank'],
|
396
|
+
referees_in_rank=overall_stats['referees_in_rank'],
|
397
|
+
scorekeepers_in_rank=overall_stats['scorekeepers_in_rank'],
|
270
398
|
first_game_id=overall_stats['first_game_id'],
|
271
|
-
last_game_id=overall_stats['last_game_id']
|
399
|
+
last_game_id=overall_stats['last_game_id'],
|
400
|
+
first_game_id_skater=overall_stats['first_game_id_skater'],
|
401
|
+
last_game_id_skater=overall_stats['last_game_id_skater'],
|
402
|
+
first_game_id_goalie=overall_stats['first_game_id_goalie'],
|
403
|
+
last_game_id_goalie=overall_stats['last_game_id_goalie'],
|
404
|
+
first_game_id_referee=overall_stats['first_game_id_referee'],
|
405
|
+
last_game_id_referee=overall_stats['last_game_id_referee'],
|
406
|
+
first_game_id_scorekeeper=overall_stats['first_game_id_scorekeeper'],
|
407
|
+
last_game_id_scorekeeper=overall_stats['last_game_id_scorekeeper']
|
272
408
|
)
|
273
409
|
session.add(overall_human_stat)
|
274
410
|
session.commit()
|
275
411
|
|
276
|
-
|
277
|
-
|
412
|
+
def run_aggregate_human_stats():
|
413
|
+
session = create_session("boss")
|
414
|
+
human_id_to_debug = None
|
415
|
+
|
416
|
+
# Aggregate by Org and Division inside Org
|
417
|
+
org_ids = session.query(Organization.id).all()
|
418
|
+
org_ids = [org_id[0] for org_id in org_ids]
|
419
|
+
|
420
|
+
for org_id in org_ids:
|
421
|
+
division_ids = get_all_division_ids_for_org(session, org_id)
|
422
|
+
print(f"Aggregating human stats for {len(division_ids)} divisions in org_id {org_id}...")
|
423
|
+
total_divisions = len(division_ids)
|
424
|
+
processed_divisions = 0
|
425
|
+
for division_id in division_ids:
|
426
|
+
aggregate_human_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug)
|
427
|
+
aggregate_human_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Weekly')
|
428
|
+
aggregate_human_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Daily')
|
429
|
+
processed_divisions += 1
|
430
|
+
if human_id_to_debug is None:
|
431
|
+
print(f"\rProcessed {processed_divisions}/{total_divisions} divisions ({(processed_divisions/total_divisions)*100:.2f}%)", end="")
|
432
|
+
print("")
|
433
|
+
aggregate_human_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug)
|
434
|
+
aggregate_human_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Weekly')
|
435
|
+
aggregate_human_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug, aggregation_window='Daily')
|
436
|
+
|
437
|
+
# Aggregate by level
|
438
|
+
level_ids = session.query(Division.level_id).distinct().all()
|
439
|
+
level_ids = [level_id[0] for level_id in level_ids]
|
440
|
+
total_levels = len(level_ids)
|
441
|
+
processed_levels = 0
|
442
|
+
for level_id in level_ids:
|
443
|
+
if level_id is None:
|
444
|
+
continue
|
445
|
+
if human_id_to_debug is None:
|
446
|
+
print(f"\rProcessed {processed_levels}/{total_levels} levels ({(processed_levels/total_levels)*100:.2f}%)", end="")
|
447
|
+
processed_levels += 1
|
448
|
+
aggregate_human_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names, human_id_filter=human_id_to_debug)
|
278
449
|
|
279
|
-
# Example usage
|
280
450
|
if __name__ == "__main__":
|
281
|
-
|
282
|
-
org_alias=args.org
|
283
|
-
session = create_session("boss")
|
284
|
-
org_id = get_org_id_from_alias(session, org_alias)
|
285
|
-
|
286
|
-
division_ids = get_division_ids_for_last_season_in_all_leagues(session, org_id)
|
287
|
-
print(f"Aggregating human stats for {len(division_ids)} divisions in {org_alias}...")
|
288
|
-
for division_id in division_ids:
|
289
|
-
aggregate_human_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, human_id_filter=None)
|
290
|
-
aggregate_human_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, human_id_filter=None, aggregation_window='Daily')
|
291
|
-
aggregate_human_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, human_id_filter=None, aggregation_window='Weekly')
|
292
|
-
|
293
|
-
aggregate_human_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, human_id_filter=None)
|
294
|
-
aggregate_human_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, human_id_filter=None, aggregation_window='Daily')
|
295
|
-
aggregate_human_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, human_id_filter=None, aggregation_window='Weekly')
|
451
|
+
run_aggregate_human_stats()
|
@@ -6,17 +6,21 @@ sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
6
6
|
|
7
7
|
from datetime import datetime, timedelta
|
8
8
|
import sqlalchemy
|
9
|
-
from hockey_blast_common_lib.models import Game, Penalty
|
10
|
-
from hockey_blast_common_lib.stats_models import OrgStatsReferee, DivisionStatsReferee,OrgStatsWeeklyReferee, OrgStatsDailyReferee, DivisionStatsWeeklyReferee, DivisionStatsDailyReferee
|
9
|
+
from hockey_blast_common_lib.models import Game, Penalty, Organization, Division
|
10
|
+
from hockey_blast_common_lib.stats_models import OrgStatsReferee, DivisionStatsReferee, OrgStatsWeeklyReferee, OrgStatsDailyReferee, DivisionStatsWeeklyReferee, DivisionStatsDailyReferee, LevelStatsReferee
|
11
11
|
from hockey_blast_common_lib.db_connection import create_session
|
12
12
|
from sqlalchemy.sql import func, case
|
13
|
-
from hockey_blast_common_lib.options import parse_args, MIN_GAMES_FOR_ORG_STATS, MIN_GAMES_FOR_DIVISION_STATS, not_human_names
|
14
|
-
from hockey_blast_common_lib.utils import get_org_id_from_alias, get_human_ids_by_names, get_division_ids_for_last_season_in_all_leagues
|
13
|
+
from hockey_blast_common_lib.options import parse_args, MIN_GAMES_FOR_ORG_STATS, MIN_GAMES_FOR_DIVISION_STATS, MIN_GAMES_FOR_LEVEL_STATS, not_human_names
|
14
|
+
from hockey_blast_common_lib.utils import get_org_id_from_alias, get_human_ids_by_names, get_division_ids_for_last_season_in_all_leagues, get_all_division_ids_for_org
|
15
|
+
from hockey_blast_common_lib.utils import assign_ranks
|
16
|
+
from hockey_blast_common_lib.utils import get_start_datetime
|
15
17
|
|
16
18
|
def aggregate_referee_stats(session, aggregation_type, aggregation_id, names_to_filter_out, aggregation_window=None):
|
17
19
|
human_ids_to_filter = get_human_ids_by_names(session, names_to_filter_out)
|
18
20
|
|
19
21
|
if aggregation_type == 'org':
|
22
|
+
aggregation_name = session.query(Organization).filter(Organization.id == aggregation_id).first().organization_name
|
23
|
+
print(f"Aggregating referee stats for {aggregation_name} with window {aggregation_window}...")
|
20
24
|
if aggregation_window == 'Daily':
|
21
25
|
StatsModel = OrgStatsDailyReferee
|
22
26
|
elif aggregation_window == 'Weekly':
|
@@ -34,23 +38,24 @@ def aggregate_referee_stats(session, aggregation_type, aggregation_id, names_to_
|
|
34
38
|
StatsModel = DivisionStatsReferee
|
35
39
|
min_games = MIN_GAMES_FOR_DIVISION_STATS
|
36
40
|
filter_condition = Game.division_id == aggregation_id
|
41
|
+
elif aggregation_type == 'level':
|
42
|
+
StatsModel = LevelStatsReferee
|
43
|
+
min_games = MIN_GAMES_FOR_LEVEL_STATS
|
44
|
+
filter_condition = Division.level_id == aggregation_id
|
45
|
+
# Add filter to only include games for the last 5 years
|
46
|
+
five_years_ago = datetime.now() - timedelta(days=5*365)
|
47
|
+
level_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP) >= five_years_ago
|
48
|
+
filter_condition = filter_condition & level_window_filter
|
37
49
|
else:
|
38
50
|
raise ValueError("Invalid aggregation type")
|
39
51
|
|
40
52
|
# Apply aggregation window filter
|
41
53
|
if aggregation_window:
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
elif aggregation_window == 'Weekly':
|
48
|
-
start_datetime = last_game_datetime - timedelta(weeks=1)
|
49
|
-
else:
|
50
|
-
start_datetime = None
|
51
|
-
if start_datetime:
|
52
|
-
game_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP).between(start_datetime, last_game_datetime)
|
53
|
-
filter_condition = filter_condition & game_window_filter
|
54
|
+
last_game_datetime_str = session.query(func.max(func.concat(Game.date, ' ', Game.time))).filter(filter_condition, Game.status.like('Final%')).scalar()
|
55
|
+
start_datetime = get_start_datetime(last_game_datetime_str, aggregation_window)
|
56
|
+
if start_datetime:
|
57
|
+
game_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP).between(start_datetime, last_game_datetime_str)
|
58
|
+
filter_condition = filter_condition & game_window_filter
|
54
59
|
|
55
60
|
# Delete existing items from the stats table
|
56
61
|
session.query(StatsModel).filter(StatsModel.aggregation_id == aggregation_id).delete()
|
@@ -117,36 +122,14 @@ def aggregate_referee_stats(session, aggregation_type, aggregation_id, names_to_
|
|
117
122
|
for stat in penalties_given_stats:
|
118
123
|
if stat.referee_1_id and stat.referee_1_id not in human_ids_to_filter:
|
119
124
|
key = (aggregation_id, stat.referee_1_id)
|
120
|
-
if key
|
121
|
-
stats_dict[key] = {
|
122
|
-
'games_reffed': 0,
|
123
|
-
'penalties_given': stat.penalties_given / 2,
|
124
|
-
'gm_given': stat.gm_given / 2,
|
125
|
-
'penalties_per_game': 0.0,
|
126
|
-
'gm_per_game': 0.0,
|
127
|
-
'game_ids': [stat.game_id],
|
128
|
-
'first_game_id': None,
|
129
|
-
'last_game_id': None
|
130
|
-
}
|
131
|
-
else:
|
125
|
+
if key in stats_dict:
|
132
126
|
stats_dict[key]['penalties_given'] += stat.penalties_given / 2
|
133
127
|
stats_dict[key]['gm_given'] += stat.gm_given / 2
|
134
128
|
stats_dict[key]['game_ids'].append(stat.game_id)
|
135
129
|
|
136
130
|
if stat.referee_2_id and stat.referee_2_id not in human_ids_to_filter:
|
137
131
|
key = (aggregation_id, stat.referee_2_id)
|
138
|
-
if key
|
139
|
-
stats_dict[key] = {
|
140
|
-
'games_reffed': 0,
|
141
|
-
'penalties_given': stat.penalties_given / 2,
|
142
|
-
'gm_given': stat.gm_given / 2,
|
143
|
-
'penalties_per_game': 0.0,
|
144
|
-
'gm_per_game': 0.0,
|
145
|
-
'game_ids': [stat.game_id],
|
146
|
-
'first_game_id': None,
|
147
|
-
'last_game_id': None
|
148
|
-
}
|
149
|
-
else:
|
132
|
+
if key in stats_dict:
|
150
133
|
stats_dict[key]['penalties_given'] += stat.penalties_given / 2
|
151
134
|
stats_dict[key]['gm_given'] += stat.gm_given / 2
|
152
135
|
stats_dict[key]['game_ids'].append(stat.game_id)
|
@@ -160,6 +143,9 @@ def aggregate_referee_stats(session, aggregation_type, aggregation_id, names_to_
|
|
160
143
|
# Ensure all keys have valid human_id values
|
161
144
|
stats_dict = {key: value for key, value in stats_dict.items() if key[1] is not None}
|
162
145
|
|
146
|
+
# Filter out referees with less than min_games
|
147
|
+
stats_dict = {key: value for key, value in stats_dict.items() if value['games_reffed'] >= min_games}
|
148
|
+
|
163
149
|
# Populate first_game_id and last_game_id
|
164
150
|
for key, stat in stats_dict.items():
|
165
151
|
all_game_ids = stat['game_ids']
|
@@ -173,11 +159,6 @@ def aggregate_referee_stats(session, aggregation_type, aggregation_id, names_to_
|
|
173
159
|
total_in_rank = len(stats_dict)
|
174
160
|
|
175
161
|
# Assign ranks
|
176
|
-
def assign_ranks(stats_dict, field):
|
177
|
-
sorted_stats = sorted(stats_dict.items(), key=lambda x: x[1][field], reverse=True)
|
178
|
-
for rank, (key, stat) in enumerate(sorted_stats, start=1):
|
179
|
-
stats_dict[key][f'{field}_rank'] = rank
|
180
|
-
|
181
162
|
assign_ranks(stats_dict, 'games_reffed')
|
182
163
|
assign_ranks(stats_dict, 'penalties_given')
|
183
164
|
assign_ranks(stats_dict, 'penalties_per_game')
|
@@ -189,8 +170,6 @@ def aggregate_referee_stats(session, aggregation_type, aggregation_id, names_to_
|
|
189
170
|
batch_size = 1000
|
190
171
|
for i, (key, stat) in enumerate(stats_dict.items(), 1):
|
191
172
|
aggregation_id, human_id = key
|
192
|
-
if stat['games_reffed'] < min_games:
|
193
|
-
continue
|
194
173
|
referee_stat = StatsModel(
|
195
174
|
aggregation_id=aggregation_id,
|
196
175
|
human_id=human_id,
|
@@ -212,23 +191,45 @@ def aggregate_referee_stats(session, aggregation_type, aggregation_id, names_to_
|
|
212
191
|
# Commit in batches
|
213
192
|
if i % batch_size == 0:
|
214
193
|
session.commit()
|
215
|
-
print(f"\r{i}/{total_items} ({(i/total_items)*100:.2f}%)", end="")
|
216
194
|
session.commit()
|
217
|
-
print(f"\r{total_items}/{total_items} (100.00%)")
|
218
|
-
print("\nDone.")
|
219
195
|
|
220
|
-
|
221
|
-
if __name__ == "__main__":
|
222
|
-
args = parse_args()
|
223
|
-
org_alias = args.org
|
196
|
+
def run_aggregate_referee_stats():
|
224
197
|
session = create_session("boss")
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
198
|
+
human_id_to_debug = None
|
199
|
+
|
200
|
+
# Get all org_id present in the Organization table
|
201
|
+
org_ids = session.query(Organization.id).all()
|
202
|
+
org_ids = [org_id[0] for org_id in org_ids]
|
203
|
+
|
204
|
+
for org_id in org_ids:
|
205
|
+
division_ids = get_all_division_ids_for_org(session, org_id)
|
206
|
+
print(f"Aggregating referee stats for {len(division_ids)} divisions in org_id {org_id}...")
|
207
|
+
total_divisions = len(division_ids)
|
208
|
+
processed_divisions = 0
|
209
|
+
for division_id in division_ids:
|
210
|
+
aggregate_referee_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names)
|
211
|
+
aggregate_referee_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, aggregation_window='Weekly')
|
212
|
+
aggregate_referee_stats(session, aggregation_type='division', aggregation_id=division_id, names_to_filter_out=not_human_names, aggregation_window='Daily')
|
213
|
+
processed_divisions += 1
|
214
|
+
if human_id_to_debug is None:
|
215
|
+
print(f"\rProcessed {processed_divisions}/{total_divisions} divisions ({(processed_divisions/total_divisions)*100:.2f}%)", end="")
|
216
|
+
|
217
|
+
aggregate_referee_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names)
|
218
|
+
aggregate_referee_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, aggregation_window='Weekly')
|
219
|
+
aggregate_referee_stats(session, aggregation_type='org', aggregation_id=org_id, names_to_filter_out=not_human_names, aggregation_window='Daily')
|
220
|
+
|
221
|
+
# Aggregate by level
|
222
|
+
level_ids = session.query(Division.level_id).distinct().all()
|
223
|
+
level_ids = [level_id[0] for level_id in level_ids]
|
224
|
+
total_levels = len(level_ids)
|
225
|
+
processed_levels = 0
|
226
|
+
for level_id in level_ids:
|
227
|
+
if level_id is None:
|
228
|
+
continue
|
229
|
+
if human_id_to_debug is None:
|
230
|
+
print(f"\rProcessed {processed_levels}/{total_levels} levels ({(processed_levels/total_levels)*100:.2f}%)", end="")
|
231
|
+
processed_levels += 1
|
232
|
+
aggregate_referee_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names)
|
233
|
+
|
234
|
+
if __name__ == "__main__":
|
235
|
+
run_aggregate_referee_stats()
|
@@ -12,6 +12,7 @@ from hockey_blast_common_lib.db_connection import create_session
|
|
12
12
|
from sqlalchemy.sql import func, case
|
13
13
|
from hockey_blast_common_lib.options import not_human_names, parse_args, MIN_GAMES_FOR_ORG_STATS, MIN_GAMES_FOR_DIVISION_STATS, MIN_GAMES_FOR_LEVEL_STATS
|
14
14
|
from hockey_blast_common_lib.utils import get_org_id_from_alias, get_human_ids_by_names, get_division_ids_for_last_season_in_all_leagues, get_all_division_ids_for_org
|
15
|
+
from hockey_blast_common_lib.utils import get_start_datetime
|
15
16
|
from sqlalchemy import func, case, and_
|
16
17
|
from collections import defaultdict
|
17
18
|
|
@@ -21,6 +22,9 @@ def aggregate_skater_stats(session, aggregation_type, aggregation_id, names_to_f
|
|
21
22
|
# Get the name of the aggregation, for debug purposes
|
22
23
|
if aggregation_type == 'org':
|
23
24
|
aggregation_name = session.query(Organization).filter(Organization.id == aggregation_id).first().organization_name
|
25
|
+
print(f"Aggregating skater stats for {aggregation_name} with window {aggregation_window}...")
|
26
|
+
|
27
|
+
aggregation_name = session.query(Organization).filter(Organization.id == aggregation_id).first().organization_name
|
24
28
|
elif aggregation_type == 'division':
|
25
29
|
aggregation_name = session.query(Division).filter(Division.id == aggregation_id).first().level
|
26
30
|
elif aggregation_type == 'level':
|
@@ -59,18 +63,11 @@ def aggregate_skater_stats(session, aggregation_type, aggregation_id, names_to_f
|
|
59
63
|
|
60
64
|
# Apply aggregation window filter
|
61
65
|
if aggregation_window:
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
elif aggregation_window == 'Weekly':
|
68
|
-
start_datetime = last_game_datetime - timedelta(weeks=1)
|
69
|
-
else:
|
70
|
-
start_datetime = None
|
71
|
-
if start_datetime:
|
72
|
-
game_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP).between(start_datetime, last_game_datetime)
|
73
|
-
filter_condition = filter_condition & game_window_filter
|
66
|
+
last_game_datetime_str = session.query(func.max(func.concat(Game.date, ' ', Game.time))).filter(filter_condition, Game.status.like('Final%')).scalar()
|
67
|
+
start_datetime = get_start_datetime(last_game_datetime_str, aggregation_window)
|
68
|
+
if start_datetime:
|
69
|
+
game_window_filter = func.cast(func.concat(Game.date, ' ', Game.time), sqlalchemy.types.TIMESTAMP).between(start_datetime, last_game_datetime_str)
|
70
|
+
filter_condition = filter_condition & game_window_filter
|
74
71
|
|
75
72
|
# Delete existing items from the stats table
|
76
73
|
session.query(StatsModel).filter(StatsModel.aggregation_id == aggregation_id).delete()
|
@@ -254,16 +251,11 @@ def aggregate_skater_stats(session, aggregation_type, aggregation_id, names_to_f
|
|
254
251
|
# Commit in batches
|
255
252
|
if i % batch_size == 0:
|
256
253
|
session.commit()
|
257
|
-
if debug_human_id is None:
|
258
|
-
print(f"\r{i}/{total_items} ({(i/total_items)*100:.2f}%)", end="")
|
259
|
-
|
260
254
|
session.commit()
|
261
|
-
if debug_human_id is None:
|
262
|
-
print(f"\r{total_items}/{total_items} (100.00%)")
|
263
255
|
|
264
|
-
|
256
|
+
def run_aggregate_skater_stats():
|
265
257
|
session = create_session("boss")
|
266
|
-
human_id_to_debug =
|
258
|
+
human_id_to_debug = None
|
267
259
|
|
268
260
|
# Get all org_id present in the Organization table
|
269
261
|
org_ids = session.query(Organization.id).all()
|
@@ -298,3 +290,6 @@ if __name__ == "__main__":
|
|
298
290
|
print(f"\rProcessed {processed_levels}/{total_levels} levels ({(processed_levels/total_levels)*100:.2f}%)", end="")
|
299
291
|
processed_levels += 1
|
300
292
|
aggregate_skater_stats(session, aggregation_type='level', aggregation_id=level_id, names_to_filter_out=not_human_names, debug_human_id=human_id_to_debug)
|
293
|
+
|
294
|
+
if __name__ == "__main__":
|
295
|
+
run_aggregate_skater_stats()
|
Binary file
|
@@ -224,6 +224,7 @@ class Organization(db.Model):
|
|
224
224
|
id = db.Column(db.Integer, primary_key=True)
|
225
225
|
alias = db.Column(db.String(100), unique=True)
|
226
226
|
organization_name = db.Column(db.String(100), unique=True)
|
227
|
+
website = db.Column(db.String(100), nullable=True) # New field for website
|
227
228
|
|
228
229
|
class Penalty(db.Model):
|
229
230
|
__tablename__ = 'penalties'
|
@@ -137,7 +137,7 @@ def populate_league_ids():
|
|
137
137
|
session.commit()
|
138
138
|
print("League IDs have been populated into the Season table.")
|
139
139
|
|
140
|
-
if __name__ == "__main__":
|
140
|
+
#if __name__ == "__main__":
|
141
141
|
# delete_all_skills()
|
142
142
|
#fill_seed_skills()
|
143
143
|
#populate_season_ids() # Call the function to populate season_ids
|
@@ -17,8 +17,20 @@ class BaseStatsHuman(db.Model):
|
|
17
17
|
games_goalie = db.Column(db.Integer, default=0)
|
18
18
|
games_goalie_rank = db.Column(db.Integer, default=0)
|
19
19
|
total_in_rank = db.Column(db.Integer, default=0)
|
20
|
-
|
21
|
-
|
20
|
+
skaters_in_rank = db.Column(db.Integer, default=0)
|
21
|
+
goalies_in_rank = db.Column(db.Integer, default=0)
|
22
|
+
referees_in_rank = db.Column(db.Integer, default=0)
|
23
|
+
scorekeepers_in_rank = db.Column(db.Integer, default=0)
|
24
|
+
first_game_id = db.Column(db.Integer, db.ForeignKey('games.id'), nullable=True)
|
25
|
+
last_game_id = db.Column(db.Integer, db.ForeignKey('games.id'), nullable=True)
|
26
|
+
first_game_id_skater = db.Column(db.Integer, db.ForeignKey('games.id'), nullable=True)
|
27
|
+
last_game_id_skater = db.Column(db.Integer, db.ForeignKey('games.id'), nullable=True)
|
28
|
+
first_game_id_goalie = db.Column(db.Integer, db.ForeignKey('games.id'), nullable=True)
|
29
|
+
last_game_id_goalie = db.Column(db.Integer, db.ForeignKey('games.id'), nullable=True)
|
30
|
+
first_game_id_referee = db.Column(db.Integer, db.ForeignKey('games.id'), nullable=True)
|
31
|
+
last_game_id_referee = db.Column(db.Integer, db.ForeignKey('games.id'), nullable=True)
|
32
|
+
first_game_id_scorekeeper = db.Column(db.Integer, db.ForeignKey('games.id'), nullable=True)
|
33
|
+
last_game_id_scorekeeper = db.Column(db.Integer, db.ForeignKey('games.id'), nullable=True)
|
22
34
|
|
23
35
|
@declared_attr
|
24
36
|
def __table_args__(cls):
|
@@ -196,6 +208,19 @@ class DivisionStatsHuman(BaseStatsHuman):
|
|
196
208
|
def get_aggregation_column(cls):
|
197
209
|
return 'division_id'
|
198
210
|
|
211
|
+
class LevelStatsHuman(BaseStatsHuman):
|
212
|
+
__tablename__ = 'level_stats_human'
|
213
|
+
level_id = db.Column(db.Integer, db.ForeignKey('levels.id'), nullable=False)
|
214
|
+
aggregation_id = synonym('level_id')
|
215
|
+
|
216
|
+
@declared_attr
|
217
|
+
def aggregation_type(cls):
|
218
|
+
return 'level'
|
219
|
+
|
220
|
+
@classmethod
|
221
|
+
def get_aggregation_column(cls):
|
222
|
+
return 'level_id'
|
223
|
+
|
199
224
|
class OrgStatsSkater(BaseStatsSkater):
|
200
225
|
__tablename__ = 'org_stats_skater'
|
201
226
|
org_id = db.Column(db.Integer, db.ForeignKey('organizations.id'), nullable=False)
|
@@ -262,6 +287,19 @@ class DivisionStatsGoalie(BaseStatsGoalie):
|
|
262
287
|
def get_aggregation_column(cls):
|
263
288
|
return 'division_id'
|
264
289
|
|
290
|
+
class LevelStatsGoalie(BaseStatsGoalie):
|
291
|
+
__tablename__ = 'level_stats_goalie'
|
292
|
+
level_id = db.Column(db.Integer, db.ForeignKey('levels.id'), nullable=False)
|
293
|
+
aggregation_id = synonym('level_id')
|
294
|
+
|
295
|
+
@declared_attr
|
296
|
+
def aggregation_type(cls):
|
297
|
+
return 'level'
|
298
|
+
|
299
|
+
@classmethod
|
300
|
+
def get_aggregation_column(cls):
|
301
|
+
return 'level_id'
|
302
|
+
|
265
303
|
|
266
304
|
class OrgStatsReferee(BaseStatsReferee):
|
267
305
|
__tablename__ = 'org_stats_referee'
|
@@ -289,6 +327,19 @@ class DivisionStatsReferee(BaseStatsReferee):
|
|
289
327
|
def get_aggregation_column(cls):
|
290
328
|
return 'division_id'
|
291
329
|
|
330
|
+
class LevelStatsReferee(BaseStatsReferee):
|
331
|
+
__tablename__ = 'level_stats_referee'
|
332
|
+
level_id = db.Column(db.Integer, db.ForeignKey('levels.id'), nullable=False)
|
333
|
+
aggregation_id = synonym('level_id')
|
334
|
+
|
335
|
+
@declared_attr
|
336
|
+
def aggregation_type(cls):
|
337
|
+
return 'level'
|
338
|
+
|
339
|
+
@classmethod
|
340
|
+
def get_aggregation_column(cls):
|
341
|
+
return 'level_id'
|
342
|
+
|
292
343
|
|
293
344
|
class OrgStatsScorekeeper(BaseStatsScorekeeper):
|
294
345
|
__tablename__ = 'org_stats_scorekeeper'
|
File without changes
|
hockey_blast_common_lib/utils.py
CHANGED
@@ -1,5 +1,6 @@
|
|
1
1
|
import sys
|
2
2
|
import os
|
3
|
+
from datetime import datetime, timedelta
|
3
4
|
|
4
5
|
# Add the package directory to the Python path
|
5
6
|
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
@@ -11,9 +12,9 @@ from sqlalchemy.sql import func
|
|
11
12
|
def get_org_id_from_alias(session, org_alias):
|
12
13
|
# Predefined organizations
|
13
14
|
predefined_organizations = [
|
14
|
-
{"id": 1, "organization_name": "Sharks Ice", "alias": "sharksice"},
|
15
|
-
{"id": 2, "organization_name": "TriValley Ice", "alias": "tvice"},
|
16
|
-
{"id": 3, "organization_name": "CAHA", "alias": "caha"}
|
15
|
+
{"id": 1, "organization_name": "Sharks Ice", "alias": "sharksice", "website": "https://www.sharksice.com"},
|
16
|
+
{"id": 2, "organization_name": "TriValley Ice", "alias": "tvice", "website": "https://www.trivalleyice.com"},
|
17
|
+
{"id": 3, "organization_name": "CAHA", "alias": "caha", "website": "https://www.caha.com"}
|
17
18
|
]
|
18
19
|
|
19
20
|
# Check if the organization exists
|
@@ -25,7 +26,7 @@ def get_org_id_from_alias(session, org_alias):
|
|
25
26
|
for org in predefined_organizations:
|
26
27
|
existing_org = session.query(Organization).filter_by(id=org["id"]).first()
|
27
28
|
if not existing_org:
|
28
|
-
new_org = Organization(id=org["id"], organization_name=org["organization_name"], alias=org["alias"])
|
29
|
+
new_org = Organization(id=org["id"], organization_name=org["organization_name"], alias=org["alias"], website=org["website"])
|
29
30
|
session.add(new_org)
|
30
31
|
session.commit()
|
31
32
|
|
@@ -82,6 +83,21 @@ def get_fake_human_for_stats(session):
|
|
82
83
|
|
83
84
|
return human.id
|
84
85
|
|
86
|
+
def get_start_datetime(last_game_datetime_str, aggregation_window):
|
87
|
+
if last_game_datetime_str:
|
88
|
+
last_game_datetime = datetime.strptime(last_game_datetime_str, '%Y-%m-%d %H:%M:%S')
|
89
|
+
if aggregation_window == 'Daily':
|
90
|
+
# From 10AM till midnight, 14 hours to avoid last day games
|
91
|
+
return last_game_datetime - timedelta(hours=14)
|
92
|
+
elif aggregation_window == 'Weekly':
|
93
|
+
return last_game_datetime - timedelta(weeks=1)
|
94
|
+
return None
|
95
|
+
|
96
|
+
def assign_ranks(stats_dict, field, reverse_rank=False):
|
97
|
+
sorted_stats = sorted(stats_dict.items(), key=lambda x: x[1][field], reverse=not reverse_rank)
|
98
|
+
for rank, (key, stat) in enumerate(sorted_stats, start=1):
|
99
|
+
stats_dict[key][f'{field}_rank'] = rank
|
100
|
+
|
85
101
|
#TEST DB CONNECTION, PERMISSIONS...
|
86
102
|
# from hockey_blast_common_lib.db_connection import create_session
|
87
103
|
# session = create_session("frontend")
|
@@ -0,0 +1,23 @@
|
|
1
|
+
hockey_blast_common_lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
2
|
+
hockey_blast_common_lib/aggregate_all_stats.py,sha256=MUjT23mdOMfCTx-kRSY-LGrLHZ9HNlR6OMqv5KLdzR4,1056
|
3
|
+
hockey_blast_common_lib/aggregate_goalie_stats.py,sha256=xDjCYaGLXh5UgkhoUvWKcsnv__QmbCzqLvYNHREFyFY,11937
|
4
|
+
hockey_blast_common_lib/aggregate_human_stats.py,sha256=di3h_nQjgMaNnGDPW6LQavnrYOxnZDMohNZEh3ixYNM,23700
|
5
|
+
hockey_blast_common_lib/aggregate_referee_stats.py,sha256=ATKBId0Hhzy0FVGmAFDTMgRftYNBDOOA9Z2yPhFA06U,11546
|
6
|
+
hockey_blast_common_lib/aggregate_skater_stats.py,sha256=LY6temy5zzqJ85ENymt9nEfpKHqL8Isk_kozfWqHNFI,15803
|
7
|
+
hockey_blast_common_lib/assign_skater_skill.py,sha256=p-0fbodGpM8BCjKHDpxNb7BH2FcIlBsJwON844KNrUY,1817
|
8
|
+
hockey_blast_common_lib/db_connection.py,sha256=HvPxDvOj7j5H85RfslGvHVNevfg7mKCd0syJ6NX21mU,1890
|
9
|
+
hockey_blast_common_lib/dump_sample_db.sh,sha256=MHPA-Ciod7wsvAlMbRtXFiyajgnEqU1xR59sJQ9UWR0,738
|
10
|
+
hockey_blast_common_lib/hockey_blast_sample_backup.sql.gz,sha256=9xsTyRZZa0eVIvEwa1IdsogVX83lidXQhTm2nOGUU3M,1033689
|
11
|
+
hockey_blast_common_lib/models.py,sha256=Gh0WgYdZE6ClF_48t7zk3G0CItf02yHVzi43afshAgQ,16064
|
12
|
+
hockey_blast_common_lib/options.py,sha256=6na8fo-5A2RBPpd_h-7dsqetOLSLoNEJg1QMYgl4jNs,792
|
13
|
+
hockey_blast_common_lib/restore_sample_db.sh,sha256=u2zKazC6vNMULkpYzI64nlneCWaGUtDHPBAU-gWgRbw,1861
|
14
|
+
hockey_blast_common_lib/skills_in_divisions.py,sha256=RR-x-D7V_lQX--2a2GHEYHtATtIOj2ACpvcEUDzVgkY,7487
|
15
|
+
hockey_blast_common_lib/skills_propagation.py,sha256=x6yy7fJ6IX3YiHqiP_v7-p_S2Expb8JJ-mWuajEFBdY,16388
|
16
|
+
hockey_blast_common_lib/stats_models.py,sha256=qvkt-XRFb4ZW7yBj7vltedzUS_YwWagm_efMRcsioSA,25120
|
17
|
+
hockey_blast_common_lib/stats_utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
18
|
+
hockey_blast_common_lib/utils.py,sha256=EDMBiWpF2bNCC1xc1WUNahQlUsG8bv1Wh0Eal3Iw2ls,4689
|
19
|
+
hockey_blast_common_lib/wsgi.py,sha256=7LGUzioigviJp-EUhSEaQcd4jBae0mxbkyBscQfZhlc,730
|
20
|
+
hockey_blast_common_lib-0.1.34.dist-info/METADATA,sha256=ROphG8zkwmNGOdHFyrafZ2cJAUBEuvr_-LZ6rBxMPEY,318
|
21
|
+
hockey_blast_common_lib-0.1.34.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
22
|
+
hockey_blast_common_lib-0.1.34.dist-info/top_level.txt,sha256=wIR4LIkE40npoA2QlOdfCYlgFeGbsHR8Z6r0h46Vtgc,24
|
23
|
+
hockey_blast_common_lib-0.1.34.dist-info/RECORD,,
|
@@ -1,21 +0,0 @@
|
|
1
|
-
hockey_blast_common_lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
2
|
-
hockey_blast_common_lib/aggregate_goalie_stats.py,sha256=d2qav46Rg2DNIYRj_Ubj1kpQmoPUJHKiwEWOVU25nD4,8742
|
3
|
-
hockey_blast_common_lib/aggregate_human_stats.py,sha256=88OMhTgQjzc9xIakf6kW9_lZwbSXkpsZy8C0pX-Wlq8,14229
|
4
|
-
hockey_blast_common_lib/aggregate_referee_stats.py,sha256=A0PTyEbPUjqfXxlJCDOVioFaQk9AyjjhiWEuRuu35v0,11036
|
5
|
-
hockey_blast_common_lib/aggregate_skater_stats.py,sha256=jkBD5u-gJc1DTDIEuxM_qymKsrWtLagFKeEn__2rFgU,16009
|
6
|
-
hockey_blast_common_lib/assign_skater_skill.py,sha256=p-0fbodGpM8BCjKHDpxNb7BH2FcIlBsJwON844KNrUY,1817
|
7
|
-
hockey_blast_common_lib/db_connection.py,sha256=HvPxDvOj7j5H85RfslGvHVNevfg7mKCd0syJ6NX21mU,1890
|
8
|
-
hockey_blast_common_lib/dump_sample_db.sh,sha256=MHPA-Ciod7wsvAlMbRtXFiyajgnEqU1xR59sJQ9UWR0,738
|
9
|
-
hockey_blast_common_lib/hockey_blast_sample_backup.sql.gz,sha256=fO5SsdPB6XYptPLx1rD2VVSTySLnJmyubeSS0A0HGyw,1033692
|
10
|
-
hockey_blast_common_lib/models.py,sha256=ebRnnvDOVNDfqAp8CA8u7uk3LCOfI3iUwOpHgzoBy0U,15984
|
11
|
-
hockey_blast_common_lib/options.py,sha256=6na8fo-5A2RBPpd_h-7dsqetOLSLoNEJg1QMYgl4jNs,792
|
12
|
-
hockey_blast_common_lib/restore_sample_db.sh,sha256=u2zKazC6vNMULkpYzI64nlneCWaGUtDHPBAU-gWgRbw,1861
|
13
|
-
hockey_blast_common_lib/skills_in_divisions.py,sha256=LV6f2nzmRi1IHl1_EDVf61I7nHUNc0xYuNL4dBsZI_U,7486
|
14
|
-
hockey_blast_common_lib/skills_propagation.py,sha256=x6yy7fJ6IX3YiHqiP_v7-p_S2Expb8JJ-mWuajEFBdY,16388
|
15
|
-
hockey_blast_common_lib/stats_models.py,sha256=35-6iz1r8MJcmzlyIlJy0uHgWh8oltyf-3H61ocal3o,23048
|
16
|
-
hockey_blast_common_lib/utils.py,sha256=odDJWCK0BgbResXeoUzxbVChjaxcXr168ZxbrAw3L_8,3752
|
17
|
-
hockey_blast_common_lib/wsgi.py,sha256=7LGUzioigviJp-EUhSEaQcd4jBae0mxbkyBscQfZhlc,730
|
18
|
-
hockey_blast_common_lib-0.1.32.dist-info/METADATA,sha256=DZgI2eRmWQGFrcaxenwqrWLZdWmgDK5LbU57iLTlI0w,318
|
19
|
-
hockey_blast_common_lib-0.1.32.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
20
|
-
hockey_blast_common_lib-0.1.32.dist-info/top_level.txt,sha256=wIR4LIkE40npoA2QlOdfCYlgFeGbsHR8Z6r0h46Vtgc,24
|
21
|
-
hockey_blast_common_lib-0.1.32.dist-info/RECORD,,
|
File without changes
|
{hockey_blast_common_lib-0.1.32.dist-info → hockey_blast_common_lib-0.1.34.dist-info}/top_level.txt
RENAMED
File without changes
|