wsba-hockey 0.1.0__tar.gz → 0.1.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: wsba_hockey
3
- Version: 0.1.0
3
+ Version: 0.1.2
4
4
  Summary: WeakSide Breakout's complete Python package of access to hockey data, primairly including the scraping of National Hockey League schedule, play-by-play, and shifts information.
5
5
  Author-email: Owen Singh <owenbksingh@gmail.com>
6
6
  Project-URL: Homepage, https://github.com/owensingh38/wsba_hockey/
@@ -62,8 +62,15 @@ wsba.nhl_scrape_player_info(wsba.nhl_scrape_roster('20242025'))
62
62
 
63
63
  ## REPOSITORY
64
64
  ### Past Season Play-by-Play*
65
- ### Team Information*
66
- ### Schedule*
65
+ ### Team Information
66
+ ```python
67
+ wsba.repo_load_teaminfo()
68
+ wsba.repo_load_rosters(seasons=['20212022','20222023','20232024','20242025])
69
+ ```
70
+ ### Schedule
71
+ ```python
72
+ wsba.repo_load_schedule(seasons=['20212022','20222023','20232024','20242025])
73
+ ```
67
74
 
68
75
  ## ACKNOWLEDGEMENTS AND CREDITS
69
76
  ### Huge thanks to the following:
@@ -47,8 +47,15 @@ wsba.nhl_scrape_player_info(wsba.nhl_scrape_roster('20242025'))
47
47
 
48
48
  ## REPOSITORY
49
49
  ### Past Season Play-by-Play*
50
- ### Team Information*
51
- ### Schedule*
50
+ ### Team Information
51
+ ```python
52
+ wsba.repo_load_teaminfo()
53
+ wsba.repo_load_rosters(seasons=['20212022','20222023','20232024','20242025])
54
+ ```
55
+ ### Schedule
56
+ ```python
57
+ wsba.repo_load_schedule(seasons=['20212022','20222023','20232024','20242025])
58
+ ```
52
59
 
53
60
  ## ACKNOWLEDGEMENTS AND CREDITS
54
61
  ### Huge thanks to the following:
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "wsba_hockey"
3
- version = "0.1.0"
3
+ version = "0.1.2"
4
4
  authors = [
5
5
  { name="Owen Singh", email="owenbksingh@gmail.com" },
6
6
  ]
@@ -0,0 +1 @@
1
+ from wsba_hockey.wsba_main import nhl_scrape_game,nhl_scrape_schedule,nhl_scrape_season,nhl_scrape_seasons_info,nhl_scrape_standings,nhl_scrape_roster,nhl_scrape_player_info,repo_load_rosters,repo_load_schedule,repo_load_teaminfo
@@ -1,13 +1,9 @@
1
1
  import re
2
- from bs4 import BeautifulSoup, SoupStrainer
2
+ from bs4 import BeautifulSoup
3
3
  import hockey_scraper.utils.shared as shared
4
- import hockey_scraper.nhl.pbp.html_pbp as html
5
- import hockey_scraper.nhl.game_scraper as gs
6
4
  import numpy as np
7
5
  import pandas as pd
8
6
  import warnings
9
- import requests as rs
10
- from zipfile import ZipFile
11
7
  warnings.filterwarnings('ignore')
12
8
 
13
9
  ### SCRAPING FUNCTIONS ###
@@ -68,6 +64,10 @@ def parse_json(json):
68
64
  info = pd.json_normalize(json)
69
65
  roster = pd.json_normalize(json['rosterSpots'])
70
66
 
67
+ #Return error if game is set in the future
68
+ if info['gameState'][0] == 'FUT':
69
+ raise ValueError(f"Game {info['id'][0]} has not occured yet.")
70
+
71
71
  #Game information
72
72
  events['game_id'] = info['id'][0]
73
73
  events['season'] = info['season'][0]
@@ -127,12 +127,25 @@ def parse_json(json):
127
127
  # x, y - Raw coordinates from JSON pbpp
128
128
  # x_fixed, y_fixed - Coordinates fixed to the right side of the ice (x is always greater than 0)
129
129
  # x_adj, y_adj - Adjusted coordinates configuring away events with negative x vlaues while home events are always positive
130
- events['x_fixed'] = abs(events['details.xCoord'])
131
- events['y_fixed'] = np.where(events['details.xCoord']<0,-events['details.yCoord'],events['details.yCoord'])
132
- events['x_adj'] = np.where(events['event_team_status']=="home",events['x_fixed'],-events['x_fixed'])
133
- events['y_adj'] = np.where(events['event_team_status']=="home",events['y_fixed'],-events['y_fixed'])
134
- events['event_distance'] = np.sqrt(((89 - events['x_fixed'])**2) + (events['y_fixed']**2))
135
- events['event_angle'] = np.degrees(np.arctan2(abs(events['y_fixed']), abs(89 - events['x_fixed'])))
130
+
131
+ #Some games (mostly preseason and all star games) do not include coordinates.
132
+ try:
133
+ events['x_fixed'] = abs(events['details.xCoord'])
134
+ events['y_fixed'] = np.where(events['details.xCoord']<0,-events['details.yCoord'],events['details.yCoord'])
135
+ events['x_adj'] = np.where(events['event_team_status']=="home",events['x_fixed'],-events['x_fixed'])
136
+ events['y_adj'] = np.where(events['event_team_status']=="home",events['y_fixed'],-events['y_fixed'])
137
+ events['event_distance'] = np.sqrt(((89 - events['x_fixed'])**2) + (events['y_fixed']**2))
138
+ events['event_angle'] = np.degrees(np.arctan2(abs(events['y_fixed']), abs(89 - events['x_fixed'])))
139
+ except TypeError:
140
+ print(f"No coordinates found for game {info['id'][0]}...")
141
+
142
+ events['x_fixed'] = np.nan
143
+ events['y_fixed'] = np.nan
144
+ events['x_adj'] = np.nan
145
+ events['y_adj'] = np.nan
146
+ events['event_distance'] = np.nan
147
+ events['event_angle'] = np.nan
148
+
136
149
 
137
150
  events['event_team_abbr'] = events['details.eventOwnerTeamId'].replace(teams)
138
151
 
@@ -517,6 +530,23 @@ def fix_names(shifts_df,json):
517
530
 
518
531
  return shifts_df.replace(replace,regex=True)
519
532
 
533
+ def get_col():
534
+ return [
535
+ 'season','season_type','game_id','game_date',"start_time","venue","venue_location",
536
+ 'away_team_abbr','home_team_abbr','event_num','period','period_type',
537
+ 'seconds_elapsed', "situation_code","strength_state","home_team_defending_side","shift_type",
538
+ "event_type_code","event_type","description","reason","penalty_duration","penalty_description",
539
+ "event_team_abbr",'num_on', 'players_on', 'ids_on', 'num_off', 'players_off', 'ids_off',
540
+ "event_team_status","event_player_1_id","event_player_2_id","event_player_3_id",
541
+ "event_player_1_name","event_player_2_name","event_player_3_name","event_player_1_pos","event_player_2_pos",
542
+ "event_player_3_pos","event_goalie_id",
543
+ "event_goalie_name","shot_type","zone_code","x","y","x_fixed","y_fixed","x_adj","y_adj",
544
+ "event_skaters","away_skaters","home_skaters",
545
+ "event_distance","event_angle","away_score","home_score", "away_fenwick", "home_fenwick",
546
+ "away_on_1","away_on_2","away_on_3","away_on_4","away_on_5","away_on_6","away_goalie",
547
+ "home_on_1","home_on_2","home_on_3","home_on_4","home_on_5","home_on_6","home_goalie"
548
+ ]
549
+
520
550
  def combine_data(json,html):
521
551
  #Given json pbp and html shifts, total game play-by-play data is provided with additional and corrected details
522
552
  df = pd.concat([json,html])
@@ -604,22 +634,6 @@ def combine_data(json,html):
604
634
 
605
635
  df['strength_state'] = df['event_skaters'].astype(str) + "v" + df['event_skaters_against'].astype(str)
606
636
  df['situation_code'] = np.where(df['situation_code'].isna(),df['away_goalie_in'].astype(str) + df['away_skaters'].astype(str) + df['home_skaters'].astype(str) + df['home_goalie_in'].astype(str),df['situation_code'])
607
-
608
- col = [
609
- 'season','season_type','game_id','game_date',"start_time","venue","venue_location",
610
- 'away_team_abbr','home_team_abbr','event_num','period','period_type',
611
- 'seconds_elapsed', "situation_code","strength_state","home_team_defending_side","shift_type",
612
- "event_type_code","event_type","description","reason","penalty_duration","penalty_description",
613
- "event_team_abbr",'num_on', 'players_on', 'ids_on', 'num_off', 'players_off', 'ids_off',
614
- "event_team_status","event_player_1_id","event_player_2_id","event_player_3_id",
615
- "event_player_1_name","event_player_2_name","event_player_3_name","event_player_1_pos","event_player_2_pos",
616
- "event_player_3_pos","event_goalie_id",
617
- "event_goalie_name","shot_type","zone_code","x","y","x_fixed","y_fixed","x_adj","y_adj",
618
- "event_skaters","away_skaters","home_skaters",
619
- "event_distance","event_angle","away_score","home_score", "away_fenwick", "home_fenwick",
620
- "away_on_1","away_on_2","away_on_3","away_on_4","away_on_5","away_on_6","away_goalie",
621
- "home_on_1","home_on_2","home_on_3","home_on_4","home_on_5","home_on_6","home_goalie"
622
- ]
623
637
 
624
638
  #Return: complete play-by-play with all important data for each event in a provided game
625
- return df[col].replace(r'^\s*$', np.nan, regex=True)
639
+ return df[get_col()].replace(r'^\s*$', np.nan, regex=True)
@@ -0,0 +1,146 @@
1
+ import pandas as pd
2
+ import numpy as np
3
+ import xgboost as xgb
4
+ import scipy.sparse as sp
5
+ import joblib
6
+
7
+ ### XG_MODEL FUNCTIONS ###
8
+ # Provided in this file are functions vital to the goal prediction model in the WSBA Hockey Python package. #
9
+
10
+ def prep_xG_data(pbp):
11
+ #Prep data for xG training and calculation
12
+
13
+ events = ['faceoff','hit','giveaway','takeaway','blocked-shot','missed-shot','shot-on-goal','goal']
14
+ shot_types = ['wrist','deflected','tip-in','slap','backhand','snap','wrap-around','poke','bat','cradle','between-legs']
15
+ fenwick_events = ['missed-shot','shot-on-goal','goal']
16
+ strengths = ['3v3',
17
+ '3v4',
18
+ '3v5',
19
+ '4v3',
20
+ '4v4',
21
+ '4v5',
22
+ '4v6',
23
+ '5v3',
24
+ '5v4',
25
+ '5v5',
26
+ '5v6',
27
+ '6v4',
28
+ '6v5']
29
+
30
+ #Filter unwanted date:
31
+ #Shots must occur in specified events and strength states, occur before the shootout, and have valid coordinates
32
+ data = pbp.loc[(pbp['event_type'].isin(events))&
33
+ (pbp['strength_state'].isin(strengths))&
34
+ (pbp['period'] < 5)&
35
+ (pbp['x_fixed'].notna())&
36
+ (pbp['y_fixed'].notna())&
37
+ ~((pbp['x_fixed']==0)&(pbp['y_fixed']==0)&(pbp['x_fixed'].isin(fenwick_events))&(pbp['event_distance']!=90))]
38
+ #Create last event columns
39
+ data = data.sort_values(by=['season','game_id','period','seconds_elapsed','event_num'])
40
+
41
+ data["seconds_since_last"] = data['seconds_elapsed']-data['seconds_elapsed'].shift(1)
42
+ data["event_team_last"] = data['event_team_abbr'].shift(1)
43
+ data["event_type_last"] = data['event_type'].shift(1)
44
+ data["x_fixed_last"] = data['x_fixed'].shift(1)
45
+ data["y_fixed_last"] = data['y_fixed'].shift(1)
46
+ data["zone_code_last"] = data['zone_code'].shift(1)
47
+ data['shot_type'] = data['shot_type'].fillna('wrist')
48
+
49
+
50
+ data.sort_values(['season','game_id','period','seconds_elapsed','event_num'],inplace=True)
51
+ data['score_state'] = np.where(data['away_team_abbr']==data['event_team_abbr'],data['away_score']-data['home_score'],data['home_score']-data['away_score'])
52
+ data['fenwick_state'] = np.where(data['away_team_abbr']==data['event_team_abbr'],data['away_fenwick']-data['home_fenwick'],data['home_fenwick']-data['away_fenwick'])
53
+ data['distance_from_last'] = np.sqrt((data['x_fixed'] - data['x_fixed_last'])**2 + (data['y_fixed'] - data['y_fixed_last'])**2)
54
+ data['rush_mod'] = np.where((data['event_type'].isin(fenwick_events))&(data['zone_code_last'].isin(['N','D']))&(data['x_fixed']>25)&(data['seconds_since_last']<5),5-data['seconds_since_last'],0)
55
+ data['rebound_mod'] = np.where((data['event_type'].isin(fenwick_events))&(data['event_type_last'].isin(fenwick_events))&(data['seconds_since_last']<3),3-data['seconds_since_last'],0)
56
+
57
+ #Create boolean variables
58
+ data["is_goal"]=(data['event_type']=='goal').astype(int)
59
+ data["is_home"]=(data['home_team_abbr']==data['event_team_abbr']).astype(int)
60
+
61
+
62
+ for shot in shot_types:
63
+ data[shot] = (data['shot_type']==shot).astype(int)
64
+ for strength in strengths:
65
+ data[f'state_{strength}'] = (data['strength_state']==strength).astype(int)
66
+ for event in events[0:len(events)-1]:
67
+ data[f'prior_{event}_same'] = ((data['event_type_last']==event)&(data['event_team_last']==data['event_team_abbr'])).astype(int)
68
+ data[f'prior_{event}_opp'] = ((data['event_type_last']==event)&(data['event_team_last']!=data['event_team_abbr'])).astype(int)
69
+
70
+ #Return: pbp data prepared to train and calculate the xG model
71
+ return data
72
+
73
+ def wsba_xG(pbp, train = False, overwrite = False, model_path = "tools/xg_model/wsba_xg.joblib", train_runs = 20, test_runs = 20):
74
+ #Train and calculate the WSBA Expected Goals model
75
+
76
+ target = "is_goal"
77
+ continous = ['event_distance',
78
+ 'event_angle',
79
+ 'seconds_elapsed',
80
+ 'period',
81
+ 'x_fixed',
82
+ 'y_fixed',
83
+ 'x_fixed_last',
84
+ 'y_fixed_last',
85
+ 'distance_from_last',
86
+ 'seconds_since_last',
87
+ 'score_state',
88
+ 'fenwick_state',
89
+ 'rush_mod',
90
+ 'rebound_mod']
91
+ boolean = ['is_home',
92
+ 'state_3v3',
93
+ 'state_3v4',
94
+ 'state_3v5',
95
+ 'state_4v3',
96
+ 'state_4v4',
97
+ 'state_4v5',
98
+ 'state_4v6',
99
+ 'state_5v3',
100
+ 'state_5v4',
101
+ 'state_5v5',
102
+ 'state_5v6',
103
+ 'state_6v4',
104
+ 'state_6v5',
105
+ 'wrist',
106
+ 'deflected',
107
+ 'tip-in',
108
+ 'slap',
109
+ 'backhand',
110
+ 'snap',
111
+ 'wrap-around',
112
+ 'poke',
113
+ 'bat',
114
+ 'cradle',
115
+ 'between-legs',
116
+ 'prior_shot-on-goal_same',
117
+ 'prior_missed-shot_same',
118
+ 'prior_blocked-shot_same',
119
+ 'prior_giveaway_same',
120
+ 'prior_takeaway_same',
121
+ 'prior_hit_same',
122
+ 'prior_shot-on-goal_opp',
123
+ 'prior_missed-shot_opp',
124
+ 'prior_blocked-shot_opp',
125
+ 'prior_giveaway_opp',
126
+ 'prior_takeaway_opp',
127
+ 'prior_hit_opp',
128
+ 'prior_faceoff']
129
+
130
+ #Prep Data
131
+ data = prep_xG_data(pbp)
132
+
133
+ #Convert to sparse
134
+ data_sparse = sp.csr_matrix(data[[target]+continous+boolean])
135
+
136
+ #Target and Predictors
137
+ is_goal_vect = data_sparse[:, 0].A
138
+ predictors = data_sparse[:, 1:]
139
+
140
+ #XGB DataModel
141
+ xgb_matrix = xgb.DMatrix(data=predictors,label=is_goal_vect)
142
+
143
+ if train == True:
144
+ run_num =
145
+ else:
146
+ print("No data to add yet...")
@@ -2,7 +2,7 @@ import requests as rs
2
2
  import pandas as pd
3
3
  import numpy as np
4
4
  from datetime import datetime, timedelta
5
- from tools.scraping import *
5
+ from wsba_hockey.tools.scraping import *
6
6
 
7
7
  ### WSBA HOCKEY ###
8
8
  ## Provided below are all integral functions in the WSBA Hockey Python package. ##
@@ -26,30 +26,43 @@ def nhl_scrape_game(game_ids,split_shifts = False,remove = ['period-start','peri
26
26
  away_log = "https://www.nhl.com/scores/htmlreports/"+season+"/TV"+str(game_id)[-6:]+".HTM"
27
27
 
28
28
  #Retrieve raw data
29
- json = rs.get(api).json()
30
- home_shift = rs.get(home_log).content
31
- away_shift = rs.get(away_log).content
32
-
33
- if int(game_id[:4]) < 2010:
34
- print()
35
- raise Exception('Games before 2010-2011 are not available yet.')
36
- else:
37
- #Parse Json
38
- pbp = parse_json(json)
39
-
40
- #Create shifts
41
- shifts = fix_names(combine_shifts(home_shift,away_shift,json,game_id),json)
42
-
43
- #Combine and append data to list
44
- data = combine_data(pbp,shifts)
29
+ try:
30
+ json = rs.get(api).json()
31
+ home_shift = rs.get(home_log).content
32
+ away_shift = rs.get(away_log).content
33
+
34
+ if int(game_id[:4]) < 2010:
35
+ print()
36
+ raise Exception('Games before 2010-2011 are not available yet.')
37
+ else:
38
+ #Parse Json
39
+ pbp = parse_json(json)
40
+
41
+ #Create shifts
42
+ #If no shifts data exists only export play-by-play
43
+ try:
44
+ shifts = fix_names(combine_shifts(home_shift,away_shift,json,game_id),json)
45
+ data = combine_data(pbp,shifts)
46
+
47
+ except:
48
+ print(f"Cannot find or create shifts for game {game_id}...")
49
+ data = combine_data(pbp,pd.DataFrame(columns=get_col()))
45
50
 
46
- pbps.append(data)
51
+ #Combine and append data to list
52
+ pbps.append(data)
53
+ except:
54
+ print(f"Unable to scrape game {game_id}. Ensure the ID is properly inputted and formatted.")
55
+ pbps.append(pd.DataFrame())
47
56
 
48
57
  #Add all pbps together
49
58
  df = pd.concat(pbps)
50
59
 
51
60
  #Split pbp and shift events if necessary
52
61
  #Return: complete play-by-play with data removed or split as necessary
62
+ try: df['event_type']
63
+ except KeyError:
64
+ raise KeyError("No data is available to return.")
65
+
53
66
  if split_shifts == True:
54
67
  if len(remove) == 0:
55
68
  remove = ['change']
@@ -113,7 +126,7 @@ def nhl_scrape_schedule(season,start = "09-01", end = "08-01"):
113
126
  #Return: specificed schedule data (excluding preseason games)
114
127
  return df.loc[df['season_type']>1]
115
128
 
116
- def nhl_scrape_season(season,split_shifts = False, remove = ['period-start','period-end','challenge','stoppage','change'], start = "09-01", end = "08-01", local=False, local_path = "schedule/schedule.csv"):
129
+ def nhl_scrape_season(season,split_shifts = False, remove = ['period-start','period-end','game-end','challenge','stoppage'], start = "09-01", end = "08-01", local=False, local_path = "schedule/schedule.csv"):
117
130
  #Given season, scrape all play-by-play occuring within the season
118
131
  # param 'season' - NHL season to scrape
119
132
  # param 'split_shifts' - boolean which splits pbp and shift events if true
@@ -146,29 +159,26 @@ def nhl_scrape_season(season,split_shifts = False, remove = ['period-start','per
146
159
  df.append(data)
147
160
 
148
161
  except:
149
- #Errors should be rare; testing of eight full-season scraped produced just one missing game due to erro
150
- #Games which have not happened yet also print as errors
151
- print("An error occurred...")
152
- errors.append(pd.DataFrame({"id":game_id}))
153
-
154
- pbp = pd.concat(df)
162
+ #Errors should be rare; testing of eight full-season scraped produced just one missing regular season game due to error
163
+ continue
164
+
165
+ #Missing data handled as a KeyError
166
+ try: pbp = pd.concat(df)
167
+ except:
168
+ raise KeyError("No data is available to return.")
169
+
155
170
  if split_shifts == True:
156
- shifts = pd.concat(df_s)
171
+ try: shifts = pd.concat(df_s)
172
+ except: raise KeyError("No data is available to return.")
157
173
  else:
158
174
  ""
159
- try:
160
- errors = pd.concat(errors)
161
- except:
162
- errors = pd.DataFrame()
163
175
 
164
176
  #Return: Complete pbp and shifts data for specified season as well as dataframe of game_ids which failed to return data
165
177
  if split_shifts == True:
166
178
  return {"pbp":pbp,
167
- 'shifts':shifts,
168
- "errors":errors}
179
+ 'shifts':shifts}
169
180
  else:
170
- return {"pbp":pbp,
171
- "errors":errors}
181
+ return pbp
172
182
 
173
183
  def nhl_scrape_seasons_info(seasons = []):
174
184
  #Returns info related to NHL seasons (by default, all seasons are included)
@@ -192,7 +202,7 @@ def nhl_scrape_seasons_info(seasons = []):
192
202
 
193
203
  def nhl_scrape_standings(arg = "now"):
194
204
  #Returns standings
195
- # parma 'arg' - by default, this is "now" returning active NHL standings. May also be a specific date formatted as YYYY-MM-DD
205
+ # param 'arg' - by default, this is "now" returning active NHL standings. May also be a specific date formatted as YYYY-MM-DD
196
206
 
197
207
  if arg == "now":
198
208
  print("Scraping standings as of now...")
@@ -279,4 +289,29 @@ def nhl_scrape_player_info(roster):
279
289
  players['Season'] = players['Season'].astype(str)
280
290
  players['Player'] = players['Player'].replace(r'^\s*$', np.nan, regex=True)
281
291
 
282
- return players.loc[players['Player'].notna()].sort_values(by=['Player','Season','Team'])
292
+ return players.loc[players['Player'].notna()].sort_values(by=['Player','Season','Team'])
293
+
294
+ def repo_load_rosters(seasons = []):
295
+ #Returns roster data from repository
296
+ # param 'seasons' - list of seasons to include
297
+
298
+ data = pd.read_csv("rosters/nhl_rosters.csv")
299
+ if len(seasons)>0:
300
+ data = data.loc[data['season'].isin(seasons)]
301
+
302
+ return data
303
+
304
+ def repo_load_schedule(seasons = []):
305
+ #Returns schedule data from repository
306
+ # param 'seasons' - list of seasons to include
307
+
308
+ data = pd.read_csv("schedule/schedule.csv")
309
+ if len(seasons)>0:
310
+ data = data.loc[data['season'].isin(seasons)]
311
+
312
+ return data
313
+
314
+ def repo_load_teaminfo():
315
+ #Returns team data from repository
316
+
317
+ return pd.read_csv("teaminfo/nhl_teaminfo.csv")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: wsba_hockey
3
- Version: 0.1.0
3
+ Version: 0.1.2
4
4
  Summary: WeakSide Breakout's complete Python package of access to hockey data, primairly including the scraping of National Hockey League schedule, play-by-play, and shifts information.
5
5
  Author-email: Owen Singh <owenbksingh@gmail.com>
6
6
  Project-URL: Homepage, https://github.com/owensingh38/wsba_hockey/
@@ -62,8 +62,15 @@ wsba.nhl_scrape_player_info(wsba.nhl_scrape_roster('20242025'))
62
62
 
63
63
  ## REPOSITORY
64
64
  ### Past Season Play-by-Play*
65
- ### Team Information*
66
- ### Schedule*
65
+ ### Team Information
66
+ ```python
67
+ wsba.repo_load_teaminfo()
68
+ wsba.repo_load_rosters(seasons=['20212022','20222023','20232024','20242025])
69
+ ```
70
+ ### Schedule
71
+ ```python
72
+ wsba.repo_load_schedule(seasons=['20212022','20222023','20232024','20242025])
73
+ ```
67
74
 
68
75
  ## ACKNOWLEDGEMENTS AND CREDITS
69
76
  ### Huge thanks to the following:
@@ -1,9 +1,11 @@
1
1
  LICENSE
2
2
  README.md
3
3
  pyproject.toml
4
+ src/wsba_hockey/__init__.py
4
5
  src/wsba_hockey/wsba_main.py
5
6
  src/wsba_hockey.egg-info/PKG-INFO
6
7
  src/wsba_hockey.egg-info/SOURCES.txt
7
8
  src/wsba_hockey.egg-info/dependency_links.txt
8
9
  src/wsba_hockey.egg-info/top_level.txt
9
- src/wsba_hockey/tools/scraping.py
10
+ src/wsba_hockey/tools/scraping.py
11
+ src/wsba_hockey/tools/xg_model.py
File without changes
File without changes