sportsball 0.3.19__tar.gz → 0.3.21__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (201) hide show
  1. {sportsball-0.3.19/sportsball.egg-info → sportsball-0.3.21}/PKG-INFO +5 -1
  2. {sportsball-0.3.19 → sportsball-0.3.21}/README.md +2 -0
  3. {sportsball-0.3.19 → sportsball-0.3.21}/requirements.txt +2 -1
  4. {sportsball-0.3.19 → sportsball-0.3.21}/setup.py +9 -6
  5. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/__init__.py +1 -1
  6. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/google/google_address_model.py +9 -0
  7. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/google/google_news_model.py +1 -1
  8. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/sportsreference/sportsreference_game_model.py +130 -29
  9. sportsball-0.3.21/sportsball/vendor/pygooglenews/__init__.py +203 -0
  10. {sportsball-0.3.19 → sportsball-0.3.21/sportsball.egg-info}/PKG-INFO +5 -1
  11. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball.egg-info/SOURCES.txt +2 -0
  12. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball.egg-info/requires.txt +2 -0
  13. sportsball-0.3.21/tests/data/sportsreference/sportsreference_game_model_test.py +97 -0
  14. sportsball-0.3.21/tests/data/x/__init__.py +0 -0
  15. sportsball-0.3.19/tests/data/sportsreference/sportsreference_game_model_test.py +0 -30
  16. {sportsball-0.3.19 → sportsball-0.3.21}/LICENSE +0 -0
  17. {sportsball-0.3.19 → sportsball-0.3.21}/MANIFEST.in +0 -0
  18. {sportsball-0.3.19 → sportsball-0.3.21}/setup.cfg +0 -0
  19. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/__main__.py +0 -0
  20. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/args.py +0 -0
  21. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/cache.py +0 -0
  22. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/__init__.py +0 -0
  23. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/address_model.py +0 -0
  24. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/afl/__init__.py +0 -0
  25. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/afl/afltables/__init__.py +0 -0
  26. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/afl/afltables/afl_afltables_game_model.py +0 -0
  27. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/afl/afltables/afl_afltables_league_model.py +0 -0
  28. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/afl/afltables/afl_afltables_player_model.py +0 -0
  29. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/afl/afltables/afl_afltables_team_model.py +0 -0
  30. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/afl/afltables/afl_afltables_venue_model.py +0 -0
  31. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/afl/aussportsbetting/__init__.py +0 -0
  32. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/afl/aussportsbetting/afl_aussportsbetting_league_model.py +0 -0
  33. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/afl/combined/__init__.py +0 -0
  34. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/afl/combined/afl_combined_league_model.py +0 -0
  35. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/afl/espn/__init__.py +0 -0
  36. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/afl/espn/afl_espn_league_model.py +0 -0
  37. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/afl/oddsportal/__init__.py +0 -0
  38. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/afl/oddsportal/afl_oddsportal_league_model.py +0 -0
  39. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/aussportsbetting/__init__.py +0 -0
  40. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/aussportsbetting/aussportsbetting_bookie_model.py +0 -0
  41. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/aussportsbetting/aussportsbetting_game_model.py +0 -0
  42. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/aussportsbetting/aussportsbetting_league_model.py +0 -0
  43. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/aussportsbetting/aussportsbetting_odds_model.py +0 -0
  44. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/aussportsbetting/aussportsbetting_team_model.py +0 -0
  45. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/aussportsbetting/aussportsbetting_venue_model.py +0 -0
  46. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/bookie_model.py +0 -0
  47. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/combined/__init__.py +0 -0
  48. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/combined/combined_address_model.py +0 -0
  49. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/combined/combined_game_model.py +0 -0
  50. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/combined/combined_league_model.py +0 -0
  51. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/combined/combined_player_model.py +0 -0
  52. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/combined/combined_team_model.py +0 -0
  53. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/combined/combined_venue_model.py +0 -0
  54. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/combined/combined_weather_model.py +0 -0
  55. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/espn/__init__.py +0 -0
  56. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/espn/espn_bookie_model.py +0 -0
  57. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/espn/espn_game_model.py +0 -0
  58. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/espn/espn_league_model.py +0 -0
  59. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/espn/espn_odds_model.py +0 -0
  60. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/espn/espn_player_model.py +0 -0
  61. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/espn/espn_team_model.py +0 -0
  62. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/espn/espn_venue_model.py +0 -0
  63. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/field_type.py +0 -0
  64. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/game_model.py +0 -0
  65. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/google/__init__.py +0 -0
  66. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/league.py +0 -0
  67. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/league_model.py +0 -0
  68. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/model.py +0 -0
  69. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nba/__init__.py +0 -0
  70. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nba/combined/__init__.py +0 -0
  71. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nba/combined/nba_combined_league_model.py +0 -0
  72. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nba/espn/__init__.py +0 -0
  73. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nba/espn/nba_espn_league_model.py +0 -0
  74. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nba/nba/__init__.py +0 -0
  75. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nba/nba/nba_nba_game_model.py +0 -0
  76. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nba/nba/nba_nba_league_model.py +0 -0
  77. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nba/nba/nba_nba_player_model.py +0 -0
  78. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nba/nba/nba_nba_team_model.py +0 -0
  79. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nba/oddsportal/__init__.py +0 -0
  80. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nba/oddsportal/nba_oddsportal_league_model.py +0 -0
  81. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nba/sportsdb/__init__.py +0 -0
  82. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nba/sportsdb/nba_sportsdb_league_model.py +0 -0
  83. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nba/sportsreference/__init__.py +0 -0
  84. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nba/sportsreference/nba_sportsreference_league_model.py +0 -0
  85. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/ncaab/__init__.py +0 -0
  86. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/ncaab/combined/__init__.py +0 -0
  87. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/ncaab/combined/ncaab_combined_league_model.py +0 -0
  88. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/ncaab/espn/__init__.py +0 -0
  89. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/ncaab/espn/ncaab_espn_league_model.py +0 -0
  90. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/ncaab/oddsportal/__init__.py +0 -0
  91. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/ncaab/oddsportal/ncaab_oddsportal_league_model.py +0 -0
  92. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/ncaab/sportsreference/__init__.py +0 -0
  93. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/ncaab/sportsreference/ncaab_sportsreference_league_model.py +0 -0
  94. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/ncaaf/__init__.py +0 -0
  95. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/ncaaf/espn/__init__.py +0 -0
  96. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/ncaaf/espn/ncaaf_espn_league_model.py +0 -0
  97. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/news_model.py +0 -0
  98. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nfl/__init__.py +0 -0
  99. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nfl/aussportsbetting/__init__.py +0 -0
  100. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nfl/aussportsbetting/nfl_aussportsbetting_league_model.py +0 -0
  101. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nfl/combined/__init__.py +0 -0
  102. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nfl/combined/nfl_combined_league_model.py +0 -0
  103. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nfl/espn/__init__.py +0 -0
  104. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nfl/espn/nfl_espn_league_model.py +0 -0
  105. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nfl/oddsportal/__init__.py +0 -0
  106. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nfl/oddsportal/nfl_oddsportal_league_model.py +0 -0
  107. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nfl/sportsdb/__init__.py +0 -0
  108. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/nfl/sportsdb/nfl_sportsdb_league_model.py +0 -0
  109. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/odds_model.py +0 -0
  110. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/oddsportal/__init__.py +0 -0
  111. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/oddsportal/oddsportal_bookie_model.py +0 -0
  112. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/oddsportal/oddsportal_game_model.py +0 -0
  113. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/oddsportal/oddsportal_league_model.py +0 -0
  114. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/oddsportal/oddsportal_odds_model.py +0 -0
  115. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/oddsportal/oddsportal_team_model.py +0 -0
  116. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/oddsportal/oddsportal_venue_model.py +0 -0
  117. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/player_model.py +0 -0
  118. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/season_type.py +0 -0
  119. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/social_model.py +0 -0
  120. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/sportsdb/__init__.py +0 -0
  121. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/sportsdb/sportsdb_game_model.py +0 -0
  122. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/sportsdb/sportsdb_league_model.py +0 -0
  123. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/sportsdb/sportsdb_team_model.py +0 -0
  124. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/sportsdb/sportsdb_venue_model.py +0 -0
  125. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/sportsreference/__init__.py +0 -0
  126. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/sportsreference/sportsreference_league_model.py +0 -0
  127. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/sportsreference/sportsreference_player_model.py +0 -0
  128. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/sportsreference/sportsreference_team_model.py +0 -0
  129. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/sportsreference/sportsreference_venue_model.py +0 -0
  130. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/team_model.py +0 -0
  131. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/venue_model.py +0 -0
  132. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/weather/__init__.py +0 -0
  133. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/weather/gribstream/__init__.py +0 -0
  134. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/weather/gribstream/gribstream_weather_model.py +0 -0
  135. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/weather/multi_weather_model.py +0 -0
  136. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/weather/openmeteo/__init__.py +0 -0
  137. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/weather/openmeteo/openmeteo_weather_model.py +0 -0
  138. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/weather_model.py +0 -0
  139. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/wikipedia/__init__.py +0 -0
  140. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/wikipedia/wikipedia_venue_model.py +0 -0
  141. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/x/__init__.py +0 -0
  142. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/data/x/x_social_model.py +0 -0
  143. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/logger.py +0 -0
  144. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/loglevel.py +0 -0
  145. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/proxy_session.py +0 -0
  146. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/session.py +0 -0
  147. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball/sportsball.py +0 -0
  148. {sportsball-0.3.19/tests → sportsball-0.3.21/sportsball/vendor}/__init__.py +0 -0
  149. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball.egg-info/dependency_links.txt +0 -0
  150. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball.egg-info/entry_points.txt +0 -0
  151. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball.egg-info/not-zip-safe +0 -0
  152. {sportsball-0.3.19 → sportsball-0.3.21}/sportsball.egg-info/top_level.txt +0 -0
  153. {sportsball-0.3.19/tests/data → sportsball-0.3.21/tests}/__init__.py +0 -0
  154. {sportsball-0.3.19/tests/data/afl → sportsball-0.3.21/tests/data}/__init__.py +0 -0
  155. {sportsball-0.3.19/tests/data/afl/afltables → sportsball-0.3.21/tests/data/afl}/__init__.py +0 -0
  156. {sportsball-0.3.19/tests/data/aussportsbetting → sportsball-0.3.21/tests/data/afl/afltables}/__init__.py +0 -0
  157. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/afl/afltables/afl_afltables_player_model_test.py +0 -0
  158. {sportsball-0.3.19/tests/data/combined → sportsball-0.3.21/tests/data/aussportsbetting}/__init__.py +0 -0
  159. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/aussportsbetting/aussportsbetting_game_model_test.py +0 -0
  160. {sportsball-0.3.19/tests/data/espn → sportsball-0.3.21/tests/data/combined}/__init__.py +0 -0
  161. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/combined/combined_game_model_test.py +0 -0
  162. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/combined/combined_player_model_test.py +0 -0
  163. {sportsball-0.3.19/tests/data/google → sportsball-0.3.21/tests/data/espn}/__init__.py +0 -0
  164. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/espn/espn_game_model_test.py +0 -0
  165. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/espn/espn_player_model_test.py +0 -0
  166. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/game_model_test.py +0 -0
  167. {sportsball-0.3.19/tests/data/nba → sportsball-0.3.21/tests/data/google}/__init__.py +0 -0
  168. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/google/google_address_model_test.py +0 -0
  169. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/google/google_news_model_test.py +0 -0
  170. {sportsball-0.3.19/tests/data/nba → sportsball-0.3.21/tests/data}/nba/__init__.py +0 -0
  171. {sportsball-0.3.19/tests/data/nba/sportsdb → sportsball-0.3.21/tests/data/nba/nba}/__init__.py +0 -0
  172. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/nba/nba/nba_nba_game_model_test.py +0 -0
  173. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/nba/nba/nba_nba_league_model_test.py +0 -0
  174. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/nba/nba/nba_nba_player_model_test.py +0 -0
  175. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/nba/nba/nba_nba_team_model_test.py +0 -0
  176. {sportsball-0.3.19/tests/data/nba/sportsreference → sportsball-0.3.21/tests/data/nba/sportsdb}/__init__.py +0 -0
  177. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/nba/sportsdb/nba_sportsdb_league_model_test.py +0 -0
  178. {sportsball-0.3.19/tests/data/ncaab → sportsball-0.3.21/tests/data/nba/sportsreference}/__init__.py +0 -0
  179. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/nba/sportsreference/nba_sportsreference_league_model_test.py +0 -0
  180. {sportsball-0.3.19/tests/data/ncaab/sportsreference → sportsball-0.3.21/tests/data/ncaab}/__init__.py +0 -0
  181. {sportsball-0.3.19/tests/data/nfl → sportsball-0.3.21/tests/data/ncaab/sportsreference}/__init__.py +0 -0
  182. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/ncaab/sportsreference/ncaab_sportsreference_league_model_test.py +0 -0
  183. {sportsball-0.3.19/tests/data/nfl/sportsdb → sportsball-0.3.21/tests/data/nfl}/__init__.py +0 -0
  184. {sportsball-0.3.19/tests/data → sportsball-0.3.21/tests/data/nfl}/sportsdb/__init__.py +0 -0
  185. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/nfl/sportsdb/nfl_sportsdb_league_model_test.py +0 -0
  186. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/player_model_test.py +0 -0
  187. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/social_model_test.py +0 -0
  188. {sportsball-0.3.19/tests/data/sportsreference → sportsball-0.3.21/tests/data/sportsdb}/__init__.py +0 -0
  189. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/sportsdb/sportsdb_game_model_test.py +0 -0
  190. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/sportsdb/sportsdb_league_model_test.py +0 -0
  191. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/sportsdb/sportsdb_team_model_test.py +0 -0
  192. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/sportsdb/sportsdb_venue_model_test.py +0 -0
  193. {sportsball-0.3.19/tests/data/x → sportsball-0.3.21/tests/data/sportsreference}/__init__.py +0 -0
  194. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/sportsreference/sportsreference_league_model_test.py +0 -0
  195. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/sportsreference/sportsreference_player_model_test.py +0 -0
  196. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/sportsreference/sportsreference_team_model_test.py +0 -0
  197. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/sportsreference/sportsreference_venue_model_test.py +0 -0
  198. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/team_model_test.py +0 -0
  199. {sportsball-0.3.19 → sportsball-0.3.21}/tests/data/x/x_social_model_test.py +0 -0
  200. {sportsball-0.3.19 → sportsball-0.3.21}/tests/proxy_session_test.py +0 -0
  201. {sportsball-0.3.19 → sportsball-0.3.21}/tests/sportsball_test.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: sportsball
3
- Version: 0.3.19
3
+ Version: 0.3.21
4
4
  Summary: A library for pulling in and normalising sports stats.
5
5
  Home-page: https://github.com/8W9aG/sportsball
6
6
  Author: Will Sackfield
@@ -41,6 +41,8 @@ Requires-Dist: tenacity>=9.0.0
41
41
  Requires-Dist: random_user_agent>=1.0.1
42
42
  Requires-Dist: wayback>=0.4.5
43
43
  Requires-Dist: cryptography>=44.0.0
44
+ Requires-Dist: feedparser>=6.0.11
45
+ Requires-Dist: dateparser>=1.2.0
44
46
  Dynamic: author
45
47
  Dynamic: author-email
46
48
  Dynamic: classifier
@@ -97,6 +99,8 @@ Python 3.11.6:
97
99
  - [random_user_agent](https://github.com/Luqman-Ud-Din/random_user_agent)
98
100
  - [wayback](https://github.com/edgi-govdata-archiving/wayback)
99
101
  - [cryptography](https://cryptography.io/en/latest/)
102
+ - [feedparser](https://github.com/kurtmckee/feedparser)
103
+ - [dateparser](https://dateparser.readthedocs.io/en/latest/)
100
104
 
101
105
  ## Raison D'être :thought_balloon:
102
106
 
@@ -43,6 +43,8 @@ Python 3.11.6:
43
43
  - [random_user_agent](https://github.com/Luqman-Ud-Din/random_user_agent)
44
44
  - [wayback](https://github.com/edgi-govdata-archiving/wayback)
45
45
  - [cryptography](https://cryptography.io/en/latest/)
46
+ - [feedparser](https://github.com/kurtmckee/feedparser)
47
+ - [dateparser](https://dateparser.readthedocs.io/en/latest/)
46
48
 
47
49
  ## Raison D'être :thought_balloon:
48
50
 
@@ -18,7 +18,6 @@ nba_api>=1.7.0
18
18
  timezonefinder>=6.5.7
19
19
  pydantic>=2.10.4
20
20
  flatten_json>=0.1.14
21
- -e git+https://github.com/8W9aG/pygooglenews@d40ad6f01cb661bbb1c4b4022896f93c83c88364#egg=pygooglenews
22
21
  extruct>=0.18.0
23
22
  wikipedia-api>=0.8.1
24
23
  tweepy>=4.15.0
@@ -29,3 +28,5 @@ tenacity>=9.0.0
29
28
  random_user_agent>=1.0.1
30
29
  wayback>=0.4.5
31
30
  cryptography>=44.0.0
31
+ feedparser>=6.0.11
32
+ dateparser>=1.2.0
@@ -13,17 +13,20 @@ def install_requires() -> typing.List[str]:
13
13
  with open(
14
14
  Path(__file__).absolute().parent.joinpath('requirements.txt'), "r"
15
15
  ) as requirments_txt_handle:
16
- requires = [
17
- x
18
- for x in requirments_txt_handle
19
- if not x.startswith(".") and not x.startswith("-e")
20
- ]
16
+ for require in requirments_txt_handle:
17
+ if not require.startswith(".") and not require.startswith("-e"):
18
+ requires.append(require)
19
+ else:
20
+ require_file = require.split()[-1]
21
+ require_file = require_file.replace("git+", "")
22
+ package_name = require_file.split("#egg=")[-1]
23
+ requires.append(package_name + " @ " + require_file)
21
24
  return requires
22
25
 
23
26
 
24
27
  setup(
25
28
  name='sportsball',
26
- version='0.3.19',
29
+ version='0.3.21',
27
30
  description='A library for pulling in and normalising sports stats.',
28
31
  long_description=long_description,
29
32
  long_description_content_type='text/markdown',
@@ -1,3 +1,3 @@
1
1
  """The main module for sportsball."""
2
2
 
3
- __VERSION__ = "0.3.19"
3
+ __VERSION__ = "0.3.21"
@@ -7066,6 +7066,15 @@ _CACHED_GEOCODES: dict[str, Any] = {
7066
7066
  housenumber="",
7067
7067
  country="USA",
7068
7068
  ),
7069
+ "Dallas, Texas": SportsballGeocodeTuple(
7070
+ city="Dallas",
7071
+ state="TX",
7072
+ postal="",
7073
+ lat=32.779167,
7074
+ lng=-96.808889,
7075
+ housenumber="",
7076
+ country="USA",
7077
+ ),
7069
7078
  }
7070
7079
 
7071
7080
 
@@ -7,9 +7,9 @@ import pytest_is_running
7
7
  import requests_cache
8
8
  from bs4 import BeautifulSoup
9
9
  from dateutil import parser
10
- from pygooglenews import GoogleNews # type: ignore
11
10
 
12
11
  from ...cache import MEMORY
12
+ from ...vendor.pygooglenews import GoogleNews # type: ignore
13
13
  from ..league import League, long_name
14
14
  from ..news_model import NewsModel
15
15
 
@@ -1,6 +1,6 @@
1
1
  """Sports Reference game model."""
2
2
 
3
- # pylint: disable=too-many-locals,too-many-statements,unused-argument,protected-access
3
+ # pylint: disable=too-many-locals,too-many-statements,unused-argument,protected-access,too-many-arguments
4
4
  import datetime
5
5
  import io
6
6
  import logging
@@ -22,19 +22,76 @@ from .sportsreference_team_model import create_sportsreference_team_model
22
22
  from .sportsreference_venue_model import create_sportsreference_venue_model
23
23
 
24
24
 
25
- def _create_sportsreference_game_model(
25
+ def _find_old_dt(
26
+ dfs: list[pd.DataFrame],
26
27
  session: requests_cache.CachedSession,
28
+ soup: BeautifulSoup,
27
29
  url: str,
28
30
  league: League,
29
- ) -> GameModel:
30
- # pylint: disable=too-many-branches
31
- response = session.get(url)
32
- response.raise_for_status()
33
- soup = BeautifulSoup(response.text, "html.parser")
34
- scorebox_meta_div = soup.find("div", class_="scorebox_meta")
35
- if not isinstance(scorebox_meta_div, Tag):
36
- raise ValueError("scorebox_meta_div is not a Tag.")
31
+ player_urls: set[str],
32
+ fg: dict[str, int],
33
+ fga: dict[str, int],
34
+ offensive_rebounds: dict[str, int],
35
+ assists: dict[str, int],
36
+ turnovers: dict[str, int],
37
+ ) -> tuple[datetime.datetime, list[TeamModel], str]:
38
+ teams: list[TeamModel] = []
39
+ dt = None
40
+ venue_name = None
41
+ for df in dfs:
42
+ test_row = df.iat[0, 0]
43
+ if "Prev Game" in test_row and len(df) == 2:
44
+ date_venue_split = df.iat[1, 0].split()
45
+ dt = parse(" ".join(date_venue_split[:5]))
46
+ venue_name = " ".join(date_venue_split[5:])
47
+ for col_idx in range(len(df.columns.values)):
48
+ team_row = df.iat[0, col_idx]
49
+ team_name_points = team_row.split("⇐")[0].strip()
50
+ points = int(team_name_points.split()[-1].strip())
51
+ team_name = " ".join(team_name_points.split()[:-1]).strip()
52
+ team_a = soup.find("a", text=team_name, href=True)
53
+ if not isinstance(team_a, Tag):
54
+ raise ValueError("team_a is not a tag.")
55
+ team_url = urllib.parse.urljoin(url, str(team_a.get("href")))
56
+ teams.append(
57
+ create_sportsreference_team_model(
58
+ session,
59
+ team_url,
60
+ dt,
61
+ league,
62
+ player_urls,
63
+ points,
64
+ fg,
65
+ fga,
66
+ offensive_rebounds,
67
+ assists,
68
+ turnovers,
69
+ )
70
+ )
71
+ break
72
+
73
+ if dt is None:
74
+ raise ValueError("dt is null.")
75
+ if venue_name is None:
76
+ raise ValueError("venue_name is null.")
77
+
78
+ return (dt, teams, venue_name)
79
+
37
80
 
81
+ def _find_new_dt(
82
+ soup: BeautifulSoup,
83
+ scorebox_meta_div: Tag,
84
+ url: str,
85
+ session: requests_cache.CachedSession,
86
+ league: League,
87
+ player_urls: set[str],
88
+ scores: list[float],
89
+ fg: dict[str, int],
90
+ fga: dict[str, int],
91
+ offensive_rebounds: dict[str, int],
92
+ assists: dict[str, int],
93
+ turnovers: dict[str, int],
94
+ ) -> tuple[datetime.datetime, list[TeamModel], str]:
38
95
  in_divs = scorebox_meta_div.find_all("div")
39
96
  current_in_div_idx = 0
40
97
  in_div = in_divs[current_in_div_idx]
@@ -54,6 +111,39 @@ def _create_sportsreference_game_model(
54
111
  if not isinstance(scorebox_div, Tag):
55
112
  raise ValueError("scorebox_div is not a Tag.")
56
113
 
114
+ teams: list[TeamModel] = []
115
+ for a in scorebox_div.find_all("a"):
116
+ team_url = urllib.parse.urljoin(url, a.get("href"))
117
+ if "/schools/" in team_url:
118
+ teams.append(
119
+ create_sportsreference_team_model(
120
+ session,
121
+ team_url,
122
+ dt,
123
+ league,
124
+ player_urls,
125
+ scores[len(teams)],
126
+ fg,
127
+ fga,
128
+ offensive_rebounds,
129
+ assists,
130
+ turnovers,
131
+ )
132
+ )
133
+
134
+ return (dt, teams, venue_name)
135
+
136
+
137
+ def _create_sportsreference_game_model(
138
+ session: requests_cache.CachedSession,
139
+ url: str,
140
+ league: League,
141
+ ) -> GameModel:
142
+ # pylint: disable=too-many-branches
143
+ response = session.get(url)
144
+ response.raise_for_status()
145
+ soup = BeautifulSoup(response.text, "html.parser")
146
+
57
147
  player_urls = set()
58
148
  for a in soup.find_all("a"):
59
149
  player_url = urllib.parse.urljoin(url, a.get("href"))
@@ -99,25 +189,36 @@ def _create_sportsreference_game_model(
99
189
  for idx, player in enumerate(players):
100
190
  turnovers[player] = tovs[idx]
101
191
 
102
- teams: list[TeamModel] = []
103
- for a in scorebox_div.find_all("a"):
104
- team_url = urllib.parse.urljoin(url, a.get("href"))
105
- if "/schools/" in team_url:
106
- teams.append(
107
- create_sportsreference_team_model(
108
- session,
109
- team_url,
110
- dt,
111
- league,
112
- player_urls,
113
- scores[len(teams)],
114
- fg,
115
- fga,
116
- offensive_rebounds,
117
- assists,
118
- turnovers,
119
- )
120
- )
192
+ scorebox_meta_div = soup.find("div", class_="scorebox_meta")
193
+ if not isinstance(scorebox_meta_div, Tag):
194
+ dt, teams, venue_name = _find_old_dt(
195
+ dfs,
196
+ session,
197
+ soup,
198
+ url,
199
+ league,
200
+ player_urls,
201
+ fg,
202
+ fga,
203
+ offensive_rebounds,
204
+ assists,
205
+ turnovers,
206
+ )
207
+ else:
208
+ dt, teams, venue_name = _find_new_dt(
209
+ soup,
210
+ scorebox_meta_div,
211
+ url,
212
+ session,
213
+ league,
214
+ player_urls,
215
+ scores,
216
+ fg,
217
+ fga,
218
+ offensive_rebounds,
219
+ assists,
220
+ turnovers,
221
+ )
121
222
 
122
223
  season_type = SeasonType.REGULAR
123
224
  for h2 in soup.find_all("h2"):
@@ -0,0 +1,203 @@
1
+ # pylint: disable=missing-module-docstring,missing-class-docstring,invalid-name,bare-except,consider-using-f-string,unused-variable,broad-exception-raised,inconsistent-return-statements,raise-missing-from,no-else-return,too-many-arguments,singleton-comparison
2
+ # ruff: noqa: E722,E712
3
+ # type: ignore
4
+ import urllib
5
+
6
+ import feedparser # type: ignore
7
+ import requests
8
+ from bs4 import BeautifulSoup
9
+ from dateparser import parse as parse_date
10
+
11
+
12
+ class GoogleNews:
13
+ def __init__(self, lang="en", country="US", session=None):
14
+ self.lang = lang.lower()
15
+ self.country = country.upper()
16
+ self.BASE_URL = "https://news.google.com/rss"
17
+ self._session = session if session is not None else requests.Session()
18
+ self.timeout = 30.0
19
+
20
+ def __top_news_parser(self, text):
21
+ """Return subarticles from the main and topic feeds"""
22
+ try:
23
+ bs4_html = BeautifulSoup(text, "html.parser")
24
+ # find all li tags
25
+ lis = bs4_html.find_all("li")
26
+ sub_articles = []
27
+ for li in lis:
28
+ try:
29
+ sub_articles.append(
30
+ {
31
+ "url": li.a["href"],
32
+ "title": li.a.text,
33
+ "publisher": li.font.text,
34
+ }
35
+ )
36
+ except:
37
+ pass
38
+ return sub_articles
39
+ except:
40
+ return text
41
+
42
+ def __ceid(self):
43
+ """Compile correct country-lang parameters for Google News RSS URL"""
44
+ return "?ceid={}:{}&hl={}&gl={}".format(
45
+ self.country, self.lang, self.lang, self.country
46
+ )
47
+
48
+ def __add_sub_articles(self, entries):
49
+ for i, val in enumerate(entries):
50
+ if "summary" in entries[i].keys():
51
+ entries[i]["sub_articles"] = self.__top_news_parser(
52
+ entries[i]["summary"]
53
+ )
54
+ else:
55
+ entries[i]["sub_articles"] = None
56
+ return entries
57
+
58
+ def __scaping_bee_request(self, api_key, url):
59
+ response = self._session.get(
60
+ url="https://app.scrapingbee.com/api/v1/",
61
+ params={"api_key": api_key, "url": url, "render_js": "false"},
62
+ timeout=self.timeout,
63
+ )
64
+ if response.status_code == 200:
65
+ return response
66
+ if response.status_code != 200:
67
+ raise Exception(
68
+ "ScrapingBee status_code: "
69
+ + str(response.status_code)
70
+ + " "
71
+ + response.text
72
+ )
73
+
74
+ def __parse_feed(self, feed_url, proxies=None, scraping_bee=None):
75
+ if scraping_bee and proxies:
76
+ raise Exception("Pick either ScrapingBee or proxies. Not both!")
77
+
78
+ if proxies:
79
+ r = self._session.get(feed_url, proxies=proxies, timeout=self.timeout)
80
+ elif scraping_bee:
81
+ r = self.__scaping_bee_request(url=feed_url, api_key=scraping_bee)
82
+ else:
83
+ r = self._session.get(feed_url, timeout=self.timeout)
84
+
85
+ if "https://news.google.com/rss/unsupported" in r.url: # pyright: ignore
86
+ raise Exception("This feed is not available")
87
+
88
+ d = feedparser.parse(r.text)
89
+
90
+ if not scraping_bee and not proxies and len(d["entries"]) == 0:
91
+ d = feedparser.parse(feed_url)
92
+
93
+ return dict((k, d[k]) for k in ("feed", "entries"))
94
+
95
+ def __search_helper(self, query):
96
+ return urllib.parse.quote_plus(query)
97
+
98
+ def __from_to_helper(self, validate=None):
99
+ try:
100
+ validate = parse_date(validate).strftime("%Y-%m-%d")
101
+ return str(validate)
102
+ except:
103
+ raise Exception("Could not parse your date")
104
+
105
+ def top_news(self, proxies=None, scraping_bee=None):
106
+ """Return a list of all articles from the main page of Google News
107
+ given a country and a language"""
108
+ d = self.__parse_feed(
109
+ self.BASE_URL + self.__ceid(), proxies=proxies, scraping_bee=scraping_bee
110
+ )
111
+ d["entries"] = self.__add_sub_articles(d["entries"])
112
+ return d
113
+
114
+ def topic_headlines(self, topic: str, proxies=None, scraping_bee=None):
115
+ """Return a list of all articles from the topic page of Google News
116
+ given a country and a language"""
117
+ # topic = topic.upper()
118
+ if topic.upper() in [
119
+ "WORLD",
120
+ "NATION",
121
+ "BUSINESS",
122
+ "TECHNOLOGY",
123
+ "ENTERTAINMENT",
124
+ "SCIENCE",
125
+ "SPORTS",
126
+ "HEALTH",
127
+ ]:
128
+ d = self.__parse_feed(
129
+ self.BASE_URL
130
+ + "/headlines/section/topic/{}".format(topic.upper())
131
+ + self.__ceid(),
132
+ proxies=proxies,
133
+ scraping_bee=scraping_bee,
134
+ )
135
+
136
+ else:
137
+ d = self.__parse_feed(
138
+ self.BASE_URL + "/topics/{}".format(topic) + self.__ceid(),
139
+ proxies=proxies,
140
+ scraping_bee=scraping_bee,
141
+ )
142
+
143
+ d["entries"] = self.__add_sub_articles(d["entries"])
144
+ if len(d["entries"]) > 0:
145
+ return d
146
+ else:
147
+ raise Exception("unsupported topic")
148
+
149
+ def geo_headlines(self, geo: str, proxies=None, scraping_bee=None):
150
+ """Return a list of all articles about a specific geolocation
151
+ given a country and a language"""
152
+ d = self.__parse_feed(
153
+ self.BASE_URL + "/headlines/section/geo/{}".format(geo) + self.__ceid(),
154
+ proxies=proxies,
155
+ scraping_bee=scraping_bee,
156
+ )
157
+
158
+ d["entries"] = self.__add_sub_articles(d["entries"])
159
+ return d
160
+
161
+ def search(
162
+ self,
163
+ query: str,
164
+ helper=True,
165
+ when=None,
166
+ from_=None,
167
+ to_=None,
168
+ proxies=None,
169
+ scraping_bee=None,
170
+ ):
171
+ """
172
+ Return a list of all articles given a full-text search parameter,
173
+ a country and a language
174
+
175
+ :param bool helper: When True helps with URL quoting
176
+ :param str when: Sets a time range for the artiles that can be found
177
+ """
178
+
179
+ if when:
180
+ query += " when:" + when
181
+
182
+ if from_ and not when:
183
+ from_ = self.__from_to_helper(validate=from_)
184
+ query += " after:" + from_
185
+
186
+ if to_ and not when:
187
+ to_ = self.__from_to_helper(validate=to_)
188
+ query += " before:" + to_
189
+
190
+ if helper == True:
191
+ query = self.__search_helper(query)
192
+
193
+ search_ceid = self.__ceid()
194
+ search_ceid = search_ceid.replace("?", "&")
195
+
196
+ d = self.__parse_feed(
197
+ self.BASE_URL + "/search?q={}".format(query) + search_ceid,
198
+ proxies=proxies,
199
+ scraping_bee=scraping_bee,
200
+ )
201
+
202
+ d["entries"] = self.__add_sub_articles(d["entries"])
203
+ return d
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: sportsball
3
- Version: 0.3.19
3
+ Version: 0.3.21
4
4
  Summary: A library for pulling in and normalising sports stats.
5
5
  Home-page: https://github.com/8W9aG/sportsball
6
6
  Author: Will Sackfield
@@ -41,6 +41,8 @@ Requires-Dist: tenacity>=9.0.0
41
41
  Requires-Dist: random_user_agent>=1.0.1
42
42
  Requires-Dist: wayback>=0.4.5
43
43
  Requires-Dist: cryptography>=44.0.0
44
+ Requires-Dist: feedparser>=6.0.11
45
+ Requires-Dist: dateparser>=1.2.0
44
46
  Dynamic: author
45
47
  Dynamic: author-email
46
48
  Dynamic: classifier
@@ -97,6 +99,8 @@ Python 3.11.6:
97
99
  - [random_user_agent](https://github.com/Luqman-Ud-Din/random_user_agent)
98
100
  - [wayback](https://github.com/edgi-govdata-archiving/wayback)
99
101
  - [cryptography](https://cryptography.io/en/latest/)
102
+ - [feedparser](https://github.com/kurtmckee/feedparser)
103
+ - [dateparser](https://dateparser.readthedocs.io/en/latest/)
100
104
 
101
105
  ## Raison D'être :thought_balloon:
102
106
 
@@ -143,6 +143,8 @@ sportsball/data/wikipedia/__init__.py
143
143
  sportsball/data/wikipedia/wikipedia_venue_model.py
144
144
  sportsball/data/x/__init__.py
145
145
  sportsball/data/x/x_social_model.py
146
+ sportsball/vendor/__init__.py
147
+ sportsball/vendor/pygooglenews/__init__.py
146
148
  tests/__init__.py
147
149
  tests/proxy_session_test.py
148
150
  tests/sportsball_test.py
@@ -28,3 +28,5 @@ tenacity>=9.0.0
28
28
  random_user_agent>=1.0.1
29
29
  wayback>=0.4.5
30
30
  cryptography>=44.0.0
31
+ feedparser>=6.0.11
32
+ dateparser>=1.2.0
@@ -0,0 +1,97 @@
1
+ """Tests for the sportsreference game model class."""
2
+ import datetime
3
+ import os
4
+ import unittest
5
+
6
+ import requests_mock
7
+ import requests_cache
8
+ from sportsball.data.sportsreference.sportsreference_game_model import create_sportsreference_game_model
9
+ from sportsball.data.league import League
10
+
11
+
12
+ class TestSportsReferenceGameModel(unittest.TestCase):
13
+
14
+ def setUp(self):
15
+ self.session = requests_cache.CachedSession(backend="memory")
16
+ self.dir = os.path.dirname(__file__)
17
+
18
+ def test_dt(self):
19
+ url = "https://www.basketball-reference.com/boxscores/202501230ATL.html"
20
+ with requests_mock.Mocker() as m:
21
+ with open(os.path.join(self.dir, "202501230ATL.html"), "rb") as f:
22
+ m.get(url, content=f.read())
23
+ m.get("https://historical-forecast-api.open-meteo.com/v1/forecast?latitude=33.757222&longitude=-84.396389&start_date=2025-01-22&end_date=2025-01-23&hourly=temperature_2m&hourly=relative_humidity_2m&hourly=dew_point_2m&hourly=apparent_temperature&hourly=precipitation&hourly=rain&hourly=snowfall&hourly=snow_depth&hourly=weather_code&hourly=pressure_msl&hourly=surface_pressure&hourly=cloud_cover&hourly=cloud_cover_low&hourly=cloud_cover_mid&hourly=cloud_cover_high&hourly=et0_fao_evapotranspiration&hourly=vapour_pressure_deficit&hourly=wind_speed_10m&hourly=wind_speed_100m&hourly=wind_direction_10m&hourly=wind_direction_100m&hourly=wind_gusts_10m&hourly=soil_temperature_0_to_7cm&hourly=soil_temperature_7_to_28cm&hourly=soil_temperature_28_to_100cm&hourly=soil_temperature_100_to_255cm&hourly=soil_moisture_0_to_7cm&hourly=soil_moisture_7_to_28cm&hourly=soil_moisture_28_to_100cm&hourly=soil_moisture_100_to_255cm&daily=weather_code&daily=temperature_2m_max&daily=temperature_2m_min&daily=temperature_2m_mean&daily=apparent_temperature_max&daily=apparent_temperature_min&daily=apparent_temperature_mean&daily=sunrise&daily=sunset&daily=daylight_duration&daily=sunshine_duration&daily=precipitation_sum&daily=rain_sum&daily=snowfall_sum&daily=precipitation_hours&daily=wind_speed_10m_max&daily=wind_gusts_10m_max&daily=wind_direction_10m_dominant&daily=shortwave_radiation_sum&daily=et0_fao_evapotranspiration&timezone=America%2FNew_York&format=flatbuffers")
24
+ game_model = create_sportsreference_game_model(
25
+ self.session,
26
+ url,
27
+ League.NBA,
28
+ datetime.datetime(2010, 10, 10, 10, 10, 0),
29
+ )
30
+ self.assertEqual(game_model.dt, datetime.datetime(2025, 1, 23, 19, 30))
31
+
32
+ def test_dt_old_style(self):
33
+ url = "http://www.basketball-reference.com/boxscores/201606190GSW.html"
34
+ with requests_mock.Mocker() as m:
35
+ with open(os.path.join(self.dir, "201606190GSW.html"), "rb") as f:
36
+ m.get(url, content=f.read())
37
+ with open(os.path.join(self.dir, "2016.html"), "rb") as f:
38
+ m.get("http://www.basketball-reference.com/teams/CLE/2016.html", content=f.read())
39
+ with open(os.path.join(self.dir, "shumpim01.html"), "rb") as f:
40
+ m.get("http://www.basketball-reference.com/players/s/shumpim01.html", content=f.read())
41
+ with open(os.path.join(self.dir, "mcraejo01.html"), "rb") as f:
42
+ m.get("http://www.basketball-reference.com/players/m/mcraejo01.html", content=f.read())
43
+ with open(os.path.join(self.dir, "jefferi01.html"), "rb") as f:
44
+ m.get("http://www.basketball-reference.com/players/j/jefferi01.html", content=f.read())
45
+ with open(os.path.join(self.dir, "varejan01.html"), "rb") as f:
46
+ m.get("http://www.basketball-reference.com/players/v/varejan01.html", content=f.read())
47
+ with open(os.path.join(self.dir, "loveke01.html"), "rb") as f:
48
+ m.get("http://www.basketball-reference.com/players/l/loveke01.html", content=f.read())
49
+ with open(os.path.join(self.dir, "thomptr01.html"), "rb") as f:
50
+ m.get("http://www.basketball-reference.com/players/t/thomptr01.html", content=f.read())
51
+ with open(os.path.join(self.dir, "curryst01.html"), "rb") as f:
52
+ m.get("http://www.basketball-reference.com/players/c/curryst01.html", content=f.read())
53
+ with open(os.path.join(self.dir, "kaunsa01.html"), "rb") as f:
54
+ m.get("http://www.basketball-reference.com/players/k/kaunsa01.html", content=f.read())
55
+ with open(os.path.join(self.dir, "jamesle01.html"), "rb") as f:
56
+ m.get("http://www.basketball-reference.com/players/j/jamesle01.html", content=f.read())
57
+ with open(os.path.join(self.dir, "willima01.html"), "rb") as f:
58
+ m.get("http://www.basketball-reference.com/players/w/willima01.html", content=f.read())
59
+ with open(os.path.join(self.dir, "irvinky01.html"), "rb") as f:
60
+ m.get("http://www.basketball-reference.com/players/i/irvinky01.html", content=f.read())
61
+ with open(os.path.join(self.dir, "smithjr01.html"), "rb") as f:
62
+ m.get("http://www.basketball-reference.com/players/s/smithjr01.html", content=f.read())
63
+ with open(os.path.join(self.dir, "GSW_2016.html"), "rb") as f:
64
+ m.get("http://www.basketball-reference.com/teams/GSW/2016.html", content=f.read())
65
+ with open(os.path.join(self.dir, "greendr01.html"), "rb") as f:
66
+ m.get("https://www.basketball-reference.com/players/g/greendr01.html", content=f.read())
67
+ with open(os.path.join(self.dir, "barbole01.html"), "rb") as f:
68
+ m.get("http://www.basketball-reference.com/players/b/barbole01.html", content=f.read())
69
+ with open(os.path.join(self.dir, "barneha02.html"), "rb") as f:
70
+ m.get("http://www.basketball-reference.com/players/b/barneha02.html", content=f.read())
71
+ with open(os.path.join(self.dir, "thompkl01.html"), "rb") as f:
72
+ m.get("http://www.basketball-reference.com/players/t/thompkl01.html", content=f.read())
73
+ with open(os.path.join(self.dir, "thompkl01.html"), "rb") as f:
74
+ m.get("http://www.basketball-reference.com/players/t/thompkl01.html", content=f.read())
75
+ with open(os.path.join(self.dir, "ezelife01.html"), "rb") as f:
76
+ m.get("http://www.basketball-reference.com/players/e/ezelife01.html", content=f.read())
77
+ with open(os.path.join(self.dir, "livinsh01.html"), "rb") as f:
78
+ m.get("http://www.basketball-reference.com/players/l/livinsh01.html", content=f.read())
79
+ with open(os.path.join(self.dir, "greendr01.html"), "rb") as f:
80
+ m.get("http://www.basketball-reference.com/players/g/greendr01.html", content=f.read())
81
+ with open(os.path.join(self.dir, "bogutan01.html"), "rb") as f:
82
+ m.get("http://www.basketball-reference.com/players/b/bogutan01.html", content=f.read())
83
+ with open(os.path.join(self.dir, "looneke01.html"), "rb") as f:
84
+ m.get("http://www.basketball-reference.com/players/l/looneke01.html", content=f.read())
85
+ with open(os.path.join(self.dir, "speigma01.html"), "rb") as f:
86
+ m.get("http://www.basketball-reference.com/players/s/speigma01.html", content=f.read())
87
+ with open(os.path.join(self.dir, "iguodan01.html"), "rb") as f:
88
+ m.get("http://www.basketball-reference.com/players/i/iguodan01.html", content=f.read())
89
+ m.get("https://historical-forecast-api.open-meteo.com/v1/forecast?latitude=37.750278&longitude=-122.203056&start_date=2016-06-18&end_date=2016-06-19&hourly=temperature_2m&hourly=relative_humidity_2m&hourly=dew_point_2m&hourly=apparent_temperature&hourly=precipitation&hourly=rain&hourly=snowfall&hourly=snow_depth&hourly=weather_code&hourly=pressure_msl&hourly=surface_pressure&hourly=cloud_cover&hourly=cloud_cover_low&hourly=cloud_cover_mid&hourly=cloud_cover_high&hourly=et0_fao_evapotranspiration&hourly=vapour_pressure_deficit&hourly=wind_speed_10m&hourly=wind_speed_100m&hourly=wind_direction_10m&hourly=wind_direction_100m&hourly=wind_gusts_10m&hourly=soil_temperature_0_to_7cm&hourly=soil_temperature_7_to_28cm&hourly=soil_temperature_28_to_100cm&hourly=soil_temperature_100_to_255cm&hourly=soil_moisture_0_to_7cm&hourly=soil_moisture_7_to_28cm&hourly=soil_moisture_28_to_100cm&hourly=soil_moisture_100_to_255cm&daily=weather_code&daily=temperature_2m_max&daily=temperature_2m_min&daily=temperature_2m_mean&daily=apparent_temperature_max&daily=apparent_temperature_min&daily=apparent_temperature_mean&daily=sunrise&daily=sunset&daily=daylight_duration&daily=sunshine_duration&daily=precipitation_sum&daily=rain_sum&daily=snowfall_sum&daily=precipitation_hours&daily=wind_speed_10m_max&daily=wind_gusts_10m_max&daily=wind_direction_10m_dominant&daily=shortwave_radiation_sum&daily=et0_fao_evapotranspiration&timezone=America%2FLos_Angeles&format=flatbuffers")
90
+
91
+ game_model = create_sportsreference_game_model(
92
+ self.session,
93
+ url,
94
+ League.NBA,
95
+ datetime.datetime(2010, 10, 10, 10, 10, 0),
96
+ )
97
+ self.assertEqual(game_model.dt, datetime.datetime(2016, 6, 19, 20, 0))
File without changes