warn-scraper 1.2.111__tar.gz → 1.2.113__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (136) hide show
  1. {warn_scraper-1.2.111/warn_scraper.egg-info → warn_scraper-1.2.113}/PKG-INFO +1 -1
  2. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/la.py +10 -4
  3. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/utils.py +101 -4
  4. {warn_scraper-1.2.111 → warn_scraper-1.2.113/warn_scraper.egg-info}/PKG-INFO +1 -1
  5. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/.devcontainer/devcontainer.json +0 -0
  6. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/.github/dependabot.yml.disabled-for-sanity +0 -0
  7. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/.github/workflows/continuous-deployment.yml +0 -0
  8. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/.github/workflows/continuous-deployment.yml.broken-tests +0 -0
  9. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/.gitignore +0 -0
  10. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/.pre-commit-config.yaml +0 -0
  11. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/LICENSE +0 -0
  12. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/MANIFEST.in +0 -0
  13. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/Makefile +0 -0
  14. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/Pipfile +0 -0
  15. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/Pipfile.lock +0 -0
  16. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/README.md +0 -0
  17. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/Makefile +0 -0
  18. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/_static/R42693.pdf +0 -0
  19. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/_static/gao-03-1003.pdf +0 -0
  20. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/_static/releasing-actions-finished.png +0 -0
  21. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/_static/releasing-actions-start.png +0 -0
  22. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/_static/releasing-changelog-button.png +0 -0
  23. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/_static/releasing-changelog-entered.png +0 -0
  24. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/_static/releasing-draft-button.png +0 -0
  25. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/_static/releasing-name-release.png +0 -0
  26. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/_static/releasing-name-tag.png +0 -0
  27. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/_static/releasing-publish-button.png +0 -0
  28. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/_static/releasing-pypi.png +0 -0
  29. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/_static/releasing-release-published.png +0 -0
  30. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/_static/releasing-releases-button.png +0 -0
  31. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/_static/releasing-tag-button.png +0 -0
  32. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/_templates/sources.md.tmpl +0 -0
  33. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/conf.py +0 -0
  34. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/contributing.rst +0 -0
  35. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/index.rst +0 -0
  36. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/make.bat +0 -0
  37. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/reference.rst +0 -0
  38. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/releasing.md +0 -0
  39. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/requirements.txt +0 -0
  40. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/scrapers/al.md +0 -0
  41. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/scrapers/az.md +0 -0
  42. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/scrapers/ca.md +0 -0
  43. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/scrapers/co.md +0 -0
  44. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/scrapers/dc.md +0 -0
  45. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/scrapers/de.md +0 -0
  46. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/scrapers/ia.md +0 -0
  47. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/scrapers/in.md +0 -0
  48. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/scrapers/job_center.md +0 -0
  49. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/scrapers/ks.md +0 -0
  50. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/scrapers/md.md +0 -0
  51. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/scrapers/me.md +0 -0
  52. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/scrapers/mo.md +0 -0
  53. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/scrapers/ny.md +0 -0
  54. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/scrapers/ok.md +0 -0
  55. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/scrapers/or.md +0 -0
  56. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/scrapers/sc.md +0 -0
  57. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/scrapers/tx.md +0 -0
  58. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/scrapers/ut.md +0 -0
  59. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/scrapers/va.md +0 -0
  60. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/scrapers/vt.md +0 -0
  61. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/scrapers/wi.md +0 -0
  62. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/sources.md +0 -0
  63. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/docs/usage.md +0 -0
  64. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/setup.cfg +0 -0
  65. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/setup.py +0 -0
  66. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/tests/__init__.py +0 -0
  67. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/tests/cassettes/test_cached_detail_pages.yaml +0 -0
  68. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/tests/cassettes/test_cached_search_results.yaml +0 -0
  69. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/tests/cassettes/test_missing_detail_page_values.yaml +0 -0
  70. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/tests/cassettes/test_no_results.yaml +0 -0
  71. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/tests/cassettes/test_paged_results.yaml +0 -0
  72. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/tests/cassettes/test_scrape_integration.yaml +0 -0
  73. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/tests/conftest.py +0 -0
  74. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/tests/fixtures/2021_page_1.html +0 -0
  75. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/tests/fixtures/2021_page_2.html +0 -0
  76. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/tests/test_cache.py +0 -0
  77. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/tests/test_delete.py +0 -0
  78. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/tests/test_job_center.py +0 -0
  79. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/tests/test_job_center_cache.py +0 -0
  80. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/tests/test_openpyxl.py +0 -0
  81. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/__init__.py +0 -0
  82. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/cache.py +0 -0
  83. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/cli.py +0 -0
  84. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/platforms/__init__.py +0 -0
  85. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/platforms/job_center/__init__.py +0 -0
  86. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/platforms/job_center/cache.py +0 -0
  87. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/platforms/job_center/site.py +0 -0
  88. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/platforms/job_center/urls.py +0 -0
  89. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/platforms/job_center/utils.py +0 -0
  90. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/runner.py +0 -0
  91. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/__init__.py +0 -0
  92. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/ak.py +0 -0
  93. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/al.py +0 -0
  94. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/az.py +0 -0
  95. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/ca.py +0 -0
  96. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/co.py +0 -0
  97. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/ct.py +0 -0
  98. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/dc.py +0 -0
  99. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/de.py +0 -0
  100. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/fl.py +0 -0
  101. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/ga.py +0 -0
  102. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/hi.py +0 -0
  103. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/ia.py +0 -0
  104. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/id.py +0 -0
  105. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/il.py +0 -0
  106. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/in.py +0 -0
  107. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/ks.py +0 -0
  108. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/ky.py +0 -0
  109. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/md.py +0 -0
  110. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/me.py +0 -0
  111. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/mi.py +0 -0
  112. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/mo.py +0 -0
  113. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/mt.py +0 -0
  114. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/ne.py +0 -0
  115. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/nj.py +0 -0
  116. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/nm.py +0 -0
  117. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/ny.py +0 -0
  118. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/oh.py +0 -0
  119. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/ok.py +0 -0
  120. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/or.py +0 -0
  121. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/ri.py +0 -0
  122. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/sc.py +0 -0
  123. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/sd.py +0 -0
  124. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/tn.py +0 -0
  125. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/tx.py +0 -0
  126. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/ut.py +0 -0
  127. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/va.py +0 -0
  128. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/vt.py +0 -0
  129. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/wa.py +0 -0
  130. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn/scrapers/wi.py +0 -0
  131. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn_scraper.egg-info/SOURCES.txt +0 -0
  132. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn_scraper.egg-info/dependency_links.txt +0 -0
  133. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn_scraper.egg-info/entry_points.txt +0 -0
  134. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn_scraper.egg-info/not-zip-safe +0 -0
  135. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn_scraper.egg-info/requires.txt +0 -0
  136. {warn_scraper-1.2.111 → warn_scraper-1.2.113}/warn_scraper.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: warn-scraper
3
- Version: 1.2.111
3
+ Version: 1.2.113
4
4
  Summary: Command-line interface for downloading WARN Act notices of qualified plant closings and mass layoffs from state government websites
5
5
  Home-page: https://github.com/biglocalnews/warn-scraper
6
6
  Author: Big Local News
@@ -10,7 +10,7 @@ from bs4 import BeautifulSoup
10
10
  from .. import utils
11
11
  from ..cache import Cache
12
12
 
13
- __authors__ = ["chriszs"]
13
+ __authors__ = ["chriszs", "stucka"]
14
14
  __tags__ = ["html", "pdf"]
15
15
  __source__ = {
16
16
  "name": "Louisiana Workforce Commission",
@@ -43,10 +43,11 @@ def scrape(
43
43
 
44
44
  # Download the root page
45
45
  url = f"{base_url}Downloads/{file_base}.asp"
46
- html = utils.get_url(url).text
46
+ htmlbin, html = utils.get_with_zyte(url)
47
47
 
48
48
  # Save it to the cache
49
- cache_key = f"{state_code}/{file_base}.html"
49
+ cache_key = cache_dir / f"{state_code}/{file_base}.html"
50
+ utils.create_directory(Path(cache_key), is_file=True)
50
51
  cache.write(cache_key, html)
51
52
 
52
53
  # Parse out the links to WARN notice PDFs
@@ -59,9 +60,14 @@ def scrape(
59
60
  if "WARN Notices" in link.text:
60
61
  # Download the PDF
61
62
  pdf_url = f"{base_url}{link['href']}"
62
- pdf_path = _read_or_download(cache, state_code, pdf_url)
63
+ rawbin, rawtext = utils.get_with_zyte(pdf_url)
64
+ pdf_path = cache_dir / f"{state_code}/{os.path.basename(pdf_url)}"
65
+
66
+ with open(pdf_path, "wb") as fp:
67
+ fp.write(rawbin)
63
68
 
64
69
  # Process the PDF
70
+ logger.debug(f"Attempting to parse {pdf_path}")
65
71
  rows = _process_pdf(pdf_path)
66
72
  all_rows.extend(rows)
67
73
 
@@ -1,7 +1,9 @@
1
1
  import csv
2
+ import json
2
3
  import logging
3
4
  import os
4
5
  import typing
6
+ from base64 import b64decode, b64encode
5
7
  from pathlib import Path
6
8
  from time import sleep
7
9
 
@@ -94,6 +96,103 @@ def save_if_good_url(filename, url, **kwargs):
94
96
  return success_flag, content
95
97
 
96
98
 
99
+ def get_with_zyte(url):
100
+ """Use Zyte as a proxy server to retrieve data not available without it.
101
+
102
+ Args:
103
+ url (str): URL to retrieve
104
+ Returns:
105
+ returnbin (bin): raw binary representation of returned data object
106
+ returntext (str): utf-8 conversion of returned data object, e.g., HTML
107
+ Failures:
108
+ Returns (None, None) if it encounters a problem and logs an error.
109
+ Requires:
110
+ ZYTE_API_KEY to be set in environment
111
+ """
112
+ logger.debug(f"Seeking to fetch {url} with Zyte")
113
+ try:
114
+ zyte_api_key = os.environ["ZYTE_API_KEY"]
115
+ except KeyError:
116
+ logger.error(
117
+ "No ZYTE_API_KEY variable found in environment. Please get an API key from Zyte and export it."
118
+ )
119
+ return (None, None)
120
+
121
+ api_response = requests.post(
122
+ "https://api.zyte.com/v1/extract",
123
+ auth=(zyte_api_key, ""),
124
+ json={
125
+ "url": url,
126
+ "httpResponseBody": True,
127
+ "followRedirect": True,
128
+ },
129
+ )
130
+
131
+ if not api_response.ok:
132
+ logger.error(
133
+ f"Error downloading {url} with get_with_zyte. Repsonse code: {api_response.status_code}"
134
+ )
135
+ return (None, None)
136
+ returnbin: bytes = b64decode(api_response.json()["httpResponseBody"])
137
+ returntext: str = returnbin.decode("utf-8", errors="backslashreplace")
138
+ logger.debug(f"Fetched {url}")
139
+ return (returnbin, returntext)
140
+
141
+
142
+ def post_with_zyte(url, payload):
143
+ """Use Zyte as a proxy server to retrieve data not available without it.
144
+
145
+ Args:
146
+ url (str): URL to retrieve
147
+ payload: (dict, str or binary): POST body.
148
+ If type dict: Convert to utf-8 text then:
149
+ If type str: Convert to b64encoded
150
+ Returns:
151
+ returnbin (bin): raw binary representation of returned data object
152
+ returntext (str): utf-8 conversion of returned data object, e.g., HTML
153
+ Failures:
154
+ Returns (None, None) if it encounters a problem and logs an error.
155
+ Requires:
156
+ ZYTE_API_KEY to be set in environment
157
+ """
158
+ logger.debug(f"Seeking to fetch {url} with Zyte")
159
+ try:
160
+ zyte_api_key = os.environ["ZYTE_API_KEY"]
161
+ except KeyError:
162
+ logger.error(
163
+ "No ZYTE_API_KEY variable found in environment. Please get an API key from Zyte and export it."
164
+ )
165
+ return (None, None)
166
+
167
+ if isinstance(payload, dict):
168
+ payload = json.dumps(payload)
169
+
170
+ if isinstance(payload, str):
171
+ payload = b64encode(payload.encode("utf-8"))
172
+
173
+ api_response = requests.post(
174
+ "https://api.zyte.com/v1/extract",
175
+ auth=(zyte_api_key, ""),
176
+ json={
177
+ "url": url,
178
+ "httpRequestMethod": "POST",
179
+ "httpRequestBody": payload,
180
+ "httpResponseBody": True,
181
+ "followRedirect": True,
182
+ },
183
+ )
184
+
185
+ if not api_response.ok:
186
+ logger.error(
187
+ f"Error downloading {url} with post_with_zyte. Repsonse code: {api_response.status_code}. Reponse: {api_response.json()}"
188
+ )
189
+ return (None, None)
190
+ returnbin: bytes = b64decode(api_response.json()["httpResponseBody"])
191
+ returntext: str = returnbin.decode("utf-8", errors="backslashreplace")
192
+ logger.debug(f"Fetched {url}")
193
+ return (returnbin, returntext)
194
+
195
+
97
196
  def write_rows_to_csv(output_path: Path, rows: list, mode="w"):
98
197
  """Write the provided list to the provided path as comma-separated values.
99
198
 
@@ -109,9 +208,7 @@ def write_rows_to_csv(output_path: Path, rows: list, mode="w"):
109
208
  writer.writerows(rows)
110
209
 
111
210
 
112
- def write_dict_rows_to_csv(
113
- output_path, headers, rows, mode="w", extrasaction="raise", encoding="utf-8"
114
- ):
211
+ def write_dict_rows_to_csv(output_path, headers, rows, mode="w", extrasaction="raise"):
115
212
  """Write the provided dictionary to the provided path as comma-separated values.
116
213
 
117
214
  Args:
@@ -123,7 +220,7 @@ def write_dict_rows_to_csv(
123
220
  """
124
221
  create_directory(output_path, is_file=True)
125
222
  logger.debug(f"Writing {len(rows)} rows to {output_path}")
126
- with open(output_path, mode, newline="", encoding=encoding) as f:
223
+ with open(output_path, mode, newline="") as f:
127
224
  # Create the writer object
128
225
  writer = csv.DictWriter(f, fieldnames=headers, extrasaction=extrasaction)
129
226
  # If we are writing a new row ...
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: warn-scraper
3
- Version: 1.2.111
3
+ Version: 1.2.113
4
4
  Summary: Command-line interface for downloading WARN Act notices of qualified plant closings and mass layoffs from state government websites
5
5
  Home-page: https://github.com/biglocalnews/warn-scraper
6
6
  Author: Big Local News
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes