warn-scraper 1.2.57__tar.gz → 1.2.59__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (137) hide show
  1. {warn-scraper-1.2.57/warn_scraper.egg-info → warn-scraper-1.2.59}/PKG-INFO +1 -1
  2. warn-scraper-1.2.59/warn/scrapers/hi.py +133 -0
  3. {warn-scraper-1.2.57 → warn-scraper-1.2.59/warn_scraper.egg-info}/PKG-INFO +1 -1
  4. warn-scraper-1.2.57/warn/scrapers/hi.py +0 -101
  5. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/.devcontainer/devcontainer.json +0 -0
  6. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/.github/dependabot.yml.disabled +0 -0
  7. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/.github/workflows/continuous-deployment.yml +0 -0
  8. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/.gitignore +0 -0
  9. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/.pre-commit-config.yaml +0 -0
  10. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/LICENSE +0 -0
  11. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/MANIFEST.in +0 -0
  12. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/Makefile +0 -0
  13. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/Pipfile +0 -0
  14. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/Pipfile.lock +0 -0
  15. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/README.md +0 -0
  16. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/Makefile +0 -0
  17. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/_static/R42693.pdf +0 -0
  18. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/_static/gao-03-1003.pdf +0 -0
  19. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/_static/releasing-actions-finished.png +0 -0
  20. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/_static/releasing-actions-start.png +0 -0
  21. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/_static/releasing-changelog-button.png +0 -0
  22. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/_static/releasing-changelog-entered.png +0 -0
  23. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/_static/releasing-draft-button.png +0 -0
  24. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/_static/releasing-name-release.png +0 -0
  25. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/_static/releasing-name-tag.png +0 -0
  26. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/_static/releasing-publish-button.png +0 -0
  27. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/_static/releasing-pypi.png +0 -0
  28. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/_static/releasing-release-published.png +0 -0
  29. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/_static/releasing-releases-button.png +0 -0
  30. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/_static/releasing-tag-button.png +0 -0
  31. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/_templates/sources.md.tmpl +0 -0
  32. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/conf.py +0 -0
  33. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/contributing.rst +0 -0
  34. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/index.rst +0 -0
  35. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/make.bat +0 -0
  36. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/reference.rst +0 -0
  37. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/releasing.md +0 -0
  38. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/requirements.txt +0 -0
  39. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/scrapers/al.md +0 -0
  40. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/scrapers/az.md +0 -0
  41. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/scrapers/ca.md +0 -0
  42. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/scrapers/co.md +0 -0
  43. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/scrapers/dc.md +0 -0
  44. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/scrapers/de.md +0 -0
  45. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/scrapers/ia.md +0 -0
  46. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/scrapers/in.md +0 -0
  47. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/scrapers/job_center.md +0 -0
  48. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/scrapers/ks.md +0 -0
  49. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/scrapers/md.md +0 -0
  50. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/scrapers/me.md +0 -0
  51. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/scrapers/mo.md +0 -0
  52. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/scrapers/ny.md +0 -0
  53. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/scrapers/ok.md +0 -0
  54. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/scrapers/or.md +0 -0
  55. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/scrapers/sc.md +0 -0
  56. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/scrapers/tx.md +0 -0
  57. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/scrapers/ut.md +0 -0
  58. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/scrapers/va.md +0 -0
  59. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/scrapers/vt.md +0 -0
  60. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/scrapers/wi.md +0 -0
  61. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/sources.md +0 -0
  62. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/docs/usage.md +0 -0
  63. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/setup.cfg +0 -0
  64. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/setup.py +0 -0
  65. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/tests/__init__.py +0 -0
  66. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/tests/cassettes/test_cached_detail_pages.yaml +0 -0
  67. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/tests/cassettes/test_cached_search_results.yaml +0 -0
  68. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/tests/cassettes/test_delete.yaml +0 -0
  69. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/tests/cassettes/test_missing_detail_page_values.yaml +0 -0
  70. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/tests/cassettes/test_no_results.yaml +0 -0
  71. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/tests/cassettes/test_paged_results.yaml +0 -0
  72. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/tests/cassettes/test_scrape_integration.yaml +0 -0
  73. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/tests/conftest.py +0 -0
  74. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/tests/fixtures/2021_page_1.html +0 -0
  75. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/tests/fixtures/2021_page_2.html +0 -0
  76. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/tests/test_cache.py +0 -0
  77. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/tests/test_delete.py +0 -0
  78. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/tests/test_job_center.py +0 -0
  79. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/tests/test_job_center_cache.py +0 -0
  80. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/tests/test_openpyxl.py +0 -0
  81. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/__init__.py +0 -0
  82. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/cache.py +0 -0
  83. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/cli.py +0 -0
  84. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/platforms/__init__.py +0 -0
  85. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/platforms/job_center/__init__.py +0 -0
  86. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/platforms/job_center/cache.py +0 -0
  87. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/platforms/job_center/site.py +0 -0
  88. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/platforms/job_center/urls.py +0 -0
  89. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/platforms/job_center/utils.py +0 -0
  90. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/runner.py +0 -0
  91. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/__init__.py +0 -0
  92. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/ak.py +0 -0
  93. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/al.py +0 -0
  94. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/az.py +0 -0
  95. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/ca.py +0 -0
  96. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/co.py +0 -0
  97. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/ct.py +0 -0
  98. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/dc.py +0 -0
  99. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/de.py +0 -0
  100. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/fl.py +0 -0
  101. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/ga.py +0 -0
  102. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/ia.py +0 -0
  103. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/id.py +0 -0
  104. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/il.py +0 -0
  105. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/in.py +0 -0
  106. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/ks.py +0 -0
  107. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/ky.py +0 -0
  108. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/la.py +0 -0
  109. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/md.py +0 -0
  110. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/me.py +0 -0
  111. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/mi.py +0 -0
  112. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/mo.py +0 -0
  113. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/mt.py +0 -0
  114. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/ne.py +0 -0
  115. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/nj.py +0 -0
  116. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/nm.py +0 -0
  117. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/ny.py +0 -0
  118. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/oh.py +0 -0
  119. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/ok.py +0 -0
  120. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/or.py +0 -0
  121. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/ri.py +0 -0
  122. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/sc.py +0 -0
  123. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/sd.py +0 -0
  124. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/tn.py +0 -0
  125. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/tx.py +0 -0
  126. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/ut.py +0 -0
  127. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/va.py +0 -0
  128. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/vt.py +0 -0
  129. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/wa.py +0 -0
  130. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/scrapers/wi.py +0 -0
  131. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn/utils.py +0 -0
  132. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn_scraper.egg-info/SOURCES.txt +0 -0
  133. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn_scraper.egg-info/dependency_links.txt +0 -0
  134. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn_scraper.egg-info/entry_points.txt +0 -0
  135. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn_scraper.egg-info/not-zip-safe +0 -0
  136. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn_scraper.egg-info/requires.txt +0 -0
  137. {warn-scraper-1.2.57 → warn-scraper-1.2.59}/warn_scraper.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: warn-scraper
3
- Version: 1.2.57
3
+ Version: 1.2.59
4
4
  Summary: Command-line interface for downloading WARN Act notices of qualified plant closings and mass layoffs from state government websites
5
5
  Home-page: https://github.com/biglocalnews/warn-scraper
6
6
  Author: Big Local News
@@ -0,0 +1,133 @@
1
+ import datetime
2
+ import logging
3
+ from pathlib import Path
4
+ from time import sleep
5
+ from urllib.parse import quote
6
+
7
+ from bs4 import BeautifulSoup
8
+
9
+ from .. import utils
10
+
11
+ __authors__ = ["Ash1R", "stucka"]
12
+ __tags__ = ["html", "pdf"]
13
+ __source__ = {
14
+ "name": "Workforce Development Hawaii",
15
+ "url": "https://labor.hawaii.gov/wdc/real-time-warn-updates/",
16
+ }
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ def scrape(
22
+ data_dir: Path = utils.WARN_DATA_DIR,
23
+ cache_dir: Path = utils.WARN_CACHE_DIR,
24
+ ) -> Path:
25
+ """
26
+ Scrape data from Hawaii.
27
+
28
+ Keyword arguments:
29
+ data_dir -- the Path were the result will be saved (default WARN_DATA_DIR)
30
+ cache_dir -- the Path where results can be cached (default WARN_CACHE_DIR)
31
+ Returns: the Path where the file is written
32
+ """
33
+ # Google Cache is a backup if the state re-implements its JS-enabled browser equivalent
34
+ usegooglecache = False
35
+ cacheprefix = "https://webcache.googleusercontent.com/search?q=cache%3A"
36
+
37
+ firstpageurl = "https://labor.hawaii.gov/wdc/real-time-warn-updates/"
38
+ if usegooglecache:
39
+ firstpageurl = cacheprefix + quote(firstpageurl)
40
+
41
+ firstpage = utils.get_url(firstpageurl)
42
+ soup = BeautifulSoup(firstpage.text, features="html5lib")
43
+ pagesection = soup.select("div.primary-content")[0]
44
+ subpageurls = []
45
+ for atag in pagesection.find_all("a"):
46
+ href = atag["href"]
47
+ if href.endswith("/"):
48
+ href = href # [:-1]
49
+ subpageurl = href
50
+ if usegooglecache:
51
+ subpageurl = cacheprefix + quote(subpageurl)
52
+ subpageurls.append(subpageurl)
53
+
54
+ masterlist = []
55
+ headers = ["Company", "Date", "PDF url", "location", "jobs"]
56
+ # data = [headers]
57
+ # lastdateseen = "2099-12-31"
58
+
59
+ for subpageurl in reversed(subpageurls):
60
+ sleep(2)
61
+ # Conditionally here, we want to check and see if we have the old cached files, or if the year is current or previous.
62
+ # Only need to download if it's current or previous year.
63
+ # But do we care enough to implement right now?
64
+
65
+ logger.debug(f"Parsing page {subpageurl}")
66
+ page = utils.get_url(subpageurl)
67
+ soup = BeautifulSoup(page.text, features="html5lib")
68
+ if subpageurl.endswith("/"):
69
+ subpageurl = subpageurl[:-1] # Trim off the final slash, if there is one
70
+ pageyear = subpageurl.split("/")[-1][:4]
71
+
72
+ # There are at least two formats for Hawaii. In some years, each individual layoff is in a paragraph tag.
73
+ # In others, all the layoffs are grouped under a single paragraph tag, separated by <br>
74
+ # BeautifulSoup converts that to a <br/>.
75
+ # But the call to parent also repeats a bunch of entries, so we need to ensure they're not.
76
+ # So in more recent years, finding the parent of the "p a" there find essentially the row of data.
77
+ # In the older years, the parent is ... all the rows of data, which gets repeated.
78
+ # So take each chunk of data, find the parent, do some quality checks, clean up the text,
79
+ # don't engage with duplicates.
80
+
81
+ selection = soup.select("p a[href*=pdf]")
82
+ rows = []
83
+ for child in selection:
84
+ parent = child.parent
85
+ for subitem in parent.prettify().split("<br/>"):
86
+ if len(subitem.strip()) > 5 and ".pdf" in subitem:
87
+ subitem = subitem.replace("\xa0", " ").replace("\n", "").strip()
88
+ row = BeautifulSoup(subitem, features="html5lib")
89
+ if row not in rows:
90
+ rows.append(row)
91
+
92
+ for row in rows:
93
+ line: dict = {}
94
+ for item in headers:
95
+ line[item] = None
96
+ graftext = row.get_text().strip()
97
+ tempdate = graftext
98
+
99
+ # Check to see if it's not an amendment, doesn't have 3/17/2022 date format
100
+ # Most dates should be like "March 17, 2022"
101
+ if pageyear in tempdate and f"/{pageyear}" not in tempdate:
102
+ try:
103
+ tempdate = (
104
+ graftext.strip().split(pageyear)[0].strip() + f" {pageyear}"
105
+ )
106
+ except ValueError:
107
+ print(f"Date conversion failed on row: {row}")
108
+
109
+ line["Date"] = tempdate
110
+
111
+ try:
112
+ parsed_date = datetime.datetime.strptime(
113
+ tempdate, "%B %d, %Y"
114
+ ).strftime("%Y-%m-%d")
115
+ line["Date"] = parsed_date
116
+ except ValueError:
117
+ logger.debug(f"Date error: '{tempdate}', leaving intact")
118
+
119
+ line["PDF url"] = row.select("a")[0].get("href")
120
+ line["Company"] = row.select("a")[0].get_text().strip()
121
+ masterlist.append(line)
122
+
123
+ if len(masterlist) == 0:
124
+ logger.error(
125
+ "No data scraped -- anti-scraping mechanism may be back in play -- try Google Cache?"
126
+ )
127
+ output_csv = data_dir / "hi.csv"
128
+ utils.write_dict_rows_to_csv(output_csv, headers, masterlist)
129
+ return output_csv
130
+
131
+
132
+ if __name__ == "__main__":
133
+ scrape()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: warn-scraper
3
- Version: 1.2.57
3
+ Version: 1.2.59
4
4
  Summary: Command-line interface for downloading WARN Act notices of qualified plant closings and mass layoffs from state government websites
5
5
  Home-page: https://github.com/biglocalnews/warn-scraper
6
6
  Author: Big Local News
@@ -1,101 +0,0 @@
1
- import datetime
2
- import logging
3
- from pathlib import Path
4
- from urllib.parse import quote
5
-
6
- from bs4 import BeautifulSoup
7
-
8
- from .. import utils
9
-
10
- __authors__ = ["Ash1R", "stucka"]
11
- __tags__ = ["html", "pdf"]
12
- __source__ = {
13
- "name": "Workforce Development Hawaii",
14
- "url": "https://labor.hawaii.gov/wdc/real-time-warn-updates/",
15
- }
16
-
17
- logger = logging.getLogger(__name__)
18
-
19
-
20
- def scrape(
21
- data_dir: Path = utils.WARN_DATA_DIR,
22
- cache_dir: Path = utils.WARN_CACHE_DIR,
23
- ) -> Path:
24
- """
25
- Scrape data from Hawaii.
26
-
27
- Keyword arguments:
28
- data_dir -- the Path were the result will be saved (default WARN_DATA_DIR)
29
- cache_dir -- the Path where results can be cached (default WARN_CACHE_DIR)
30
- Returns: the Path where the file is written
31
- """
32
- cacheprefix = "https://webcache.googleusercontent.com/search?q=cache%3A" # Use Google Cache, per #600
33
-
34
- firstpage = utils.get_url(cacheprefix + quote("https://labor.hawaii.gov/wdc/real-time-warn-updates/"))
35
- soup = BeautifulSoup(firstpage.text, features="html5lib")
36
- pagesection = soup.select("div.primary-content")[0]
37
- subpageurls = []
38
- for atag in pagesection.find_all("a"):
39
- href = atag["href"]
40
- if href.endswith("/"):
41
- href = href # [:-1]
42
- subpageurls.append(cacheprefix + quote(href))
43
-
44
- headers = ["Company", "Date", "PDF url", "location", "jobs"]
45
- data = [headers]
46
- # lastdateseen = "2099-12-31"
47
-
48
- for subpageurl in reversed(subpageurls):
49
- # Conditionally here, we want to check and see if we have the old cached files, or if the year is current or previous.
50
- # Only need to download if it's current or previous year.
51
- # But do we care enough to implement right now?
52
-
53
- logger.debug(f"Parsing page {subpageurl}")
54
- page = utils.get_url(subpageurl)
55
- soup = BeautifulSoup(page.text, features="html5lib")
56
- pageyear = subpageurl.split("/")[-1][:4]
57
- tags = soup.select("p a[href*=pdf]")
58
- p_tags = [i.parent.get_text().replace("\xa0", " ").split("\n") for i in tags]
59
- clean_p_tags = [j for i in p_tags for j in i]
60
-
61
- dates = [k.split("–")[0].strip() for k in clean_p_tags]
62
- for i in range(len(dates)):
63
- try:
64
- tempdate = dates[i].split(pageyear)[0].strip() + f" {pageyear}"
65
- parsed_date = datetime.datetime.strptime(
66
- tempdate, "%B %d, %Y"
67
- ).strftime("%Y-%m-%d")
68
- dates[i] = parsed_date
69
- # lastdateseen = parsed_date
70
-
71
- # Disabling amendment automation to shift fixes into warn-transformer instead.
72
- # If this needs to come back, uncomment the lastseendate references
73
- # then rebuild the below section as an else
74
- except ValueError:
75
- logger.debug(f"Date error: {dates[i]}, leaving intact")
76
- # if "*" in dates[i]:
77
- # logger.debug(
78
- # f"Date error: {dates[i]} as apparent amendment; saving as {lastdateseen}"
79
- # )
80
- # dates[i] = lastdateseen
81
- # else:
82
-
83
- for i in range(len(tags)):
84
- row = []
85
- url = tags[i].get("href")
86
- row.append(tags[i].get_text())
87
-
88
- row.append(dates[i])
89
-
90
- row.append(url)
91
- row.append(None) # location
92
- row.append(None) # jobs
93
- data.append(row)
94
-
95
- output_csv = data_dir / "hi.csv"
96
- utils.write_rows_to_csv(output_csv, data)
97
- return output_csv
98
-
99
-
100
- if __name__ == "__main__":
101
- scrape()
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes