tldextract 5.1.2__tar.gz → 5.1.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. {tldextract-5.1.2 → tldextract-5.1.3}/.github/workflows/ci.yml +5 -6
  2. {tldextract-5.1.2 → tldextract-5.1.3}/CHANGELOG.md +14 -0
  3. {tldextract-5.1.2 → tldextract-5.1.3}/PKG-INFO +27 -18
  4. {tldextract-5.1.2 → tldextract-5.1.3}/README.md +23 -14
  5. {tldextract-5.1.2 → tldextract-5.1.3}/pyproject.toml +4 -4
  6. {tldextract-5.1.2 → tldextract-5.1.3}/scripts/release.py +36 -38
  7. {tldextract-5.1.2 → tldextract-5.1.3}/tests/__snapshots__/test_release.ambr +3 -0
  8. {tldextract-5.1.2 → tldextract-5.1.3}/tests/main_test.py +1 -3
  9. {tldextract-5.1.2 → tldextract-5.1.3}/tests/test_cache.py +3 -3
  10. {tldextract-5.1.2 → tldextract-5.1.3}/tests/test_parallel.py +1 -13
  11. {tldextract-5.1.2 → tldextract-5.1.3}/tests/test_release.py +7 -6
  12. {tldextract-5.1.2 → tldextract-5.1.3}/tldextract/.tld_set_snapshot +4390 -2405
  13. {tldextract-5.1.2 → tldextract-5.1.3}/tldextract/_version.py +2 -2
  14. {tldextract-5.1.2 → tldextract-5.1.3}/tldextract/cache.py +5 -16
  15. {tldextract-5.1.2 → tldextract-5.1.3}/tldextract/suffix_list.py +3 -1
  16. {tldextract-5.1.2 → tldextract-5.1.3}/tldextract/tldextract.py +11 -12
  17. {tldextract-5.1.2 → tldextract-5.1.3}/tldextract.egg-info/PKG-INFO +27 -18
  18. {tldextract-5.1.2 → tldextract-5.1.3}/tldextract.egg-info/requires.txt +1 -1
  19. {tldextract-5.1.2 → tldextract-5.1.3}/tox.ini +5 -5
  20. {tldextract-5.1.2 → tldextract-5.1.3}/.github/FUNDING.yml +0 -0
  21. {tldextract-5.1.2 → tldextract-5.1.3}/.gitignore +0 -0
  22. {tldextract-5.1.2 → tldextract-5.1.3}/LICENSE +0 -0
  23. {tldextract-5.1.2 → tldextract-5.1.3}/setup.cfg +0 -0
  24. {tldextract-5.1.2 → tldextract-5.1.3}/tests/__init__.py +0 -0
  25. {tldextract-5.1.2 → tldextract-5.1.3}/tests/cli_test.py +0 -0
  26. {tldextract-5.1.2 → tldextract-5.1.3}/tests/conftest.py +0 -0
  27. {tldextract-5.1.2 → tldextract-5.1.3}/tests/custom_suffix_test.py +0 -0
  28. {tldextract-5.1.2 → tldextract-5.1.3}/tests/fixtures/fake_suffix_list_fixture.dat +0 -0
  29. {tldextract-5.1.2 → tldextract-5.1.3}/tests/integration_test.py +0 -0
  30. {tldextract-5.1.2 → tldextract-5.1.3}/tests/test_trie.py +0 -0
  31. {tldextract-5.1.2 → tldextract-5.1.3}/tldextract/__init__.py +0 -0
  32. {tldextract-5.1.2 → tldextract-5.1.3}/tldextract/__main__.py +0 -0
  33. {tldextract-5.1.2 → tldextract-5.1.3}/tldextract/cli.py +0 -0
  34. {tldextract-5.1.2 → tldextract-5.1.3}/tldextract/py.typed +0 -0
  35. {tldextract-5.1.2 → tldextract-5.1.3}/tldextract/remote.py +0 -0
  36. {tldextract-5.1.2 → tldextract-5.1.3}/tldextract.egg-info/SOURCES.txt +0 -0
  37. {tldextract-5.1.2 → tldextract-5.1.3}/tldextract.egg-info/dependency_links.txt +0 -0
  38. {tldextract-5.1.2 → tldextract-5.1.3}/tldextract.egg-info/entry_points.txt +0 -0
  39. {tldextract-5.1.2 → tldextract-5.1.3}/tldextract.egg-info/top_level.txt +0 -0
@@ -14,22 +14,21 @@ jobs:
14
14
  os: [macos-latest, windows-latest, ubuntu-latest]
15
15
  language:
16
16
  [
17
- {python-version: "3.8", toxenv: "py38"},
18
17
  {python-version: "3.9", toxenv: "py39"},
19
18
  {python-version: "3.10", toxenv: "py310"},
20
19
  {python-version: "3.11", toxenv: "py311"},
21
20
  {python-version: "3.12", toxenv: "py312"},
22
- {python-version: "pypy3.8", toxenv: "pypy38"},
21
+ {python-version: "3.13", toxenv: "py313"},
23
22
  {python-version: "pypy3.9", toxenv: "pypy39"},
24
23
  {python-version: "pypy3.10", toxenv: "pypy310"},
25
24
  ]
26
25
  include:
27
26
  - os: ubuntu-latest
28
- language: {python-version: "3.8", toxenv: "codestyle"}
27
+ language: {python-version: "3.9", toxenv: "codestyle"}
29
28
  - os: ubuntu-latest
30
- language: {python-version: "3.8", toxenv: "lint"}
29
+ language: {python-version: "3.9", toxenv: "lint"}
31
30
  - os: ubuntu-latest
32
- language: {python-version: "3.8", toxenv: "typecheck"}
31
+ language: {python-version: "3.9", toxenv: "typecheck"}
33
32
  runs-on: ${{ matrix.os }}
34
33
  steps:
35
34
  - name: Check out repository
@@ -41,7 +40,7 @@ jobs:
41
40
  check-latest: true
42
41
  - name: Install Python requirements
43
42
  run: |
44
- pip install --upgrade tox
43
+ pip install --upgrade tox tox-uv
45
44
  - name: Test
46
45
  run: tox
47
46
  env:
@@ -3,6 +3,20 @@
3
3
  After upgrading, update your cache file by deleting it or via `tldextract
4
4
  --update`.
5
5
 
6
+ ## 5.1.3 (2024-11-04)
7
+
8
+ * Bugfixes
9
+ * Reduce logging errors ([`921a825`](https://github.com/john-kurkowski/tldextract/commit/921a82523c0e4403d21d50b2c3410d9af43520ac))
10
+ * Drop support for EOL Python 3.8 ([#340](https://github.com/john-kurkowski/tldextract/issues/340))
11
+ * Support Python 3.13 ([#341](https://github.com/john-kurkowski/tldextract/issues/341))
12
+ * Update bundled snapshot
13
+ * Documentation
14
+ * Clarify how to use your own definitions
15
+ * Clarify first-successful definitions vs. merged definitions
16
+ * Misc.
17
+ * Switch from Black to Ruff ([#333](https://github.com/john-kurkowski/tldextract/issues/333))
18
+ * Switch from pip to uv, during tox ([#324](https://github.com/john-kurkowski/tldextract/issues/324))
19
+
6
20
  ## 5.1.2 (2024-03-18)
7
21
 
8
22
  * Bugfixes
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: tldextract
3
- Version: 5.1.2
3
+ Version: 5.1.3
4
4
  Summary: Accurately separates a URL's subdomain, domain, and public suffix, using the Public Suffix List (PSL). By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well.
5
5
  Author-email: John Kurkowski <john.kurkowski@gmail.com>
6
6
  License: BSD-3-Clause
@@ -10,12 +10,12 @@ Classifier: Development Status :: 5 - Production/Stable
10
10
  Classifier: Topic :: Utilities
11
11
  Classifier: License :: OSI Approved :: BSD License
12
12
  Classifier: Programming Language :: Python :: 3
13
- Classifier: Programming Language :: Python :: 3.8
14
13
  Classifier: Programming Language :: Python :: 3.9
15
14
  Classifier: Programming Language :: Python :: 3.10
16
15
  Classifier: Programming Language :: Python :: 3.11
17
16
  Classifier: Programming Language :: Python :: 3.12
18
- Requires-Python: >=3.8
17
+ Classifier: Programming Language :: Python :: 3.13
18
+ Requires-Python: >=3.9
19
19
  Description-Content-Type: text/markdown
20
20
  License-File: LICENSE
21
21
  Requires-Dist: idna
@@ -26,7 +26,6 @@ Provides-Extra: release
26
26
  Requires-Dist: build; extra == "release"
27
27
  Requires-Dist: twine; extra == "release"
28
28
  Provides-Extra: testing
29
- Requires-Dist: black; extra == "testing"
30
29
  Requires-Dist: mypy; extra == "testing"
31
30
  Requires-Dist: pytest; extra == "testing"
32
31
  Requires-Dist: pytest-gitignore; extra == "testing"
@@ -35,6 +34,7 @@ Requires-Dist: responses; extra == "testing"
35
34
  Requires-Dist: ruff; extra == "testing"
36
35
  Requires-Dist: syrupy; extra == "testing"
37
36
  Requires-Dist: tox; extra == "testing"
37
+ Requires-Dist: tox-uv; extra == "testing"
38
38
  Requires-Dist: types-filelock; extra == "testing"
39
39
  Requires-Dist: types-requests; extra == "testing"
40
40
 
@@ -129,8 +129,8 @@ tldextract http://forums.bbc.co.uk
129
129
 
130
130
  Beware when first calling `tldextract`, it updates its TLD list with a live HTTP
131
131
  request. This updated TLD set is usually cached indefinitely in `$HOME/.cache/python-tldextract`.
132
- To control the cache's location, set TLDEXTRACT_CACHE environment variable or set the
133
- cache_dir path in TLDExtract initialization.
132
+ To control the cache's location, set the `TLDEXTRACT_CACHE` environment variable or set the
133
+ `cache_dir` path when constructing a `TLDExtract`.
134
134
 
135
135
  (Arguably runtime bootstrapping like that shouldn't be the default behavior,
136
136
  like for production systems. But I want you to have the latest TLDs, especially
@@ -219,10 +219,12 @@ extract = tldextract.TLDExtract(
219
219
  fallback_to_snapshot=False)
220
220
  ```
221
221
 
222
- The above snippet will fetch from the URL *you* specified, upon first need to download the
223
- suffix list (i.e. if the cached version doesn't exist).
222
+ If the cached version of public suffix definitions doesn't exist, such as on
223
+ the first run, the above snippet will request the URLs you specified in order,
224
+ and use the first successful response.
224
225
 
225
- If you want to use input data from your local filesystem, just use the `file://` protocol:
226
+ If you want to use input data from your local filesystem, use the `file://`
227
+ protocol with an absolute path:
226
228
 
227
229
  ```python
228
230
  extract = tldextract.TLDExtract(
@@ -231,17 +233,24 @@ extract = tldextract.TLDExtract(
231
233
  fallback_to_snapshot=False)
232
234
  ```
233
235
 
234
- Use an absolute path when specifying the `suffix_list_urls` keyword argument.
235
- `os.path` is your friend.
236
-
237
- The command line update command can be used with a URL or local file you specify:
236
+ This also works via command line update:
238
237
 
239
238
  ```zsh
240
239
  tldextract --update --suffix_list_url "http://foo.bar.baz"
241
240
  ```
242
241
 
243
- This could be useful in production when you don't want the delay associated with updating the suffix
244
- list on first use, or if you are behind a complex firewall that prevents a simple update from working.
242
+ Using your own URLs could be useful in production when you don't want the delay
243
+ with updating the suffix list on first use, or if you are behind a complex
244
+ firewall.
245
+
246
+ You can also specify additional suffixes in the `extra_suffixes` param. These
247
+ will be merged into whatever public suffix definitions are already in use by
248
+ `tldextract`.
249
+
250
+ ```python
251
+ extract = tldextract.TLDExtract(
252
+ extra_suffixes=["foo", "bar", "baz"])
253
+ ```
245
254
 
246
255
  ## FAQ
247
256
 
@@ -250,9 +259,9 @@ list on first use, or if you are behind a complex firewall that prevents a simpl
250
259
  This project doesn't contain an actual list of public suffixes. That comes from
251
260
  [the Public Suffix List (PSL)](https://publicsuffix.org/). Submit amendments there.
252
261
 
253
- (In the meantime, you can tell tldextract about your exception by either
262
+ In the meantime, you can tell tldextract about your exception by either
254
263
  forking the PSL and using your fork in the `suffix_list_urls` param, or adding
255
- your suffix piecemeal with the `extra_suffixes` param.)
264
+ your suffix piecemeal with the `extra_suffixes` param.
256
265
 
257
266
  ### I see my suffix in [the Public Suffix List (PSL)](https://publicsuffix.org/), but this library doesn't extract it.
258
267
 
@@ -309,5 +318,5 @@ tox -e py311
309
318
  Automatically format all code:
310
319
 
311
320
  ```zsh
312
- black .
321
+ ruff format .
313
322
  ```
@@ -89,8 +89,8 @@ tldextract http://forums.bbc.co.uk
89
89
 
90
90
  Beware when first calling `tldextract`, it updates its TLD list with a live HTTP
91
91
  request. This updated TLD set is usually cached indefinitely in `$HOME/.cache/python-tldextract`.
92
- To control the cache's location, set TLDEXTRACT_CACHE environment variable or set the
93
- cache_dir path in TLDExtract initialization.
92
+ To control the cache's location, set the `TLDEXTRACT_CACHE` environment variable or set the
93
+ `cache_dir` path when constructing a `TLDExtract`.
94
94
 
95
95
  (Arguably runtime bootstrapping like that shouldn't be the default behavior,
96
96
  like for production systems. But I want you to have the latest TLDs, especially
@@ -179,10 +179,12 @@ extract = tldextract.TLDExtract(
179
179
  fallback_to_snapshot=False)
180
180
  ```
181
181
 
182
- The above snippet will fetch from the URL *you* specified, upon first need to download the
183
- suffix list (i.e. if the cached version doesn't exist).
182
+ If the cached version of public suffix definitions doesn't exist, such as on
183
+ the first run, the above snippet will request the URLs you specified in order,
184
+ and use the first successful response.
184
185
 
185
- If you want to use input data from your local filesystem, just use the `file://` protocol:
186
+ If you want to use input data from your local filesystem, use the `file://`
187
+ protocol with an absolute path:
186
188
 
187
189
  ```python
188
190
  extract = tldextract.TLDExtract(
@@ -191,17 +193,24 @@ extract = tldextract.TLDExtract(
191
193
  fallback_to_snapshot=False)
192
194
  ```
193
195
 
194
- Use an absolute path when specifying the `suffix_list_urls` keyword argument.
195
- `os.path` is your friend.
196
-
197
- The command line update command can be used with a URL or local file you specify:
196
+ This also works via command line update:
198
197
 
199
198
  ```zsh
200
199
  tldextract --update --suffix_list_url "http://foo.bar.baz"
201
200
  ```
202
201
 
203
- This could be useful in production when you don't want the delay associated with updating the suffix
204
- list on first use, or if you are behind a complex firewall that prevents a simple update from working.
202
+ Using your own URLs could be useful in production when you don't want the delay
203
+ with updating the suffix list on first use, or if you are behind a complex
204
+ firewall.
205
+
206
+ You can also specify additional suffixes in the `extra_suffixes` param. These
207
+ will be merged into whatever public suffix definitions are already in use by
208
+ `tldextract`.
209
+
210
+ ```python
211
+ extract = tldextract.TLDExtract(
212
+ extra_suffixes=["foo", "bar", "baz"])
213
+ ```
205
214
 
206
215
  ## FAQ
207
216
 
@@ -210,9 +219,9 @@ list on first use, or if you are behind a complex firewall that prevents a simpl
210
219
  This project doesn't contain an actual list of public suffixes. That comes from
211
220
  [the Public Suffix List (PSL)](https://publicsuffix.org/). Submit amendments there.
212
221
 
213
- (In the meantime, you can tell tldextract about your exception by either
222
+ In the meantime, you can tell tldextract about your exception by either
214
223
  forking the PSL and using your fork in the `suffix_list_urls` param, or adding
215
- your suffix piecemeal with the `extra_suffixes` param.)
224
+ your suffix piecemeal with the `extra_suffixes` param.
216
225
 
217
226
  ### I see my suffix in [the Public Suffix List (PSL)](https://publicsuffix.org/), but this library doesn't extract it.
218
227
 
@@ -269,5 +278,5 @@ tox -e py311
269
278
  Automatically format all code:
270
279
 
271
280
  ```zsh
272
- black .
281
+ ruff format .
273
282
  ```
@@ -23,13 +23,13 @@ classifiers = [
23
23
  "Topic :: Utilities",
24
24
  "License :: OSI Approved :: BSD License",
25
25
  "Programming Language :: Python :: 3",
26
- "Programming Language :: Python :: 3.8",
27
26
  "Programming Language :: Python :: 3.9",
28
27
  "Programming Language :: Python :: 3.10",
29
28
  "Programming Language :: Python :: 3.11",
30
29
  "Programming Language :: Python :: 3.12",
30
+ "Programming Language :: Python :: 3.13",
31
31
  ]
32
- requires-python = ">=3.8"
32
+ requires-python = ">=3.9"
33
33
  dynamic = ["version"]
34
34
  readme = "README.md"
35
35
 
@@ -46,7 +46,6 @@ release = [
46
46
  "twine",
47
47
  ]
48
48
  testing = [
49
- "black",
50
49
  "mypy",
51
50
  "pytest",
52
51
  "pytest-gitignore",
@@ -55,6 +54,7 @@ testing = [
55
54
  "ruff",
56
55
  "syrupy",
57
56
  "tox",
57
+ "tox-uv",
58
58
  "types-filelock",
59
59
  "types-requests",
60
60
  ]
@@ -104,7 +104,7 @@ select = [
104
104
  "W",
105
105
  ]
106
106
  ignore = [
107
- "E501", # line too long; if Black does its job, not worried about the rare long line
107
+ "E501", # line too long; if formatter does its job, not worried about the rare long line
108
108
  ]
109
109
 
110
110
  [tool.ruff.lint.pydocstyle]
@@ -13,7 +13,7 @@ It will:
13
13
  Prerequisites:
14
14
  - This must be run from the root of the repository.
15
15
  - The repo must have a clean git working tree.
16
- - The user must have the GITHUB_TOKEN environment variable set to a valid GitHub personal access token.
16
+ - The user must have the GITHUB_TOKEN environment variable set to a GitHub personal access token with repository "Contents" read and write permission.
17
17
  - The user will need credentials for the PyPI repository, which the user will be prompted for during the upload step. The user will need to paste the token manually from a password manager or similar.
18
18
  - The CHANGELOG.md file must already contain an entry for the version being released.
19
19
  - Install requirements with: pip install --upgrade --editable '.[release]'
@@ -22,19 +22,27 @@ Prerequisites:
22
22
 
23
23
  from __future__ import annotations
24
24
 
25
+ import contextlib
25
26
  import os
26
27
  import re
27
28
  import subprocess
28
29
  import sys
30
+ from collections.abc import Iterator
29
31
  from pathlib import Path
30
32
 
31
33
  import requests
32
34
 
33
35
 
34
- def add_git_tag_for_version(version: str) -> None:
36
+ @contextlib.contextmanager
37
+ def add_git_tag_for_version(version: str) -> Iterator[None]:
35
38
  """Add a git tag for the given version."""
36
39
  subprocess.run(["git", "tag", "-a", version, "-m", version], check=True)
37
40
  print(f"Version {version} tag added successfully.")
41
+ try:
42
+ yield
43
+ except:
44
+ subprocess.run(["git", "tag", "-d", version])
45
+ raise
38
46
 
39
47
 
40
48
  def remove_previous_dist() -> None:
@@ -68,14 +76,16 @@ def verify_build(is_test: str) -> None:
68
76
  confirmation = input("Does the build look correct? (y/n): ")
69
77
  if confirmation == "y":
70
78
  print("Build verified successfully.")
71
- upload_build_to_pypi(is_test)
72
- push_git_tags()
73
79
  else:
74
80
  raise Exception("Could not verify. Build was not uploaded.")
75
81
 
76
82
 
77
83
  def generate_github_release_notes_body(token: str, version: str) -> str:
78
- """Generate and grab release notes URL from Github."""
84
+ """Generate and grab release notes URL from Github.
85
+
86
+ Delete their first paragraph, because we track its contents in a tighter
87
+ form in CHANGELOG.md. See `get_changelog_release_notes`.
88
+ """
79
89
  response = requests.post(
80
90
  "https://api.github.com/repos/john-kurkowski/tldextract/releases/generate-notes",
81
91
  headers={
@@ -94,24 +104,13 @@ def generate_github_release_notes_body(token: str, version: str) -> str:
94
104
  file=sys.stderr,
95
105
  )
96
106
  return ""
97
- return str(response.json()["body"])
98
107
 
108
+ body = str(response.json()["body"])
109
+ paragraphs = body.split("\n\n")
110
+ return "\n\n".join(paragraphs[1:])
99
111
 
100
- def get_release_notes_url(body: str) -> str:
101
- """Parse the release notes content to get the changelog URL."""
102
- url_pattern = re.compile(r"\*\*Full Changelog\*\*: (.*)$")
103
- match = url_pattern.search(body)
104
- if match:
105
- return match.group(1)
106
- else:
107
- print(
108
- "WARNING: Failed to parse release notes URL from GitHub response.",
109
- file=sys.stderr,
110
- )
111
- return ""
112
112
 
113
-
114
- def get_changelog_release_notes(release_notes_url: str, version: str) -> str:
113
+ def get_changelog_release_notes(version: str) -> str:
115
114
  """Get the changelog release notes.
116
115
 
117
116
  Uses a regex starting on a heading beginning with the version number
@@ -125,25 +124,15 @@ def get_changelog_release_notes(release_notes_url: str, version: str) -> str:
125
124
  if match:
126
125
  return str(match.group(1)).strip()
127
126
  else:
128
- print(
129
- f"WARNING: Failed to parse changelog release notes. Manually copy this version's notes from the CHANGELOG.md file to {release_notes_url}.",
130
- file=sys.stderr,
131
- )
132
127
  return ""
133
128
 
134
129
 
135
- def create_release_notes_body(token: str, version: str) -> str:
136
- """Compile the release notes."""
137
- github_release_body = generate_github_release_notes_body(token, version)
138
- release_notes_url = get_release_notes_url(github_release_body)
139
- changelog_notes = get_changelog_release_notes(release_notes_url, version)
140
- full_release_notes = f"{changelog_notes}\n\n**Full Changelog**: {release_notes_url}"
141
- return full_release_notes
142
-
143
-
144
130
  def create_github_release_draft(token: str, version: str) -> None:
145
131
  """Create a release on GitHub."""
146
- release_body = create_release_notes_body(token, version)
132
+ github_release_body = generate_github_release_notes_body(token, version)
133
+ changelog_notes = get_changelog_release_notes(version)
134
+ release_body = f"{changelog_notes}\n\n{github_release_body}"
135
+
147
136
  response = requests.post(
148
137
  "https://api.github.com/repos/john-kurkowski/tldextract/releases",
149
138
  headers={
@@ -168,8 +157,15 @@ def create_github_release_draft(token: str, version: str) -> None:
168
157
  file=sys.stderr,
169
158
  )
170
159
  return
160
+
171
161
  print(f'Release created successfully: {response.json()["html_url"]}')
172
162
 
163
+ if not changelog_notes:
164
+ print(
165
+ "WARNING: Failed to parse changelog release notes. Manually copy this version's notes from the CHANGELOG.md file to the above URL.",
166
+ file=sys.stderr,
167
+ )
168
+
173
169
 
174
170
  def upload_build_to_pypi(is_test: str) -> None:
175
171
  """Upload the build to PyPI."""
@@ -227,10 +223,12 @@ def main() -> None:
227
223
  is_test = get_is_test_response()
228
224
  version_number = input("Enter the version number: ")
229
225
 
230
- add_git_tag_for_version(version_number)
231
- remove_previous_dist()
232
- create_build()
233
- verify_build(is_test)
226
+ with add_git_tag_for_version(version_number):
227
+ remove_previous_dist()
228
+ create_build()
229
+ verify_build(is_test)
230
+ upload_build_to_pypi(is_test)
231
+ push_git_tags()
234
232
  create_github_release_draft(github_token, version_number)
235
233
 
236
234
 
@@ -72,6 +72,9 @@
72
72
  * Misc.
73
73
  * Increase typecheck aggression
74
74
 
75
+ ## New Contributors
76
+ * @jdoe contributed
77
+
75
78
  **Full Changelog**: fake-body
76
79
  ''',
77
80
  'draft': True,
@@ -4,7 +4,6 @@ from __future__ import annotations
4
4
 
5
5
  import logging
6
6
  import os
7
- import sys
8
7
  import tempfile
9
8
  from collections.abc import Sequence
10
9
  from pathlib import Path
@@ -168,8 +167,7 @@ def test_looks_like_ipv6() -> None:
168
167
  assert looks_like_ipv6("aBcD:ef01:2345:6789:aBcD:ef01:aaaa:2288") is True
169
168
  assert looks_like_ipv6("aBcD:ef01:2345:6789:aBcD:ef01:127.0.0.1") is True
170
169
  assert looks_like_ipv6("ZBcD:ef01:2345:6789:aBcD:ef01:127.0.0.1") is False
171
- if sys.version_info >= (3, 8, 12): # noqa: UP036
172
- assert looks_like_ipv6("aBcD:ef01:2345:6789:aBcD:ef01:127.0.0.01") is False
170
+ assert looks_like_ipv6("aBcD:ef01:2345:6789:aBcD:ef01:127.0.0.01") is False
173
171
  assert looks_like_ipv6("aBcD:ef01:2345:6789:aBcD:") is False
174
172
 
175
173
 
@@ -32,7 +32,7 @@ def test_disk_cache(tmp_path: Path) -> None:
32
32
 
33
33
  def test_get_pkg_unique_identifier(monkeypatch: pytest.MonkeyPatch) -> None:
34
34
  """Test generating a unique identifier for the version of this package."""
35
- monkeypatch.setattr(sys, "version_info", (3, 8, 1, "final", 0))
35
+ monkeypatch.setattr(sys, "version_info", (3, 9, 1, "final", 0))
36
36
  monkeypatch.setattr(sys, "prefix", "/home/john/.pyenv/versions/myvirtualenv")
37
37
 
38
38
  mock_version_module = types.ModuleType("tldextract._version", "mocked module")
@@ -41,13 +41,13 @@ def test_get_pkg_unique_identifier(monkeypatch: pytest.MonkeyPatch) -> None:
41
41
 
42
42
  assert (
43
43
  get_pkg_unique_identifier()
44
- == "3.8.1.final__myvirtualenv__f01a7b__tldextract-1.2.3"
44
+ == "3.9.1.final__myvirtualenv__f01a7b__tldextract-1.2.3"
45
45
  )
46
46
 
47
47
 
48
48
  def test_get_cache_dir(monkeypatch: pytest.MonkeyPatch) -> None:
49
49
  """Test finding the cache directory."""
50
- pkg_identifier = "3.8.1.final__myvirtualenv__f01a7b__tldextract-1.2.3"
50
+ pkg_identifier = "3.9.1.final__myvirtualenv__f01a7b__tldextract-1.2.3"
51
51
  monkeypatch.setattr(
52
52
  tldextract.cache, "get_pkg_unique_identifier", lambda: pkg_identifier
53
53
  )
@@ -44,22 +44,10 @@ def test_cache_cleared_by_other_process(
44
44
  extract("google.com")
45
45
  orig_unlink = os.unlink
46
46
 
47
- def is_relative_to(path: Path, other_path: str | Path) -> bool:
48
- """Return True if path is relative to other_path or False.
49
-
50
- Taken from the Python 3.9 standard library.
51
- Reference: https://docs.python.org/3/library/pathlib.html#pathlib.PurePath.is_relative_to
52
- """
53
- try:
54
- path.relative_to(other_path)
55
- return True
56
- except ValueError:
57
- return False
58
-
59
47
  def evil_unlink(filename: str | Path) -> None:
60
48
  """Simulate someone deletes the file right before we try to."""
61
49
  if (isinstance(filename, str) and filename.startswith(cache_dir)) or (
62
- isinstance(filename, Path) and is_relative_to(filename, cache_dir)
50
+ isinstance(filename, Path) and filename.is_relative_to(cache_dir)
63
51
  ):
64
52
  orig_unlink(filename)
65
53
  orig_unlink(filename)
@@ -34,11 +34,12 @@ class Mocks:
34
34
  @pytest.fixture
35
35
  def mocks() -> Iterator[Mocks]:
36
36
  """Stub network and subprocesses."""
37
- with mock.patch("builtins.input") as mock_input, mock.patch(
38
- "os.listdir"
39
- ) as mock_listdir, mock.patch("requests.post") as mock_requests, mock.patch(
40
- "subprocess.run"
41
- ) as mock_subprocess:
37
+ with (
38
+ mock.patch("builtins.input") as mock_input,
39
+ mock.patch("os.listdir") as mock_listdir,
40
+ mock.patch("requests.post") as mock_requests,
41
+ mock.patch("subprocess.run") as mock_subprocess,
42
+ ):
42
43
  yield Mocks(
43
44
  input=mock_input,
44
45
  listdir=mock_listdir,
@@ -76,7 +77,7 @@ def test_happy_path(
76
77
  return mock.Mock(
77
78
  json=mock.Mock(
78
79
  return_value={
79
- "body": "Body start **Full Changelog**: fake-body",
80
+ "body": "## What's Changed\nGitHub changelog here\n\n## New Contributors\n* @jdoe contributed\n\n**Full Changelog**: fake-body",
80
81
  "html_url": "https://github.com/path/to/release",
81
82
  }
82
83
  ),