tldextract 5.1.0__tar.gz → 5.1.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tldextract-5.1.2/.github/workflows/ci.yml +48 -0
- {tldextract-5.1.0 → tldextract-5.1.2}/CHANGELOG.md +16 -0
- {tldextract-5.1.0 → tldextract-5.1.2}/LICENSE +1 -1
- {tldextract-5.1.0 → tldextract-5.1.2}/PKG-INFO +7 -2
- {tldextract-5.1.0 → tldextract-5.1.2}/README.md +1 -1
- {tldextract-5.1.0 → tldextract-5.1.2}/pyproject.toml +9 -2
- tldextract-5.1.2/scripts/release.py +238 -0
- tldextract-5.1.2/tests/__snapshots__/test_release.ambr +244 -0
- {tldextract-5.1.0 → tldextract-5.1.2}/tests/custom_suffix_test.py +7 -3
- {tldextract-5.1.0 → tldextract-5.1.2}/tests/main_test.py +18 -14
- {tldextract-5.1.0 → tldextract-5.1.2}/tests/test_cache.py +6 -6
- {tldextract-5.1.0 → tldextract-5.1.2}/tests/test_parallel.py +18 -3
- tldextract-5.1.2/tests/test_release.py +95 -0
- {tldextract-5.1.0 → tldextract-5.1.2}/tests/test_trie.py +1 -0
- {tldextract-5.1.0 → tldextract-5.1.2}/tldextract/__main__.py +0 -1
- {tldextract-5.1.0 → tldextract-5.1.2}/tldextract/_version.py +2 -2
- {tldextract-5.1.0 → tldextract-5.1.2}/tldextract/cache.py +11 -12
- {tldextract-5.1.0 → tldextract-5.1.2}/tldextract/remote.py +6 -28
- {tldextract-5.1.0 → tldextract-5.1.2}/tldextract/tldextract.py +5 -10
- {tldextract-5.1.0 → tldextract-5.1.2}/tldextract.egg-info/PKG-INFO +7 -2
- {tldextract-5.1.0 → tldextract-5.1.2}/tldextract.egg-info/SOURCES.txt +4 -1
- {tldextract-5.1.0 → tldextract-5.1.2}/tldextract.egg-info/requires.txt +5 -0
- {tldextract-5.1.0 → tldextract-5.1.2}/tox.ini +2 -2
- tldextract-5.1.0/.travis.yml +0 -23
- {tldextract-5.1.0 → tldextract-5.1.2}/.github/FUNDING.yml +0 -0
- {tldextract-5.1.0 → tldextract-5.1.2}/.gitignore +0 -0
- {tldextract-5.1.0 → tldextract-5.1.2}/setup.cfg +0 -0
- {tldextract-5.1.0 → tldextract-5.1.2}/tests/__init__.py +0 -0
- {tldextract-5.1.0 → tldextract-5.1.2}/tests/cli_test.py +0 -0
- {tldextract-5.1.0 → tldextract-5.1.2}/tests/conftest.py +0 -0
- {tldextract-5.1.0 → tldextract-5.1.2}/tests/fixtures/fake_suffix_list_fixture.dat +0 -0
- {tldextract-5.1.0 → tldextract-5.1.2}/tests/integration_test.py +0 -0
- {tldextract-5.1.0 → tldextract-5.1.2}/tldextract/.tld_set_snapshot +0 -0
- {tldextract-5.1.0 → tldextract-5.1.2}/tldextract/__init__.py +0 -0
- {tldextract-5.1.0 → tldextract-5.1.2}/tldextract/cli.py +0 -0
- {tldextract-5.1.0 → tldextract-5.1.2}/tldextract/py.typed +0 -0
- {tldextract-5.1.0 → tldextract-5.1.2}/tldextract/suffix_list.py +0 -0
- {tldextract-5.1.0 → tldextract-5.1.2}/tldextract.egg-info/dependency_links.txt +0 -0
- {tldextract-5.1.0 → tldextract-5.1.2}/tldextract.egg-info/entry_points.txt +0 -0
- {tldextract-5.1.0 → tldextract-5.1.2}/tldextract.egg-info/top_level.txt +0 -0
@@ -0,0 +1,48 @@
|
|
1
|
+
name: build
|
2
|
+
on:
|
3
|
+
pull_request: {}
|
4
|
+
push:
|
5
|
+
branches:
|
6
|
+
- "master"
|
7
|
+
tags-ignore:
|
8
|
+
- "**"
|
9
|
+
jobs:
|
10
|
+
test:
|
11
|
+
strategy:
|
12
|
+
fail-fast: false
|
13
|
+
matrix:
|
14
|
+
os: [macos-latest, windows-latest, ubuntu-latest]
|
15
|
+
language:
|
16
|
+
[
|
17
|
+
{python-version: "3.8", toxenv: "py38"},
|
18
|
+
{python-version: "3.9", toxenv: "py39"},
|
19
|
+
{python-version: "3.10", toxenv: "py310"},
|
20
|
+
{python-version: "3.11", toxenv: "py311"},
|
21
|
+
{python-version: "3.12", toxenv: "py312"},
|
22
|
+
{python-version: "pypy3.8", toxenv: "pypy38"},
|
23
|
+
{python-version: "pypy3.9", toxenv: "pypy39"},
|
24
|
+
{python-version: "pypy3.10", toxenv: "pypy310"},
|
25
|
+
]
|
26
|
+
include:
|
27
|
+
- os: ubuntu-latest
|
28
|
+
language: {python-version: "3.8", toxenv: "codestyle"}
|
29
|
+
- os: ubuntu-latest
|
30
|
+
language: {python-version: "3.8", toxenv: "lint"}
|
31
|
+
- os: ubuntu-latest
|
32
|
+
language: {python-version: "3.8", toxenv: "typecheck"}
|
33
|
+
runs-on: ${{ matrix.os }}
|
34
|
+
steps:
|
35
|
+
- name: Check out repository
|
36
|
+
uses: actions/checkout@v4
|
37
|
+
- name: Setup Python
|
38
|
+
uses: actions/setup-python@v5
|
39
|
+
with:
|
40
|
+
python-version: ${{ matrix.language.python-version }}
|
41
|
+
check-latest: true
|
42
|
+
- name: Install Python requirements
|
43
|
+
run: |
|
44
|
+
pip install --upgrade tox
|
45
|
+
- name: Test
|
46
|
+
run: tox
|
47
|
+
env:
|
48
|
+
TOXENV: ${{ matrix.language.toxenv }}
|
@@ -3,6 +3,22 @@
|
|
3
3
|
After upgrading, update your cache file by deleting it or via `tldextract
|
4
4
|
--update`.
|
5
5
|
|
6
|
+
## 5.1.2 (2024-03-18)
|
7
|
+
|
8
|
+
* Bugfixes
|
9
|
+
* Remove `socket.inet_pton`, to fix platform-dependent IP parsing ([#318](https://github.com/john-kurkowski/tldextract/issues/318))
|
10
|
+
* Use non-capturing groups for IPv4 address detection, for a slight speed boost ([#323](https://github.com/john-kurkowski/tldextract/issues/323))
|
11
|
+
* Misc.
|
12
|
+
* Add CI for PyPy3.9 and PyPy3.10 ([#316](https://github.com/john-kurkowski/tldextract/issues/316))
|
13
|
+
* Add script to automate package release process ([#325](https://github.com/john-kurkowski/tldextract/issues/325))
|
14
|
+
* Update LICENSE copyright years
|
15
|
+
|
16
|
+
## 5.1.1 (2023-11-16)
|
17
|
+
|
18
|
+
* Bugfixes
|
19
|
+
* Fix path join on Windows ([#314](https://github.com/john-kurkowski/tldextract/issues/314))
|
20
|
+
* Support Python 3.12
|
21
|
+
|
6
22
|
## 5.1.0 (2023-11-05)
|
7
23
|
|
8
24
|
* Features
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: tldextract
|
3
|
-
Version: 5.1.
|
3
|
+
Version: 5.1.2
|
4
4
|
Summary: Accurately separates a URL's subdomain, domain, and public suffix, using the Public Suffix List (PSL). By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well.
|
5
5
|
Author-email: John Kurkowski <john.kurkowski@gmail.com>
|
6
6
|
License: BSD-3-Clause
|
@@ -14,6 +14,7 @@ Classifier: Programming Language :: Python :: 3.8
|
|
14
14
|
Classifier: Programming Language :: Python :: 3.9
|
15
15
|
Classifier: Programming Language :: Python :: 3.10
|
16
16
|
Classifier: Programming Language :: Python :: 3.11
|
17
|
+
Classifier: Programming Language :: Python :: 3.12
|
17
18
|
Requires-Python: >=3.8
|
18
19
|
Description-Content-Type: text/markdown
|
19
20
|
License-File: LICENSE
|
@@ -21,6 +22,9 @@ Requires-Dist: idna
|
|
21
22
|
Requires-Dist: requests>=2.1.0
|
22
23
|
Requires-Dist: requests-file>=1.4
|
23
24
|
Requires-Dist: filelock>=3.0.8
|
25
|
+
Provides-Extra: release
|
26
|
+
Requires-Dist: build; extra == "release"
|
27
|
+
Requires-Dist: twine; extra == "release"
|
24
28
|
Provides-Extra: testing
|
25
29
|
Requires-Dist: black; extra == "testing"
|
26
30
|
Requires-Dist: mypy; extra == "testing"
|
@@ -29,11 +33,12 @@ Requires-Dist: pytest-gitignore; extra == "testing"
|
|
29
33
|
Requires-Dist: pytest-mock; extra == "testing"
|
30
34
|
Requires-Dist: responses; extra == "testing"
|
31
35
|
Requires-Dist: ruff; extra == "testing"
|
36
|
+
Requires-Dist: syrupy; extra == "testing"
|
32
37
|
Requires-Dist: tox; extra == "testing"
|
33
38
|
Requires-Dist: types-filelock; extra == "testing"
|
34
39
|
Requires-Dist: types-requests; extra == "testing"
|
35
40
|
|
36
|
-
# tldextract [](https://badge.fury.io/py/tldextract) [](https://badge.fury.io/py/tldextract) [](https://github.com/john-kurkowski/tldextract/actions/workflows/ci.yml)
|
37
42
|
|
38
43
|
`tldextract` accurately separates a URL's subdomain, domain, and public suffix,
|
39
44
|
using [the Public Suffix List (PSL)](https://publicsuffix.org).
|
@@ -1,4 +1,4 @@
|
|
1
|
-
# tldextract [](https://badge.fury.io/py/tldextract) [](https://badge.fury.io/py/tldextract) [](https://github.com/john-kurkowski/tldextract/actions/workflows/ci.yml)
|
2
2
|
|
3
3
|
`tldextract` accurately separates a URL's subdomain, domain, and public suffix,
|
4
4
|
using [the Public Suffix List (PSL)](https://publicsuffix.org).
|
@@ -27,6 +27,7 @@ classifiers = [
|
|
27
27
|
"Programming Language :: Python :: 3.9",
|
28
28
|
"Programming Language :: Python :: 3.10",
|
29
29
|
"Programming Language :: Python :: 3.11",
|
30
|
+
"Programming Language :: Python :: 3.12",
|
30
31
|
]
|
31
32
|
requires-python = ">=3.8"
|
32
33
|
dynamic = ["version"]
|
@@ -40,6 +41,10 @@ dependencies = [
|
|
40
41
|
]
|
41
42
|
|
42
43
|
[project.optional-dependencies]
|
44
|
+
release = [
|
45
|
+
"build",
|
46
|
+
"twine",
|
47
|
+
]
|
43
48
|
testing = [
|
44
49
|
"black",
|
45
50
|
"mypy",
|
@@ -48,6 +53,7 @@ testing = [
|
|
48
53
|
"pytest-mock",
|
49
54
|
"responses",
|
50
55
|
"ruff",
|
56
|
+
"syrupy",
|
51
57
|
"tox",
|
52
58
|
"types-filelock",
|
53
59
|
"types-requests",
|
@@ -78,12 +84,13 @@ write_to = "tldextract/_version.py"
|
|
78
84
|
version = {attr = "setuptools_scm.get_version"}
|
79
85
|
|
80
86
|
[tool.mypy]
|
87
|
+
explicit_package_bases = true
|
81
88
|
strict = true
|
82
89
|
|
83
90
|
[tool.pytest.ini_options]
|
84
91
|
addopts = "--doctest-modules"
|
85
92
|
|
86
|
-
[tool.ruff]
|
93
|
+
[tool.ruff.lint]
|
87
94
|
select = [
|
88
95
|
"A",
|
89
96
|
"B",
|
@@ -100,5 +107,5 @@ ignore = [
|
|
100
107
|
"E501", # line too long; if Black does its job, not worried about the rare long line
|
101
108
|
]
|
102
109
|
|
103
|
-
[tool.ruff.pydocstyle]
|
110
|
+
[tool.ruff.lint.pydocstyle]
|
104
111
|
convention = "pep257"
|
@@ -0,0 +1,238 @@
|
|
1
|
+
"""
|
2
|
+
This script automates the release process for a Python package.
|
3
|
+
|
4
|
+
It will:
|
5
|
+
- Add a git tag for the given version.
|
6
|
+
- Remove the previous dist folder.
|
7
|
+
- Create a build.
|
8
|
+
- Ask the user to verify the build.
|
9
|
+
- Upload the build to PyPI.
|
10
|
+
- Push all git tags to the remote.
|
11
|
+
- Create a draft release on GitHub using the version notes in CHANGELOG.md.
|
12
|
+
|
13
|
+
Prerequisites:
|
14
|
+
- This must be run from the root of the repository.
|
15
|
+
- The repo must have a clean git working tree.
|
16
|
+
- The user must have the GITHUB_TOKEN environment variable set to a valid GitHub personal access token.
|
17
|
+
- The user will need credentials for the PyPI repository, which the user will be prompted for during the upload step. The user will need to paste the token manually from a password manager or similar.
|
18
|
+
- The CHANGELOG.md file must already contain an entry for the version being released.
|
19
|
+
- Install requirements with: pip install --upgrade --editable '.[release]'
|
20
|
+
|
21
|
+
"""
|
22
|
+
|
23
|
+
from __future__ import annotations
|
24
|
+
|
25
|
+
import os
|
26
|
+
import re
|
27
|
+
import subprocess
|
28
|
+
import sys
|
29
|
+
from pathlib import Path
|
30
|
+
|
31
|
+
import requests
|
32
|
+
|
33
|
+
|
34
|
+
def add_git_tag_for_version(version: str) -> None:
|
35
|
+
"""Add a git tag for the given version."""
|
36
|
+
subprocess.run(["git", "tag", "-a", version, "-m", version], check=True)
|
37
|
+
print(f"Version {version} tag added successfully.")
|
38
|
+
|
39
|
+
|
40
|
+
def remove_previous_dist() -> None:
|
41
|
+
"""Check for dist folder, and if it exists, remove it."""
|
42
|
+
subprocess.run(["rm", "-rf", Path("dist")], check=True)
|
43
|
+
print("Previous dist folder removed successfully.")
|
44
|
+
|
45
|
+
|
46
|
+
def create_build() -> None:
|
47
|
+
"""Create a build."""
|
48
|
+
subprocess.run(["python", "-m", "build"], check=True)
|
49
|
+
print("Build created successfully.")
|
50
|
+
|
51
|
+
|
52
|
+
def verify_build(is_test: str) -> None:
|
53
|
+
"""Verify the build.
|
54
|
+
|
55
|
+
Print the archives in dist/ and ask the user to manually inspect and
|
56
|
+
confirm they contain the expected files, e.g. source files and test files.
|
57
|
+
"""
|
58
|
+
build_files = os.listdir("dist")
|
59
|
+
if len(build_files) != 2:
|
60
|
+
print(
|
61
|
+
"WARNING: dist folder contains incorrect number of files.", file=sys.stderr
|
62
|
+
)
|
63
|
+
print("Contents of dist folder:")
|
64
|
+
subprocess.run(["ls", "-l", Path("dist")], check=True)
|
65
|
+
print("Contents of tar files in dist folder:")
|
66
|
+
for build_file in build_files:
|
67
|
+
subprocess.run(["tar", "tvf", Path("dist") / build_file], check=True)
|
68
|
+
confirmation = input("Does the build look correct? (y/n): ")
|
69
|
+
if confirmation == "y":
|
70
|
+
print("Build verified successfully.")
|
71
|
+
upload_build_to_pypi(is_test)
|
72
|
+
push_git_tags()
|
73
|
+
else:
|
74
|
+
raise Exception("Could not verify. Build was not uploaded.")
|
75
|
+
|
76
|
+
|
77
|
+
def generate_github_release_notes_body(token: str, version: str) -> str:
|
78
|
+
"""Generate and grab release notes URL from Github."""
|
79
|
+
response = requests.post(
|
80
|
+
"https://api.github.com/repos/john-kurkowski/tldextract/releases/generate-notes",
|
81
|
+
headers={
|
82
|
+
"Accept": "application/vnd.github+json",
|
83
|
+
"Authorization": f"Bearer {token}",
|
84
|
+
"X-GitHub-Api-Version": "2022-11-28",
|
85
|
+
},
|
86
|
+
json={"tag_name": version},
|
87
|
+
)
|
88
|
+
|
89
|
+
try:
|
90
|
+
response.raise_for_status()
|
91
|
+
except requests.exceptions.HTTPError as err:
|
92
|
+
print(
|
93
|
+
f"WARNING: Failed to generate release notes from Github: {err}",
|
94
|
+
file=sys.stderr,
|
95
|
+
)
|
96
|
+
return ""
|
97
|
+
return str(response.json()["body"])
|
98
|
+
|
99
|
+
|
100
|
+
def get_release_notes_url(body: str) -> str:
|
101
|
+
"""Parse the release notes content to get the changelog URL."""
|
102
|
+
url_pattern = re.compile(r"\*\*Full Changelog\*\*: (.*)$")
|
103
|
+
match = url_pattern.search(body)
|
104
|
+
if match:
|
105
|
+
return match.group(1)
|
106
|
+
else:
|
107
|
+
print(
|
108
|
+
"WARNING: Failed to parse release notes URL from GitHub response.",
|
109
|
+
file=sys.stderr,
|
110
|
+
)
|
111
|
+
return ""
|
112
|
+
|
113
|
+
|
114
|
+
def get_changelog_release_notes(release_notes_url: str, version: str) -> str:
|
115
|
+
"""Get the changelog release notes.
|
116
|
+
|
117
|
+
Uses a regex starting on a heading beginning with the version number
|
118
|
+
literal, and matching until the next heading. Using regex to match markup
|
119
|
+
is brittle. Consider a Markdown-parsing library instead.
|
120
|
+
"""
|
121
|
+
with open("CHANGELOG.md") as file:
|
122
|
+
changelog_text = file.read()
|
123
|
+
pattern = re.compile(rf"## {re.escape(version)}[^\n]*(.*?)## ", re.DOTALL)
|
124
|
+
match = pattern.search(changelog_text)
|
125
|
+
if match:
|
126
|
+
return str(match.group(1)).strip()
|
127
|
+
else:
|
128
|
+
print(
|
129
|
+
f"WARNING: Failed to parse changelog release notes. Manually copy this version's notes from the CHANGELOG.md file to {release_notes_url}.",
|
130
|
+
file=sys.stderr,
|
131
|
+
)
|
132
|
+
return ""
|
133
|
+
|
134
|
+
|
135
|
+
def create_release_notes_body(token: str, version: str) -> str:
|
136
|
+
"""Compile the release notes."""
|
137
|
+
github_release_body = generate_github_release_notes_body(token, version)
|
138
|
+
release_notes_url = get_release_notes_url(github_release_body)
|
139
|
+
changelog_notes = get_changelog_release_notes(release_notes_url, version)
|
140
|
+
full_release_notes = f"{changelog_notes}\n\n**Full Changelog**: {release_notes_url}"
|
141
|
+
return full_release_notes
|
142
|
+
|
143
|
+
|
144
|
+
def create_github_release_draft(token: str, version: str) -> None:
|
145
|
+
"""Create a release on GitHub."""
|
146
|
+
release_body = create_release_notes_body(token, version)
|
147
|
+
response = requests.post(
|
148
|
+
"https://api.github.com/repos/john-kurkowski/tldextract/releases",
|
149
|
+
headers={
|
150
|
+
"Accept": "application/vnd.github+json",
|
151
|
+
"Authorization": f"Bearer {token}",
|
152
|
+
"X-GitHub-Api-Version": "2022-11-28",
|
153
|
+
},
|
154
|
+
json={
|
155
|
+
"tag_name": version,
|
156
|
+
"name": version,
|
157
|
+
"body": release_body,
|
158
|
+
"draft": True,
|
159
|
+
"prerelease": False,
|
160
|
+
},
|
161
|
+
)
|
162
|
+
|
163
|
+
try:
|
164
|
+
response.raise_for_status()
|
165
|
+
except requests.exceptions.HTTPError as err:
|
166
|
+
print(
|
167
|
+
f"WARNING: Failed to create release on Github: {err}",
|
168
|
+
file=sys.stderr,
|
169
|
+
)
|
170
|
+
return
|
171
|
+
print(f'Release created successfully: {response.json()["html_url"]}')
|
172
|
+
|
173
|
+
|
174
|
+
def upload_build_to_pypi(is_test: str) -> None:
|
175
|
+
"""Upload the build to PyPI."""
|
176
|
+
repository: list[str | Path] = (
|
177
|
+
[] if is_test == "n" else ["--repository", "testpypi"]
|
178
|
+
)
|
179
|
+
upload_command = ["twine", "upload", *repository, Path("dist") / "*"]
|
180
|
+
subprocess.run(
|
181
|
+
upload_command,
|
182
|
+
check=True,
|
183
|
+
)
|
184
|
+
|
185
|
+
|
186
|
+
def push_git_tags() -> None:
|
187
|
+
"""Push all git tags to the remote."""
|
188
|
+
subprocess.run(["git", "push", "--tags", "origin", "master"], check=True)
|
189
|
+
|
190
|
+
|
191
|
+
def check_for_clean_working_tree() -> None:
|
192
|
+
"""Check for a clean git working tree."""
|
193
|
+
git_status = subprocess.run(
|
194
|
+
["git", "status", "--porcelain"], capture_output=True, text=True
|
195
|
+
)
|
196
|
+
if git_status.stdout:
|
197
|
+
print(
|
198
|
+
"Git working tree is not clean. Please commit or stash changes.",
|
199
|
+
file=sys.stderr,
|
200
|
+
)
|
201
|
+
sys.exit(1)
|
202
|
+
|
203
|
+
|
204
|
+
def get_env_github_token() -> str:
|
205
|
+
"""Check for the GITHUB_TOKEN environment variable."""
|
206
|
+
github_token = os.environ.get("GITHUB_TOKEN")
|
207
|
+
if not github_token:
|
208
|
+
print("GITHUB_TOKEN environment variable not set.", file=sys.stderr)
|
209
|
+
sys.exit(1)
|
210
|
+
return github_token
|
211
|
+
|
212
|
+
|
213
|
+
def get_is_test_response() -> str:
|
214
|
+
"""Ask the user if this is a test release."""
|
215
|
+
while True:
|
216
|
+
is_test = input("Is this a test release? (y/n): ")
|
217
|
+
if is_test in ["y", "n"]:
|
218
|
+
return is_test
|
219
|
+
else:
|
220
|
+
print("Invalid input. Please enter 'y' or 'n.'")
|
221
|
+
|
222
|
+
|
223
|
+
def main() -> None:
|
224
|
+
"""Run the main program."""
|
225
|
+
check_for_clean_working_tree()
|
226
|
+
github_token = get_env_github_token()
|
227
|
+
is_test = get_is_test_response()
|
228
|
+
version_number = input("Enter the version number: ")
|
229
|
+
|
230
|
+
add_git_tag_for_version(version_number)
|
231
|
+
remove_previous_dist()
|
232
|
+
create_build()
|
233
|
+
verify_build(is_test)
|
234
|
+
create_github_release_draft(github_token, version_number)
|
235
|
+
|
236
|
+
|
237
|
+
if __name__ == "__main__":
|
238
|
+
main()
|
@@ -0,0 +1,244 @@
|
|
1
|
+
# serializer version: 1
|
2
|
+
# name: test_happy_path
|
3
|
+
dict({
|
4
|
+
'input': _CallList([
|
5
|
+
_Call(
|
6
|
+
'',
|
7
|
+
tuple(
|
8
|
+
'Is this a test release? (y/n): ',
|
9
|
+
),
|
10
|
+
dict({
|
11
|
+
}),
|
12
|
+
),
|
13
|
+
_Call(
|
14
|
+
'',
|
15
|
+
tuple(
|
16
|
+
'Enter the version number: ',
|
17
|
+
),
|
18
|
+
dict({
|
19
|
+
}),
|
20
|
+
),
|
21
|
+
_Call(
|
22
|
+
'',
|
23
|
+
tuple(
|
24
|
+
'Does the build look correct? (y/n): ',
|
25
|
+
),
|
26
|
+
dict({
|
27
|
+
}),
|
28
|
+
),
|
29
|
+
]),
|
30
|
+
'listdir': _CallList([
|
31
|
+
_Call(
|
32
|
+
'',
|
33
|
+
tuple(
|
34
|
+
'dist',
|
35
|
+
),
|
36
|
+
dict({
|
37
|
+
}),
|
38
|
+
),
|
39
|
+
]),
|
40
|
+
'requests': _CallList([
|
41
|
+
_Call(
|
42
|
+
'',
|
43
|
+
tuple(
|
44
|
+
'https://api.github.com/repos/john-kurkowski/tldextract/releases/generate-notes',
|
45
|
+
),
|
46
|
+
dict({
|
47
|
+
'headers': dict({
|
48
|
+
'Accept': 'application/vnd.github+json',
|
49
|
+
'Authorization': 'Bearer fake-token',
|
50
|
+
'X-GitHub-Api-Version': '2022-11-28',
|
51
|
+
}),
|
52
|
+
'json': dict({
|
53
|
+
'tag_name': '5.0.1',
|
54
|
+
}),
|
55
|
+
}),
|
56
|
+
),
|
57
|
+
_Call(
|
58
|
+
'',
|
59
|
+
tuple(
|
60
|
+
'https://api.github.com/repos/john-kurkowski/tldextract/releases',
|
61
|
+
),
|
62
|
+
dict({
|
63
|
+
'headers': dict({
|
64
|
+
'Accept': 'application/vnd.github+json',
|
65
|
+
'Authorization': 'Bearer fake-token',
|
66
|
+
'X-GitHub-Api-Version': '2022-11-28',
|
67
|
+
}),
|
68
|
+
'json': dict({
|
69
|
+
'body': '''
|
70
|
+
* Bugfixes
|
71
|
+
* Indicate MD5 not used in a security context (FIPS compliance) ([#309](https://github.com/john-kurkowski/tldextract/issues/309))
|
72
|
+
* Misc.
|
73
|
+
* Increase typecheck aggression
|
74
|
+
|
75
|
+
**Full Changelog**: fake-body
|
76
|
+
''',
|
77
|
+
'draft': True,
|
78
|
+
'name': '5.0.1',
|
79
|
+
'prerelease': False,
|
80
|
+
'tag_name': '5.0.1',
|
81
|
+
}),
|
82
|
+
}),
|
83
|
+
),
|
84
|
+
]),
|
85
|
+
'subprocess': _CallList([
|
86
|
+
_Call(
|
87
|
+
'',
|
88
|
+
tuple(
|
89
|
+
list([
|
90
|
+
'git',
|
91
|
+
'status',
|
92
|
+
'--porcelain',
|
93
|
+
]),
|
94
|
+
),
|
95
|
+
dict({
|
96
|
+
'capture_output': True,
|
97
|
+
'text': True,
|
98
|
+
}),
|
99
|
+
),
|
100
|
+
_Call(
|
101
|
+
'',
|
102
|
+
tuple(
|
103
|
+
list([
|
104
|
+
'git',
|
105
|
+
'tag',
|
106
|
+
'-a',
|
107
|
+
'5.0.1',
|
108
|
+
'-m',
|
109
|
+
'5.0.1',
|
110
|
+
]),
|
111
|
+
),
|
112
|
+
dict({
|
113
|
+
'check': True,
|
114
|
+
}),
|
115
|
+
),
|
116
|
+
_Call(
|
117
|
+
'',
|
118
|
+
tuple(
|
119
|
+
list([
|
120
|
+
'rm',
|
121
|
+
'-rf',
|
122
|
+
PosixPath('dist'),
|
123
|
+
]),
|
124
|
+
),
|
125
|
+
dict({
|
126
|
+
'check': True,
|
127
|
+
}),
|
128
|
+
),
|
129
|
+
_Call(
|
130
|
+
'',
|
131
|
+
tuple(
|
132
|
+
list([
|
133
|
+
'python',
|
134
|
+
'-m',
|
135
|
+
'build',
|
136
|
+
]),
|
137
|
+
),
|
138
|
+
dict({
|
139
|
+
'check': True,
|
140
|
+
}),
|
141
|
+
),
|
142
|
+
_Call(
|
143
|
+
'',
|
144
|
+
tuple(
|
145
|
+
list([
|
146
|
+
'ls',
|
147
|
+
'-l',
|
148
|
+
PosixPath('dist'),
|
149
|
+
]),
|
150
|
+
),
|
151
|
+
dict({
|
152
|
+
'check': True,
|
153
|
+
}),
|
154
|
+
),
|
155
|
+
_Call(
|
156
|
+
'',
|
157
|
+
tuple(
|
158
|
+
list([
|
159
|
+
'tar',
|
160
|
+
'tvf',
|
161
|
+
PosixPath('dist/archive1'),
|
162
|
+
]),
|
163
|
+
),
|
164
|
+
dict({
|
165
|
+
'check': True,
|
166
|
+
}),
|
167
|
+
),
|
168
|
+
_Call(
|
169
|
+
'',
|
170
|
+
tuple(
|
171
|
+
list([
|
172
|
+
'tar',
|
173
|
+
'tvf',
|
174
|
+
PosixPath('dist/archive2'),
|
175
|
+
]),
|
176
|
+
),
|
177
|
+
dict({
|
178
|
+
'check': True,
|
179
|
+
}),
|
180
|
+
),
|
181
|
+
_Call(
|
182
|
+
'',
|
183
|
+
tuple(
|
184
|
+
list([
|
185
|
+
'tar',
|
186
|
+
'tvf',
|
187
|
+
PosixPath('dist/archive3'),
|
188
|
+
]),
|
189
|
+
),
|
190
|
+
dict({
|
191
|
+
'check': True,
|
192
|
+
}),
|
193
|
+
),
|
194
|
+
_Call(
|
195
|
+
'',
|
196
|
+
tuple(
|
197
|
+
list([
|
198
|
+
'twine',
|
199
|
+
'upload',
|
200
|
+
'--repository',
|
201
|
+
'testpypi',
|
202
|
+
PosixPath('dist/*'),
|
203
|
+
]),
|
204
|
+
),
|
205
|
+
dict({
|
206
|
+
'check': True,
|
207
|
+
}),
|
208
|
+
),
|
209
|
+
_Call(
|
210
|
+
'',
|
211
|
+
tuple(
|
212
|
+
list([
|
213
|
+
'git',
|
214
|
+
'push',
|
215
|
+
'--tags',
|
216
|
+
'origin',
|
217
|
+
'master',
|
218
|
+
]),
|
219
|
+
),
|
220
|
+
dict({
|
221
|
+
'check': True,
|
222
|
+
}),
|
223
|
+
),
|
224
|
+
]),
|
225
|
+
})
|
226
|
+
# ---
|
227
|
+
# name: test_happy_path.1
|
228
|
+
'''
|
229
|
+
Version 5.0.1 tag added successfully.
|
230
|
+
Previous dist folder removed successfully.
|
231
|
+
Build created successfully.
|
232
|
+
Contents of dist folder:
|
233
|
+
Contents of tar files in dist folder:
|
234
|
+
Build verified successfully.
|
235
|
+
Release created successfully: https://github.com/path/to/release
|
236
|
+
|
237
|
+
'''
|
238
|
+
# ---
|
239
|
+
# name: test_happy_path.2
|
240
|
+
'''
|
241
|
+
WARNING: dist folder contains incorrect number of files.
|
242
|
+
|
243
|
+
'''
|
244
|
+
# ---
|
@@ -2,13 +2,17 @@
|
|
2
2
|
|
3
3
|
import os
|
4
4
|
import tempfile
|
5
|
+
from pathlib import Path
|
5
6
|
|
6
7
|
import tldextract
|
7
8
|
from tldextract.tldextract import ExtractResult
|
8
9
|
|
9
|
-
FAKE_SUFFIX_LIST_URL =
|
10
|
-
os.path.dirname(os.path.abspath(__file__)),
|
11
|
-
|
10
|
+
FAKE_SUFFIX_LIST_URL = Path(
|
11
|
+
os.path.dirname(os.path.abspath(__file__)),
|
12
|
+
"fixtures",
|
13
|
+
"fake_suffix_list_fixture.dat",
|
14
|
+
).as_uri()
|
15
|
+
|
12
16
|
EXTRA_SUFFIXES = ["foo1", "bar1", "baz1"]
|
13
17
|
|
14
18
|
extract_using_fake_suffix_list = tldextract.TLDExtract(
|
@@ -4,6 +4,7 @@ from __future__ import annotations
|
|
4
4
|
|
5
5
|
import logging
|
6
6
|
import os
|
7
|
+
import sys
|
7
8
|
import tempfile
|
8
9
|
from collections.abc import Sequence
|
9
10
|
from pathlib import Path
|
@@ -17,7 +18,7 @@ import responses
|
|
17
18
|
import tldextract
|
18
19
|
import tldextract.suffix_list
|
19
20
|
from tldextract.cache import DiskCache
|
20
|
-
from tldextract.remote import
|
21
|
+
from tldextract.remote import lenient_netloc, looks_like_ip, looks_like_ipv6
|
21
22
|
from tldextract.suffix_list import SuffixListNotFound
|
22
23
|
from tldextract.tldextract import ExtractResult
|
23
24
|
|
@@ -152,21 +153,24 @@ def test_lenient_netloc() -> None:
|
|
152
153
|
)
|
153
154
|
|
154
155
|
|
155
|
-
|
156
|
-
|
157
|
-
""
|
158
|
-
assert looks_like_ip("1.1.1.
|
159
|
-
assert looks_like_ip("a.1.1.1"
|
160
|
-
assert looks_like_ip("1.1.1.1\n"
|
161
|
-
assert looks_like_ip("256.256.256.256"
|
156
|
+
def test_looks_like_ip() -> None:
|
157
|
+
"""Test function to check if a string looks like an IPv4 address."""
|
158
|
+
assert looks_like_ip("1.1.1.1") is True
|
159
|
+
assert looks_like_ip("1.1.1.01") is False
|
160
|
+
assert looks_like_ip("a.1.1.1") is False
|
161
|
+
assert looks_like_ip("1.1.1.1\n") is False
|
162
|
+
assert looks_like_ip("256.256.256.256") is False
|
162
163
|
|
163
164
|
|
164
|
-
def
|
165
|
-
"""Test
|
166
|
-
assert
|
167
|
-
assert
|
168
|
-
assert
|
169
|
-
assert
|
165
|
+
def test_looks_like_ipv6() -> None:
|
166
|
+
"""Test function to check if a string looks like an IPv6 address."""
|
167
|
+
assert looks_like_ipv6("::") is True
|
168
|
+
assert looks_like_ipv6("aBcD:ef01:2345:6789:aBcD:ef01:aaaa:2288") is True
|
169
|
+
assert looks_like_ipv6("aBcD:ef01:2345:6789:aBcD:ef01:127.0.0.1") is True
|
170
|
+
assert looks_like_ipv6("ZBcD:ef01:2345:6789:aBcD:ef01:127.0.0.1") is False
|
171
|
+
if sys.version_info >= (3, 8, 12): # noqa: UP036
|
172
|
+
assert looks_like_ipv6("aBcD:ef01:2345:6789:aBcD:ef01:127.0.0.01") is False
|
173
|
+
assert looks_like_ipv6("aBcD:ef01:2345:6789:aBcD:") is False
|
170
174
|
|
171
175
|
|
172
176
|
def test_similar_to_ip() -> None:
|
@@ -1,7 +1,7 @@
|
|
1
1
|
"""Test the caching functionality."""
|
2
|
+
|
2
3
|
from __future__ import annotations
|
3
4
|
|
4
|
-
import os.path
|
5
5
|
import sys
|
6
6
|
import types
|
7
7
|
from collections.abc import Hashable
|
@@ -56,14 +56,14 @@ def test_get_cache_dir(monkeypatch: pytest.MonkeyPatch) -> None:
|
|
56
56
|
monkeypatch.delenv("HOME", raising=False)
|
57
57
|
monkeypatch.delenv("XDG_CACHE_HOME", raising=False)
|
58
58
|
monkeypatch.delenv("TLDEXTRACT_CACHE", raising=False)
|
59
|
-
assert get_cache_dir().endswith("tldextract
|
59
|
+
assert get_cache_dir().endswith(str(Path("tldextract", ".suffix_cache")))
|
60
60
|
|
61
61
|
# with home set, but not anything else specified, use XDG_CACHE_HOME default
|
62
62
|
monkeypatch.setenv("HOME", "/home/john")
|
63
63
|
monkeypatch.delenv("XDG_CACHE_HOME", raising=False)
|
64
64
|
monkeypatch.delenv("TLDEXTRACT_CACHE", raising=False)
|
65
|
-
assert get_cache_dir() ==
|
66
|
-
"/home/john", ".cache/python-tldextract", pkg_identifier
|
65
|
+
assert get_cache_dir() == str(
|
66
|
+
Path("/home/john", ".cache/python-tldextract", pkg_identifier)
|
67
67
|
)
|
68
68
|
|
69
69
|
# if XDG_CACHE_HOME is set, use it
|
@@ -71,8 +71,8 @@ def test_get_cache_dir(monkeypatch: pytest.MonkeyPatch) -> None:
|
|
71
71
|
monkeypatch.setenv("XDG_CACHE_HOME", "/my/alt/cache")
|
72
72
|
monkeypatch.delenv("TLDEXTRACT_CACHE", raising=False)
|
73
73
|
|
74
|
-
assert get_cache_dir() ==
|
75
|
-
"/my/alt/cache/python-tldextract", pkg_identifier
|
74
|
+
assert get_cache_dir() == str(
|
75
|
+
Path("/my/alt/cache/python-tldextract", pkg_identifier)
|
76
76
|
)
|
77
77
|
|
78
78
|
# if TLDEXTRACT_CACHE is set, use it
|
@@ -1,7 +1,8 @@
|
|
1
1
|
"""Test ability to run in parallel with shared cache."""
|
2
2
|
|
3
|
+
from __future__ import annotations
|
4
|
+
|
3
5
|
import os
|
4
|
-
import os.path
|
5
6
|
from multiprocessing import Pool
|
6
7
|
from pathlib import Path
|
7
8
|
|
@@ -43,9 +44,23 @@ def test_cache_cleared_by_other_process(
|
|
43
44
|
extract("google.com")
|
44
45
|
orig_unlink = os.unlink
|
45
46
|
|
46
|
-
def
|
47
|
+
def is_relative_to(path: Path, other_path: str | Path) -> bool:
|
48
|
+
"""Return True if path is relative to other_path or False.
|
49
|
+
|
50
|
+
Taken from the Python 3.9 standard library.
|
51
|
+
Reference: https://docs.python.org/3/library/pathlib.html#pathlib.PurePath.is_relative_to
|
52
|
+
"""
|
53
|
+
try:
|
54
|
+
path.relative_to(other_path)
|
55
|
+
return True
|
56
|
+
except ValueError:
|
57
|
+
return False
|
58
|
+
|
59
|
+
def evil_unlink(filename: str | Path) -> None:
|
47
60
|
"""Simulate someone deletes the file right before we try to."""
|
48
|
-
if filename.startswith(cache_dir)
|
61
|
+
if (isinstance(filename, str) and filename.startswith(cache_dir)) or (
|
62
|
+
isinstance(filename, Path) and is_relative_to(filename, cache_dir)
|
63
|
+
):
|
49
64
|
orig_unlink(filename)
|
50
65
|
orig_unlink(filename)
|
51
66
|
|
@@ -0,0 +1,95 @@
|
|
1
|
+
"""Test the library maintainer release script."""
|
2
|
+
|
3
|
+
from __future__ import annotations
|
4
|
+
|
5
|
+
import dataclasses
|
6
|
+
import sys
|
7
|
+
from collections.abc import Iterator
|
8
|
+
from typing import Any
|
9
|
+
from unittest import mock
|
10
|
+
|
11
|
+
import pytest
|
12
|
+
from syrupy.assertion import SnapshotAssertion
|
13
|
+
|
14
|
+
from scripts import release
|
15
|
+
|
16
|
+
|
17
|
+
@dataclasses.dataclass
|
18
|
+
class Mocks:
|
19
|
+
"""Collection of all mocked objects used in the release script."""
|
20
|
+
|
21
|
+
input: mock.Mock
|
22
|
+
listdir: mock.Mock
|
23
|
+
requests: mock.Mock
|
24
|
+
subprocess: mock.Mock
|
25
|
+
|
26
|
+
@property
|
27
|
+
def mock_calls(self) -> dict[str, Any]:
|
28
|
+
"""A dict of _all_ calls to this class's mock objects."""
|
29
|
+
return {
|
30
|
+
k.name: getattr(self, k.name).mock_calls for k in dataclasses.fields(self)
|
31
|
+
}
|
32
|
+
|
33
|
+
|
34
|
+
@pytest.fixture
|
35
|
+
def mocks() -> Iterator[Mocks]:
|
36
|
+
"""Stub network and subprocesses."""
|
37
|
+
with mock.patch("builtins.input") as mock_input, mock.patch(
|
38
|
+
"os.listdir"
|
39
|
+
) as mock_listdir, mock.patch("requests.post") as mock_requests, mock.patch(
|
40
|
+
"subprocess.run"
|
41
|
+
) as mock_subprocess:
|
42
|
+
yield Mocks(
|
43
|
+
input=mock_input,
|
44
|
+
listdir=mock_listdir,
|
45
|
+
requests=mock_requests,
|
46
|
+
subprocess=mock_subprocess,
|
47
|
+
)
|
48
|
+
|
49
|
+
|
50
|
+
@pytest.mark.skipif(
|
51
|
+
sys.platform == "win32", reason="Snapshot paths are different on Windows"
|
52
|
+
)
|
53
|
+
def test_happy_path(
|
54
|
+
capsys: pytest.CaptureFixture[str],
|
55
|
+
mocks: Mocks,
|
56
|
+
monkeypatch: pytest.MonkeyPatch,
|
57
|
+
snapshot: SnapshotAssertion,
|
58
|
+
) -> None:
|
59
|
+
"""Test the release script happy path.
|
60
|
+
|
61
|
+
Simulate user input for a typical, existing release.
|
62
|
+
|
63
|
+
This one test case covers most lines of the release script, without
|
64
|
+
actually making network requests or running subprocesses. For an
|
65
|
+
infrequently used script, this coverage is useful without being too brittle
|
66
|
+
to change.
|
67
|
+
"""
|
68
|
+
monkeypatch.setenv("GITHUB_TOKEN", "fake-token")
|
69
|
+
|
70
|
+
mocks.input.side_effect = ["y", "5.0.1", "y"]
|
71
|
+
|
72
|
+
mocks.listdir.return_value = ["archive1", "archive2", "archive3"]
|
73
|
+
|
74
|
+
def mock_post(*args: Any, **kwargs: Any) -> mock.Mock:
|
75
|
+
"""Return _one_ response JSON that happens to match expectations for multiple requests."""
|
76
|
+
return mock.Mock(
|
77
|
+
json=mock.Mock(
|
78
|
+
return_value={
|
79
|
+
"body": "Body start **Full Changelog**: fake-body",
|
80
|
+
"html_url": "https://github.com/path/to/release",
|
81
|
+
}
|
82
|
+
),
|
83
|
+
)
|
84
|
+
|
85
|
+
mocks.requests.side_effect = mock_post
|
86
|
+
|
87
|
+
mocks.subprocess.return_value.stdout = ""
|
88
|
+
|
89
|
+
release.main()
|
90
|
+
|
91
|
+
out, err = capsys.readouterr()
|
92
|
+
|
93
|
+
assert mocks.mock_calls == snapshot
|
94
|
+
assert out == snapshot
|
95
|
+
assert err == snapshot
|
@@ -1,4 +1,5 @@
|
|
1
1
|
"""Helpers."""
|
2
|
+
|
2
3
|
from __future__ import annotations
|
3
4
|
|
4
5
|
import errno
|
@@ -6,9 +7,9 @@ import hashlib
|
|
6
7
|
import json
|
7
8
|
import logging
|
8
9
|
import os
|
9
|
-
import os.path
|
10
10
|
import sys
|
11
11
|
from collections.abc import Callable, Hashable, Iterable
|
12
|
+
from pathlib import Path
|
12
13
|
from typing import (
|
13
14
|
TypeVar,
|
14
15
|
cast,
|
@@ -37,8 +38,7 @@ else:
|
|
37
38
|
|
38
39
|
|
39
40
|
def get_pkg_unique_identifier() -> str:
|
40
|
-
"""
|
41
|
-
Generate an identifier unique to the python version, tldextract version, and python instance.
|
41
|
+
"""Generate an identifier unique to the python version, tldextract version, and python instance.
|
42
42
|
|
43
43
|
This will prevent interference between virtualenvs and issues that might arise when installing
|
44
44
|
a new version of tldextract
|
@@ -65,8 +65,7 @@ def get_pkg_unique_identifier() -> str:
|
|
65
65
|
|
66
66
|
|
67
67
|
def get_cache_dir() -> str:
|
68
|
-
"""
|
69
|
-
Get a cache dir that we have permission to write to.
|
68
|
+
"""Get a cache dir that we have permission to write to.
|
70
69
|
|
71
70
|
Try to follow the XDG standard, but if that doesn't work fallback to the package directory
|
72
71
|
http://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html
|
@@ -79,15 +78,15 @@ def get_cache_dir() -> str:
|
|
79
78
|
if xdg_cache_home is None:
|
80
79
|
user_home = os.getenv("HOME", None)
|
81
80
|
if user_home:
|
82
|
-
xdg_cache_home =
|
81
|
+
xdg_cache_home = str(Path(user_home, ".cache"))
|
83
82
|
|
84
83
|
if xdg_cache_home is not None:
|
85
|
-
return
|
86
|
-
xdg_cache_home, "python-tldextract", get_pkg_unique_identifier()
|
84
|
+
return str(
|
85
|
+
Path(xdg_cache_home, "python-tldextract", get_pkg_unique_identifier())
|
87
86
|
)
|
88
87
|
|
89
88
|
# fallback to trying to use package directory itself
|
90
|
-
return
|
89
|
+
return str(Path(os.path.dirname(__file__), ".suffix_cache"))
|
91
90
|
|
92
91
|
|
93
92
|
class DiskCache:
|
@@ -153,7 +152,7 @@ class DiskCache:
|
|
153
152
|
self.file_ext + ".lock"
|
154
153
|
):
|
155
154
|
try:
|
156
|
-
os.unlink(
|
155
|
+
os.unlink(str(Path(root, filename)))
|
157
156
|
except FileNotFoundError:
|
158
157
|
pass
|
159
158
|
except OSError as exc:
|
@@ -165,10 +164,10 @@ class DiskCache:
|
|
165
164
|
def _key_to_cachefile_path(
|
166
165
|
self, namespace: str, key: str | dict[str, Hashable]
|
167
166
|
) -> str:
|
168
|
-
namespace_path =
|
167
|
+
namespace_path = str(Path(self.cache_dir, namespace))
|
169
168
|
hashed_key = _make_cache_key(key)
|
170
169
|
|
171
|
-
cache_path =
|
170
|
+
cache_path = str(Path(namespace_path, hashed_key + self.file_ext))
|
172
171
|
|
173
172
|
return cache_path
|
174
173
|
|
@@ -3,19 +3,13 @@
|
|
3
3
|
from __future__ import annotations
|
4
4
|
|
5
5
|
import re
|
6
|
-
from collections.abc import Callable
|
7
6
|
from ipaddress import AddressValueError, IPv6Address
|
8
7
|
from urllib.parse import scheme_chars
|
9
8
|
|
10
|
-
inet_pton: Callable[[int, str], bytes] | None
|
11
|
-
try:
|
12
|
-
from socket import AF_INET, AF_INET6, inet_pton # Availability: Unix, Windows.
|
13
|
-
except ImportError:
|
14
|
-
inet_pton = None
|
15
|
-
|
16
9
|
IP_RE = re.compile(
|
17
|
-
r"^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.)"
|
18
|
-
r"{3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$"
|
10
|
+
r"^(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.)"
|
11
|
+
r"{3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$",
|
12
|
+
re.ASCII,
|
19
13
|
)
|
20
14
|
|
21
15
|
scheme_chars_set = set(scheme_chars)
|
@@ -59,32 +53,16 @@ def _schemeless_url(url: str) -> str:
|
|
59
53
|
return url[double_slashes_start + 2 :]
|
60
54
|
|
61
55
|
|
62
|
-
def looks_like_ip(
|
63
|
-
|
64
|
-
) -> bool:
|
65
|
-
"""Check whether the given str looks like an IP address."""
|
56
|
+
def looks_like_ip(maybe_ip: str) -> bool:
|
57
|
+
"""Check whether the given str looks like an IPv4 address."""
|
66
58
|
if not maybe_ip[0].isdigit():
|
67
59
|
return False
|
68
60
|
|
69
|
-
if pton is not None:
|
70
|
-
try:
|
71
|
-
pton(AF_INET, maybe_ip)
|
72
|
-
return True
|
73
|
-
except OSError:
|
74
|
-
return False
|
75
61
|
return IP_RE.fullmatch(maybe_ip) is not None
|
76
62
|
|
77
63
|
|
78
|
-
def looks_like_ipv6(
|
79
|
-
maybe_ip: str, pton: Callable[[int, str], bytes] | None = inet_pton
|
80
|
-
) -> bool:
|
64
|
+
def looks_like_ipv6(maybe_ip: str) -> bool:
|
81
65
|
"""Check whether the given str looks like an IPv6 address."""
|
82
|
-
if pton is not None:
|
83
|
-
try:
|
84
|
-
pton(AF_INET6, maybe_ip)
|
85
|
-
return True
|
86
|
-
except OSError:
|
87
|
-
return False
|
88
66
|
try:
|
89
67
|
IPv6Address(maybe_ip)
|
90
68
|
except AddressValueError:
|
@@ -75,8 +75,7 @@ class ExtractResult:
|
|
75
75
|
|
76
76
|
@property
|
77
77
|
def registered_domain(self) -> str:
|
78
|
-
"""
|
79
|
-
Joins the domain and suffix fields with a dot, if they're both set.
|
78
|
+
"""Joins the domain and suffix fields with a dot, if they're both set.
|
80
79
|
|
81
80
|
>>> extract('http://forums.bbc.co.uk').registered_domain
|
82
81
|
'bbc.co.uk'
|
@@ -89,8 +88,7 @@ class ExtractResult:
|
|
89
88
|
|
90
89
|
@property
|
91
90
|
def fqdn(self) -> str:
|
92
|
-
"""
|
93
|
-
Returns a Fully Qualified Domain Name, if there is a proper domain/suffix.
|
91
|
+
"""Returns a Fully Qualified Domain Name, if there is a proper domain/suffix.
|
94
92
|
|
95
93
|
>>> extract('http://forums.bbc.co.uk/path/to/file').fqdn
|
96
94
|
'forums.bbc.co.uk'
|
@@ -103,8 +101,7 @@ class ExtractResult:
|
|
103
101
|
|
104
102
|
@property
|
105
103
|
def ipv4(self) -> str:
|
106
|
-
"""
|
107
|
-
Returns the ipv4 if that is what the presented domain/url is.
|
104
|
+
"""Returns the ipv4 if that is what the presented domain/url is.
|
108
105
|
|
109
106
|
>>> extract('http://127.0.0.1/path/to/file').ipv4
|
110
107
|
'127.0.0.1'
|
@@ -123,8 +120,7 @@ class ExtractResult:
|
|
123
120
|
|
124
121
|
@property
|
125
122
|
def ipv6(self) -> str:
|
126
|
-
"""
|
127
|
-
Returns the ipv6 if that is what the presented domain/url is.
|
123
|
+
"""Returns the ipv6 if that is what the presented domain/url is.
|
128
124
|
|
129
125
|
>>> extract('http://[aBcD:ef01:2345:6789:aBcD:ef01:127.0.0.1]/path/to/file').ipv6
|
130
126
|
'aBcD:ef01:2345:6789:aBcD:ef01:127.0.0.1'
|
@@ -334,8 +330,7 @@ class TLDExtract:
|
|
334
330
|
|
335
331
|
@property
|
336
332
|
def tlds(self, session: requests.Session | None = None) -> list[str]:
|
337
|
-
"""
|
338
|
-
Returns the list of tld's used by default.
|
333
|
+
"""Returns the list of tld's used by default.
|
339
334
|
|
340
335
|
This will vary based on `include_psl_private_domains` and `extra_suffixes`
|
341
336
|
"""
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: tldextract
|
3
|
-
Version: 5.1.
|
3
|
+
Version: 5.1.2
|
4
4
|
Summary: Accurately separates a URL's subdomain, domain, and public suffix, using the Public Suffix List (PSL). By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well.
|
5
5
|
Author-email: John Kurkowski <john.kurkowski@gmail.com>
|
6
6
|
License: BSD-3-Clause
|
@@ -14,6 +14,7 @@ Classifier: Programming Language :: Python :: 3.8
|
|
14
14
|
Classifier: Programming Language :: Python :: 3.9
|
15
15
|
Classifier: Programming Language :: Python :: 3.10
|
16
16
|
Classifier: Programming Language :: Python :: 3.11
|
17
|
+
Classifier: Programming Language :: Python :: 3.12
|
17
18
|
Requires-Python: >=3.8
|
18
19
|
Description-Content-Type: text/markdown
|
19
20
|
License-File: LICENSE
|
@@ -21,6 +22,9 @@ Requires-Dist: idna
|
|
21
22
|
Requires-Dist: requests>=2.1.0
|
22
23
|
Requires-Dist: requests-file>=1.4
|
23
24
|
Requires-Dist: filelock>=3.0.8
|
25
|
+
Provides-Extra: release
|
26
|
+
Requires-Dist: build; extra == "release"
|
27
|
+
Requires-Dist: twine; extra == "release"
|
24
28
|
Provides-Extra: testing
|
25
29
|
Requires-Dist: black; extra == "testing"
|
26
30
|
Requires-Dist: mypy; extra == "testing"
|
@@ -29,11 +33,12 @@ Requires-Dist: pytest-gitignore; extra == "testing"
|
|
29
33
|
Requires-Dist: pytest-mock; extra == "testing"
|
30
34
|
Requires-Dist: responses; extra == "testing"
|
31
35
|
Requires-Dist: ruff; extra == "testing"
|
36
|
+
Requires-Dist: syrupy; extra == "testing"
|
32
37
|
Requires-Dist: tox; extra == "testing"
|
33
38
|
Requires-Dist: types-filelock; extra == "testing"
|
34
39
|
Requires-Dist: types-requests; extra == "testing"
|
35
40
|
|
36
|
-
# tldextract [](https://badge.fury.io/py/tldextract) [](https://badge.fury.io/py/tldextract) [](https://github.com/john-kurkowski/tldextract/actions/workflows/ci.yml)
|
37
42
|
|
38
43
|
`tldextract` accurately separates a URL's subdomain, domain, and public suffix,
|
39
44
|
using [the Public Suffix List (PSL)](https://publicsuffix.org).
|
@@ -1,11 +1,12 @@
|
|
1
1
|
.gitignore
|
2
|
-
.travis.yml
|
3
2
|
CHANGELOG.md
|
4
3
|
LICENSE
|
5
4
|
README.md
|
6
5
|
pyproject.toml
|
7
6
|
tox.ini
|
8
7
|
.github/FUNDING.yml
|
8
|
+
.github/workflows/ci.yml
|
9
|
+
scripts/release.py
|
9
10
|
tests/__init__.py
|
10
11
|
tests/cli_test.py
|
11
12
|
tests/conftest.py
|
@@ -14,7 +15,9 @@ tests/integration_test.py
|
|
14
15
|
tests/main_test.py
|
15
16
|
tests/test_cache.py
|
16
17
|
tests/test_parallel.py
|
18
|
+
tests/test_release.py
|
17
19
|
tests/test_trie.py
|
20
|
+
tests/__snapshots__/test_release.ambr
|
18
21
|
tests/fixtures/fake_suffix_list_fixture.dat
|
19
22
|
tldextract/.tld_set_snapshot
|
20
23
|
tldextract/__init__.py
|
@@ -1,5 +1,5 @@
|
|
1
1
|
[tox]
|
2
|
-
envlist = py{38,39,310,311,
|
2
|
+
envlist = py{38,39,310,311,312,py38,py39,py310},codestyle,lint,typecheck
|
3
3
|
|
4
4
|
[testenv]
|
5
5
|
commands = pytest {posargs}
|
@@ -18,5 +18,5 @@ extras = testing
|
|
18
18
|
|
19
19
|
[testenv:typecheck]
|
20
20
|
basepython = python3.8
|
21
|
-
commands = mypy --show-error-codes tldextract tests
|
21
|
+
commands = mypy --show-error-codes scripts tldextract tests
|
22
22
|
extras = testing
|
tldextract-5.1.0/.travis.yml
DELETED
@@ -1,23 +0,0 @@
|
|
1
|
-
dist: focal
|
2
|
-
language: python
|
3
|
-
matrix:
|
4
|
-
include:
|
5
|
-
- python: "3.8"
|
6
|
-
env: TOXENV=py38
|
7
|
-
- python: "3.9"
|
8
|
-
env: TOXENV=py39
|
9
|
-
- python: "3.10"
|
10
|
-
env: TOXENV=py310
|
11
|
-
- python: "3.11"
|
12
|
-
env: TOXENV=py311
|
13
|
-
- python: pypy3.8-7.3.9
|
14
|
-
dist: xenial
|
15
|
-
env: TOXENV=pypy3
|
16
|
-
- env: TOXENV=codestyle
|
17
|
-
- env: TOXENV=lint
|
18
|
-
- env: TOXENV=typecheck
|
19
|
-
python: "3.10"
|
20
|
-
install:
|
21
|
-
- pip install --upgrade pip
|
22
|
-
- pip install --upgrade --editable '.[testing]'
|
23
|
-
script: tox
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|