fluidattacks-core 2.15.7__tar.gz → 2.16.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/PKG-INFO +3 -1
  2. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/aio/tasks.py +2 -1
  3. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/git/__init__.py +8 -8
  4. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/git/classes.py +1 -1
  5. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/git/clone.py +11 -11
  6. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/git/delete_files.py +6 -4
  7. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/git/download_file.py +1 -1
  8. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/git/download_repo.py +13 -18
  9. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/git/https_utils.py +4 -3
  10. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/git/remote.py +1 -1
  11. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/git/ssh_utils.py +6 -9
  12. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/git/utils.py +5 -10
  13. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/git/warp.py +17 -11
  14. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/http/client.py +13 -11
  15. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/http/validations.py +22 -13
  16. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/logging/formatters.py +2 -2
  17. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/logging/utils.py +4 -3
  18. fluidattacks_core-2.16.0/fluidattacks_core/sarif/__init__.py +2623 -0
  19. fluidattacks_core-2.16.0/fluidattacks_core/semver/match_versions.py +305 -0
  20. fluidattacks_core-2.16.0/fluidattacks_core/serializers/__init__.py +0 -0
  21. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/serializers/snippet.py +1 -2
  22. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/serializers/syntax.py +5 -9
  23. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core.egg-info/PKG-INFO +3 -1
  24. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core.egg-info/SOURCES.txt +5 -1
  25. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core.egg-info/requires.txt +3 -0
  26. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/pyproject.toml +6 -4
  27. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/test/test_aio_tasks.py +2 -2
  28. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/test/test_logging.py +2 -2
  29. fluidattacks_core-2.16.0/test/test_match_versions.py +144 -0
  30. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/README.md +0 -0
  31. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/__init__.py +0 -0
  32. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/aio/__init__.py +0 -0
  33. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/aio/processes.py +0 -0
  34. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/aio/runners.py +0 -0
  35. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/aio/threads.py +0 -0
  36. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/authz/__init__.py +0 -0
  37. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/authz/py.typed +0 -0
  38. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/authz/types.py +0 -0
  39. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/bugsnag/__init__.py +0 -0
  40. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/bugsnag/client.py +0 -0
  41. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/cpg/__init__.py +0 -0
  42. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/cpg/joern.py +0 -0
  43. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/filesystem/__init__.py +0 -0
  44. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/filesystem/defaults.py +0 -0
  45. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/git/codecommit_utils.py +0 -0
  46. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/git/py.typed +0 -0
  47. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/http/__init__.py +0 -0
  48. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/logging/__init__.py +0 -0
  49. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/logging/filters.py +0 -0
  50. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/logging/handlers.py +0 -0
  51. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/logging/presets.py +0 -0
  52. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/logging/types.py +0 -0
  53. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core/py.typed +0 -0
  54. {fluidattacks_core-2.15.7/fluidattacks_core/serializers → fluidattacks_core-2.16.0/fluidattacks_core/semver}/__init__.py +0 -0
  55. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core.egg-info/dependency_links.txt +0 -0
  56. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/fluidattacks_core.egg-info/top_level.txt +0 -0
  57. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/setup.cfg +0 -0
  58. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/test/test_extract_db.py +0 -0
  59. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/test/test_language_detection.py +0 -0
  60. {fluidattacks_core-2.15.7 → fluidattacks_core-2.16.0}/test/test_make_snippet.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fluidattacks-core
3
- Version: 2.15.7
3
+ Version: 2.16.0
4
4
  Summary: Fluid Attacks Core Library
5
5
  Author-email: Development <development@fluidattacks.com>
6
6
  License: MPL-2.0
@@ -49,6 +49,7 @@ Requires-Dist: platformdirs>=4.3.8; extra == "cpg"
49
49
  Provides-Extra: filesystem
50
50
  Provides-Extra: sarif
51
51
  Requires-Dist: pydantic>=2.12.3; extra == "sarif"
52
+ Provides-Extra: semver
52
53
  Provides-Extra: all
53
54
  Requires-Dist: fluidattacks-core[aio]; extra == "all"
54
55
  Requires-Dist: fluidattacks-core[authz]; extra == "all"
@@ -59,6 +60,7 @@ Requires-Dist: fluidattacks-core[serializers]; extra == "all"
59
60
  Requires-Dist: fluidattacks-core[filesystem]; extra == "all"
60
61
  Requires-Dist: fluidattacks-core[cpg]; extra == "all"
61
62
  Requires-Dist: fluidattacks-core[sarif]; extra == "all"
63
+ Requires-Dist: fluidattacks-core[semver]; extra == "all"
62
64
 
63
65
  # Fluid Attacks Core Library
64
66
 
@@ -165,7 +165,8 @@ async def merge_async_generators(
165
165
 
166
166
  """
167
167
  if limit < 1:
168
- raise ValueError("limit must be at least 1")
168
+ msg = "limit must be at least 1"
169
+ raise ValueError(msg)
169
170
 
170
171
  queue: asyncio.Queue[T | object] = asyncio.Queue()
171
172
  active_generators = [0] # Use list to allow modification in nested function
@@ -131,9 +131,9 @@ async def get_line_author(
131
131
  OSError,
132
132
  SubprocessError,
133
133
  UnicodeDecodeError,
134
- ) as exc:
134
+ ):
135
135
  LOGGER.exception(
136
- exc,
136
+ "An error occurred while getting the line author",
137
137
  extra={
138
138
  "extra": {
139
139
  "repo_path": repo_path,
@@ -200,7 +200,7 @@ async def is_commit_in_branch(
200
200
  return branch in stdout.decode()
201
201
 
202
202
 
203
- def rebase(
203
+ def rebase( # noqa: PLR0913
204
204
  repo: Repo,
205
205
  *,
206
206
  path: str,
@@ -223,9 +223,9 @@ def rebase(
223
223
  M=True,
224
224
  C=True,
225
225
  ).splitlines()
226
- except GitError as exc:
226
+ except GitError:
227
227
  if ignore_errors:
228
- LOGGER.exception(exc)
228
+ LOGGER.exception("A git error occurred while rebasing")
229
229
  return None
230
230
 
231
231
  raise
@@ -240,10 +240,10 @@ def rebase(
240
240
  new_path = (
241
241
  new_path.encode("latin-1").decode("unicode-escape").encode("latin-1").decode("utf-8")
242
242
  ).strip('"')
243
- except (UnicodeDecodeError, UnicodeEncodeError) as exc:
243
+ except (UnicodeDecodeError, UnicodeEncodeError):
244
244
  if ignore_errors:
245
245
  LOGGER.exception(
246
- exc,
246
+ "Error decoding the new path",
247
247
  extra={
248
248
  "extra": {
249
249
  "path": path,
@@ -267,7 +267,7 @@ def get_head_commit(path_to_repo: Path, branch: str) -> str | None:
267
267
  return None
268
268
 
269
269
 
270
- async def clone(
270
+ async def clone( # noqa: PLR0913
271
271
  repo_url: str,
272
272
  repo_branch: str,
273
273
  *,
@@ -18,7 +18,7 @@ class RebaseResult(NamedTuple):
18
18
  rev: str
19
19
 
20
20
 
21
- class InvalidParameter(Exception):
21
+ class InvalidParameter(Exception): # noqa: N818
22
22
  """Exception to control empty required parameters."""
23
23
 
24
24
  def __init__(self, field: str = "") -> None:
@@ -75,14 +75,14 @@ async def ssh_clone(
75
75
  )
76
76
  _, stderr = await proc.communicate()
77
77
  except OSError as ex:
78
- LOGGER.exception(ex, extra={"extra": {"branch": branch, "repo": repo_url}})
78
+ LOGGER.exception(MSG, extra={"extra": {"branch": branch, "repo": repo_url}})
79
79
 
80
80
  return None, str(ex)
81
81
 
82
- os.remove(ssh_file_name)
82
+ os.remove(ssh_file_name) # noqa: PTH107
83
83
 
84
84
  if mirror and proc.returncode == 0:
85
- with open(f"{folder_to_clone_root}/.info.json", "w") as f: # noqa: ASYNC230
85
+ with open(f"{folder_to_clone_root}/.info.json", "w") as f: # noqa: ASYNC230,PTH123
86
86
  json.dump({"fluid_branch": branch, "repo": repo_url}, f)
87
87
  if proc.returncode == 0:
88
88
  return (folder_to_clone_root, None)
@@ -92,7 +92,7 @@ async def ssh_clone(
92
92
  return (None, stderr.decode("utf-8"))
93
93
 
94
94
 
95
- async def https_clone(
95
+ async def https_clone( # noqa: PLR0913
96
96
  *,
97
97
  branch: str,
98
98
  repo_url: str,
@@ -149,12 +149,12 @@ async def https_clone(
149
149
  )
150
150
  _, stderr = await proc.communicate()
151
151
  except OSError as ex:
152
- LOGGER.exception(ex, extra={"extra": {"branch": branch, "repo": repo_url}})
152
+ LOGGER.exception(MSG, extra={"extra": {"branch": branch, "repo": repo_url}})
153
153
 
154
154
  return None, str(ex)
155
155
 
156
156
  if mirror and proc.returncode == 0:
157
- with open(f"{folder_to_clone_root}/.info.json", "w") as f: # noqa: ASYNC230
157
+ with open(f"{folder_to_clone_root}/.info.json", "w") as f: # noqa: ASYNC230,PTH123
158
158
  json.dump({"fluid_branch": branch, "repo": repo_url}, f)
159
159
 
160
160
  if proc.returncode == 0:
@@ -165,7 +165,7 @@ async def https_clone(
165
165
  return (None, stderr.decode("utf-8"))
166
166
 
167
167
 
168
- async def codecommit_clone(
168
+ async def codecommit_clone( # noqa: PLR0913
169
169
  *,
170
170
  env: dict[str, str],
171
171
  branch: str,
@@ -202,12 +202,12 @@ async def codecommit_clone(
202
202
  )
203
203
  _, stderr = await proc.communicate()
204
204
  except OSError as ex:
205
- LOGGER.exception(ex, extra={"extra": {"branch": branch, "repo": repo_url}})
205
+ LOGGER.exception(MSG, extra={"extra": {"branch": branch, "repo": repo_url}})
206
206
 
207
207
  return None, str(ex)
208
208
 
209
209
  if mirror and proc.returncode == 0:
210
- with open(f"{folder_to_clone_root}/.info.json", "w") as f: # noqa: ASYNC230
210
+ with open(f"{folder_to_clone_root}/.info.json", "w") as f: # noqa: ASYNC230, PTH123
211
211
  json.dump({"fluid_branch": branch, "repo": repo_url}, f)
212
212
 
213
213
  if proc.returncode == 0:
@@ -218,7 +218,7 @@ async def codecommit_clone(
218
218
  return (None, stderr.decode("utf-8"))
219
219
 
220
220
 
221
- async def call_codecommit_clone(
221
+ async def call_codecommit_clone( # noqa: PLR0913
222
222
  *,
223
223
  branch: str,
224
224
  repo_url: str,
@@ -252,7 +252,7 @@ async def call_codecommit_clone(
252
252
  )
253
253
 
254
254
  except ClientError as exc:
255
- LOGGER.error(
255
+ LOGGER.exception(
256
256
  MSG,
257
257
  extra={
258
258
  "extra": {
@@ -15,7 +15,7 @@ def delete_out_of_scope_files(git_ignore: list[str], repo_path: str) -> None:
15
15
  if match.startswith(".git/"):
16
16
  continue
17
17
 
18
- file_path = os.path.join(repo_path, match)
18
+ file_path = os.path.join(repo_path, match) # noqa: PTH118
19
19
  if Path(file_path).is_file():
20
20
  with suppress(FileNotFoundError):
21
21
  Path(file_path).unlink()
@@ -23,10 +23,12 @@ def delete_out_of_scope_files(git_ignore: list[str], repo_path: str) -> None:
23
23
  # remove empty directories
24
24
  for root, dirs, _ in os.walk(repo_path, topdown=False):
25
25
  for dir_name in dirs:
26
- dir_path = os.path.join(root, dir_name)
26
+ dir_path = os.path.join(root, dir_name) # noqa: PTH118
27
27
  try:
28
28
  if not os.listdir(dir_path): # noqa: PTH208
29
29
  Path(dir_path).rmdir()
30
- except FileNotFoundError as exc:
31
- LOGGER.exception(exc, extra={"extra": {"dir_path": dir_path}})
30
+ except FileNotFoundError:
31
+ LOGGER.exception(
32
+ "Error removing empty directory", extra={"extra": {"dir_path": dir_path}}
33
+ )
32
34
  continue
@@ -8,7 +8,7 @@ LOGGER = logging.getLogger(__name__)
8
8
 
9
9
 
10
10
  async def download_file(url: str, destination_path: str) -> bool:
11
- async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=3600)) as session:
11
+ async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=3600)) as session: # noqa: SIM117
12
12
  async with session.get(url) as response:
13
13
  if response.status == 200:
14
14
  async with aiofiles.open(destination_path, "wb") as file:
@@ -30,8 +30,8 @@ def _safe_extract_tar(tar_handler: tarfile.TarFile, file_path: Path) -> bool:
30
30
  continue
31
31
  try:
32
32
  tar_handler.extract(member, path=file_path, numeric_owner=True)
33
- except tarfile.ExtractError as ex:
34
- LOGGER.error("Error extracting %s: %s", member.name, ex)
33
+ except tarfile.ExtractError:
34
+ LOGGER.exception("Error extracting %s", member.name)
35
35
 
36
36
  return True
37
37
 
@@ -39,7 +39,7 @@ def _safe_extract_tar(tar_handler: tarfile.TarFile, file_path: Path) -> bool:
39
39
  def remove_symlinks_in_directory(directory: str) -> None:
40
40
  for root, _, files in os.walk(directory):
41
41
  for file in files:
42
- file_path = os.path.join(root, file)
42
+ file_path = os.path.join(root, file) # noqa: PTH118
43
43
  if Path(file_path).is_symlink():
44
44
  Path(file_path).unlink(missing_ok=True)
45
45
 
@@ -47,10 +47,8 @@ def remove_symlinks_in_directory(directory: str) -> None:
47
47
  async def reset_repo(repo_path: str) -> bool:
48
48
  try:
49
49
  Path.cwd()
50
- except OSError as exc:
51
- LOGGER.error("Failed to get the working directory: %s", repo_path)
52
- LOGGER.error(exc)
53
- LOGGER.error("\n")
50
+ except OSError:
51
+ LOGGER.exception("Failed to get the working directory: %s", repo_path)
54
52
  os.chdir(repo_path)
55
53
 
56
54
  try:
@@ -64,19 +62,14 @@ async def reset_repo(repo_path: str) -> bool:
64
62
  "*",
65
63
  ],
66
64
  )
67
- except GitError as exc:
68
- LOGGER.error("Failed to add safe directory %s", repo_path)
69
- LOGGER.error(exc)
70
- LOGGER.error("\n")
65
+ except GitError:
66
+ LOGGER.exception("Failed to add safe directory %s", repo_path)
71
67
 
72
68
  try:
73
69
  repo = Repo(repo_path)
74
70
  repo.git.reset("--hard", "HEAD")
75
- except GitError as exc:
76
- LOGGER.error("Expand repositories has failed:")
77
- LOGGER.error("Repository: %s", repo_path)
78
- LOGGER.error(exc)
79
- LOGGER.error("\n")
71
+ except GitError:
72
+ LOGGER.exception("Expand repositories has failed for repository %s", repo_path)
80
73
 
81
74
  return False
82
75
 
@@ -121,8 +114,10 @@ async def download_repo_from_s3(
121
114
 
122
115
  shutil.move(extracted_dir, destination_path)
123
116
 
124
- except OSError as ex:
125
- LOGGER.exception(ex, extra={"extra": {"path": destination_path}})
117
+ except OSError:
118
+ LOGGER.exception(
119
+ "Error downloading repository", extra={"extra": {"path": destination_path}}
120
+ )
126
121
  return False
127
122
 
128
123
  if not await reset_repo(str(destination_path.absolute())):
@@ -90,7 +90,8 @@ async def _get_url(
90
90
  original = parse_url(original_url.removesuffix("/"))
91
91
  url = parse_url(redirect_url.removesuffix("/")) if redirect_url else original
92
92
  except LocationParseError as exc:
93
- raise HTTPValidationError(f"Invalid URL {redirect_url}") from exc
93
+ msg = f"Invalid URL {redirect_url}"
94
+ raise HTTPValidationError(msg) from exc
94
95
 
95
96
  if max_retries < 1:
96
97
  return format_redirected_url(original, url)
@@ -174,7 +175,7 @@ async def _execute_git_command(
174
175
  return stdout, _stderr, proc.returncode
175
176
 
176
177
 
177
- async def https_ls_remote(
178
+ async def https_ls_remote( # noqa: PLR0913
178
179
  *,
179
180
  repo_url: str,
180
181
  branch: str,
@@ -210,7 +211,7 @@ async def https_ls_remote(
210
211
  return None, stderr.decode("utf-8")
211
212
 
212
213
 
213
- async def call_https_ls_remote(
214
+ async def call_https_ls_remote( # noqa: PLR0913
214
215
  *,
215
216
  repo_url: str,
216
217
  user: str | None,
@@ -3,7 +3,7 @@ from .https_utils import call_https_ls_remote
3
3
  from .ssh_utils import call_ssh_ls_remote
4
4
 
5
5
 
6
- async def ls_remote(
6
+ async def ls_remote( # noqa: PLR0913
7
7
  repo_url: str,
8
8
  repo_branch: str,
9
9
  *,
@@ -10,8 +10,7 @@ def _add_ssh_scheme_to_url(url: str) -> str:
10
10
  scheme: str = "ssh://"
11
11
  if url.startswith(scheme):
12
12
  return url
13
- url_with_scheme = scheme + url
14
- return url_with_scheme
13
+ return scheme + url
15
14
 
16
15
 
17
16
  def _url_has_port(url: str) -> bool:
@@ -65,8 +64,7 @@ def _set_default_ssh_port(url_with_scheme: str) -> str:
65
64
  if len(url_parts) < 3:
66
65
  return url_with_scheme
67
66
 
68
- url_with_default_port = f"{url_parts[0]}:{url_parts[1]}:/{url_parts[2]}"
69
- return url_with_default_port
67
+ return f"{url_parts[0]}:{url_parts[1]}:/{url_parts[2]}"
70
68
 
71
69
 
72
70
  def parse_ssh_url(url: str) -> str:
@@ -76,13 +74,12 @@ def parse_ssh_url(url: str) -> str:
76
74
  url_with_scheme = _add_ssh_scheme_to_url(url)
77
75
 
78
76
  # url misses an explicit ssh port
79
- url_with_default_ssh_port = _set_default_ssh_port(url_with_scheme)
80
- return url_with_default_ssh_port
77
+ return _set_default_ssh_port(url_with_scheme)
81
78
 
82
79
 
83
80
  def _create_ssh_file(temp_dir: str, credential_key: str) -> str:
84
- ssh_file_name: str = os.path.join(temp_dir, str(uuid.uuid4()))
85
- with open(
81
+ ssh_file_name: str = os.path.join(temp_dir, str(uuid.uuid4())) # noqa: PTH118
82
+ with open( # noqa: PTH123
86
83
  os.open(ssh_file_name, os.O_CREAT | os.O_WRONLY, 0o400),
87
84
  "w",
88
85
  encoding="utf-8",
@@ -139,7 +136,7 @@ async def ssh_ls_remote(
139
136
  return None, "git ls-remote time out"
140
137
 
141
138
  finally:
142
- os.remove(ssh_file_name)
139
+ os.remove(ssh_file_name) # noqa: PTH107
143
140
 
144
141
  if return_code == 0:
145
142
  return stdout.decode().split("\t")[0], None
@@ -25,9 +25,7 @@ def _format_token(
25
25
  provider: str | None,
26
26
  ) -> str:
27
27
  formatted_token = _format_token_for_provider(provider, token, host)
28
- url = _replace_netloc_in_url(parsed_url, formatted_token)
29
-
30
- return url
28
+ return _replace_netloc_in_url(parsed_url, formatted_token)
31
29
 
32
30
 
33
31
  def _quote_if_not_none(value: str | None) -> str | None:
@@ -36,8 +34,7 @@ def _quote_if_not_none(value: str | None) -> str | None:
36
34
 
37
35
  def _quote_path_in_url(url: str) -> ParseResult:
38
36
  parsed_url = urlparse(url)
39
- parsed_url = parsed_url._replace(path=quote(unquote(parsed_url.path)))
40
- return parsed_url
37
+ return parsed_url._replace(path=quote(unquote(parsed_url.path)))
41
38
 
42
39
 
43
40
  def _get_host_from_url(parsed_url: ParseResult) -> str:
@@ -47,7 +44,7 @@ def _get_host_from_url(parsed_url: ParseResult) -> str:
47
44
  return host
48
45
 
49
46
 
50
- def _get_url_based_on_credentials(
47
+ def _get_url_based_on_credentials( # noqa: PLR0913
51
48
  *,
52
49
  parsed_url: ParseResult,
53
50
  token: str | None,
@@ -63,7 +60,7 @@ def _get_url_based_on_credentials(
63
60
  return parsed_url.geturl()
64
61
 
65
62
 
66
- def format_url(
63
+ def format_url( # noqa: PLR0913
67
64
  *,
68
65
  repo_url: str,
69
66
  user: str | None = None,
@@ -79,7 +76,7 @@ def format_url(
79
76
  host = _get_host_from_url(parsed_url)
80
77
  user = _quote_if_not_none(user)
81
78
  password = _quote_if_not_none(password)
82
- url = _get_url_based_on_credentials(
79
+ return _get_url_based_on_credentials(
83
80
  parsed_url=parsed_url,
84
81
  token=token,
85
82
  host=host,
@@ -87,5 +84,3 @@ def format_url(
87
84
  user=user,
88
85
  password=password,
89
86
  )
90
-
91
- return url
@@ -18,7 +18,7 @@ class WarpError(Exception):
18
18
  async def test_public_ip(expected_ip: str) -> bool:
19
19
  ip_service_url = "https://api.ipify.org?format=text"
20
20
  try:
21
- async with aiohttp.ClientSession() as session:
21
+ async with aiohttp.ClientSession() as session: # noqa: SIM117
22
22
  async with session.get(ip_service_url) as response:
23
23
  if response.status == 200:
24
24
  public_ip = await response.text()
@@ -28,8 +28,8 @@ async def test_public_ip(expected_ip: str) -> bool:
28
28
  LOGGER.error("Failed to fetch public IP. Status code: %s", response.status)
29
29
  return False
30
30
 
31
- except aiohttp.ClientError as ex:
32
- LOGGER.exception(ex)
31
+ except aiohttp.ClientError:
32
+ LOGGER.exception("Error fetching public IP")
33
33
  return False
34
34
 
35
35
 
@@ -86,10 +86,12 @@ async def warp_cli(*args: str) -> str:
86
86
  try:
87
87
  stdout, stderr = await asyncio.wait_for(proc.communicate(), 30)
88
88
  except (asyncio.exceptions.TimeoutError, OSError) as ex:
89
- raise WarpError("Failed to run command") from ex
89
+ msg = "Failed to run command"
90
+ raise WarpError(msg) from ex
90
91
 
91
92
  if proc.returncode != 0:
92
- raise WarpError(stderr.decode().strip())
93
+ msg = stderr.decode().strip()
94
+ raise WarpError(msg)
93
95
 
94
96
  return stdout.decode().strip()
95
97
 
@@ -103,7 +105,8 @@ async def warp_cli_connect() -> None:
103
105
  LOGGER.info("Connect: %s", response)
104
106
  await asyncio.sleep(CONFIG_DELAY)
105
107
  if not await is_dns_ready(host_to_test_dns=DOMAIN_TO_TEST_DNS):
106
- raise WarpError("Failed to resolve DNS")
108
+ msg = "Failed to resolve DNS"
109
+ raise WarpError(msg)
107
110
 
108
111
  LOGGER.info("Connected. Status: %s", await warp_cli_status())
109
112
 
@@ -121,7 +124,8 @@ async def warp_cli_get_virtual_network_id(vnet_name: str) -> str:
121
124
  await warp_cli("vnet"),
122
125
  )
123
126
  if not vnet_id_match:
124
- raise WarpError(f"Failed to find virtual network {vnet_name}")
127
+ msg = f"Failed to find virtual network {vnet_name}"
128
+ raise WarpError(msg)
125
129
 
126
130
  return vnet_id_match.groups()[0]
127
131
 
@@ -159,10 +163,12 @@ async def _ip_route_get(host: str) -> tuple[bytes, bytes]:
159
163
  try:
160
164
  stdout, stderr = await asyncio.wait_for(proc.communicate(), 5)
161
165
  except asyncio.exceptions.TimeoutError as ex:
162
- raise WarpError("Timeout - Failed to retrieve route") from ex
166
+ msg = "Timeout - Failed to retrieve route"
167
+ raise WarpError(msg) from ex
163
168
 
164
169
  if proc.returncode != 0:
165
- raise WarpError(stderr.decode())
170
+ msg = stderr.decode()
171
+ raise WarpError(msg)
166
172
 
167
173
  return stdout, stderr
168
174
 
@@ -171,8 +177,8 @@ async def is_using_split_tunnel(host: str) -> bool:
171
177
  try:
172
178
  stdout, _ = await _ip_route_get(host)
173
179
  LOGGER.info("Route command for '%s': %s", host, stdout.decode().replace("\n", " "))
174
- except WarpError as ex:
175
- LOGGER.exception(ex)
180
+ except WarpError:
181
+ LOGGER.exception("Error getting IP route in split tunnel")
176
182
  return False
177
183
  else:
178
184
  return b"CloudflareWARP" in stdout
@@ -39,7 +39,7 @@ def get_secure_connector(
39
39
  return SecureTCPConnector
40
40
 
41
41
 
42
- async def request(
42
+ async def request( # noqa: PLR0913
43
43
  url: str,
44
44
  *,
45
45
  method: Literal["GET", "POST", "PUT", "PATCH", "DELETE"],
@@ -49,10 +49,10 @@ async def request(
49
49
  dns_rebind_protection: bool = True,
50
50
  enforce_sanitization: bool = False,
51
51
  headers: dict[str, str] | None = None,
52
- json: Any | None = None,
52
+ json: Any | None = None, # noqa: ANN401
53
53
  ports: list[int] | None = None,
54
54
  schemes: list[str] | None = None,
55
- timeout: int = 10,
55
+ timeout: int = 10, # noqa: ASYNC109
56
56
  ) -> aiohttp.ClientResponse:
57
57
  validate_url(
58
58
  url,
@@ -73,16 +73,18 @@ async def request(
73
73
  ssl=ssl.create_default_context(cafile=certifi.where()),
74
74
  )
75
75
 
76
- async with aiohttp.ClientSession(
77
- connector=connection,
78
- headers=headers,
79
- ) as session:
80
- async with session.request(
76
+ async with (
77
+ aiohttp.ClientSession(
78
+ connector=connection,
79
+ headers=headers,
80
+ ) as session,
81
+ session.request(
81
82
  method,
82
83
  url,
83
84
  allow_redirects=not dns_rebind_protection,
84
85
  json=json,
85
86
  timeout=aiohttp.ClientTimeout(total=timeout),
86
- ) as response:
87
- await response.read()
88
- return response
87
+ ) as response,
88
+ ):
89
+ await response.read()
90
+ return response
@@ -11,14 +11,14 @@ class HTTPValidationError(Exception):
11
11
 
12
12
  def validate_scheme(scheme: str | None, schemes: list[str]) -> None:
13
13
  if scheme and scheme not in schemes:
14
- raise HTTPValidationError(f"Only allowed schemes are {', '.join(schemes)}")
14
+ msg = f"Only allowed schemes are {', '.join(schemes)}"
15
+ raise HTTPValidationError(msg)
15
16
 
16
17
 
17
18
  def validate_port(port: int | None, ports: list[int]) -> None:
18
19
  if port and port < 1024 and port not in ports:
19
- raise HTTPValidationError(
20
- f"Only allowed ports are {', '.join(map(str, ports))}, and any over 1024",
21
- )
20
+ msg = f"Only allowed ports are {', '.join(map(str, ports))}, and any over 1024"
21
+ raise HTTPValidationError(msg)
22
22
 
23
23
 
24
24
  def validate_host(host: str | None) -> None:
@@ -31,20 +31,23 @@ def validate_host(host: str | None) -> None:
31
31
  return
32
32
  else:
33
33
  return
34
- raise HTTPValidationError("Hostname or IP address invalid")
34
+ msg = "Hostname or IP address invalid"
35
+ raise HTTPValidationError(msg)
35
36
 
36
37
 
37
38
  def validate_unicode_restriction(uri: Url) -> None:
38
39
  uri_str = str(uri)
39
40
  if not uri_str.isascii():
40
- raise HTTPValidationError(f"URI must be ascii only {uri_str}")
41
+ msg = f"URI must be ascii only {uri_str}"
42
+ raise HTTPValidationError(msg)
41
43
 
42
44
 
43
45
  def validate_html_tags(uri: Url) -> None:
44
46
  uri_str = str(uri)
45
47
  sanitized_uri = html.escape(uri_str)
46
48
  if sanitized_uri != uri_str:
47
- raise HTTPValidationError("HTML/CSS/JS tags are not allowed")
49
+ msg = "HTML/CSS/JS tags are not allowed"
50
+ raise HTTPValidationError(msg)
48
51
 
49
52
 
50
53
  def validate_url(
@@ -58,7 +61,8 @@ def validate_url(
58
61
  try:
59
62
  uri = parse_url(url)
60
63
  except LocationParseError as exc:
61
- raise HTTPValidationError(f"Invalid URL {url}") from exc
64
+ msg = f"Invalid URL {url}"
65
+ raise HTTPValidationError(msg) from exc
62
66
  validate_host(uri.host)
63
67
 
64
68
  if ascii_only:
@@ -75,21 +79,24 @@ def validate_loopback(
75
79
  ips: list[ipaddress.IPv4Address | ipaddress.IPv6Address],
76
80
  ) -> None:
77
81
  if any(ip.is_loopback for ip in ips):
78
- raise HTTPValidationError("Requests to loopback addresses are not allowed")
82
+ msg = "Requests to loopback addresses are not allowed"
83
+ raise HTTPValidationError(msg)
79
84
 
80
85
 
81
86
  def validate_local_network(
82
87
  ips: list[ipaddress.IPv4Address | ipaddress.IPv6Address],
83
88
  ) -> None:
84
89
  if any(ip.is_private for ip in ips):
85
- raise HTTPValidationError("Requests to the local network are not allowed")
90
+ msg = "Requests to the local network are not allowed"
91
+ raise HTTPValidationError(msg)
86
92
 
87
93
 
88
94
  def validate_link_local(
89
95
  ips: list[ipaddress.IPv4Address | ipaddress.IPv6Address],
90
96
  ) -> None:
91
97
  if any(ip.is_link_local for ip in ips):
92
- raise HTTPValidationError("Requests to the link local network are not allowed")
98
+ msg = "Requests to the link local network are not allowed"
99
+ raise HTTPValidationError(msg)
93
100
 
94
101
 
95
102
  def validate_shared_address(
@@ -97,7 +104,8 @@ def validate_shared_address(
97
104
  ) -> None:
98
105
  shared_address_space = ipaddress.ip_network("100.64.0.0/10")
99
106
  if any(ip in shared_address_space for ip in ips):
100
- raise HTTPValidationError("Requests to the shared address space are not allowed")
107
+ msg = "Requests to the shared address space are not allowed"
108
+ raise HTTPValidationError(msg)
101
109
 
102
110
 
103
111
  def validate_limited_broadcast_address(
@@ -105,7 +113,8 @@ def validate_limited_broadcast_address(
105
113
  ) -> None:
106
114
  limited_broadcast_address = ipaddress.ip_address("255.255.255.255")
107
115
  if any(ip == limited_broadcast_address for ip in ips):
108
- raise HTTPValidationError("Requests to the limited broadcast address are not allowed")
116
+ msg = "Requests to the limited broadcast address are not allowed"
117
+ raise HTTPValidationError(msg)
109
118
 
110
119
 
111
120
  def validate_local_request(
@@ -48,8 +48,8 @@ class ColorfulFormatter(logging.Formatter):
48
48
 
49
49
 
50
50
  class CustomJsonFormatter(JsonFormatter):
51
- def __init__(self, *args: Any, **kwargs: Any) -> None:
52
- def json_default(object_: object) -> Any:
51
+ def __init__(self, *args: Any, **kwargs: Any) -> None: # noqa: ANN401
52
+ def json_default(object_: object) -> Any: # noqa: ANN401
53
53
  if isinstance(object_, set):
54
54
  return list(object_)
55
55
  if isinstance(object_, datetime):