fluidattacks-core 5.0.1__py3-none-any.whl → 6.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  import asyncio
2
2
  import logging
3
- from collections.abc import AsyncGenerator, AsyncIterator, Awaitable, Coroutine, Iterable
3
+ from collections.abc import AsyncGenerator, AsyncIterator, Awaitable, Coroutine, Iterable, Iterator
4
4
  from contextlib import suppress
5
5
  from typing import Any, Literal, TypeVar, cast, overload
6
6
 
@@ -134,6 +134,23 @@ async def _consume_generator(
134
134
  await queue.put(_GENERATOR_DONE_SENTINEL)
135
135
 
136
136
 
137
+ def _start_initial_generator_tasks(
138
+ gen_iter: Iterator[AsyncGenerator[T, None]],
139
+ limit: int,
140
+ queue: asyncio.Queue[T | object],
141
+ active_generators: list[int],
142
+ ) -> list[asyncio.Task[None]]:
143
+ tasks: list[asyncio.Task[None]] = []
144
+ # Start initial tasks up to the limit
145
+ for _ in range(limit):
146
+ try:
147
+ gen = next(gen_iter)
148
+ tasks.append(asyncio.create_task(_consume_generator(gen, queue, active_generators)))
149
+ except StopIteration:
150
+ break
151
+ return tasks
152
+
153
+
137
154
  async def merge_async_generators(
138
155
  generators: Iterable[AsyncGenerator[T, None]],
139
156
  limit: int,
@@ -170,18 +187,8 @@ async def merge_async_generators(
170
187
 
171
188
  queue: asyncio.Queue[T | object] = asyncio.Queue()
172
189
  active_generators = [0] # Use list to allow modification in nested function
173
- tasks: list[asyncio.Task[None]] = []
174
-
175
190
  gen_iter = iter(generators)
176
-
177
- # Start initial tasks up to the limit
178
- for _ in range(limit):
179
- try:
180
- gen = next(gen_iter)
181
- tasks.append(asyncio.create_task(_consume_generator(gen, queue, active_generators)))
182
- except StopIteration:
183
- break
184
-
191
+ tasks = _start_initial_generator_tasks(gen_iter, limit, queue, active_generators)
185
192
  # Keep track of how many generators we expect to finish
186
193
  expected_done_signals = len(tasks)
187
194
  done_signals_received = 0
@@ -3,6 +3,7 @@ import hashlib
3
3
  from contextlib import suppress
4
4
  from pathlib import Path
5
5
 
6
+ import anyio
6
7
  from aioboto3 import Session
7
8
  from platformdirs import user_cache_dir
8
9
 
@@ -83,7 +84,7 @@ async def generate_cpg(
83
84
  exclude: list[Path] | None = None,
84
85
  ) -> Path | None:
85
86
  cache_dir = Path(user_cache_dir("sifts"))
86
- cache_dir.mkdir(parents=True, exist_ok=True)
87
+ await anyio.Path(cache_dir).mkdir(parents=True, exist_ok=True)
87
88
  try:
88
89
  relative_path = working_dir.relative_to(await _get_repo_top_level(working_dir))
89
90
  except ValueError:
@@ -134,7 +135,7 @@ async def get_cpg(
134
135
  repo_nickname: str,
135
136
  ) -> Path | None:
136
137
  cache_dir = Path(user_cache_dir("sifts"))
137
- cache_dir.mkdir(parents=True, exist_ok=True)
138
+ await anyio.Path(cache_dir).mkdir(parents=True, exist_ok=True)
138
139
  try:
139
140
  relative_path = working_dir.relative_to(await _get_repo_top_level(working_dir))
140
141
  except ValueError:
@@ -17,6 +17,61 @@ if TYPE_CHECKING:
17
17
  from collections.abc import Iterable
18
18
 
19
19
 
20
+ # Language priority mapping (higher number = higher priority)
21
+ # This helps resolve conflicts between similar languages
22
+ _LANGUAGE_PRIORITIES = {
23
+ Language.TypeScript: 10, # TypeScript has priority over JavaScript
24
+ Language.JavaScript: 5,
25
+ Language.Kotlin: 8, # Kotlin has priority over Java
26
+ Language.Java: 6,
27
+ Language.Rust: 9, # Rust has priority over C
28
+ Language.C: 4,
29
+ Language.Scala: 7, # Scala has priority over Java
30
+ Language.Dart: 8, # Dart has priority over JavaScript
31
+ Language.Python: 7,
32
+ Language.Go: 8,
33
+ Language.PHP: 6,
34
+ Language.Ruby: 6,
35
+ Language.CSharp: 7,
36
+ Language.Swift: 8,
37
+ }
38
+
39
+
40
+ def _calculate_language_score(
41
+ names: set[str],
42
+ markers: dict[str, set[str] | list[str]],
43
+ ) -> int:
44
+ exacts: set[str] = {n.casefold() for n in markers["names"]}
45
+ patterns: list[str] = list(markers["globs"])
46
+ # Check exact name matches (higher confidence)
47
+ score = len([n for n in exacts if n in names]) * 10
48
+ # Check glob pattern matches (lower confidence)
49
+ glob_matches = sum(1 for f in names if any(fnmatch(f, pat) for pat in patterns))
50
+ return score + glob_matches * 5
51
+
52
+
53
+ def _resolve_language_conflicts(
54
+ hits: list[Language],
55
+ language_scores: dict[Language, int],
56
+ ) -> list[Language]:
57
+ # Resolve conflicts using priority system.
58
+ if len(hits) <= 1:
59
+ return hits
60
+ # Sort by priority (descending) and then by confidence score (descending)
61
+ hits.sort(
62
+ key=lambda lang: (_LANGUAGE_PRIORITIES.get(lang, 0), language_scores.get(lang, 0)),
63
+ reverse=True,
64
+ )
65
+ # Remove lower priority languages that conflict with higher priority ones
66
+ resolved_hits: list[Language] = []
67
+ for lang in hits:
68
+ # Check if this language conflicts with any already resolved language
69
+ conflicts = _get_language_conflicts(lang)
70
+ if not any(conflict in resolved_hits for conflict in conflicts):
71
+ resolved_hits.append(lang)
72
+ return resolved_hits
73
+
74
+
20
75
  def _detect_languages_in_dir(files: Iterable[str]) -> list[Language]:
21
76
  """Return programming languages detected in a directory via exact filenames or glob patterns.
22
77
 
@@ -24,74 +79,15 @@ def _detect_languages_in_dir(files: Iterable[str]) -> list[Language]:
24
79
  """
25
80
  names = {f.casefold() for f in files}
26
81
  hits: list[Language] = []
27
-
28
- # Language priority mapping (higher number = higher priority)
29
- # This helps resolve conflicts between similar languages
30
- language_priorities = {
31
- Language.TypeScript: 10, # TypeScript has priority over JavaScript
32
- Language.JavaScript: 5,
33
- Language.Kotlin: 8, # Kotlin has priority over Java
34
- Language.Java: 6,
35
- Language.Rust: 9, # Rust has priority over C
36
- Language.C: 4,
37
- Language.Scala: 7, # Scala has priority over Java
38
- Language.Dart: 8, # Dart has priority over JavaScript
39
- Language.Python: 7,
40
- Language.Go: 8,
41
- Language.PHP: 6,
42
- Language.Ruby: 6,
43
- Language.CSharp: 7,
44
- Language.Swift: 8,
45
- }
46
-
47
- # Track confidence scores for each language
48
82
  language_scores: dict[Language, int] = {}
49
83
 
50
84
  for lang, markers in CONFIG_MARKERS.items():
51
- exacts: set[str] = {n.casefold() for n in markers["names"]}
52
- patterns: list[str] = list(markers["globs"])
53
-
54
- score = 0
55
-
56
- # Check exact name matches (higher confidence)
57
- exact_matches = [n for n in exacts if n in names]
58
- if exact_matches:
59
- score += len(exact_matches) * 10 # Each exact match adds 10 points
60
-
61
- # Check glob pattern matches (lower confidence)
62
- glob_matches = 0
63
- for f in names:
64
- if any(fnmatch(f, pat) for pat in patterns):
65
- glob_matches += 1
66
- if glob_matches:
67
- score += glob_matches * 5 # Each glob match adds 5 points
68
-
85
+ score = _calculate_language_score(names, markers)
69
86
  if score > 0:
70
87
  language_scores[lang] = score
71
88
  hits.append(lang)
72
89
 
73
- # Resolve conflicts using priority system
74
- if len(hits) > 1:
75
- # Sort by priority (descending) and then by confidence score (descending)
76
- hits.sort(
77
- key=lambda lang: (
78
- language_priorities.get(lang, 0),
79
- language_scores.get(lang, 0),
80
- ),
81
- reverse=True,
82
- )
83
-
84
- # Remove lower priority languages that conflict with higher priority ones
85
- resolved_hits = []
86
- for lang in hits:
87
- # Check if this language conflicts with any already resolved language
88
- conflicts = _get_language_conflicts(lang)
89
- if not any(conflict in resolved_hits for conflict in conflicts):
90
- resolved_hits.append(lang)
91
-
92
- hits = resolved_hits
93
-
94
- return hits
90
+ return _resolve_language_conflicts(hits, language_scores)
95
91
 
96
92
 
97
93
  def _get_language_conflicts(lang: Language) -> set[Language]:
@@ -181,8 +177,9 @@ def _scan_dir(
181
177
 
182
178
  if langs:
183
179
  language = langs[0] if len(langs) == 1 else Language.Unknown
184
- exclusions = [sp[0].relative_to(dir_path).as_posix() for sp in subprojects]
185
- exclusions = _optimize_exclusions(exclusions)
180
+ exclusions = _optimize_exclusions(
181
+ [sp[0].relative_to(dir_path).as_posix() for sp in subprojects]
182
+ )
186
183
  return [(dir_path, language, exclusions), *subprojects]
187
184
 
188
185
  # if there is no project here, simply propagate the subprojects
@@ -1,14 +1,32 @@
1
1
  import logging
2
- from pathlib import Path
3
2
 
4
3
  import aiofiles
5
4
  import aiohttp
5
+ import anyio
6
6
 
7
7
  from .constants import DEFAULT_DOWNLOAD_BUFFER_SIZE
8
8
 
9
9
  LOGGER = logging.getLogger(__name__)
10
10
 
11
11
 
12
+ async def _write_response_to_file(
13
+ response: aiohttp.ClientResponse,
14
+ destination_path: str,
15
+ download_buffer_size: int,
16
+ ) -> bool:
17
+ async with aiofiles.open(destination_path, "wb") as file:
18
+ while True:
19
+ try:
20
+ chunk = await response.content.read(download_buffer_size)
21
+ except TimeoutError:
22
+ LOGGER.exception("Read timeout for path %s", destination_path)
23
+ return False
24
+ if not chunk:
25
+ break
26
+ await file.write(chunk)
27
+ return await anyio.Path(destination_path).exists()
28
+
29
+
12
30
  async def download_file(
13
31
  *,
14
32
  url: str,
@@ -26,15 +44,4 @@ async def download_file(
26
44
  )
27
45
  return False
28
46
 
29
- async with aiofiles.open(destination_path, "wb") as file:
30
- while True:
31
- try:
32
- chunk = await response.content.read(download_buffer_size)
33
- except TimeoutError:
34
- LOGGER.exception("Read timeout for path %s", destination_path)
35
- return False
36
- if not chunk:
37
- break
38
- await file.write(chunk)
39
-
40
- return Path(destination_path).exists()
47
+ return await _write_response_to_file(response, destination_path, download_buffer_size)
@@ -5,6 +5,7 @@ import tarfile
5
5
  import tempfile
6
6
  from pathlib import Path
7
7
 
8
+ import anyio
8
9
  from git import GitError
9
10
  from git.cmd import Git
10
11
  from git.repo import Repo
@@ -87,7 +88,7 @@ async def download_repo_from_s3(
87
88
  *,
88
89
  download_buffer_size: int = DEFAULT_DOWNLOAD_BUFFER_SIZE,
89
90
  ) -> bool:
90
- destination_path.parent.mkdir(parents=True, exist_ok=True)
91
+ await anyio.Path(destination_path.parent).mkdir(parents=True, exist_ok=True)
91
92
  with tempfile.TemporaryDirectory(prefix="fluidattacks_", ignore_cleanup_errors=True) as tmpdir:
92
93
  tmp_path = Path(tmpdir)
93
94
  file_path = tmp_path / "repo.tar.gz"
@@ -104,7 +105,7 @@ async def download_repo_from_s3(
104
105
  with tarfile.open(file_path, "r:gz") as tar_handler:
105
106
  _safe_extract_tar(tar_handler, tmp_path)
106
107
 
107
- extracted_dirs = [d for d in tmp_path.iterdir() if d.is_dir()]
108
+ extracted_dirs = [d async for d in anyio.Path(tmp_path).iterdir() if await d.is_dir()]
108
109
  if not extracted_dirs:
109
110
  LOGGER.error("No directory found in the extracted archive: %s", destination_path)
110
111
  return False
@@ -116,7 +117,7 @@ async def download_repo_from_s3(
116
117
  )
117
118
  extracted_dir = extracted_dirs[0]
118
119
 
119
- if destination_path.exists():
120
+ if await anyio.Path(destination_path).exists():
120
121
  shutil.rmtree(destination_path)
121
122
 
122
123
  shutil.move(extracted_dir, destination_path)
@@ -127,10 +128,10 @@ async def download_repo_from_s3(
127
128
  )
128
129
  return False
129
130
 
130
- if not await reset_repo(str(destination_path.absolute())):
131
+ if not await reset_repo(str(destination_path)):
131
132
  shutil.rmtree(destination_path, ignore_errors=True)
132
133
  return False
133
134
 
134
- delete_out_of_scope_files(git_ignore or [], str(destination_path.absolute()))
135
+ delete_out_of_scope_files(git_ignore or [], str(destination_path))
135
136
 
136
137
  return True
@@ -1,6 +1,6 @@
1
1
  import logging
2
2
 
3
- from fluidattacks_core.logging.sources.utils import get_environment
3
+ from fluidattacks_core.logging.sources.utils import get_env_var, get_environment
4
4
 
5
5
 
6
6
  class NoProductionFilter(logging.Filter):
@@ -11,3 +11,13 @@ class NoProductionFilter(logging.Filter):
11
11
  class ProductionOnlyFilter(logging.Filter):
12
12
  def filter(self, _record: logging.LogRecord) -> bool:
13
13
  return get_environment() == "production"
14
+
15
+
16
+ class ErrorOnlyFilter(logging.Filter):
17
+ def filter(self, _record: logging.LogRecord) -> bool:
18
+ return _record.levelno >= logging.ERROR
19
+
20
+
21
+ class EnabledTelemetryFilter(logging.Filter):
22
+ def filter(self, _record: logging.LogRecord) -> bool:
23
+ return str(get_env_var("TELEMETRY_OPT_OUT")).lower() != "true"
@@ -7,7 +7,12 @@ from typing import TextIO
7
7
 
8
8
  import simplejson as json
9
9
 
10
- from fluidattacks_core.logging.filters import NoProductionFilter, ProductionOnlyFilter
10
+ from fluidattacks_core.logging.filters import (
11
+ EnabledTelemetryFilter,
12
+ ErrorOnlyFilter,
13
+ NoProductionFilter,
14
+ ProductionOnlyFilter,
15
+ )
11
16
  from fluidattacks_core.logging.formatters import ColorfulFormatter, CustomJsonFormatter
12
17
 
13
18
 
@@ -114,7 +119,14 @@ class DatadogLogsHandler(HTTPHandler):
114
119
 
115
120
 
116
121
  class TelemetryAsyncHandler(CustomQueueHandler):
117
- """Logging handler for sending logs to telemetry services asynchronously."""
122
+ """Logging handler for sending logs to telemetry services asynchronously.
123
+
124
+ To enable telemetry, set the `TELEMETRY_OPT_OUT` environment variable different from `true`.
125
+
126
+ Includes:
127
+ - Filters: `ErrorOnlyFilter`, `EnabledTelemetryFilter`
128
+ - Formatter: `CustomJsonFormatter`
129
+ """
118
130
 
119
131
  def __init__(self, service: str, source: str, dd_client_token: str) -> None:
120
132
  """Initialize the TelemetryAsyncHandler.
@@ -125,6 +137,9 @@ class TelemetryAsyncHandler(CustomQueueHandler):
125
137
  dd_client_token: The Datadog Client Token.
126
138
 
127
139
  """
140
+ self.addFilter(ErrorOnlyFilter())
141
+ self.addFilter(EnabledTelemetryFilter())
142
+
128
143
  handler = DatadogLogsHandler(service, source, dd_client_token)
129
144
  handler.setFormatter(CustomJsonFormatter())
130
145
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fluidattacks-core
3
- Version: 5.0.1
3
+ Version: 6.0.0
4
4
  Summary: Fluid Attacks Core Library
5
5
  Author-email: Development <development@fluidattacks.com>
6
6
  License: MPL-2.0
@@ -14,10 +14,10 @@ Requires-Python: >=3.11
14
14
  Description-Content-Type: text/markdown
15
15
  Provides-Extra: aio
16
16
  Requires-Dist: uvloop>=0.21.0; extra == "aio"
17
- Provides-Extra: authz
18
17
  Provides-Extra: git
19
18
  Requires-Dist: aiohttp>=3.10.0; extra == "git"
20
19
  Requires-Dist: aiofiles>=23.2.1; extra == "git"
20
+ Requires-Dist: anyio>=4.7.0; extra == "git"
21
21
  Requires-Dist: boto3>=1.34; extra == "git"
22
22
  Requires-Dist: botocore>=1.40.18; extra == "git"
23
23
  Requires-Dist: GitPython>=3.1.41; extra == "git"
@@ -48,6 +48,7 @@ Requires-Dist: tree-sitter>=0.25.2; extra == "serializers"
48
48
  Requires-Dist: tree-sitter-dart-orchard==0.3.2; extra == "serializers"
49
49
  Provides-Extra: cpg
50
50
  Requires-Dist: aioboto3>=13.3.0; extra == "cpg"
51
+ Requires-Dist: anyio>=4.7.0; extra == "cpg"
51
52
  Requires-Dist: platformdirs>=4.3.8; extra == "cpg"
52
53
  Provides-Extra: filesystem
53
54
  Provides-Extra: sarif
@@ -55,7 +56,6 @@ Requires-Dist: pydantic>=2.12.3; extra == "sarif"
55
56
  Provides-Extra: semver
56
57
  Provides-Extra: all
57
58
  Requires-Dist: fluidattacks-core[aio]; extra == "all"
58
- Requires-Dist: fluidattacks-core[authz]; extra == "all"
59
59
  Requires-Dist: fluidattacks-core[cpg]; extra == "all"
60
60
  Requires-Dist: fluidattacks-core[filesystem]; extra == "all"
61
61
  Requires-Dist: fluidattacks-core[git]; extra == "all"
@@ -3,14 +3,11 @@ fluidattacks_core/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  fluidattacks_core/aio/__init__.py,sha256=Xn4sNbAYtqLPV-em8vWVQD5eRH3IigDpAVzHnky0onY,399
4
4
  fluidattacks_core/aio/processes.py,sha256=NXsQrlj2BZNqhKO6mRHqNWQEUI2TpzHvfoNUOIH0wsU,833
5
5
  fluidattacks_core/aio/runners.py,sha256=rJYtrshcWawhWWWagRn6Rsre_BsmAECLWG0FYOheLJQ,250
6
- fluidattacks_core/aio/tasks.py,sha256=0NaD0Kg15d1bjwhnIbAEe_pzcSRpFTdEw1nAolXniW8,6171
6
+ fluidattacks_core/aio/tasks.py,sha256=bZ-d0BP4ptCK9seQiXY4nFnFGGK3ZhIDQTM52k-yMDc,6487
7
7
  fluidattacks_core/aio/threads.py,sha256=JrnAv7_jAlDDj2AI5MviWvxU8EHQVpW-c7uCtst-2yY,556
8
- fluidattacks_core/authz/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
- fluidattacks_core/authz/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
- fluidattacks_core/authz/types.py,sha256=tngC-eq23EJlp6q5eW-uE3Xst5dBhsNcu3wfc5gsfF8,1558
11
- fluidattacks_core/cpg/__init__.py,sha256=W75X2p3ANMYYQC2l8qpb845kOdBhWrZLHEGgxj2xnLk,4961
8
+ fluidattacks_core/cpg/__init__.py,sha256=De48KbslOzXGJC2CYrPwyIO9gBQJJRb1I7mVgSL8PwU,5010
12
9
  fluidattacks_core/cpg/joern.py,sha256=mjFrtWkIwE2-_uDWisWXwOEwXUaB0t24eKkMmbN45wE,2703
13
- fluidattacks_core/filesystem/__init__.py,sha256=PkM5NvhfCrNk5cvum26-26CKUTz1yBqy7ZBz2Ljmw8w,8346
10
+ fluidattacks_core/filesystem/__init__.py,sha256=nLYLoIDAP38cevEWY56_spSM-gQ0k4RmfecW6gLQ2cE,8231
14
11
  fluidattacks_core/filesystem/defaults.py,sha256=tGGl_lXIOcU7N5GkiYyVaC23xRzEtzMR1E9hhEiXEn0,10596
15
12
  fluidattacks_core/git/__init__.py,sha256=GkFW3YIY-SOIUKW3cJE8r-m8qMat1kZIzDy_jGggyV4,8465
16
13
  fluidattacks_core/git/classes.py,sha256=vgCVOUF6tqeW0lKtD9giCNFQtzRit44bnu5qOAx7qCI,579
@@ -18,8 +15,8 @@ fluidattacks_core/git/clone.py,sha256=alvidqUITrtTkvv4Ur9djI4Ch37QdhVWtHupMmV1eM
18
15
  fluidattacks_core/git/codecommit_utils.py,sha256=Ec1Ymk9F1DTTyRTdqrni77UUktGQgQB_jSq5n3wWy7Q,3422
19
16
  fluidattacks_core/git/constants.py,sha256=dTFn5bLkJ-VG-954MVJVHXxa4UCnvaurSM7GY73BiWk,49
20
17
  fluidattacks_core/git/delete_files.py,sha256=_EfPFl61tRK4CyQHL2QtvqCQQkQ38RTXVP0Db_d6rWg,1189
21
- fluidattacks_core/git/download_file.py,sha256=0W0jhUiA6V7LRbAlbwUU3LbQiI1-UPqno7A70s2eo8s,1296
22
- fluidattacks_core/git/download_repo.py,sha256=GiZT0-kgqLTAg7uqV09P6V0AXyyPrxCe5QN3Fhp2iaE,4114
18
+ fluidattacks_core/git/download_file.py,sha256=hPuZhDQaYoG0ROF__CSGVV0UUWOK3771AnU1xXtu-4o,1450
19
+ fluidattacks_core/git/download_repo.py,sha256=F7oDjacBUtysCdPvih8T28RbQq4ZEzQ8nrCdVXk03zk,4165
23
20
  fluidattacks_core/git/https_utils.py,sha256=V2Z9ClFq9F3sUvTqc_h6uf2PRdEzD-6MuC9zZJHy7_0,7036
24
21
  fluidattacks_core/git/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
22
  fluidattacks_core/git/remote.py,sha256=cPuyBMHeGrzRkEjroB6zlRLMA-QH2gIyIkGJNyf8wZc,1255
@@ -30,9 +27,9 @@ fluidattacks_core/http/__init__.py,sha256=3Zz90L6e3_z-M-8Bvk_53rv-CFhPThkRGXnxCi
30
27
  fluidattacks_core/http/client.py,sha256=jIhtGU2cKi5GZbxHq8WJOPgnk0beScRtxlz9tBSaKuw,2454
31
28
  fluidattacks_core/http/validations.py,sha256=h10Hr906KJqda1rJJb8eOqk1Xyyz81lAJ1glXeae4kM,3766
32
29
  fluidattacks_core/logging/__init__.py,sha256=y6D12LrvrsMwaveQn5C4Em3RyeS6mP6E9fRpq7gqS4o,1546
33
- fluidattacks_core/logging/filters.py,sha256=v03EWIbCGLKc6sdSQnO7ealxMdPzcJhd20rGr1PBZrE,388
30
+ fluidattacks_core/logging/filters.py,sha256=jEJkrvFeoQxCJ9vI8rd4N1FWXTbPdiTlv-7kriB8dQ0,725
34
31
  fluidattacks_core/logging/formatters.py,sha256=SCA4k9XvHJknmkTv63uiCBU31VrWOsgA7llEXYYj1uQ,6063
35
- fluidattacks_core/logging/handlers.py,sha256=7fjCG0AqwlF0PRmH6MwkYRO1qdVoRjyYAAwML3A5lXY,4658
32
+ fluidattacks_core/logging/handlers.py,sha256=X-brVAc-QOqSFiEKqbBmxirkLLAaZDi5ID6cYzKdoHg,5028
36
33
  fluidattacks_core/logging/presets.py,sha256=KU6d6PI61kklJ_o7NgAzU1DahEPM0KwwjTYHo2naHv8,939
37
34
  fluidattacks_core/logging/utils.py,sha256=jbAcwr0L6iPsId3dYEp-vAbtFex2UUU2l2iIk1F60BE,1115
38
35
  fluidattacks_core/logging/sources/__init__.py,sha256=bUOsNhmBq9WHLvPxCW3G0Az4D8Wunp0mCCqEYlCsjsI,5937
@@ -44,7 +41,7 @@ fluidattacks_core/semver/match_versions.py,sha256=10Vc3aS5Q2WVeM6UYUNiAo8C4E7zFT
44
41
  fluidattacks_core/serializers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
45
42
  fluidattacks_core/serializers/snippet.py,sha256=e520pZHC-fsNuYVNY30A7TcSugvUlFL6xdr74j5aCDM,12780
46
43
  fluidattacks_core/serializers/syntax.py,sha256=eSpoAmWLaVbd8tQ8ghHIvjJlCtieEf_IimonqLpIe3Y,16020
47
- fluidattacks_core-5.0.1.dist-info/METADATA,sha256=E52TwLj71JPrMUmgZ24JrHHaDPhOH_uTbWAd9dyTPfw,3453
48
- fluidattacks_core-5.0.1.dist-info/WHEEL,sha256=qELbo2s1Yzl39ZmrAibXA2jjPLUYfnVhUNTlyF1rq0Y,92
49
- fluidattacks_core-5.0.1.dist-info/top_level.txt,sha256=m49ZyZ2zPQmDBxkSpjb20wr-ZbGVXdOMFBZrDiP5Lb8,18
50
- fluidattacks_core-5.0.1.dist-info/RECORD,,
44
+ fluidattacks_core-6.0.0.dist-info/METADATA,sha256=bnEqOVSvM_A5S7EEOdzKCFsgeMVREbFHuXfCCRaIhV8,3463
45
+ fluidattacks_core-6.0.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
46
+ fluidattacks_core-6.0.0.dist-info/top_level.txt,sha256=m49ZyZ2zPQmDBxkSpjb20wr-ZbGVXdOMFBZrDiP5Lb8,18
47
+ fluidattacks_core-6.0.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.10.1)
2
+ Generator: setuptools (80.10.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
File without changes
File without changes
@@ -1,75 +0,0 @@
1
- from __future__ import (
2
- annotations,
3
- )
4
-
5
- from dataclasses import (
6
- dataclass,
7
- )
8
- from typing import (
9
- Literal,
10
- TypedDict,
11
- )
12
-
13
- AuthzType = Literal[
14
- "admin",
15
- "user",
16
- "organization",
17
- "group",
18
- "root",
19
- ]
20
-
21
- AuthzRelation = Literal[
22
- "admin",
23
- "parent",
24
- "customer_manage",
25
- "customer_write",
26
- "customer_read",
27
- "fluid_manage",
28
- "fluid_write",
29
- "fluid_read",
30
- "manage",
31
- "write",
32
- "read",
33
- ]
34
-
35
-
36
- @dataclass(frozen=True, kw_only=True)
37
- class AuthzConditionFluidEmail:
38
- email: str
39
-
40
- class _ReturnTypeContext(TypedDict):
41
- email: str
42
-
43
- class _ReturnType(TypedDict):
44
- name: Literal["fluid_email"]
45
- context: AuthzConditionFluidEmail._ReturnTypeContext
46
-
47
- def __call__(self) -> _ReturnType:
48
- return {
49
- "name": "fluid_email",
50
- "context": {"email": self.email},
51
- }
52
-
53
-
54
- @dataclass(frozen=True, kw_only=True)
55
- class AuthzTuple:
56
- user_type: AuthzType
57
- user: str
58
- relation: AuthzRelation
59
- object_type: AuthzType
60
- object: str
61
- condition: AuthzConditionFluidEmail | None = None
62
-
63
- class _ReturnType(TypedDict):
64
- user: str
65
- relation: AuthzRelation
66
- object: str
67
- condition: AuthzConditionFluidEmail._ReturnType | None
68
-
69
- def __call__(self) -> _ReturnType:
70
- return {
71
- "user": f"{self.user_type}:{self.user}",
72
- "relation": self.relation,
73
- "object": f"{self.object_type}:{self.object}",
74
- "condition": self.condition() if self.condition else None,
75
- }