txt2detection 1.0.7__tar.gz → 1.0.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of txt2detection might be problematic. Click here for more details.

Files changed (55) hide show
  1. {txt2detection-1.0.7 → txt2detection-1.0.8}/PKG-INFO +2 -1
  2. {txt2detection-1.0.7 → txt2detection-1.0.8}/pyproject.toml +2 -1
  3. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/src/test_bundler.py +49 -3
  4. {txt2detection-1.0.7 → txt2detection-1.0.8}/txt2detection/bundler.py +19 -6
  5. {txt2detection-1.0.7 → txt2detection-1.0.8}/txt2detection/models.py +76 -50
  6. {txt2detection-1.0.7 → txt2detection-1.0.8}/.env.example +0 -0
  7. {txt2detection-1.0.7 → txt2detection-1.0.8}/.env.markdown +0 -0
  8. {txt2detection-1.0.7 → txt2detection-1.0.8}/.github/workflows/create-release.yml +0 -0
  9. {txt2detection-1.0.7 → txt2detection-1.0.8}/.github/workflows/run-tests.yml +0 -0
  10. {txt2detection-1.0.7 → txt2detection-1.0.8}/.gitignore +0 -0
  11. {txt2detection-1.0.7 → txt2detection-1.0.8}/LICENSE +0 -0
  12. {txt2detection-1.0.7 → txt2detection-1.0.8}/README.md +0 -0
  13. {txt2detection-1.0.7 → txt2detection-1.0.8}/config/detection_languages.yaml +0 -0
  14. {txt2detection-1.0.7 → txt2detection-1.0.8}/docs/README.md +0 -0
  15. {txt2detection-1.0.7 → txt2detection-1.0.8}/docs/txt2detection.png +0 -0
  16. {txt2detection-1.0.7 → txt2detection-1.0.8}/requirements.txt +0 -0
  17. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/files/CVE-2024-56520.txt +0 -0
  18. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/files/EC2-exfil.txt +0 -0
  19. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/files/observables.txt +0 -0
  20. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/files/sigma-rule-custom-tags.yml +0 -0
  21. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/files/sigma-rule-existing-related.yml +0 -0
  22. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/files/sigma-rule-master.yml +0 -0
  23. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/files/sigma-rule-no-author.yml +0 -0
  24. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/files/sigma-rule-no-date.yml +0 -0
  25. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/files/sigma-rule-no-description.yml +0 -0
  26. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/files/sigma-rule-no-level.yml +0 -0
  27. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/files/sigma-rule-no-license.yml +0 -0
  28. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/files/sigma-rule-no-status.yml +0 -0
  29. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/files/sigma-rule-no-tags.yml +0 -0
  30. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/files/sigma-rule-no-title.yml +0 -0
  31. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/files/sigma-rule-observables.yml +0 -0
  32. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/files/sigma-rule-one-date.yml +0 -0
  33. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/manual-tests/README.md +0 -0
  34. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/src/__init__.py +0 -0
  35. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/src/requirements.txt +0 -0
  36. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/src/test_main.py +0 -0
  37. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/src/test_main_run_txt2detction.py +0 -0
  38. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/src/test_models.py +0 -0
  39. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/src/test_observables.py +0 -0
  40. {txt2detection-1.0.7 → txt2detection-1.0.8}/tests/src/test_utils.py +0 -0
  41. {txt2detection-1.0.7 → txt2detection-1.0.8}/txt2detection/__init__.py +0 -0
  42. {txt2detection-1.0.7 → txt2detection-1.0.8}/txt2detection/__main__.py +0 -0
  43. {txt2detection-1.0.7 → txt2detection-1.0.8}/txt2detection/ai_extractor/__init__.py +0 -0
  44. {txt2detection-1.0.7 → txt2detection-1.0.8}/txt2detection/ai_extractor/anthropic.py +0 -0
  45. {txt2detection-1.0.7 → txt2detection-1.0.8}/txt2detection/ai_extractor/base.py +0 -0
  46. {txt2detection-1.0.7 → txt2detection-1.0.8}/txt2detection/ai_extractor/deepseek.py +0 -0
  47. {txt2detection-1.0.7 → txt2detection-1.0.8}/txt2detection/ai_extractor/gemini.py +0 -0
  48. {txt2detection-1.0.7 → txt2detection-1.0.8}/txt2detection/ai_extractor/openai.py +0 -0
  49. {txt2detection-1.0.7 → txt2detection-1.0.8}/txt2detection/ai_extractor/openrouter.py +0 -0
  50. {txt2detection-1.0.7 → txt2detection-1.0.8}/txt2detection/ai_extractor/prompts.py +0 -0
  51. {txt2detection-1.0.7 → txt2detection-1.0.8}/txt2detection/ai_extractor/utils.py +0 -0
  52. {txt2detection-1.0.7 → txt2detection-1.0.8}/txt2detection/credential_checker.py +0 -0
  53. {txt2detection-1.0.7 → txt2detection-1.0.8}/txt2detection/observables.py +0 -0
  54. {txt2detection-1.0.7 → txt2detection-1.0.8}/txt2detection/utils.py +0 -0
  55. {txt2detection-1.0.7 → txt2detection-1.0.8}/txt2detection.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: txt2detection
3
- Version: 1.0.7
3
+ Version: 1.0.8
4
4
  Summary: A command line tool that takes a txt file containing threat intelligence and turns it into a detection rule.
5
5
  Project-URL: Homepage, https://github.com/muchdogesec/txt2detection
6
6
  Project-URL: Issues, https://github.com/muchdogesec/txt2detection/issues
@@ -21,6 +21,7 @@ Requires-Dist: python-slugify
21
21
  Requires-Dist: pyyaml
22
22
  Requires-Dist: requests>=2.31.0; python_version >= '3.7'
23
23
  Requires-Dist: stix2
24
+ Requires-Dist: stix2extensions
24
25
  Requires-Dist: tqdm>=4.66.4; python_version >= '3.7'
25
26
  Requires-Dist: validators>=0.34.0
26
27
  Provides-Extra: anthropic
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
4
4
 
5
5
  [project]
6
6
  name = "txt2detection"
7
- version = "1.0.7"
7
+ version = "1.0.8"
8
8
  authors = [
9
9
  { name = "dogesec" }
10
10
  ]
@@ -31,6 +31,7 @@ dependencies = [
31
31
  "validators>=0.34.0",
32
32
  "llama-index-core>=0.12.42",
33
33
  'llama-index-llms-openai>=0.4.5',
34
+ 'stix2extensions',
34
35
  ]
35
36
  [project.urls]
36
37
  Homepage = "https://github.com/muchdogesec/txt2detection"
@@ -16,9 +16,9 @@ def dummy_detection():
16
16
  detection = SigmaRuleDetection(
17
17
  title="Test Detection",
18
18
  description="Detects something suspicious.",
19
- detection=dict(condition="selection1", selection1=dict(ip='1.1.1.1')),
19
+ detection=dict(condition="selection1", selection1=dict(ip="1.1.1.1")),
20
20
  tags=["tlp.red", "sigma.execution"],
21
- id=str(uuid.uuid4()),
21
+ id="cd7ff0b1-fbf3-4c2d-ba70-5d127eb8b4be",
22
22
  external_references=[],
23
23
  logsource=dict(
24
24
  category="network-connection",
@@ -36,6 +36,8 @@ def bundler_instance():
36
36
  tlp_level="red",
37
37
  description="This is a test report.",
38
38
  labels=["tlp.red", "test.test-var"],
39
+ created=datetime(2025, 1, 1),
40
+ report_id="74e36652-00f5-4dca-bf10-9f02fc996dcc",
39
41
  )
40
42
 
41
43
 
@@ -229,7 +231,7 @@ def test_bundler_generates_valid_bundle(dummy_detection):
229
231
 
230
232
  def test_bundle_detections(dummy_detection, bundler_instance):
231
233
  container = DetectionContainer(success=False, detections=[])
232
- with patch.object(Bundler, 'add_rule_indicator') as mock_add_rule_indicator:
234
+ with patch.object(Bundler, "add_rule_indicator") as mock_add_rule_indicator:
233
235
  bundler_instance.bundle_detections(container)
234
236
  mock_add_rule_indicator.assert_not_called()
235
237
  mock_add_rule_indicator.reset_mock()
@@ -238,3 +240,47 @@ def test_bundle_detections(dummy_detection, bundler_instance):
238
240
  container.success = True
239
241
  bundler_instance.bundle_detections(container)
240
242
  mock_add_rule_indicator.assert_called_once_with(detection)
243
+
244
+
245
+ def test_bundle_detections__creates_log_source(dummy_detection, bundler_instance):
246
+ dummy_detection.detection_id = "d73e1632-c541-4b09-8281-95dc7f9c5782"
247
+ bundler_instance.add_rule_indicator(dummy_detection)
248
+ objects = [
249
+ obj
250
+ for obj in bundler_instance.bundle_dict["objects"]
251
+ if obj["id"]
252
+ in (
253
+ "data-source--f078a18f-0f04-5fde-b6cd-a5af90b6346b",
254
+ "relationship--fe0a3715-6a21-5472-840f-39ea9c61ee83",
255
+ )
256
+ ]
257
+ assert objects == [
258
+ {
259
+ "type": "data-source",
260
+ "spec_version": "2.1",
261
+ "id": "data-source--f078a18f-0f04-5fde-b6cd-a5af90b6346b",
262
+ "category": "network-connection",
263
+ "product": "firewall",
264
+ "extensions": {
265
+ "extension-definition--afeeb724-bce2-575e-af3d-d705842ea84b": {
266
+ "extension_type": "new-sco"
267
+ }
268
+ },
269
+ },
270
+ {
271
+ "type": "relationship",
272
+ "spec_version": "2.1",
273
+ "id": "relationship--fe0a3715-6a21-5472-840f-39ea9c61ee83",
274
+ "created_by_ref": "identity--a4d70b75-6f4a-5d19-9137-da863edd33d7",
275
+ "created": "2025-01-01T00:00:00.000Z",
276
+ "modified": "2025-01-01T00:00:00.000Z",
277
+ "relationship_type": "related-to",
278
+ "description": "Test Detection is created from log-source {category=network-connection, product=firewall}",
279
+ "source_ref": "indicator--d73e1632-c541-4b09-8281-95dc7f9c5782",
280
+ "target_ref": "data-source--f078a18f-0f04-5fde-b6cd-a5af90b6346b",
281
+ "object_marking_refs": [
282
+ "marking-definition--e828b379-4e03-4974-9ac4-e53a884c97c1",
283
+ "marking-definition--a4d70b75-6f4a-5d19-9137-da863edd33d7",
284
+ ],
285
+ },
286
+ ]
@@ -105,7 +105,7 @@ class Bundler:
105
105
 
106
106
  self.job_id = f"report--{self.uuid}"
107
107
  self.external_refs = (external_refs or []) + [dict(source_name='txt2detection', url=url, description='txt2detection-reference') for url in self.reference_urls]
108
-
108
+
109
109
  self.report = Report(
110
110
  created_by_ref=self.identity.id,
111
111
  name=name,
@@ -177,6 +177,7 @@ class Bundler:
177
177
  "external_id": hashlib.md5(indicator['pattern'].encode()).hexdigest()
178
178
  }
179
179
  )
180
+ logsource = detection.make_data_source()
180
181
 
181
182
  logger.debug(f"===== rule {detection.detection_id} =====")
182
183
  logger.debug("```yaml\n"+indicator['pattern']+"\n```")
@@ -191,24 +192,30 @@ class Bundler:
191
192
  self.add_relation(indicator, obj)
192
193
 
193
194
  self.add_ref(parse_stix(indicator, allow_custom=True), append_report=True)
195
+ print('everywhere')
196
+ self.add_ref(logsource, append_report=True)
197
+ print('here')
198
+ self.add_relation(indicator, logsource, description=f'{indicator["name"]} is created from {make_logsouce_string(logsource)}')
199
+ print('there')
194
200
 
195
201
  for ob_type, ob_value in set(observables.find_stix_observables(detection.detection)):
196
202
  try:
197
203
  obj = observables.to_stix_object(ob_type, ob_value)
198
204
  self.add_ref(obj)
199
- self.add_relation(indicator, obj, 'detects', target_name=ob_value)
205
+ self.add_relation(indicator, obj, 'related-to', target_name=ob_value)
200
206
  except:
201
207
  logger.exception(f"failed to process observable {ob_type}/{ob_value}")
202
208
 
203
-
204
- def add_relation(self, indicator, target_object, relationship_type='detects', target_name=None):
209
+ def add_relation(self, indicator, target_object, relationship_type='related-to', target_name=None, description=None):
205
210
  ext_refs = []
206
211
 
207
212
  with contextlib.suppress(Exception):
208
213
  indicator['external_references'].append(target_object['external_references'][0])
209
214
  ext_refs = [target_object['external_references'][0]]
210
215
 
211
- target_name = target_name or f"{target_object['external_references'][0]['external_id']} ({target_object['name']})"
216
+ if not description:
217
+ target_name = target_name or f"{target_object['external_references'][0]['external_id']} ({target_object['name']})"
218
+ description = f"{indicator['name']} {relationship_type} {target_name}"
212
219
 
213
220
  rel = Relationship(
214
221
  id="relationship--" + str(
@@ -220,7 +227,7 @@ class Bundler:
220
227
  target_ref=target_object['id'],
221
228
  relationship_type=relationship_type,
222
229
  created_by_ref=self.report.created_by_ref,
223
- description=f"{indicator['name']} {relationship_type} {target_name}",
230
+ description=description,
224
231
  created=self.report.created,
225
232
  modified=self.report.modified,
226
233
  object_marking_refs=self.report.object_marking_refs,
@@ -281,3 +288,9 @@ class Bundler:
281
288
  return
282
289
  for d in container.detections:
283
290
  self.add_rule_indicator(d)
291
+
292
+ def make_logsouce_string(source: dict):
293
+ d = [f'{k}={v}' for k, v in source.items()
294
+ if k in ['product', 'service', 'category']]
295
+ d_str = ', '.join(d)
296
+ return 'log-source {'+d_str+'}'
@@ -8,6 +8,7 @@ from slugify import slugify
8
8
  from datetime import date as dt_date
9
9
  from typing import Any, ClassVar, List, Literal, Optional, Union
10
10
  from uuid import UUID
11
+ from stix2extensions.data_source import DataSource
11
12
 
12
13
  import jsonschema
13
14
  from pydantic import BaseModel, Field, computed_field, field_validator
@@ -124,11 +125,11 @@ class TLP_LEVEL(enum.Enum):
124
125
  ]
125
126
 
126
127
  @classmethod
127
- def get(cls, level: 'str|TLP_LEVEL'):
128
+ def get(cls, level: "str|TLP_LEVEL"):
128
129
  if isinstance(level, cls):
129
130
  return level
130
131
  level = level.lower()
131
- level = level.replace('+', '_').replace('-', '_')
132
+ level = level.replace("+", "_").replace("-", "_")
132
133
  if level not in cls.levels():
133
134
  raise Exception(f"unsupported tlp level: `{level}`")
134
135
  return cls.levels()[level]
@@ -137,6 +138,7 @@ class TLP_LEVEL(enum.Enum):
137
138
  def name(self):
138
139
  return super().name.lower()
139
140
 
141
+
140
142
  class Statuses(enum.StrEnum):
141
143
  stable = enum.auto()
142
144
  test = enum.auto()
@@ -144,6 +146,7 @@ class Statuses(enum.StrEnum):
144
146
  deprecated = enum.auto()
145
147
  unsupported = enum.auto()
146
148
 
149
+
147
150
  class Level(enum.StrEnum):
148
151
  informational = enum.auto()
149
152
  low = enum.auto()
@@ -151,6 +154,7 @@ class Level(enum.StrEnum):
151
154
  high = enum.auto()
152
155
  critical = enum.auto()
153
156
 
157
+
154
158
  class SigmaTag(str):
155
159
  @classmethod
156
160
  def __get_pydantic_core_schema__(
@@ -158,31 +162,35 @@ class SigmaTag(str):
158
162
  _source: type[Any],
159
163
  _handler,
160
164
  ) -> core_schema.CoreSchema:
161
- return core_schema.no_info_after_validator_function(cls._validate, core_schema.str_schema())
165
+ return core_schema.no_info_after_validator_function(
166
+ cls._validate, core_schema.str_schema()
167
+ )
162
168
 
163
169
  @classmethod
164
- def __get_pydantic_json_schema__(
165
- cls, core_schema: core_schema.CoreSchema, handler
166
- ):
170
+ def __get_pydantic_json_schema__(cls, core_schema: core_schema.CoreSchema, handler):
167
171
  field_schema = handler(core_schema)
168
- field_schema.update(type='string', pattern=TAG_PATTERN.pattern, format='sigma-tag')
172
+ field_schema.update(
173
+ type="string", pattern=TAG_PATTERN.pattern, format="sigma-tag"
174
+ )
169
175
  return field_schema
170
176
 
171
177
  @classmethod
172
178
  def _validate(cls, input_value: str, /) -> str:
173
179
  if not TAG_PATTERN.match(input_value):
174
180
  raise PydanticCustomError(
175
- 'value_error',
176
- 'value is not a valid SIGMA tag: {reason}',
177
- {'reason': f'Must be in format namespace.value and match pattern {TAG_PATTERN.pattern}'},
178
- )
181
+ "value_error",
182
+ "value is not a valid SIGMA tag: {reason}",
183
+ {
184
+ "reason": f"Must be in format namespace.value and match pattern {TAG_PATTERN.pattern}"
185
+ },
186
+ )
179
187
  return input_value
180
-
188
+
189
+
181
190
  class RelatedRule(BaseModel):
182
191
  id: UUID
183
- type: Literal[
184
- "derived", "obsolete", "merged", "renamed", "similar"
185
- ]
192
+ type: Literal["derived", "obsolete", "merged", "renamed", "similar"]
193
+
186
194
 
187
195
  class BaseDetection(BaseModel):
188
196
  title: str
@@ -195,7 +203,9 @@ class BaseDetection(BaseModel):
195
203
  level: Level
196
204
  _custom_id = None
197
205
  _extra_data: dict
198
- sigma_json_schema: ClassVar = requests.get("https://github.com/SigmaHQ/sigma-specification/raw/refs/heads/main/json-schema/sigma-detection-rule-schema.json").json()
206
+ sigma_json_schema: ClassVar = requests.get(
207
+ "https://github.com/SigmaHQ/sigma-specification/raw/refs/heads/main/json-schema/sigma-detection-rule-schema.json"
208
+ ).json()
199
209
 
200
210
  def model_post_init(self, __context):
201
211
  self.tags = self.tags or []
@@ -213,17 +223,16 @@ class BaseDetection(BaseModel):
213
223
  @property
214
224
  def tlp_level(self):
215
225
  return tlp_from_tags(self.tags)
216
-
226
+
217
227
  @tlp_level.setter
218
228
  def tlp_level(self, level):
219
229
  set_tlp_level_in_tags(self.tags, level)
220
-
230
+
221
231
  def set_labels(self, labels):
222
232
  self.tags.extend(labels)
223
233
 
224
234
  def set_extra_data_from_bundler(self, bundler: "Bundler"):
225
- raise NotImplementedError('this class should no longer be in use')
226
-
235
+ raise NotImplementedError("this class should no longer be in use")
227
236
 
228
237
  def make_rule(self, bundler: "Bundler"):
229
238
  self.set_extra_data_from_bundler(bundler)
@@ -232,19 +241,17 @@ class BaseDetection(BaseModel):
232
241
  rule = dict(
233
242
  id=self.detection_id,
234
243
  **self.model_dump(
235
- exclude=["indicator_types", "id"],
236
- mode="json",
237
- by_alias=True
244
+ exclude=["indicator_types", "id"], mode="json", by_alias=True
238
245
  ),
239
246
  )
240
247
  for k, v in list(rule.items()):
241
248
  if not v:
242
249
  rule.pop(k, None)
243
-
250
+
244
251
  self.validate_rule_with_json_schema(rule)
245
- if getattr(self, 'date', 0):
252
+ if getattr(self, "date", 0):
246
253
  rule.update(date=self.date)
247
- if getattr(self, 'modified', 0):
254
+ if getattr(self, "modified", 0):
248
255
  rule.update(modified=self.modified)
249
256
  return yaml.dump(rule, sort_keys=False, indent=4)
250
257
 
@@ -253,13 +260,13 @@ class BaseDetection(BaseModel):
253
260
  rule,
254
261
  self.sigma_json_schema,
255
262
  )
256
-
263
+
257
264
  @property
258
265
  def external_references(self):
259
266
  refs = []
260
- for attr in ['level', 'status', 'license']:
267
+ for attr in ["level", "status", "license"]:
261
268
  if attr_val := getattr(self, attr, None):
262
- refs.append(dict(source_name=f'sigma-{attr}', description=attr_val))
269
+ refs.append(dict(source_name=f"sigma-{attr}", description=attr_val))
263
270
  return refs
264
271
 
265
272
  @property
@@ -280,19 +287,34 @@ class BaseDetection(BaseModel):
280
287
  retval.append(namespace.upper() + "-" + label_id)
281
288
  return retval
282
289
 
290
+ def make_data_source(self):
291
+ return DataSource(
292
+ category=self.logsource.get("category"),
293
+ product=self.logsource.get("product"),
294
+ service=self.logsource.get("service"),
295
+ definition=self.logsource.get("definition"),
296
+ )
297
+
283
298
 
284
299
  class AIDetection(BaseDetection):
285
300
  indicator_types: list[str] = Field(default_factory=list)
286
-
301
+
287
302
  def to_sigma_rule_detection(self, bundler):
288
303
  rule_dict = {
289
- **self.model_dump(exclude=['indicator_types']),
290
- **dict(date=bundler.report.created.date(), modified=bundler.report.modified.date(), id=uuid.uuid4())
304
+ **self.model_dump(exclude=["indicator_types"]),
305
+ **dict(
306
+ date=bundler.report.created.date(),
307
+ modified=bundler.report.modified.date(),
308
+ id=uuid.uuid4(),
309
+ ),
291
310
  }
292
311
  try:
293
312
  return SigmaRuleDetection.model_validate(rule_dict)
294
313
  except Exception as e:
295
- raise ValueError(dict(message='validate ai output failed', error=e, content=rule_dict))
314
+ raise ValueError(
315
+ dict(message="validate ai output failed", error=e, content=rule_dict)
316
+ )
317
+
296
318
 
297
319
  class SigmaRuleDetection(BaseDetection):
298
320
  title: str
@@ -319,58 +341,61 @@ class SigmaRuleDetection(BaseDetection):
319
341
  @property
320
342
  def detection_id(self):
321
343
  return str(self.id)
322
-
344
+
323
345
  @property
324
346
  def indicator_types(self):
325
347
  return self._indicator_types
326
-
348
+
327
349
  @indicator_types.setter
328
350
  def indicator_types(self, types):
329
351
  self._indicator_types = types
330
-
352
+
331
353
  @detection_id.setter
332
354
  def detection_id(self, new_id):
333
355
  if self.id and str(self.id) != str(new_id):
334
356
  self.related = self.related or []
335
357
  self.related.append(RelatedRule(id=self.id, type="renamed"))
336
358
  self.id = new_id
337
-
338
- @field_validator('tags', mode='after')
359
+
360
+ @field_validator("tags", mode="after")
339
361
  @classmethod
340
362
  def validate_tlp(cls, tags: list[str]):
341
363
  tlps = []
342
364
  for tag in tags:
343
- if tag.startswith('tlp.'):
365
+ if tag.startswith("tlp."):
344
366
  tlps.append(tag)
345
367
  if len(tlps) > 1:
346
- raise ValueError(f'tag must not contain more than one tag in tlp namespace. Got {tlps}')
368
+ raise ValueError(
369
+ f"tag must not contain more than one tag in tlp namespace. Got {tlps}"
370
+ )
347
371
  return tags
348
-
349
- @field_validator('modified', mode='after')
372
+
373
+ @field_validator("modified", mode="after")
350
374
  @classmethod
351
375
  def validate_modified(cls, modified, info):
352
- if info.data.get('date') == modified:
376
+ if info.data.get("date") == modified:
353
377
  return None
354
378
  return modified
355
-
379
+
356
380
  def set_extra_data_from_bundler(self, bundler: "Bundler"):
357
381
  if not bundler:
358
382
  return
359
-
383
+
360
384
  if not self.date:
361
385
  from .utils import as_date
386
+
362
387
  self.date = as_date(bundler.created)
363
-
388
+
364
389
  self.set_labels(bundler.labels)
365
390
  self.tlp_level = bundler.tlp_level.name
366
391
  self.author = bundler.report.created_by_ref
367
392
  self.license = bundler.license
368
393
  self.references = bundler.reference_urls
369
394
 
395
+
370
396
  class DetectionContainer(BaseModel):
371
397
  success: bool
372
- detections: list[Union[BaseDetection , AIDetection, SigmaRuleDetection]]
373
-
398
+ detections: list[Union[BaseDetection, AIDetection, SigmaRuleDetection]]
374
399
 
375
400
 
376
401
  def tlp_from_tags(tags: list[SigmaTag]):
@@ -382,10 +407,11 @@ def tlp_from_tags(tags: list[SigmaTag]):
382
407
  return tlp_level
383
408
  return None
384
409
 
410
+
385
411
  def set_tlp_level_in_tags(tags: list[SigmaTag], level):
386
412
  level = str(level)
387
413
  for i, tag in enumerate(tags):
388
- if tag.startswith('tlp.'):
414
+ if tag.startswith("tlp."):
389
415
  tags.remove(tag)
390
- tags.append('tlp.'+level.replace("_", "-"))
416
+ tags.append("tlp." + level.replace("_", "-"))
391
417
  return tags
File without changes
File without changes
File without changes