txt2detection 1.0.3__py3-none-any.whl → 1.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of txt2detection might be problematic. Click here for more details.

txt2detection/models.py CHANGED
@@ -3,9 +3,10 @@ import json
3
3
  import re
4
4
  import typing
5
5
  import uuid
6
+ import requests
6
7
  from slugify import slugify
7
8
  from datetime import date as dt_date
8
- from typing import Any, List, Literal, Optional, Union
9
+ from typing import Any, ClassVar, List, Literal, Optional, Union
9
10
  from uuid import UUID
10
11
 
11
12
  import jsonschema
@@ -194,6 +195,7 @@ class BaseDetection(BaseModel):
194
195
  level: Level
195
196
  _custom_id = None
196
197
  _extra_data: dict
198
+ sigma_json_schema: ClassVar = requests.get("https://github.com/SigmaHQ/sigma-specification/raw/refs/heads/main/json-schema/sigma-detection-rule-schema.json").json()
197
199
 
198
200
  def model_post_init(self, __context):
199
201
  self.tags = self.tags or []
@@ -249,9 +251,7 @@ class BaseDetection(BaseModel):
249
251
  def validate_rule_with_json_schema(self, rule):
250
252
  jsonschema.validate(
251
253
  rule,
252
- {
253
- "$ref": "https://github.com/SigmaHQ/sigma-specification/raw/refs/heads/main/json-schema/sigma-detection-rule-schema.json"
254
- },
254
+ self.sigma_json_schema,
255
255
  )
256
256
 
257
257
  @property
txt2detection/utils.py CHANGED
@@ -55,11 +55,13 @@ def validate_token_count(max_tokens, input, extractor: BaseAIExtractor):
55
55
  if token_count > max_tokens:
56
56
  raise Exception(f"{extractor.extractor_name}: input_file token count ({token_count}) exceeds INPUT_TOKEN_LIMIT ({max_tokens})")
57
57
 
58
+
59
+ @lru_cache(maxsize=5)
60
+ def get_licenses(date):
61
+ resp = requests.get("https://github.com/spdx/license-list-data/raw/refs/heads/main/json/licenses.json")
62
+ return {l['licenseId']: l['name'] for l in resp.json()['licenses']}
63
+
58
64
  def valid_licenses():
59
- @lru_cache(maxsize=5)
60
- def get_licenses(date):
61
- resp = requests.get("https://github.com/spdx/license-list-data/raw/refs/heads/main/json/licenses.json")
62
- return {l['licenseId']: l['name'] for l in resp.json()['licenses']}
63
65
  return get_licenses(datetime.now().date().isoformat())
64
66
 
65
67
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: txt2detection
3
- Version: 1.0.3
3
+ Version: 1.0.5
4
4
  Summary: A command line tool that takes a txt file containing threat intelligence and turns it into a detection rule.
5
5
  Project-URL: Homepage, https://github.com/muchdogesec/txt2detection
6
6
  Project-URL: Issues, https://github.com/muchdogesec/txt2detection/issues
@@ -2,9 +2,9 @@ txt2detection/__init__.py,sha256=Fc460P0q_eb2u3Xc89z-fwl-4ai3jrPqPNVwJQYNkNQ,89
2
2
  txt2detection/__main__.py,sha256=R9TgWWGzA8rxF39rZr2MNOrQubhItdRAgP2nd8Tfb78,9337
3
3
  txt2detection/bundler.py,sha256=rIvVTlLEHu9SMPqy8AyLbiJ3Cg0WNq7uWvGIXGoaPsg,10822
4
4
  txt2detection/credential_checker.py,sha256=YoOe1ABjNfAJIcNE6PRAZtvznTybUKHNBB57DPQhZsU,2564
5
- txt2detection/models.py,sha256=wDRL-Thme8XXuX1b850v_egTjL3mNYpdVmDvYwfzKI8,12000
5
+ txt2detection/models.py,sha256=AKxqHsjnMQZFX5tWPCsXE6-OQLMbDhQbdu97zWJdNb0,12064
6
6
  txt2detection/observables.py,sha256=NNnwF_gOsPmAbfgk5fj1rcluMsShZOHssAGy2VJgvmo,6763
7
- txt2detection/utils.py,sha256=wCol8_DBlvzJOV8VvjqvVR6FWWDQjmz8ItbxG8qbvnw,2755
7
+ txt2detection/utils.py,sha256=rLBFzpSepksXkONnqWkRqiMr8R4LTp4j8OrashFVUPc,2741
8
8
  txt2detection/ai_extractor/__init__.py,sha256=itcwTF0-S80mx-SuSvfrKazvcwsojR-QsBN-UvnSDwE,418
9
9
  txt2detection/ai_extractor/anthropic.py,sha256=YOi2rHUeeoRMS4CFG6mX7xUU4q4rw9qNl72R74UN6ZM,420
10
10
  txt2detection/ai_extractor/base.py,sha256=urZe_kpYu3BwXyKJsQ0GQIEtTasUQYp4dFzuz34Hai8,2336
@@ -15,8 +15,8 @@ txt2detection/ai_extractor/openrouter.py,sha256=-KcdcyKPpaeiGfvqJB4L7vMmcXTDhml3
15
15
  txt2detection/ai_extractor/prompts.py,sha256=ACYFWUafdHXHBXz7fq_RSooA4PJ-mBdaBzqsOOSFpVg,5918
16
16
  txt2detection/ai_extractor/utils.py,sha256=SUxyPhkGp5yDbX_H_E018i93R8IbyLsQ00PIBDecfuc,540
17
17
  txt2detection/config/detection_languages.yaml,sha256=dgQUJPxhDRJ_IiFEFOiH0yhEer3SkFSIhY4pS3BsX2c,287
18
- txt2detection-1.0.3.dist-info/METADATA,sha256=uObp1AT1iejmjazeIxZdB-EmY3tk087qnHxv-x1kj88,14213
19
- txt2detection-1.0.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
20
- txt2detection-1.0.3.dist-info/entry_points.txt,sha256=ep_rLlS2r1-kKE7S3iKf3SVwbCU9-FZhU9zUebitw7A,62
21
- txt2detection-1.0.3.dist-info/licenses/LICENSE,sha256=BK8Ppqlc4pdgnNzIxnxde0taoQ1BgicdyqmBvMiNYgY,11364
22
- txt2detection-1.0.3.dist-info/RECORD,,
18
+ txt2detection-1.0.5.dist-info/METADATA,sha256=VQqAGPgS4ZZC_3QoKI-kdC_myPZaqgPCWxqCT5IaGQY,14213
19
+ txt2detection-1.0.5.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
20
+ txt2detection-1.0.5.dist-info/entry_points.txt,sha256=ep_rLlS2r1-kKE7S3iKf3SVwbCU9-FZhU9zUebitw7A,62
21
+ txt2detection-1.0.5.dist-info/licenses/LICENSE,sha256=BK8Ppqlc4pdgnNzIxnxde0taoQ1BgicdyqmBvMiNYgY,11364
22
+ txt2detection-1.0.5.dist-info/RECORD,,