txt2stix 1.0.9__py3-none-any.whl → 1.0.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- txt2stix/ai_extractor/openai.py +6 -1
- txt2stix/txt2stix.py +3 -3
- {txt2stix-1.0.9.dist-info → txt2stix-1.0.11.dist-info}/METADATA +1 -1
- {txt2stix-1.0.9.dist-info → txt2stix-1.0.11.dist-info}/RECORD +7 -7
- {txt2stix-1.0.9.dist-info → txt2stix-1.0.11.dist-info}/WHEEL +0 -0
- {txt2stix-1.0.9.dist-info → txt2stix-1.0.11.dist-info}/entry_points.txt +0 -0
- {txt2stix-1.0.9.dist-info → txt2stix-1.0.11.dist-info}/licenses/LICENSE +0 -0
txt2stix/ai_extractor/openai.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
|
|
2
|
+
import logging
|
|
2
3
|
import os
|
|
3
4
|
from txt2stix.ai_extractor.base import BaseAIExtractor
|
|
4
5
|
from llama_index.llms.openai import OpenAI
|
|
@@ -11,5 +12,9 @@ class OpenAIExtractor(BaseAIExtractor, provider="openai"):
|
|
|
11
12
|
super().__init__()
|
|
12
13
|
|
|
13
14
|
def count_tokens(self, text):
|
|
14
|
-
|
|
15
|
+
try:
|
|
16
|
+
return len(self.llm._tokenizer.encode(text))
|
|
17
|
+
except Exception as e:
|
|
18
|
+
logging.warning(e)
|
|
19
|
+
return super().count_tokens(text)
|
|
15
20
|
|
txt2stix/txt2stix.py
CHANGED
|
@@ -439,18 +439,18 @@ def main():
|
|
|
439
439
|
|
|
440
440
|
## write outputs
|
|
441
441
|
out = bundler.to_json()
|
|
442
|
-
output_dir = Path("./output")/str(
|
|
442
|
+
output_dir = Path("./output")/str(bundler.uuid)
|
|
443
443
|
with contextlib.suppress(BaseException):
|
|
444
444
|
shutil.rmtree(output_dir)
|
|
445
445
|
output_dir.mkdir(exist_ok=True, parents=True)
|
|
446
446
|
output_path = output_dir/f"{bundler.bundle.id}.json"
|
|
447
447
|
output_path.write_text(out)
|
|
448
448
|
logger.info(f"Wrote bundle output to `{output_path}`")
|
|
449
|
-
data_path = output_dir/f"data--{
|
|
449
|
+
data_path = output_dir/f"data--{bundler.uuid}.json"
|
|
450
450
|
data_path.write_text(data.model_dump_json(indent=4))
|
|
451
451
|
logger.info(f"Wrote data output to `{data_path}`")
|
|
452
452
|
for nav_layer in data.navigator_layer or []:
|
|
453
|
-
nav_path = output_dir/f"navigator-{nav_layer['domain']}----{
|
|
453
|
+
nav_path = output_dir/f"navigator-{nav_layer['domain']}----{bundler.uuid}.json"
|
|
454
454
|
nav_path.write_text(json.dumps(nav_layer, indent=4))
|
|
455
455
|
logger.info(f"Wrote navigator output to `{nav_path}`")
|
|
456
456
|
except argparse.ArgumentError as e:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: txt2stix
|
|
3
|
-
Version: 1.0.
|
|
3
|
+
Version: 1.0.11
|
|
4
4
|
Summary: txt2stix is a Python script that is designed to identify and extract IoCs and TTPs from text files, identify the relationships between them, convert them to STIX 2.1 objects, and output as a STIX 2.1 bundle.
|
|
5
5
|
Project-URL: Homepage, https://github.com/muchdogesec/txt2stix
|
|
6
6
|
Project-URL: Issues, https://github.com/muchdogesec/txt2stix/issues
|
|
@@ -8,14 +8,14 @@ txt2stix/indicator.py,sha256=c6S0xx0K8JM-PT_Qd1PlN_ZlDXdnEwiRS8529iUp3yg,30774
|
|
|
8
8
|
txt2stix/lookups.py,sha256=h42YVtYUkWZm6ZPv2h5hHDHDzDs3yBqrT_T7pj2MDZI,2301
|
|
9
9
|
txt2stix/retriever.py,sha256=biRSRwYsZoSvR758y4OFONjfrEMcxgj1PLHFLFydoSU,5729
|
|
10
10
|
txt2stix/stix.py,sha256=9nXD9a2dCY4uaatl-mlIA1k3srwQBhGW-tUSho3iYe0,30
|
|
11
|
-
txt2stix/txt2stix.py,sha256=
|
|
11
|
+
txt2stix/txt2stix.py,sha256=Y7vr4zzh8PvFCD-pX8-qm8kxuintjkhnqQ-OYfq7CRs,18589
|
|
12
12
|
txt2stix/utils.py,sha256=n6mh4t9ZRJ7iT4Jvp9ai_dfCXjgXNcRtF_zXO7nkpnk,3304
|
|
13
13
|
txt2stix/ai_extractor/__init__.py,sha256=5Tf6Co9THzytBdFEVhD-7vvT05TT3nSpltnAV1sfdoM,349
|
|
14
14
|
txt2stix/ai_extractor/anthropic.py,sha256=mdz-8CB-BSCEqnK5l35DRZURVPUf508ef2b48XMxmuk,441
|
|
15
15
|
txt2stix/ai_extractor/base.py,sha256=w8FFceCtOZ4_uAaVMTZCzUdKnC3_3nDBafBzHlfHCn0,3959
|
|
16
16
|
txt2stix/ai_extractor/deepseek.py,sha256=2XehIYbWXG6Odq68nQX4CNtl5GdmBlAmjLP_lG2eEFo,660
|
|
17
17
|
txt2stix/ai_extractor/gemini.py,sha256=yJC7knYzl-TScyCBd-MTpUf-NT6znC25E7vXxNMqjLU,578
|
|
18
|
-
txt2stix/ai_extractor/openai.py,sha256=
|
|
18
|
+
txt2stix/ai_extractor/openai.py,sha256=FK3UlKozwoBVoBYS_CDGa9lSOae5AC3rMcOH_v0y5_Q,629
|
|
19
19
|
txt2stix/ai_extractor/openrouter.py,sha256=hAA6mTOMcpA28XYsOCvuJH7WMJqXCxfqZGJf_VrDsIk,628
|
|
20
20
|
txt2stix/ai_extractor/prompts.py,sha256=NtqtVyPPtShPlVZ5SrFmo-LCkfpANIIi4H9rjqaxqDo,10559
|
|
21
21
|
txt2stix/ai_extractor/utils.py,sha256=xPVtp_lI7254MvkXPt9YY_Vter0uiPLKMGcv5poXVKs,4763
|
|
@@ -113,8 +113,8 @@ txt2stix/includes/lookups/threat_actor.txt,sha256=QfDO9maQuqKBgW_Sdd7VGv1SHZ9Ra-
|
|
|
113
113
|
txt2stix/includes/lookups/tld.txt,sha256=-MEgJea2NMG_KDsnc4BVvI8eRk5Dm93L-t8SGYx5wMo,8598
|
|
114
114
|
txt2stix/includes/lookups/tool.txt,sha256=HGKG6JpUE26w6ezzSxOjBkp15UpSaB7N-mZ_NU_3G7A,6
|
|
115
115
|
txt2stix/includes/tests/test_cases.yaml,sha256=QD1FdIunpPkOpsn6wJRqs2vil_hv8OSVaqUp4a96aZg,22247
|
|
116
|
-
txt2stix-1.0.
|
|
117
|
-
txt2stix-1.0.
|
|
118
|
-
txt2stix-1.0.
|
|
119
|
-
txt2stix-1.0.
|
|
120
|
-
txt2stix-1.0.
|
|
116
|
+
txt2stix-1.0.11.dist-info/METADATA,sha256=RMfOHLhIbQI650ut4rDZlAreolrdjk6YZ5VPpTA6rbw,15483
|
|
117
|
+
txt2stix-1.0.11.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
118
|
+
txt2stix-1.0.11.dist-info/entry_points.txt,sha256=x6QPtt65hWeomw4IpJ_wQUesBl1M4WOLODbhOKyWMFg,55
|
|
119
|
+
txt2stix-1.0.11.dist-info/licenses/LICENSE,sha256=BK8Ppqlc4pdgnNzIxnxde0taoQ1BgicdyqmBvMiNYgY,11364
|
|
120
|
+
txt2stix-1.0.11.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|