llm-to-json 0.0.0__py3-none-any.whl → 0.0.43__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,24 @@
1
+ # packages/llm_to_json/llm_to_json/__init__.py
2
+ try:
3
+ from prompture import extract_and_jsonify as _extract_and_jsonify
4
+ from prompture.drivers import OllamaDriver
5
+ except Exception:
6
+ from prompture.core import extract_and_jsonify as _extract_and_jsonify
7
+ from prompture.drivers import OllamaDriver
8
+
9
+
10
+ def from_llm_text(text: str, schema: dict, driver: dict | None = None):
11
+ """Extract JSON from text using LLM.
12
+
13
+ Args:
14
+ text: Text to extract JSON from
15
+ schema: JSON schema to validate against
16
+ driver: Optional LLM driver (defaults to local Ollama if not provided)
17
+
18
+ Returns:
19
+ Extracted and validated JSON object
20
+ """
21
+ if driver is None:
22
+ driver = OllamaDriver(endpoint="http://localhost:11434", model="gemma:latest")
23
+ result = _extract_and_jsonify(driver, text, schema)
24
+ return result["json_object"]
@@ -1,27 +1,29 @@
1
- Metadata-Version: 2.4
2
- Name: llm-to-json
3
- Version: 0.0.0
4
- Summary: Tiny wrapper exposing Prompture helpers to convert LLM output into JSON.
5
- Author-email: Juan Denis <juan@vene.co>
6
- License: MIT
7
- Keywords: llm,json,prompt,structured-output
8
- Requires-Python: >=3.9
9
- Description-Content-Type: text/markdown
10
- Requires-Dist: prompture>=0.0.1
11
-
12
- # llm-to-json
13
-
14
- Tiny wrapper around `prompture` with a minimal, easy-to-use API for converting LLM output (or raw text) into JSON according to a schema.
15
-
16
- Install:
17
- ```bash
18
- pip install llm-to-json
19
- ```
20
-
21
- Usage:
22
-
23
- ```python
24
- from llm_to_json import from_llm_text
25
- schema = {"name": "string", "age": "int"}
26
- print(from_llm_text("Name: Juan Age: 30", schema))
27
- ```
1
+ Metadata-Version: 2.4
2
+ Name: llm-to-json
3
+ Version: 0.0.43
4
+ Summary: Tiny wrapper exposing Prompture helpers to convert LLM output into JSON.
5
+ Author-email: Juan Denis <juan@vene.co>
6
+ License: MIT
7
+ Keywords: llm,json,prompt,structured-output
8
+ Requires-Python: >=3.10
9
+ Description-Content-Type: text/markdown
10
+ Requires-Dist: prompture>=0.0.43
11
+
12
+ # llm-to-json
13
+
14
+ Tiny wrapper around `prompture` with a minimal, easy-to-use API for converting LLM output (or raw text) into JSON according to a schema.
15
+
16
+ Install:
17
+ ```bash
18
+ pip install llm-to-json
19
+ ```
20
+
21
+ Usage:
22
+
23
+ ```python
24
+ from llm_to_json import from_llm_text
25
+ schema = {"name": "string", "age": "int"}
26
+ print(from_llm_text("Name: Juan Age: 30", schema))
27
+ ```
28
+
29
+ For full docs and advanced features, see the main project: Prompture — https://github.com/jhd3197/prompture
@@ -0,0 +1,5 @@
1
+ llm_to_json/__init__.py,sha256=GWm3JMH1oxrhGN5E7CyxaxkwR33eU0padgyPFOakgy8,867
2
+ llm_to_json-0.0.43.dist-info/METADATA,sha256=CKpJA6ug7KUSlIJ3z_deOiKljbGRm03ZuMuppsaq8c4,796
3
+ llm_to_json-0.0.43.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
4
+ llm_to_json-0.0.43.dist-info/top_level.txt,sha256=hL_ltOEZzqwDE-EMjHKgJMvpIHri6GCtk3ppiWvP168,12
5
+ llm_to_json-0.0.43.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.9.0)
2
+ Generator: setuptools (80.10.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -0,0 +1 @@
1
+ llm_to_json
@@ -1,4 +0,0 @@
1
- llm_to_json-0.0.0.dist-info/METADATA,sha256=JEE7hGfLG1MXUg1J2tzhMGXX1R36vL8ZPjTb8CAnpgw,709
2
- llm_to_json-0.0.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
3
- llm_to_json-0.0.0.dist-info/top_level.txt,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
4
- llm_to_json-0.0.0.dist-info/RECORD,,
@@ -1 +0,0 @@
1
-