osbot-utils 2.33.0__py3-none-any.whl → 2.35.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- osbot_utils/helpers/Local_Cache.py +5 -7
- osbot_utils/helpers/Local_Caches.py +5 -5
- osbot_utils/helpers/llms/__init__.py +0 -0
- osbot_utils/helpers/llms/actions/LLM_Request__Execute.py +31 -0
- osbot_utils/helpers/llms/actions/Type_Safe__Schema_For__LLMs.py +213 -0
- osbot_utils/helpers/llms/actions/__init__.py +0 -0
- osbot_utils/helpers/llms/builders/LLM_Request__Builder.py +41 -0
- osbot_utils/helpers/llms/builders/LLM_Request__Builder__Open_AI.py +54 -0
- osbot_utils/helpers/llms/builders/LLM_Request__Factory.py +95 -0
- osbot_utils/helpers/llms/builders/__init__.py +0 -0
- osbot_utils/helpers/llms/cache/LLM_Cache__Path_Generator.py +83 -0
- osbot_utils/helpers/llms/cache/LLM_Request__Cache.py +112 -0
- osbot_utils/helpers/llms/cache/LLM_Request__Cache__File_System.py +237 -0
- osbot_utils/helpers/llms/cache/LLM_Request__Cache__Storage.py +85 -0
- osbot_utils/helpers/llms/cache/Virtual_Storage__Local__Folder.py +64 -0
- osbot_utils/helpers/llms/cache/Virtual_Storage__Sqlite.py +72 -0
- osbot_utils/helpers/llms/cache/__init__.py +0 -0
- osbot_utils/helpers/llms/platforms/__init__.py +0 -0
- osbot_utils/helpers/llms/platforms/open_ai/API__LLM__Open_AI.py +55 -0
- osbot_utils/helpers/llms/platforms/open_ai/__init__.py +0 -0
- osbot_utils/helpers/llms/schemas/Schema__LLM_Cache__Index.py +9 -0
- osbot_utils/helpers/llms/schemas/Schema__LLM_Request.py +7 -0
- osbot_utils/helpers/llms/schemas/Schema__LLM_Request__Data.py +14 -0
- osbot_utils/helpers/llms/schemas/Schema__LLM_Request__Function_Call.py +8 -0
- osbot_utils/helpers/llms/schemas/Schema__LLM_Request__Message__Content.py +6 -0
- osbot_utils/helpers/llms/schemas/Schema__LLM_Request__Message__Role.py +9 -0
- osbot_utils/helpers/llms/schemas/Schema__LLM_Response.py +9 -0
- osbot_utils/helpers/llms/schemas/Schema__LLM_Response__Cache.py +13 -0
- osbot_utils/helpers/llms/schemas/__init__.py +0 -0
- osbot_utils/helpers/safe_str/Safe_Str.py +50 -0
- osbot_utils/helpers/safe_str/Safe_Str__File__Name.py +8 -0
- osbot_utils/helpers/safe_str/Safe_Str__File__Path.py +12 -0
- osbot_utils/helpers/safe_str/Safe_Str__Hash.py +14 -0
- osbot_utils/helpers/safe_str/Safe_Str__Text.py +9 -0
- osbot_utils/helpers/safe_str/Safe_Str__Text__Dangerous.py +9 -0
- osbot_utils/helpers/safe_str/__init__.py +0 -0
- osbot_utils/helpers/sqlite/Sqlite__Cursor.py +4 -6
- osbot_utils/helpers/sqlite/Sqlite__Database.py +1 -1
- osbot_utils/helpers/sqlite/Sqlite__Field.py +3 -8
- osbot_utils/helpers/sqlite/Sqlite__Table.py +1 -3
- osbot_utils/helpers/sqlite/domains/Sqlite__DB__Files.py +6 -2
- osbot_utils/helpers/sqlite/models/Sqlite__Field__Type.py +2 -2
- osbot_utils/helpers/sqlite/tables/Sqlite__Table__Files.py +5 -5
- osbot_utils/helpers/ssh/SSH__Execute.py +0 -1
- osbot_utils/helpers/ssh/SSH__Health_Check.py +4 -5
- osbot_utils/testing/performance/Performance_Measure__Session.py +1 -1
- osbot_utils/type_safe/Type_Safe__Base.py +38 -3
- osbot_utils/type_safe/Type_Safe__Dict.py +2 -8
- osbot_utils/type_safe/Type_Safe__List.py +8 -4
- osbot_utils/type_safe/Type_Safe__Method.py +46 -5
- osbot_utils/type_safe/Type_Safe__Tuple.py +1 -1
- osbot_utils/type_safe/shared/Type_Safe__Shared__Variables.py +3 -2
- osbot_utils/type_safe/shared/Type_Safe__Validation.py +6 -4
- osbot_utils/type_safe/steps/Type_Safe__Step__Default_Value.py +0 -2
- osbot_utils/type_safe/steps/Type_Safe__Step__From_Json.py +16 -0
- osbot_utils/type_safe/steps/Type_Safe__Step__Init.py +57 -1
- osbot_utils/utils/Files.py +8 -8
- osbot_utils/utils/Objects.py +0 -1
- osbot_utils/version +1 -1
- {osbot_utils-2.33.0.dist-info → osbot_utils-2.35.0.dist-info}/METADATA +2 -2
- {osbot_utils-2.33.0.dist-info → osbot_utils-2.35.0.dist-info}/RECORD +63 -30
- osbot_utils/helpers/cache_requests/flows/flow__Cache__Requests.py +0 -11
- {osbot_utils-2.33.0.dist-info → osbot_utils-2.35.0.dist-info}/LICENSE +0 -0
- {osbot_utils-2.33.0.dist-info → osbot_utils-2.35.0.dist-info}/WHEEL +0 -0
@@ -1,10 +1,8 @@
|
|
1
|
-
from osbot_utils.utils.Misc
|
2
|
-
from osbot_utils.utils.Dev
|
3
|
-
from osbot_utils.decorators.methods.cache_on_self
|
4
|
-
from osbot_utils.utils.Files
|
5
|
-
|
6
|
-
from osbot_utils.utils.Json import json_save_file, json_load_file
|
7
|
-
|
1
|
+
from osbot_utils.utils.Misc import list_set
|
2
|
+
from osbot_utils.utils.Dev import pprint
|
3
|
+
from osbot_utils.decorators.methods.cache_on_self import cache_on_self
|
4
|
+
from osbot_utils.utils.Files import current_temp_folder, path_combine, create_folder, safe_file_name, file_exists, file_delete, file_size
|
5
|
+
from osbot_utils.utils.Json import json_save_file, json_load_file
|
8
6
|
|
9
7
|
class Local_Cache:
|
10
8
|
|
@@ -1,8 +1,8 @@
|
|
1
|
-
from pathlib
|
2
|
-
from typing
|
3
|
-
from osbot_utils.helpers.Local_Cache
|
4
|
-
from osbot_utils.utils.Files
|
5
|
-
from osbot_utils.utils.Misc
|
1
|
+
from pathlib import Path
|
2
|
+
from typing import Dict, List
|
3
|
+
from osbot_utils.helpers.Local_Cache import Local_Cache
|
4
|
+
from osbot_utils.utils.Files import current_temp_folder, path_combine, folder_exists, folder_delete, file_extension
|
5
|
+
from osbot_utils.utils.Misc import random_text
|
6
6
|
|
7
7
|
|
8
8
|
class Local_Caches:
|
File without changes
|
@@ -0,0 +1,31 @@
|
|
1
|
+
from osbot_utils.helpers.llms.cache.LLM_Request__Cache import LLM_Request__Cache
|
2
|
+
from osbot_utils.helpers.llms.builders.LLM_Request__Builder import LLM_Request__Builder
|
3
|
+
from osbot_utils.helpers.llms.platforms.open_ai.API__LLM__Open_AI import API__LLM__Open_AI
|
4
|
+
from osbot_utils.helpers.llms.schemas.Schema__LLM_Request import Schema__LLM_Request
|
5
|
+
from osbot_utils.helpers.llms.schemas.Schema__LLM_Response import Schema__LLM_Response
|
6
|
+
from osbot_utils.type_safe.Type_Safe import Type_Safe
|
7
|
+
from osbot_utils.type_safe.decorators.type_safe import type_safe
|
8
|
+
|
9
|
+
class LLM_Request__Execute(Type_Safe):
|
10
|
+
llm_cache : LLM_Request__Cache
|
11
|
+
llm_api : API__LLM__Open_AI
|
12
|
+
use_cache : bool = True
|
13
|
+
request_builder: LLM_Request__Builder
|
14
|
+
|
15
|
+
@type_safe
|
16
|
+
def execute(self, llm_request: Schema__LLM_Request) -> Schema__LLM_Response:
|
17
|
+
|
18
|
+
if self.use_cache: # Check cache if enabled
|
19
|
+
cached_response = self.llm_cache.get(llm_request)
|
20
|
+
if cached_response:
|
21
|
+
return cached_response
|
22
|
+
|
23
|
+
self.request_builder.llm_request_data = llm_request.request_data
|
24
|
+
llm_payload = self.request_builder.build_request_payload()
|
25
|
+
response_data = self.llm_api.execute(llm_payload) # Make API call
|
26
|
+
llm_response = Schema__LLM_Response(response_data=response_data) # Create response object
|
27
|
+
|
28
|
+
if self.use_cache: # Cache the response if enabled
|
29
|
+
self.llm_cache.add(llm_request, llm_response)
|
30
|
+
|
31
|
+
return llm_response
|
@@ -0,0 +1,213 @@
|
|
1
|
+
import re
|
2
|
+
from typing import Type, Any, List, Dict, Tuple, Set, Optional, Union, get_origin, get_args
|
3
|
+
from osbot_utils.type_safe.Type_Safe import Type_Safe
|
4
|
+
from osbot_utils.type_safe.decorators.type_safe import type_safe
|
5
|
+
from osbot_utils.type_safe.shared.Type_Safe__Cache import type_safe_cache
|
6
|
+
from osbot_utils.type_safe.validators.Type_Safe__Validator import Type_Safe__Validator
|
7
|
+
from osbot_utils.type_safe.validators.Validator__Min import Validator__Min
|
8
|
+
from osbot_utils.type_safe.validators.Validator__Max import Validator__Max
|
9
|
+
from osbot_utils.type_safe.validators.Validator__Regex import Validator__Regex
|
10
|
+
from osbot_utils.type_safe.validators.Validator__One_Of import Validator__One_Of
|
11
|
+
from osbot_utils.helpers.python_compatibility.python_3_8 import Annotated
|
12
|
+
import inspect
|
13
|
+
|
14
|
+
|
15
|
+
class Type_Safe__Schema_For__LLMs(Type_Safe):
|
16
|
+
|
17
|
+
@type_safe
|
18
|
+
def export(self, target : Type[Type_Safe]) -> dict: # Target Type_Safe class to convert, Returns JSON Schema object
|
19
|
+
return self.export__type_safe(target)
|
20
|
+
|
21
|
+
def export__type_safe(self, target: Type[Type_Safe]) -> dict: # Target Type_Safe class to convert, Returns JSON Schema object
|
22
|
+
annotations = type_safe_cache.get_class_annotations(target)
|
23
|
+
properties = {}
|
24
|
+
required = []
|
25
|
+
|
26
|
+
schema = { "type" : "object" ,
|
27
|
+
"properties": properties }
|
28
|
+
|
29
|
+
if target.__doc__: # Add description if available and requested
|
30
|
+
schema["description"] = self.clean_docstring(target.__doc__)
|
31
|
+
|
32
|
+
for var_name, var_type in annotations: # Process all annotated fields
|
33
|
+
|
34
|
+
prop_schema = self.get_property_schema(target, var_name, var_type)
|
35
|
+
properties[var_name] = prop_schema
|
36
|
+
|
37
|
+
if self.is_required_property(target, var_name, var_type): # Check if property is required
|
38
|
+
required.append(var_name)
|
39
|
+
|
40
|
+
if required: # Add required fields if any
|
41
|
+
schema["required"] = required
|
42
|
+
|
43
|
+
return schema
|
44
|
+
|
45
|
+
def clean_docstring(self, docstring: str) -> str: # Cleans up a docstring
|
46
|
+
if not docstring:
|
47
|
+
return ""
|
48
|
+
lines = docstring.splitlines()
|
49
|
+
|
50
|
+
while lines and not lines[0].strip(): # Remove empty lines at the beginning
|
51
|
+
lines.pop(0)
|
52
|
+
|
53
|
+
while lines and not lines[-1].strip(): # Remove empty lines at the end
|
54
|
+
lines.pop()
|
55
|
+
|
56
|
+
indent = min((len(line) - len(line.lstrip()) # Find minimum indentation
|
57
|
+
for line in lines if line.strip()), default=0)
|
58
|
+
|
59
|
+
return '\n'.join(line[indent:] for line in lines) # Remove indentation and join lines
|
60
|
+
|
61
|
+
def get_property_schema(self, target_cls : Type , # Class containing the property
|
62
|
+
var_name : str , # Name of the property
|
63
|
+
var_type : Any , # Type annotation of the property
|
64
|
+
) -> dict: # Returns JSON Schema for property
|
65
|
+
validators = []
|
66
|
+
origin = get_origin(var_type)
|
67
|
+
|
68
|
+
if origin is Annotated: # Handle Annotated types
|
69
|
+
args = get_args(var_type)
|
70
|
+
base_type = args[0]
|
71
|
+
validators = [arg for arg in args[1:] if isinstance(arg, Type_Safe__Validator)]
|
72
|
+
var_type = base_type
|
73
|
+
origin = get_origin(base_type) # todo: see if we need this field (which is not being used at the moment)
|
74
|
+
|
75
|
+
schema = self.get_type_schema(var_type) # Get basic type schema
|
76
|
+
|
77
|
+
#if target_cls.__doc__: # Add description if available
|
78
|
+
doc = self.get_attribute_doc(target_cls, var_name)
|
79
|
+
if doc:
|
80
|
+
schema["description"] = doc
|
81
|
+
|
82
|
+
for validator in validators: # Apply validators
|
83
|
+
self._apply_validator(schema, validator)
|
84
|
+
|
85
|
+
return schema
|
86
|
+
|
87
|
+
def get_attribute_doc(self, cls : Type , # Class to extract docs from
|
88
|
+
attr_name: str
|
89
|
+
) -> Optional[str]: # Returns attribute documentation
|
90
|
+
if cls.__doc__: # Try class docstring first
|
91
|
+
lines = cls.__doc__.splitlines()
|
92
|
+
for i, line in enumerate(lines):
|
93
|
+
if f"{attr_name}:" in line or f"{attr_name} :" in line:
|
94
|
+
parts = line.split(":", 1)
|
95
|
+
if len(parts) > 1:
|
96
|
+
return parts[1].strip()
|
97
|
+
if i + 1 < len(lines) and lines[i + 1].strip():
|
98
|
+
return lines[i + 1].strip()
|
99
|
+
|
100
|
+
try: # if nor available Try source code comments next
|
101
|
+
source = inspect.getsource(cls)
|
102
|
+
lines = source.splitlines()
|
103
|
+
for line in lines:
|
104
|
+
normal_case = f"{attr_name}:" in line or f"{attr_name} :" in line # Handle the normal case and the aligned case with variable spacing
|
105
|
+
aligned_case = re.search(fr"\b{attr_name}\s*:", line)
|
106
|
+
|
107
|
+
if normal_case or aligned_case:
|
108
|
+
if "#" in line:
|
109
|
+
return line.split("#", 1)[1].strip()
|
110
|
+
if f"{attr_name} :" in line or f"{attr_name}:" in line:
|
111
|
+
if "#" in line:
|
112
|
+
return line.split("#", 1)[1].strip()
|
113
|
+
except (TypeError, OSError):
|
114
|
+
pass
|
115
|
+
|
116
|
+
return None
|
117
|
+
|
118
|
+
def get_type_schema(self, type_hint: Any) -> dict: # Convert Python type to JSON Schema
|
119
|
+
origin = get_origin(type_hint)
|
120
|
+
|
121
|
+
if type_hint is str : return {"type": "string" } # Handle primitive types
|
122
|
+
elif type_hint is int : return {"type": "integer" }
|
123
|
+
elif type_hint is float : return {"type": "number" }
|
124
|
+
elif type_hint is bool : return {"type": "boolean" }
|
125
|
+
elif type_hint is None : return {"type": "null" }
|
126
|
+
elif type_hint is type(None): return {"type": "null" }
|
127
|
+
|
128
|
+
|
129
|
+
if origin in (list, List): # Handle container types
|
130
|
+
args = get_args(type_hint)
|
131
|
+
item_type = args[0] if args else Any
|
132
|
+
return { "type" : "array",
|
133
|
+
"items": self.get_type_schema(item_type) }
|
134
|
+
elif origin in (dict, Dict):
|
135
|
+
args = get_args(type_hint)
|
136
|
+
if len(args) >= 2:
|
137
|
+
key_type, value_type = args[0], args[1]
|
138
|
+
return { "type" : "object",
|
139
|
+
"additionalProperties": self.get_type_schema(value_type) }
|
140
|
+
else:
|
141
|
+
return {"type": "object"}
|
142
|
+
elif origin in (tuple, Tuple):
|
143
|
+
args = get_args(type_hint)
|
144
|
+
if args:
|
145
|
+
return { "type" : "array" ,
|
146
|
+
"items" : [self.get_type_schema(arg) for arg in args],
|
147
|
+
"minItems": len(args) ,
|
148
|
+
"maxItems": len(args) }
|
149
|
+
else:
|
150
|
+
return {"type": "array"}
|
151
|
+
elif origin in (set, Set):
|
152
|
+
args = get_args(type_hint)
|
153
|
+
item_type = args[0] if args else Any
|
154
|
+
return { "type" : "array" ,
|
155
|
+
"items" : self.get_type_schema(item_type) ,
|
156
|
+
"uniqueItems": True }
|
157
|
+
|
158
|
+
elif origin is Union: # Handle union types (Optional is Union[type, None])
|
159
|
+
args = get_args(type_hint)
|
160
|
+
if len(args) == 2 and type(None) in args:
|
161
|
+
non_none_type = next(arg for arg in args if arg is not type(None))
|
162
|
+
schema = self.get_type_schema(non_none_type)
|
163
|
+
if "type" in schema and isinstance(schema["type"], str):
|
164
|
+
schema["type"] = [schema["type"], "null"]
|
165
|
+
return schema
|
166
|
+
else:
|
167
|
+
return { "anyOf": [self.get_type_schema(arg) for arg in args] }
|
168
|
+
|
169
|
+
|
170
|
+
elif inspect.isclass(type_hint) and issubclass(type_hint, Type_Safe): # Handle custom Type_Safe classes (nested objects)
|
171
|
+
return self.export__type_safe(type_hint) # Recursive call for nested objects
|
172
|
+
|
173
|
+
return {"type": "object"} # Default fallback
|
174
|
+
|
175
|
+
def _apply_validator(self, schema : dict , # Schema to modify
|
176
|
+
validator: Type_Safe__Validator
|
177
|
+
) -> None: # Applies validator constraints
|
178
|
+
if isinstance(validator, Validator__Min):
|
179
|
+
if schema.get("type") in ["integer", "number"]: schema["minimum" ] = validator.min_value
|
180
|
+
elif schema.get("type") == "string" : schema["minLength"] = validator.min_value
|
181
|
+
elif schema.get("type") == "array" : schema["minItems" ] = validator.min_value
|
182
|
+
|
183
|
+
elif isinstance(validator, Validator__Max):
|
184
|
+
if schema.get("type") in ["integer", "number"]: schema["maximum" ] = validator.max_value
|
185
|
+
elif schema.get("type") == "string" : schema["maxLength"] = validator.max_value
|
186
|
+
elif schema.get("type") == "array" : schema["maxItems" ] = validator.max_value
|
187
|
+
|
188
|
+
elif isinstance(validator, Validator__Regex):
|
189
|
+
if schema.get("type") == "string":
|
190
|
+
schema["pattern"] = validator.pattern
|
191
|
+
if validator.description:
|
192
|
+
if "description" not in schema:
|
193
|
+
schema["description"] = validator.description
|
194
|
+
else:
|
195
|
+
schema["description"] += f" (Pattern: {validator.description})"
|
196
|
+
|
197
|
+
elif isinstance(validator, Validator__One_Of):
|
198
|
+
schema["enum"] = validator.allowed
|
199
|
+
|
200
|
+
def is_required_property(self, target_cls: Type , # Class containing the property
|
201
|
+
var_name : str , # Name of the property
|
202
|
+
var_type : Any
|
203
|
+
) -> bool: # Returns True if property is required
|
204
|
+
origin = get_origin(var_type) # Check if the type is Optional
|
205
|
+
if origin is Union:
|
206
|
+
args = get_args(var_type)
|
207
|
+
if type(None) in args:
|
208
|
+
return False
|
209
|
+
|
210
|
+
if hasattr(target_cls, var_name): # Check if there's a default value in the class
|
211
|
+
return getattr(target_cls, var_name) is None
|
212
|
+
|
213
|
+
return True # By default, consider it required
|
File without changes
|
@@ -0,0 +1,41 @@
|
|
1
|
+
from typing import Dict, Any, Type
|
2
|
+
from osbot_utils.helpers.llms.actions.Type_Safe__Schema_For__LLMs import Type_Safe__Schema_For__LLMs
|
3
|
+
from osbot_utils.helpers.llms.schemas.Schema__LLM_Request__Data import Schema__LLM_Request__Data
|
4
|
+
from osbot_utils.helpers.llms.schemas.Schema__LLM_Request__Function_Call import Schema__LLM_Request__Function_Call
|
5
|
+
from osbot_utils.helpers.llms.schemas.Schema__LLM_Request__Message__Content import Schema__LLM_Request__Message__Content
|
6
|
+
from osbot_utils.helpers.llms.schemas.Schema__LLM_Request__Message__Role import Schema__LLM_Request__Message__Role
|
7
|
+
from osbot_utils.type_safe.Type_Safe import Type_Safe
|
8
|
+
from osbot_utils.type_safe.decorators.type_safe import type_safe
|
9
|
+
|
10
|
+
class LLM_Request__Builder(Type_Safe):
|
11
|
+
schema_generator : Type_Safe__Schema_For__LLMs
|
12
|
+
llm_request_data : Schema__LLM_Request__Data
|
13
|
+
|
14
|
+
@type_safe
|
15
|
+
def add_message(self, role : Schema__LLM_Request__Message__Role,
|
16
|
+
content : str = None
|
17
|
+
) -> Schema__LLM_Request__Message__Content:
|
18
|
+
if content:
|
19
|
+
message = Schema__LLM_Request__Message__Content(role=role, content=content)
|
20
|
+
self.llm_request_data.messages.append(message)
|
21
|
+
return self
|
22
|
+
|
23
|
+
def add_message__assistant(self, content : str = None): return self.add_message(role=Schema__LLM_Request__Message__Role.ASSISTANT, content=content)
|
24
|
+
def add_message__system (self, content : str = None): return self.add_message(role=Schema__LLM_Request__Message__Role.SYSTEM , content=content)
|
25
|
+
def add_message__user (self, content : str = None): return self.add_message(role=Schema__LLM_Request__Message__Role.USER , content=content)
|
26
|
+
|
27
|
+
@type_safe
|
28
|
+
def set_function_call(self, parameters : Type[Type_Safe],
|
29
|
+
function_name : str,
|
30
|
+
description : str = ''
|
31
|
+
) -> Schema__LLM_Request__Function_Call:
|
32
|
+
function_call = Schema__LLM_Request__Function_Call(parameters = parameters,
|
33
|
+
function_name = function_name,
|
34
|
+
description = description)
|
35
|
+
self.llm_request_data.function_call = function_call
|
36
|
+
return self
|
37
|
+
|
38
|
+
|
39
|
+
@type_safe
|
40
|
+
def build_request_payload(self) -> Dict[str, Any]:
|
41
|
+
raise NotImplementedError("Subclasses must implement this method")
|
@@ -0,0 +1,54 @@
|
|
1
|
+
from typing import Dict, Any
|
2
|
+
from osbot_utils.helpers.llms.builders.LLM_Request__Builder import LLM_Request__Builder
|
3
|
+
from osbot_utils.type_safe.decorators.type_safe import type_safe
|
4
|
+
|
5
|
+
class LLM_Request__Builder__Open_AI(LLM_Request__Builder):
|
6
|
+
|
7
|
+
@type_safe
|
8
|
+
def build_request_payload(self) -> Dict[str, Any]:
|
9
|
+
payload = { "model" : self.llm_request_data.model ,
|
10
|
+
"messages" : [{"role" : msg.role.value, "content": msg.content} for msg in self.llm_request_data.messages]}
|
11
|
+
if self.llm_request_data.function_call:
|
12
|
+
schema = self.schema_generator.export(self.llm_request_data.function_call.parameters)
|
13
|
+
schema["additionalProperties"] = False # needs to be False when using structured outputs
|
14
|
+
payload["response_format" ] = { "type" : "json_schema",
|
15
|
+
"json_schema": { "name" : self.llm_request_data.function_call.function_name,
|
16
|
+
"schema": schema ,
|
17
|
+
'strict': True }}
|
18
|
+
|
19
|
+
if self.llm_request_data.temperature is not None: payload["temperature"] = self.llm_request_data.temperature
|
20
|
+
if self.llm_request_data.top_p is not None: payload["top_p"] = self.llm_request_data.top_p
|
21
|
+
if self.llm_request_data.max_tokens is not None: payload["max_tokens"] = self.llm_request_data.max_tokens
|
22
|
+
|
23
|
+
return payload
|
24
|
+
|
25
|
+
# @type_safe
|
26
|
+
# def build_request_with_json_mode(self, request: Schema__LLM_Request
|
27
|
+
# ) -> Dict[str, Any]:
|
28
|
+
# """
|
29
|
+
# Builds request using OpenAI's JSON mode rather than function calling.
|
30
|
+
# This is an alternative approach for structured outputs that doesn't use the tools API.
|
31
|
+
# """
|
32
|
+
# payload = {
|
33
|
+
# "model": request.model,
|
34
|
+
# "messages": [
|
35
|
+
# {"role": msg.role, "content": msg.content}
|
36
|
+
# for msg in request.messages
|
37
|
+
# ],
|
38
|
+
# "response_format": {"type": "json_object"}
|
39
|
+
# }
|
40
|
+
#
|
41
|
+
# if request.temperature is not None:
|
42
|
+
# payload["temperature"] = request.temperature
|
43
|
+
# if request.top_p is not None:
|
44
|
+
# payload["top_p"] = request.top_p
|
45
|
+
# if request.max_tokens is not None:
|
46
|
+
# payload["max_tokens"] = request.max_tokens
|
47
|
+
#
|
48
|
+
# return payload
|
49
|
+
|
50
|
+
# @type_safe
|
51
|
+
# def build_request_json(self, request: Schema__LLM_Request
|
52
|
+
# ) -> str:
|
53
|
+
# payload = self.build_request_payload(request)
|
54
|
+
# return json_dumps(payload)
|
@@ -0,0 +1,95 @@
|
|
1
|
+
from typing import Type, Optional, Dict, Any
|
2
|
+
from osbot_utils.helpers.llms.builders.LLM_Request__Builder import LLM_Request__Builder
|
3
|
+
from osbot_utils.helpers.llms.schemas.Schema__LLM_Request__Data import Schema__LLM_Request__Data
|
4
|
+
from osbot_utils.type_safe.Type_Safe import Type_Safe
|
5
|
+
from osbot_utils.type_safe.decorators.type_safe import type_safe
|
6
|
+
|
7
|
+
|
8
|
+
class LLM_Request__Factory(Type_Safe): # Factory class for creating common LLM request patterns.
|
9
|
+
request_builder : LLM_Request__Builder
|
10
|
+
|
11
|
+
@type_safe
|
12
|
+
def create_simple_chat_request(self, model : str , # Model identifier
|
13
|
+
provider : str , # Provider name (openai, anthropic)
|
14
|
+
platform : str , # Platform name
|
15
|
+
user_message : str , # User message content
|
16
|
+
system_prompt: Optional[str ] = None , # Optional system prompt
|
17
|
+
temperature : Optional[float] = None # Temperature
|
18
|
+
) -> Schema__LLM_Request__Data: # Create a simple chat request with optional system prompt.
|
19
|
+
|
20
|
+
with self.request_builder as _:
|
21
|
+
_.llm_request_data.model = model
|
22
|
+
_.llm_request_data.provider = provider
|
23
|
+
_.llm_request_data.platform = platform
|
24
|
+
_.llm_request_data.temperature = temperature
|
25
|
+
|
26
|
+
_.add_message__system(content=system_prompt) # Add system prompt
|
27
|
+
_.add_message__user (content=user_message ) # Add user message
|
28
|
+
return self
|
29
|
+
|
30
|
+
@type_safe
|
31
|
+
def create_function_calling_request(self, model : str , # Model identifier
|
32
|
+
provider : str , # Provider name (openai, anthropic)
|
33
|
+
platform : str , # Platform name
|
34
|
+
parameters : Type[Type_Safe] , # Parameters schema class
|
35
|
+
function_name : str , # Function name
|
36
|
+
function_desc : str , # Function description
|
37
|
+
user_message : str , # User message
|
38
|
+
system_prompt : Optional[str] = None , # Optional system prompt
|
39
|
+
temperature : Optional[float] = None # Temperature
|
40
|
+
) -> Schema__LLM_Request__Data: # Create a request that uses function calling with the specified schema.
|
41
|
+
|
42
|
+
with self.request_builder as _:
|
43
|
+
_.set_function_call(parameters = parameters , # Create the function call
|
44
|
+
function_name = function_name,
|
45
|
+
description = function_desc)
|
46
|
+
|
47
|
+
_.add_message__system(content=system_prompt)
|
48
|
+
_.add_message__user (content=user_message )
|
49
|
+
_.llm_request_data.model = model
|
50
|
+
_.llm_request_data.provider = provider
|
51
|
+
_.llm_request_data.platform = platform
|
52
|
+
_.llm_request_data.temperature = temperature
|
53
|
+
return self
|
54
|
+
|
55
|
+
@type_safe
|
56
|
+
def create_entity_extraction_request(self, model : str , # Model identifier
|
57
|
+
provider : str , # Provider name
|
58
|
+
platform : str , # Platform name
|
59
|
+
entity_class : Type[Type_Safe] , # Entity schema class
|
60
|
+
text_to_analyze : str , # Text to extract entities from
|
61
|
+
system_instruction: Optional[str] = None , # Optional system instructions
|
62
|
+
function_name : str = "extract_entities", # Function name
|
63
|
+
temperature : Optional[float] = 0.2 # Low temperature for precision
|
64
|
+
) -> Schema__LLM_Request__Data:
|
65
|
+
"""Create a specialized request for entity extraction using the provided schema."""
|
66
|
+
# Default system instruction if none provided
|
67
|
+
if system_instruction is None:
|
68
|
+
system_instruction = (
|
69
|
+
"You are an expert at analyzing text and extracting structured information. "
|
70
|
+
"Extract entities mentioned in the text according to the specified schema. "
|
71
|
+
"Be precise and only include information explicitly mentioned in the text."
|
72
|
+
)
|
73
|
+
|
74
|
+
# User message prompting for extraction
|
75
|
+
user_message = f"Extract key entities from this text: {text_to_analyze}"
|
76
|
+
|
77
|
+
# Create the function calling request
|
78
|
+
return self.create_function_calling_request(
|
79
|
+
model=model,
|
80
|
+
provider=provider,
|
81
|
+
platform=platform,
|
82
|
+
parameters=entity_class,
|
83
|
+
function_name=function_name,
|
84
|
+
function_desc="Extract entities from text",
|
85
|
+
system_prompt=system_instruction,
|
86
|
+
user_message=user_message,
|
87
|
+
temperature=temperature
|
88
|
+
)
|
89
|
+
|
90
|
+
def request_data(self) -> Schema__LLM_Request__Data:
|
91
|
+
return self.request_builder.llm_request_data
|
92
|
+
|
93
|
+
@type_safe
|
94
|
+
def build_request_payload(self) -> Dict[str, Any]: # Build a provider-specific request payload from a Schema__LLM_Request.
|
95
|
+
return self.request_builder.build_request_payload()
|
File without changes
|
@@ -0,0 +1,83 @@
|
|
1
|
+
from datetime import datetime
|
2
|
+
from typing import List, Optional
|
3
|
+
from osbot_utils.helpers.Safe_Id import Safe_Id
|
4
|
+
from osbot_utils.helpers.safe_str.Safe_Str__File__Path import Safe_Str__File__Path
|
5
|
+
from osbot_utils.type_safe.Type_Safe import Type_Safe
|
6
|
+
from osbot_utils.type_safe.decorators.type_safe import type_safe
|
7
|
+
|
8
|
+
class LLM_Cache__Path_Generator(Type_Safe):
|
9
|
+
@type_safe
|
10
|
+
def generate_path(self, year : Optional[int] = None, # Time components - all optional and independent
|
11
|
+
month : Optional[int] = None,
|
12
|
+
day : Optional[int] = None,
|
13
|
+
hour : Optional[int] = None,
|
14
|
+
domains : List[Safe_Id] = None, # Before time path
|
15
|
+
areas : List[Safe_Id] = None, # After time path
|
16
|
+
file_id : Safe_Id = None, # File components
|
17
|
+
extension: str = None
|
18
|
+
) -> Safe_Str__File__Path: # Generate a flexible path with optional time components and organizational elements."""
|
19
|
+
|
20
|
+
path = ""
|
21
|
+
if domains: # Build domains part (if any)
|
22
|
+
path += '/'.join(str(domain) for domain in domains)
|
23
|
+
|
24
|
+
|
25
|
+
time_parts = [] # Build time path with any available components - fully independent
|
26
|
+
if year is not None:
|
27
|
+
time_parts.append(f'{year:04}')
|
28
|
+
if month is not None:
|
29
|
+
time_parts.append(f'{month:02}')
|
30
|
+
if day is not None:
|
31
|
+
time_parts.append(f'{day:02}')
|
32
|
+
if hour is not None:
|
33
|
+
time_parts.append(f'{hour:02}')
|
34
|
+
|
35
|
+
if time_parts: # Add timeparts (if any)
|
36
|
+
if path:
|
37
|
+
path += '/' + '/'.join(time_parts)
|
38
|
+
else:
|
39
|
+
path = '/'.join(time_parts)
|
40
|
+
|
41
|
+
if areas: # Add areas (if any)
|
42
|
+
if path:
|
43
|
+
path += '/' + '/'.join(str(area) for area in areas)
|
44
|
+
else:
|
45
|
+
path = '/'.join(str(area) for area in areas)
|
46
|
+
|
47
|
+
if file_id and extension: # Add file ID and extension (if any)
|
48
|
+
if path:
|
49
|
+
path += f'/{file_id}.{extension}'
|
50
|
+
else:
|
51
|
+
path = f'{file_id}.{extension}'
|
52
|
+
|
53
|
+
return Safe_Str__File__Path(path)
|
54
|
+
|
55
|
+
@type_safe
|
56
|
+
def from_date_time(self, date_time : datetime ,
|
57
|
+
domains : List[Safe_Id] = None,
|
58
|
+
areas : List[Safe_Id] = None,
|
59
|
+
file_id : Safe_Id = None,
|
60
|
+
extension : str = None
|
61
|
+
) -> Safe_Str__File__Path: # Generate a path from a datetime object.
|
62
|
+
return self.generate_path(year = date_time.year ,
|
63
|
+
month = date_time.month,
|
64
|
+
day = date_time.day ,
|
65
|
+
hour = date_time.hour ,
|
66
|
+
domains = domains ,
|
67
|
+
areas = areas ,
|
68
|
+
file_id = file_id ,
|
69
|
+
extension = extension )
|
70
|
+
|
71
|
+
@type_safe
|
72
|
+
def now(self, domains : List[Safe_Id] = None,
|
73
|
+
areas : List[Safe_Id] = None,
|
74
|
+
file_id : Safe_Id = None,
|
75
|
+
extension : str = None,
|
76
|
+
now : datetime = None
|
77
|
+
) -> Safe_Str__File__Path: # Generate a path using current time or provided timestamp.
|
78
|
+
date_time = now or datetime.now()
|
79
|
+
return self.from_date_time(date_time = date_time,
|
80
|
+
domains = domains ,
|
81
|
+
areas = areas ,
|
82
|
+
file_id = file_id ,
|
83
|
+
extension = extension)
|