qtype 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. qtype/__init__.py +0 -0
  2. qtype/cli.py +73 -0
  3. qtype/commands/__init__.py +5 -0
  4. qtype/commands/convert.py +76 -0
  5. qtype/commands/generate.py +107 -0
  6. qtype/commands/run.py +200 -0
  7. qtype/commands/validate.py +83 -0
  8. qtype/commons/__init__.py +0 -0
  9. qtype/commons/generate.py +88 -0
  10. qtype/commons/tools.py +192 -0
  11. qtype/converters/__init__.py +0 -0
  12. qtype/converters/tools_from_api.py +24 -0
  13. qtype/converters/tools_from_module.py +326 -0
  14. qtype/converters/types.py +20 -0
  15. qtype/dsl/__init__.py +1 -0
  16. qtype/dsl/base_types.py +31 -0
  17. qtype/dsl/document.py +108 -0
  18. qtype/dsl/domain_types.py +56 -0
  19. qtype/dsl/model.py +685 -0
  20. qtype/dsl/validator.py +439 -0
  21. qtype/interpreter/__init__.py +1 -0
  22. qtype/interpreter/api.py +104 -0
  23. qtype/interpreter/conversions.py +148 -0
  24. qtype/interpreter/exceptions.py +10 -0
  25. qtype/interpreter/flow.py +37 -0
  26. qtype/interpreter/resource_cache.py +37 -0
  27. qtype/interpreter/step.py +67 -0
  28. qtype/interpreter/steps/__init__.py +0 -0
  29. qtype/interpreter/steps/agent.py +114 -0
  30. qtype/interpreter/steps/condition.py +36 -0
  31. qtype/interpreter/steps/decoder.py +84 -0
  32. qtype/interpreter/steps/llm_inference.py +127 -0
  33. qtype/interpreter/steps/prompt_template.py +54 -0
  34. qtype/interpreter/steps/search.py +24 -0
  35. qtype/interpreter/steps/tool.py +53 -0
  36. qtype/interpreter/telemetry.py +16 -0
  37. qtype/interpreter/typing.py +78 -0
  38. qtype/loader.py +341 -0
  39. qtype/semantic/__init__.py +0 -0
  40. qtype/semantic/errors.py +4 -0
  41. qtype/semantic/generate.py +383 -0
  42. qtype/semantic/model.py +354 -0
  43. qtype/semantic/resolver.py +97 -0
  44. qtype-0.0.1.dist-info/METADATA +120 -0
  45. qtype-0.0.1.dist-info/RECORD +49 -0
  46. qtype-0.0.1.dist-info/WHEEL +5 -0
  47. qtype-0.0.1.dist-info/entry_points.txt +2 -0
  48. qtype-0.0.1.dist-info/licenses/LICENSE +202 -0
  49. qtype-0.0.1.dist-info/top_level.txt +1 -0
qtype/dsl/validator.py ADDED
@@ -0,0 +1,439 @@
1
+ from typing import Any, Dict, Union, get_args, get_origin
2
+
3
+ import qtype.dsl.base_types as base_types
4
+ import qtype.dsl.domain_types
5
+ import qtype.dsl.model as dsl
6
+
7
+
8
+ class QTypeValidationError(Exception):
9
+ """Raised when there's an error during QType validation."""
10
+
11
+ pass
12
+
13
+
14
+ class DuplicateComponentError(QTypeValidationError):
15
+ """Raised when there are duplicate components with the same ID."""
16
+
17
+ def __init__(
18
+ self,
19
+ obj_id: str,
20
+ found_obj: qtype.dsl.domain_types.StrictBaseModel,
21
+ existing_obj: qtype.dsl.domain_types.StrictBaseModel,
22
+ ):
23
+ super().__init__(
24
+ f"Duplicate component with ID '{obj_id}' found:\n{found_obj.model_dump_json()}\nAlready exists:\n{existing_obj.model_dump_json()}"
25
+ )
26
+
27
+
28
+ class ComponentNotFoundError(QTypeValidationError):
29
+ """Raised when a component is not found in the DSL Application."""
30
+
31
+ def __init__(self, component_id: str):
32
+ super().__init__(
33
+ f"Component with ID '{component_id}' not found in the DSL Application."
34
+ )
35
+
36
+
37
+ class ReferenceNotFoundError(QTypeValidationError):
38
+ """Raised when a reference is not found in the lookup map."""
39
+
40
+ def __init__(self, reference: str, type_hint: str | None = None):
41
+ msg = (
42
+ f"Reference '{reference}' not found in lookup map."
43
+ if type_hint is None
44
+ else f"Reference '{reference}' not found in lookup map for type '{type_hint}'."
45
+ )
46
+ super().__init__(msg)
47
+
48
+
49
+ class FlowHasNoStepsError(QTypeValidationError):
50
+ """Raised when a flow has no steps defined."""
51
+
52
+ def __init__(self, flow_id: str):
53
+ super().__init__(f"Flow {flow_id} has no steps defined.")
54
+
55
+
56
+ # These types are used only for the DSL and should not be converted to semantic types
57
+ # They are used for JSON schema generation
58
+ # They will be switched to their semantic abstract class in the generation.
59
+ # i.e., `ToolType` will be switched to `Tool`
60
+ def _update_map_with_unique_check(
61
+ current_map: Dict[str, qtype.dsl.domain_types.StrictBaseModel],
62
+ new_objects: list[qtype.dsl.domain_types.StrictBaseModel],
63
+ ) -> None:
64
+ """
65
+ Update a map with new objects, ensuring unique IDs.
66
+
67
+ Args:
68
+ current_map: The current map of objects by ID.
69
+ new_objects: List of new objects to add to the map.
70
+
71
+ Returns:
72
+ Updated map with new objects added, ensuring unique IDs.
73
+ """
74
+ for obj in new_objects:
75
+ if obj is None:
76
+ # If the object is None, we skip it.
77
+ continue
78
+ if isinstance(obj, str):
79
+ # If the object is a string, we assume it is an ID and skip it.
80
+ # This is a special case where we do not want to add the string itself.
81
+ continue
82
+ # Note: There is no current abstraction for the `id` field, so we assume it exists.
83
+ obj_id = obj.id
84
+ # If the object already exists in the map, we check if it is the same object.
85
+ # If it is not the same object, we raise an error.
86
+ # This ensures that we do not have duplicate components with the same ID.
87
+ if obj_id in current_map and id(current_map[obj_id]) != id(obj):
88
+ raise DuplicateComponentError(obj_id, obj, current_map[obj_id])
89
+ else:
90
+ current_map[obj_id] = obj
91
+
92
+
93
+ def _update_maps_with_embedded_objects(
94
+ lookup_map: Dict[str, qtype.dsl.domain_types.StrictBaseModel],
95
+ embedded_objects: list[qtype.dsl.domain_types.StrictBaseModel],
96
+ ) -> None:
97
+ """
98
+ Update lookup maps with embedded objects.
99
+ Embedded objects are when the user specifies the object and not just the ID.
100
+ For example, a prompt template may have variables embedded:
101
+ ```yaml
102
+ steps:
103
+ - id: my_prompt
104
+ variables:
105
+ - id: my_var
106
+ type: text
107
+ outputs:
108
+ - id: my_output
109
+ type: text
110
+ ```
111
+
112
+ Args:
113
+ lookup_maps: The current lookup maps to update.
114
+ embedded_objects: List of embedded objects to add to the maps.
115
+ """
116
+ for obj in embedded_objects:
117
+ if isinstance(obj, dsl.Step):
118
+ # All steps have inputs and outputs
119
+ _update_map_with_unique_check(lookup_map, obj.inputs or []) # type: ignore
120
+ _update_map_with_unique_check(lookup_map, obj.outputs or []) # type: ignore
121
+ _update_map_with_unique_check(lookup_map, [obj])
122
+
123
+ if isinstance(obj, dsl.Model):
124
+ # note inputs
125
+ _update_map_with_unique_check(lookup_map, [obj.auth]) # type: ignore
126
+
127
+ if isinstance(obj, dsl.Condition):
128
+ # Conditions have inputs and outputs
129
+ _update_map_with_unique_check(lookup_map, [obj.then, obj.else_]) # type: ignore
130
+ _update_map_with_unique_check(lookup_map, [obj.equals]) # type: ignore
131
+ if obj.then and isinstance(obj.then, dsl.Step):
132
+ _update_maps_with_embedded_objects(lookup_map, [obj.then])
133
+ if obj.else_ and isinstance(obj.else_, dsl.Step):
134
+ _update_maps_with_embedded_objects(lookup_map, [obj.else_])
135
+
136
+ if isinstance(obj, dsl.APITool):
137
+ # API tools have inputs and outputs
138
+ _update_map_with_unique_check(lookup_map, [obj.auth]) # type: ignore
139
+
140
+ if isinstance(obj, dsl.LLMInference):
141
+ # LLM Inference steps have inputs and outputs
142
+ _update_map_with_unique_check(lookup_map, [obj.model]) # type: ignore
143
+ _update_maps_with_embedded_objects(lookup_map, [obj.model]) # type: ignore
144
+ _update_map_with_unique_check(lookup_map, [obj.memory]) # type: ignore
145
+
146
+ if isinstance(obj, dsl.Agent):
147
+ _update_map_with_unique_check(lookup_map, obj.tools or []) # type: ignore
148
+ _update_maps_with_embedded_objects(lookup_map, obj.tools or []) # type: ignore
149
+
150
+ if isinstance(obj, dsl.Flow):
151
+ _update_map_with_unique_check(lookup_map, [obj])
152
+ _update_map_with_unique_check(lookup_map, obj.steps or []) # type: ignore
153
+ _update_maps_with_embedded_objects(lookup_map, obj.steps or []) # type: ignore
154
+
155
+ if isinstance(obj, dsl.TelemetrySink):
156
+ # Telemetry sinks may have auth references
157
+ _update_map_with_unique_check(lookup_map, [obj.auth]) # type: ignore
158
+
159
+ if isinstance(obj, dsl.Index):
160
+ # Indexes may have auth references
161
+ _update_map_with_unique_check(lookup_map, [obj.auth]) # type: ignore
162
+
163
+ if isinstance(obj, dsl.VectorIndex):
164
+ if isinstance(obj.embedding_model, dsl.EmbeddingModel):
165
+ _update_map_with_unique_check(
166
+ lookup_map, [obj.embedding_model]
167
+ )
168
+ _update_maps_with_embedded_objects(
169
+ lookup_map, [obj.embedding_model]
170
+ )
171
+
172
+ if isinstance(obj, dsl.Search):
173
+ if isinstance(obj.index, dsl.Index):
174
+ _update_map_with_unique_check(lookup_map, [obj.index])
175
+ _update_maps_with_embedded_objects(lookup_map, [obj.index])
176
+
177
+ if isinstance(obj, dsl.AuthorizationProviderList):
178
+ # AuthorizationProviderList is a list of AuthorizationProvider objects
179
+ _update_map_with_unique_check(lookup_map, obj.root) # type: ignore
180
+ _update_maps_with_embedded_objects(lookup_map, obj.root) # type: ignore
181
+
182
+ if isinstance(obj, dsl.IndexList):
183
+ # IndexList is a list of Index objects
184
+ _update_map_with_unique_check(lookup_map, obj.root) # type: ignore
185
+ _update_maps_with_embedded_objects(lookup_map, obj.root) # type: ignore
186
+
187
+ if isinstance(obj, dsl.ModelList):
188
+ # ModelList is a list of Model objects
189
+ _update_map_with_unique_check(lookup_map, obj.root) # type: ignore
190
+ _update_maps_with_embedded_objects(lookup_map, obj.root) # type: ignore
191
+
192
+ if isinstance(obj, dsl.ToolList):
193
+ # ToolList is a list of Tool objects
194
+ _update_map_with_unique_check(lookup_map, obj.root) # type: ignore
195
+ _update_maps_with_embedded_objects(lookup_map, obj.root) # type: ignore
196
+
197
+ if isinstance(obj, dsl.TypeList):
198
+ # TypeList is a list of Type objects
199
+ _update_map_with_unique_check(lookup_map, obj.root) # type: ignore
200
+
201
+ if isinstance(obj, dsl.VariableList):
202
+ # VariableList is a list of Variable objects
203
+ _update_map_with_unique_check(lookup_map, obj.root) # type: ignore
204
+
205
+ if isinstance(obj, dsl.TelemetrySink):
206
+ # TelemetrySink is a list of TelemetrySink objects
207
+ _update_map_with_unique_check(lookup_map, [obj.auth]) # type: ignore
208
+
209
+
210
+ def _build_lookup_maps(
211
+ dsl_application: dsl.Application,
212
+ lookup_map: Dict[str, qtype.dsl.domain_types.StrictBaseModel]
213
+ | None = None,
214
+ ) -> Dict[str, qtype.dsl.domain_types.StrictBaseModel]:
215
+ """
216
+ Build lookup map for all objects in the DSL Application.
217
+ This function creates a dictionary of id -> component, where each key is a
218
+ component id and the value is the component.
219
+ Args:
220
+ dsl_application: The DSL Application to build lookup maps for.
221
+ Returns:
222
+ Dict[str, dsl.StrictBaseModel]: A dictionary of lookup maps
223
+ Throws:
224
+ SemanticResolutionError: If there are duplicate components with the same ID.
225
+ """
226
+ component_names = {
227
+ f
228
+ for f in dsl.Application.model_fields.keys()
229
+ if f not in set(["id", "references"])
230
+ }
231
+
232
+ if lookup_map is None:
233
+ lookup_map = {}
234
+
235
+ for component_name in component_names:
236
+ if not hasattr(dsl_application, component_name):
237
+ raise ComponentNotFoundError(component_name)
238
+ components = getattr(dsl_application, component_name) or []
239
+ if not isinstance(components, list):
240
+ components = [components] # Ensure we have a list
241
+ _update_map_with_unique_check(lookup_map, components)
242
+ _update_maps_with_embedded_objects(lookup_map, components)
243
+
244
+ # now deal with the references.
245
+ for ref in dsl_application.references or []:
246
+ ref = ref.root # type: ignore
247
+ if isinstance(ref, dsl.Application):
248
+ _build_lookup_maps(ref, lookup_map)
249
+
250
+ # Anything in the reference list that is not an Application is handled by the embedded object resolver.
251
+ _update_maps_with_embedded_objects(
252
+ lookup_map,
253
+ [
254
+ ref.root # type: ignore
255
+ for ref in dsl_application.references or []
256
+ if not isinstance(ref.root, dsl.Application)
257
+ ], # type: ignore
258
+ )
259
+
260
+ lookup_map[dsl_application.id] = dsl_application
261
+
262
+ return lookup_map
263
+
264
+
265
+ def _is_dsl_type(type_obj: Any) -> bool:
266
+ """Check if a type is a DSL type that should be converted to semantic."""
267
+ if not hasattr(type_obj, "__name__"):
268
+ return False
269
+
270
+ # Check if it's defined in the DSL module
271
+ return (
272
+ hasattr(type_obj, "__module__")
273
+ and (
274
+ type_obj.__module__ == dsl.__name__
275
+ or type_obj.__module__ == base_types.__name__
276
+ )
277
+ and not type_obj.__name__.startswith("_")
278
+ )
279
+
280
+
281
+ def _resolve_forward_ref(field_type: Any) -> Any:
282
+ """
283
+ Resolve a ForwardRef type to its actual type.
284
+ This is used to handle cases where the type is a string that refers to a class.
285
+ """
286
+ if hasattr(field_type, "__forward_arg__"):
287
+ # Extract the string from ForwardRef and process it
288
+ forward_ref_str = field_type.__forward_arg__
289
+ # Use eval to get the actual type from the string
290
+ return eval(forward_ref_str, dict(vars(dsl)))
291
+ return field_type
292
+
293
+
294
+ def _is_union(type: Any) -> bool:
295
+ """
296
+ Indicates if the provided type is a Union type.
297
+ """
298
+ origin = get_origin(type)
299
+ return origin is Union or (
300
+ hasattr(type, "__class__") and type.__class__.__name__ == "UnionType"
301
+ )
302
+
303
+
304
+ def _is_reference_type(field_type: Any) -> bool:
305
+ """
306
+ Indicates if the provided type can be a reference -- i.e., a union between a dsl type and a string.
307
+ """
308
+ field_type = _resolve_forward_ref(field_type)
309
+
310
+ if _is_union(field_type):
311
+ args = get_args(field_type)
312
+ has_str = any(arg is str for arg in args)
313
+ has_dsl_type = any(_is_dsl_type(arg) for arg in args)
314
+ return has_str and has_dsl_type
315
+ else:
316
+ return False
317
+
318
+
319
+ def _resolve_id_references(
320
+ dslobj: qtype.dsl.domain_types.StrictBaseModel | str,
321
+ lookup_map: Dict[str, qtype.dsl.domain_types.StrictBaseModel],
322
+ ) -> Any:
323
+ """
324
+ Resolves ID references in a DSL object such that all references are replaced with the actual object.
325
+ """
326
+
327
+ if isinstance(dslobj, str):
328
+ # If the object is a string, we assume it is an ID and look it up in the map.
329
+ if dslobj in lookup_map:
330
+ return lookup_map[dslobj]
331
+ else:
332
+ raise ReferenceNotFoundError(dslobj)
333
+
334
+ # iterate over all fields in the object
335
+ def lookup_reference(val: str, typ: Any) -> Any:
336
+ if (
337
+ isinstance(val, str)
338
+ and _is_reference_type(typ)
339
+ and not _is_dsl_type(type(val))
340
+ ):
341
+ if val in lookup_map:
342
+ return lookup_map[val]
343
+ else:
344
+ raise ReferenceNotFoundError(val, str(typ))
345
+ return val
346
+
347
+ for field_name, field_value in dslobj:
348
+ field_info = dslobj.__class__.model_fields[field_name]
349
+ field_type = _resolve_forward_ref(field_info.annotation)
350
+
351
+ if isinstance(field_value, list):
352
+ # If the field value is a list, resolve each item in the list
353
+ # Get the type of the items of the list
354
+ field_type = field_type.__args__[0] # type: ignore
355
+ if (
356
+ get_origin(field_type) is list
357
+ ): # handles case where we have list[Class] | None -- in this case field_type is Union and item_type is now the list...
358
+ field_type = field_type.__args__[0]
359
+ resolved_list = [
360
+ lookup_reference(item, field_type) # type: ignore
361
+ for item in field_value
362
+ ]
363
+ setattr(dslobj, field_name, resolved_list)
364
+ elif isinstance(field_value, dict):
365
+ field_type = field_type.__args__[0]
366
+ if (
367
+ get_origin(field_type) is dict
368
+ ): # handles case where we have dict[Class] | None -- in this case field_type is Union and item_type is now the dict...
369
+ field_type = field_type.__args__[1]
370
+ # If the field value is a dict, resolve each value in the dict
371
+ resolved_dict = {
372
+ k: lookup_reference(v, field_type) # type: ignore
373
+ for k, v in field_value.items()
374
+ }
375
+ setattr(dslobj, field_name, resolved_dict)
376
+ elif field_value is None:
377
+ # Convert lst | None to an empty list
378
+ # and dict | None to an empty dict
379
+ if _is_union(field_type):
380
+ args = field_type.__args__ # type: ignore
381
+ if any(str(arg).startswith("list") for arg in args):
382
+ setattr(dslobj, field_name, [])
383
+ elif any(str(arg).startswith("dict") for arg in args):
384
+ setattr(dslobj, field_name, {})
385
+ else:
386
+ setattr(
387
+ dslobj, field_name, lookup_reference(field_value, field_type)
388
+ )
389
+
390
+ return dslobj
391
+
392
+
393
+ def validate(
394
+ dsl_application: dsl.Application,
395
+ ) -> dsl.Application:
396
+ """
397
+ Validates the semantics of a DSL Application and returns a copy of it with all
398
+ internal references resolved to their actual objects.
399
+ Args:
400
+ dsl_application: The DSL Application to validate.
401
+ Returns:
402
+ dsl.Application: A copy of the DSL Application with all internal references resolved.
403
+ Throws:
404
+ SemanticResolutionError: If there are semantic errors in the DSL Application.
405
+ """
406
+
407
+ # First, make a lookup map of all objects in the DSL Application.
408
+ # This ensures that all object ids are unique.
409
+ lookup_map = _build_lookup_maps(dsl_application)
410
+
411
+ # If any flows have no steps, we raise an error.
412
+ for flow in dsl_application.flows or []:
413
+ if not flow.steps:
414
+ raise FlowHasNoStepsError(flow.id)
415
+ # If any flow doesn't have inputs, copy the inputs from the first step.
416
+ if not flow.inputs:
417
+ first_step = (
418
+ lookup_map[flow.steps[0]]
419
+ if isinstance(flow.steps[0], str)
420
+ else flow.steps[0]
421
+ )
422
+ flow.inputs = first_step.inputs or [] # type: ignore
423
+
424
+ # If any flow doesn't have outputs, copy them from the last step.
425
+ if not flow.outputs:
426
+ last_step = (
427
+ lookup_map[flow.steps[-1]]
428
+ if isinstance(flow.steps[-1], str)
429
+ else flow.steps[-1]
430
+ )
431
+ flow.outputs = last_step.outputs or [] # type: ignore
432
+
433
+ # Now we resolve all ID references in the DSL Application.
434
+ lookup_map = {
435
+ obj_id: _resolve_id_references(obj, lookup_map)
436
+ for obj_id, obj in lookup_map.items()
437
+ }
438
+
439
+ return dsl_application
@@ -0,0 +1 @@
1
+ """qtype.runner package initialization."""
@@ -0,0 +1,104 @@
1
+ from typing import Optional
2
+
3
+ from fastapi import FastAPI, HTTPException
4
+
5
+ from qtype.interpreter.flow import execute_flow
6
+ from qtype.interpreter.typing import (
7
+ create_input_type_model,
8
+ create_output_type_model,
9
+ )
10
+ from qtype.semantic.model import Application, Flow
11
+
12
+
13
+ class APIExecutor:
14
+ """API executor for QType definitions with dynamic endpoint generation."""
15
+
16
+ def __init__(
17
+ self,
18
+ definition: Application,
19
+ host: str = "localhost",
20
+ port: int = 8000,
21
+ ):
22
+ self.definition = definition
23
+ self.host = host
24
+ self.port = port
25
+
26
+ def create_app(self, name: Optional[str]) -> FastAPI:
27
+ """Create FastAPI app with dynamic endpoints."""
28
+ app = FastAPI(
29
+ title=name or "QType API",
30
+ docs_url="/docs", # Swagger UI
31
+ redoc_url="/redoc",
32
+ )
33
+
34
+ flows = self.definition.flows if self.definition.flows else []
35
+
36
+ # Dynamically generate POST endpoints for each flow
37
+ for flow in flows:
38
+ self._create_flow_endpoint(app, flow)
39
+
40
+ return app
41
+
42
+ def _create_flow_endpoint(self, app: FastAPI, flow: Flow) -> None:
43
+ """Create a dynamic POST endpoint for a specific flow."""
44
+ flow_id = flow.id
45
+
46
+ # Create dynamic request and response models for this flow
47
+ RequestModel = create_input_type_model(flow)
48
+ ResponseModel = create_output_type_model(flow)
49
+
50
+ # Create the endpoint function with proper model binding
51
+ def execute_flow_endpoint(request: RequestModel) -> ResponseModel: # type: ignore
52
+ """Execute the specific flow with provided inputs."""
53
+ try:
54
+ # Make a copy of the flow to avoid modifying the original
55
+ # TODO: just store this in case we're using memory / need state.
56
+ # TODO: Store memory and session info in a cache to enable this kind of stateful communication.
57
+ flow_copy = flow.model_copy(deep=True)
58
+ # Set input values on the flow variables
59
+ if flow_copy.inputs:
60
+ for var in flow_copy.inputs:
61
+ # Get the value from the request using the variable ID
62
+ request_dict = request.model_dump() # type: ignore
63
+ if var.id in request_dict:
64
+ var.value = getattr(request, var.id)
65
+ elif not var.is_set():
66
+ raise HTTPException(
67
+ status_code=400,
68
+ detail=f"Required input '{var.id}' not provided",
69
+ )
70
+
71
+ # Execute the flow
72
+ result_vars = execute_flow(flow_copy)
73
+
74
+ # Extract output values
75
+ outputs = {var.id: var.value for var in result_vars}
76
+
77
+ response_data = {
78
+ "flow_id": flow_id,
79
+ "outputs": outputs,
80
+ "status": "success",
81
+ }
82
+
83
+ # Return the response using the dynamic model
84
+ return ResponseModel(**response_data) # type: ignore
85
+
86
+ except Exception as e:
87
+ raise HTTPException(
88
+ status_code=500, detail=f"Flow execution failed: {str(e)}"
89
+ )
90
+
91
+ # Set the function annotations properly for FastAPI
92
+ execute_flow_endpoint.__annotations__ = {
93
+ "request": RequestModel,
94
+ "return": ResponseModel,
95
+ }
96
+
97
+ # Add the endpoint with explicit models
98
+ app.post(
99
+ f"/flows/{flow_id}",
100
+ tags=["flow"],
101
+ summary=f"Execute {flow_id} flow",
102
+ description=f"Execute the '{flow_id}' flow with the provided input parameters.",
103
+ response_model=ResponseModel,
104
+ )(execute_flow_endpoint)
@@ -0,0 +1,148 @@
1
+ from __future__ import annotations
2
+
3
+ from llama_index.core.base.embeddings.base import BaseEmbedding
4
+ from llama_index.core.base.llms.base import BaseLLM
5
+ from llama_index.core.base.llms.types import AudioBlock
6
+ from llama_index.core.base.llms.types import ChatMessage as LlamaChatMessage
7
+ from llama_index.core.base.llms.types import (
8
+ ContentBlock,
9
+ DocumentBlock,
10
+ ImageBlock,
11
+ TextBlock,
12
+ )
13
+ from llama_index.core.memory import Memory as LlamaMemory
14
+
15
+ from qtype.dsl.base_types import PrimitiveTypeEnum
16
+ from qtype.dsl.domain_types import ChatContent, ChatMessage
17
+ from qtype.dsl.model import Memory
18
+ from qtype.interpreter.exceptions import InterpreterError
19
+ from qtype.semantic.model import Model
20
+
21
+ from .resource_cache import cached_resource
22
+
23
+
24
+ @cached_resource
25
+ def to_memory(session_id: str | None, memory: Memory) -> LlamaMemory:
26
+ return LlamaMemory.from_defaults(
27
+ session_id=session_id,
28
+ token_limit=memory.token_limit,
29
+ chat_history_token_ratio=memory.chat_history_token_ratio,
30
+ token_flush_size=memory.token_flush_size,
31
+ )
32
+
33
+
34
+ @cached_resource
35
+ def to_llm(model: Model, system_prompt: str | None) -> BaseLLM:
36
+ """Convert a qtype Model to a LlamaIndex Model."""
37
+
38
+ if model.provider in "aws-bedrock":
39
+ # BedrockConverse requires a model_id and system_prompt
40
+ # Inference params can be passed as additional kwargs
41
+ from llama_index.llms.bedrock_converse import BedrockConverse
42
+ brv: BaseLLM = BedrockConverse(
43
+ model=model.model_id if model.model_id else model.id,
44
+ system_prompt=system_prompt,
45
+ **(model.inference_params if model.inference_params else {}),
46
+ )
47
+ return brv
48
+ elif model.provider == "openai":
49
+ from llama_index.llms.openai import OpenAI
50
+ return OpenAI(
51
+ model=model.model_id if model.model_id else model.id,
52
+ system_prompt=system_prompt,
53
+ **(model.inference_params if model.inference_params else {}),
54
+ api_key=model.auth.api_key if model.auth and model.auth.api_key else None,
55
+ )
56
+ elif model.provider == "anthropic":
57
+ from llama_index.llms.anthropic import Anthropic
58
+ arv: BaseLLM = Anthropic(
59
+ model=model.model_id if model.model_id else model.id,
60
+ system_prompt=system_prompt,
61
+ **(model.inference_params if model.inference_params else {}),
62
+ )
63
+ return arv
64
+ else:
65
+ raise InterpreterError(
66
+ f"Unsupported model provider: {model.provider}."
67
+ )
68
+
69
+
70
+ @cached_resource
71
+ def to_embedding_model(model: Model) -> BaseEmbedding:
72
+ """Convert a qtype Model to a LlamaIndex embedding model."""
73
+
74
+ if model.provider in {"bedrock","aws", "aws-bedrock"}:
75
+ from llama_index.embeddings.bedrock import BedrockEmbedding
76
+
77
+ embedding: BaseEmbedding = BedrockEmbedding(
78
+ model_name=model.model_id if model.model_id else model.id
79
+ )
80
+ return embedding
81
+ elif model.provider == "openai":
82
+ from llama_index.embeddings.openai import OpenAIEmbedding
83
+
84
+ embedding: BaseEmbedding = OpenAIEmbedding(
85
+ model_name=model.model_id if model.model_id else model.id
86
+ )
87
+ return embedding
88
+ else:
89
+ raise InterpreterError(
90
+ f"Unsupported embedding model provider: {model.provider}."
91
+ )
92
+
93
+
94
+ def to_content_block(content: ChatContent) -> ContentBlock:
95
+ if content.type == PrimitiveTypeEnum.text:
96
+ if isinstance(content.content, str):
97
+ # If content is a string, return a TextBlock
98
+ return TextBlock(text=content.content)
99
+ else:
100
+ # If content is not a string, raise an error
101
+ raise InterpreterError(
102
+ f"Expected content to be a string, got {type(content.content)}"
103
+ )
104
+ elif isinstance(content.content, bytes):
105
+ if content.type == PrimitiveTypeEnum.image:
106
+ return ImageBlock(image=content.content)
107
+ elif content.type == PrimitiveTypeEnum.audio:
108
+ return AudioBlock(audio=content.content)
109
+ elif content.type == PrimitiveTypeEnum.file:
110
+ return DocumentBlock(data=content.content)
111
+
112
+ raise InterpreterError(
113
+ f"Unsupported content type: {content.type} with data of type {type(content.content)}"
114
+ )
115
+
116
+
117
+ def to_chat_message(message: ChatMessage) -> LlamaChatMessage:
118
+ """Convert a ChatMessage to a LlamaChatMessage."""
119
+ blocks = [to_content_block(content) for content in message.blocks]
120
+ return LlamaChatMessage(role=message.role, content=blocks)
121
+
122
+
123
+ def from_chat_message(message: LlamaChatMessage) -> ChatMessage:
124
+ """Convert a LlamaChatMessage to a ChatMessage."""
125
+ blocks = []
126
+ for block in message.blocks:
127
+ if isinstance(block, TextBlock):
128
+ blocks.append(
129
+ ChatContent(type=PrimitiveTypeEnum.text, content=block.text)
130
+ )
131
+ elif isinstance(block, ImageBlock):
132
+ blocks.append(
133
+ ChatContent(type=PrimitiveTypeEnum.image, content=block.image)
134
+ )
135
+ elif isinstance(block, AudioBlock):
136
+ blocks.append(
137
+ ChatContent(type=PrimitiveTypeEnum.audio, content=block.audio)
138
+ )
139
+ elif isinstance(block, DocumentBlock):
140
+ blocks.append(
141
+ ChatContent(type=PrimitiveTypeEnum.file, content=block.data)
142
+ )
143
+ else:
144
+ raise InterpreterError(
145
+ f"Unsupported content block type: {type(block)}"
146
+ )
147
+
148
+ return ChatMessage(role=message.role, blocks=blocks) # type: ignore