qtype 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qtype/__init__.py +0 -0
- qtype/cli.py +73 -0
- qtype/commands/__init__.py +5 -0
- qtype/commands/convert.py +76 -0
- qtype/commands/generate.py +107 -0
- qtype/commands/run.py +200 -0
- qtype/commands/validate.py +83 -0
- qtype/commons/__init__.py +0 -0
- qtype/commons/generate.py +88 -0
- qtype/commons/tools.py +192 -0
- qtype/converters/__init__.py +0 -0
- qtype/converters/tools_from_api.py +24 -0
- qtype/converters/tools_from_module.py +326 -0
- qtype/converters/types.py +20 -0
- qtype/dsl/__init__.py +1 -0
- qtype/dsl/base_types.py +31 -0
- qtype/dsl/document.py +108 -0
- qtype/dsl/domain_types.py +56 -0
- qtype/dsl/model.py +685 -0
- qtype/dsl/validator.py +439 -0
- qtype/interpreter/__init__.py +1 -0
- qtype/interpreter/api.py +104 -0
- qtype/interpreter/conversions.py +148 -0
- qtype/interpreter/exceptions.py +10 -0
- qtype/interpreter/flow.py +37 -0
- qtype/interpreter/resource_cache.py +37 -0
- qtype/interpreter/step.py +67 -0
- qtype/interpreter/steps/__init__.py +0 -0
- qtype/interpreter/steps/agent.py +114 -0
- qtype/interpreter/steps/condition.py +36 -0
- qtype/interpreter/steps/decoder.py +84 -0
- qtype/interpreter/steps/llm_inference.py +127 -0
- qtype/interpreter/steps/prompt_template.py +54 -0
- qtype/interpreter/steps/search.py +24 -0
- qtype/interpreter/steps/tool.py +53 -0
- qtype/interpreter/telemetry.py +16 -0
- qtype/interpreter/typing.py +78 -0
- qtype/loader.py +341 -0
- qtype/semantic/__init__.py +0 -0
- qtype/semantic/errors.py +4 -0
- qtype/semantic/generate.py +383 -0
- qtype/semantic/model.py +354 -0
- qtype/semantic/resolver.py +97 -0
- qtype-0.0.1.dist-info/METADATA +120 -0
- qtype-0.0.1.dist-info/RECORD +49 -0
- qtype-0.0.1.dist-info/WHEEL +5 -0
- qtype-0.0.1.dist-info/entry_points.txt +2 -0
- qtype-0.0.1.dist-info/licenses/LICENSE +202 -0
- qtype-0.0.1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,383 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import inspect
|
|
3
|
+
import subprocess
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Any, Union, get_args, get_origin
|
|
6
|
+
|
|
7
|
+
import networkx as nx
|
|
8
|
+
|
|
9
|
+
import qtype.dsl.model as dsl
|
|
10
|
+
from qtype.dsl.validator import _is_dsl_type
|
|
11
|
+
|
|
12
|
+
FIELDS_TO_IGNORE = {"Application.references"}
|
|
13
|
+
TYPES_TO_IGNORE = {
|
|
14
|
+
"ArrayTypeDefinition",
|
|
15
|
+
"DecoderFormat",
|
|
16
|
+
"Document",
|
|
17
|
+
"ObjectTypeDefinition",
|
|
18
|
+
"PrimitiveTypeEnum",
|
|
19
|
+
"StrictBaseModel",
|
|
20
|
+
"StructuralTypeEnum",
|
|
21
|
+
"TypeDefinition",
|
|
22
|
+
"Variable",
|
|
23
|
+
"VariableType",
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
FROZEN_TYPES = {
|
|
27
|
+
"AuthorizationProvider",
|
|
28
|
+
"DocumentIndex",
|
|
29
|
+
"EmbeddingModel",
|
|
30
|
+
"Index",
|
|
31
|
+
"Memory",
|
|
32
|
+
"Model",
|
|
33
|
+
"VectorIndex",
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def sort_classes_by_inheritance(
|
|
38
|
+
classes: list[tuple[str, type]],
|
|
39
|
+
) -> list[tuple[str, type]]:
|
|
40
|
+
"""Sort classes based on their inheritance hierarchy."""
|
|
41
|
+
graph = nx.DiGraph()
|
|
42
|
+
class_dict = dict(classes)
|
|
43
|
+
|
|
44
|
+
# Build dependency graph
|
|
45
|
+
for class_name, cls in classes:
|
|
46
|
+
graph.add_node(class_name)
|
|
47
|
+
for base in cls.__bases__:
|
|
48
|
+
if (
|
|
49
|
+
hasattr(base, "__module__")
|
|
50
|
+
and base.__module__ == dsl.__name__
|
|
51
|
+
and base.__name__ not in TYPES_TO_IGNORE
|
|
52
|
+
and not base.__name__.startswith("_")
|
|
53
|
+
):
|
|
54
|
+
graph.add_edge(base.__name__, class_name)
|
|
55
|
+
|
|
56
|
+
sorted_names = list(nx.topological_sort(graph))
|
|
57
|
+
|
|
58
|
+
# sorted_names = sorted(graph.nodes, key=lambda node: depths[node])
|
|
59
|
+
return [(name, class_dict[name]) for name in sorted_names]
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def generate_semantic_model(args: argparse.Namespace) -> None:
|
|
63
|
+
"""Generate semantic model classes from DSL model classes.
|
|
64
|
+
|
|
65
|
+
This function inspects the DSL model classes and generates corresponding
|
|
66
|
+
semantic model classes where string ID references are replaced with actual
|
|
67
|
+
object references.
|
|
68
|
+
"""
|
|
69
|
+
output_path = Path(args.output)
|
|
70
|
+
|
|
71
|
+
# Get all classes from the DSL model module
|
|
72
|
+
dsl_classes = []
|
|
73
|
+
for name, cls in inspect.getmembers(dsl, inspect.isclass):
|
|
74
|
+
if (
|
|
75
|
+
cls.__module__ == dsl.__name__
|
|
76
|
+
and not name.startswith("_")
|
|
77
|
+
and name not in TYPES_TO_IGNORE
|
|
78
|
+
and not name.endswith("List")
|
|
79
|
+
):
|
|
80
|
+
dsl_classes.append((name, cls))
|
|
81
|
+
|
|
82
|
+
# Sort classes based on inheritance hierarchy
|
|
83
|
+
sorted_classes = sort_classes_by_inheritance(dsl_classes)
|
|
84
|
+
|
|
85
|
+
# Generate semantic classes in sorted order
|
|
86
|
+
generated = [
|
|
87
|
+
generate_semantic_class(class_name, cls)
|
|
88
|
+
for class_name, cls in sorted_classes
|
|
89
|
+
]
|
|
90
|
+
|
|
91
|
+
# Write to output file
|
|
92
|
+
with open(output_path, "w") as f:
|
|
93
|
+
# Write header
|
|
94
|
+
f.write('"""\n')
|
|
95
|
+
f.write("Semantic Intermediate Representation models.\n\n")
|
|
96
|
+
f.write(
|
|
97
|
+
"This module contains the semantic models that represent a resolved QType\n"
|
|
98
|
+
)
|
|
99
|
+
f.write(
|
|
100
|
+
"specification where all ID references have been replaced with actual object\n"
|
|
101
|
+
)
|
|
102
|
+
f.write("references.\n\n")
|
|
103
|
+
f.write(
|
|
104
|
+
"Generated automatically with command:\nqtype generate semantic-model\n"
|
|
105
|
+
)
|
|
106
|
+
f.write('"""\n\n')
|
|
107
|
+
|
|
108
|
+
# Write imports
|
|
109
|
+
f.write("from __future__ import annotations\n\n")
|
|
110
|
+
f.write("from typing import Any, Type\n\n")
|
|
111
|
+
f.write("from pydantic import BaseModel, ConfigDict, Field\n\n")
|
|
112
|
+
f.write("# Import enums and type aliases from DSL\n")
|
|
113
|
+
f.write("from qtype.dsl.model import VariableType # noqa: F401\n")
|
|
114
|
+
f.write(
|
|
115
|
+
"from qtype.dsl.model import ArrayTypeDefinition, DecoderFormat, PrimitiveTypeEnum, ObjectTypeDefinition, StructuralTypeEnum\n"
|
|
116
|
+
)
|
|
117
|
+
f.write(
|
|
118
|
+
"from qtype.dsl.model import Variable as DSLVariable # noqa: F401\n"
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
# Write the new variable class
|
|
122
|
+
f.write("class Variable(DSLVariable, BaseModel):\n")
|
|
123
|
+
f.write(
|
|
124
|
+
' """Semantic version of DSL Variable with ID references resolved."""\n'
|
|
125
|
+
)
|
|
126
|
+
f.write(
|
|
127
|
+
' value: Any | None = Field(None, description="The value of the variable")\n'
|
|
128
|
+
)
|
|
129
|
+
f.write(" def is_set(self) -> bool:\n")
|
|
130
|
+
f.write(" return self.value is not None\n")
|
|
131
|
+
|
|
132
|
+
# Write the new ImmutableModel class
|
|
133
|
+
f.write("\n\nclass ImmutableModel(BaseModel):\n")
|
|
134
|
+
f.write(
|
|
135
|
+
' """Base model that can\'t be mutated but can be cached."""\n'
|
|
136
|
+
)
|
|
137
|
+
f.write(" model_config = ConfigDict(frozen=True)\n\n")
|
|
138
|
+
|
|
139
|
+
# Write classes
|
|
140
|
+
f.write("\n\n".join(generated))
|
|
141
|
+
f.write("\n\n")
|
|
142
|
+
|
|
143
|
+
# Format the file with Ruff
|
|
144
|
+
format_with_ruff(str(output_path))
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def format_with_ruff(file_path: str) -> None:
|
|
148
|
+
"""Format the given file using Ruff."""
|
|
149
|
+
try:
|
|
150
|
+
subprocess.run(["ruff", "check", "--fix", file_path], check=True)
|
|
151
|
+
subprocess.run(["ruff", "format", file_path], check=True)
|
|
152
|
+
except subprocess.CalledProcessError as e:
|
|
153
|
+
print(f"Error while formatting with Ruff: {e}")
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
DSL_ONLY_UNION_TYPES = {
|
|
157
|
+
get_args(dsl.ToolType): "Tool",
|
|
158
|
+
get_args(dsl.StepType): "Step",
|
|
159
|
+
get_args(dsl.IndexType): "Index",
|
|
160
|
+
get_args(dsl.ModelType): "Model",
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def _transform_union_type(args: tuple) -> str:
|
|
165
|
+
"""Transform Union types, handling string ID references."""
|
|
166
|
+
|
|
167
|
+
args_without_str_none = tuple(
|
|
168
|
+
arg for arg in args if arg is not str and arg is not type(None)
|
|
169
|
+
)
|
|
170
|
+
has_none = any(arg is type(None) for arg in args)
|
|
171
|
+
has_str = any(arg is str for arg in args)
|
|
172
|
+
|
|
173
|
+
# First see if this is a DSL-only union type
|
|
174
|
+
# If so, just return the corresponding semantic type
|
|
175
|
+
if args_without_str_none in DSL_ONLY_UNION_TYPES:
|
|
176
|
+
if has_none:
|
|
177
|
+
# If we have a DSL type and None, we return the DSL type with None
|
|
178
|
+
return DSL_ONLY_UNION_TYPES[args_without_str_none] + " | None"
|
|
179
|
+
else:
|
|
180
|
+
# Note we don't handle the case where we have a DSL type and str,
|
|
181
|
+
# because that would indicate a reference to an ID, which we handle separately.
|
|
182
|
+
return DSL_ONLY_UNION_TYPES[args_without_str_none]
|
|
183
|
+
|
|
184
|
+
# Handle the case where we have a list | None, which in the dsl is needed, but here we will just have an empty list.
|
|
185
|
+
if len(args) == 2:
|
|
186
|
+
list_elems = [
|
|
187
|
+
arg for arg in args if get_origin(arg) in set([list, dict])
|
|
188
|
+
]
|
|
189
|
+
if len(list_elems) > 0 and has_none:
|
|
190
|
+
# If we have a list and None, we return the list type
|
|
191
|
+
# This is to handle cases like List[SomeType] | None
|
|
192
|
+
# which in the DSL is needed, but here we will just have an empty list.
|
|
193
|
+
return dsl_to_semantic_type_name(list_elems[0])
|
|
194
|
+
|
|
195
|
+
# If the union contains a DSL type and a str, we need to drop the str
|
|
196
|
+
if any(_is_dsl_type(arg) for arg in args) and has_str:
|
|
197
|
+
# There is a DSL type and a str, which indicates something that can reference an ID.
|
|
198
|
+
# drop the str
|
|
199
|
+
args = tuple(arg for arg in args if arg is not str)
|
|
200
|
+
|
|
201
|
+
return " | ".join(dsl_to_semantic_type_name(a) for a in args)
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def dsl_to_semantic_type_name(field_type: Any) -> str:
|
|
205
|
+
"""Transform a DSL field type to a semantic field type."""
|
|
206
|
+
|
|
207
|
+
# Handle ForwardRef objects
|
|
208
|
+
if hasattr(field_type, "__forward_arg__"):
|
|
209
|
+
# Extract the string from ForwardRef and process it
|
|
210
|
+
forward_ref_str = field_type.__forward_arg__
|
|
211
|
+
actual_type = eval(forward_ref_str, dict(vars(dsl)))
|
|
212
|
+
return dsl_to_semantic_type_name(actual_type)
|
|
213
|
+
|
|
214
|
+
# Handle Union types (including | syntax)
|
|
215
|
+
origin = get_origin(field_type)
|
|
216
|
+
args = get_args(field_type)
|
|
217
|
+
|
|
218
|
+
if origin is Union or (
|
|
219
|
+
hasattr(field_type, "__class__")
|
|
220
|
+
and field_type.__class__.__name__ == "UnionType"
|
|
221
|
+
):
|
|
222
|
+
return _transform_union_type(args)
|
|
223
|
+
|
|
224
|
+
# Handle list types
|
|
225
|
+
if origin is list:
|
|
226
|
+
if args:
|
|
227
|
+
inner_type = dsl_to_semantic_type_name(args[0])
|
|
228
|
+
return f"list[{inner_type}]"
|
|
229
|
+
return "list"
|
|
230
|
+
|
|
231
|
+
# Handle dict types
|
|
232
|
+
if origin is dict:
|
|
233
|
+
if len(args) == 2:
|
|
234
|
+
key_type = dsl_to_semantic_type_name(args[0])
|
|
235
|
+
value_type = dsl_to_semantic_type_name(args[1])
|
|
236
|
+
return f"dict[{key_type}, {value_type}]"
|
|
237
|
+
return "dict"
|
|
238
|
+
|
|
239
|
+
# Handle basic types
|
|
240
|
+
if hasattr(field_type, "__name__"):
|
|
241
|
+
type_name = field_type.__name__
|
|
242
|
+
if _is_dsl_type(field_type) and type_name not in TYPES_TO_IGNORE:
|
|
243
|
+
return type_name
|
|
244
|
+
if type_name == "NoneType":
|
|
245
|
+
return "None"
|
|
246
|
+
return type_name
|
|
247
|
+
|
|
248
|
+
return str(field_type)
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
def generate_semantic_class(class_name: str, cls: type) -> str:
|
|
252
|
+
"""Generate a semantic class from a DSL class."""
|
|
253
|
+
semantic_name = f"{class_name}"
|
|
254
|
+
|
|
255
|
+
# Get class docstring
|
|
256
|
+
docstring = cls.__doc__ or f"Semantic version of {class_name}."
|
|
257
|
+
|
|
258
|
+
# Determine inheritance
|
|
259
|
+
if class_name in FROZEN_TYPES:
|
|
260
|
+
# If this is a frozen type, we use ImmutableModel instead of BaseModel
|
|
261
|
+
inheritance = "ImmutableModel"
|
|
262
|
+
else:
|
|
263
|
+
inheritance = "BaseModel"
|
|
264
|
+
if inspect.isabstract(cls):
|
|
265
|
+
inheritance += ", ABC"
|
|
266
|
+
|
|
267
|
+
# Check if this class inherits from another DSL class
|
|
268
|
+
for base in cls.__bases__:
|
|
269
|
+
if (
|
|
270
|
+
hasattr(base, "__module__")
|
|
271
|
+
and base.__module__ == dsl.__name__
|
|
272
|
+
and base.__name__ not in TYPES_TO_IGNORE
|
|
273
|
+
and not base.__name__.startswith("_")
|
|
274
|
+
):
|
|
275
|
+
# This class inherits from another DSL class
|
|
276
|
+
semantic_base = f"{base.__name__}"
|
|
277
|
+
if inspect.isabstract(cls):
|
|
278
|
+
inheritance = f"ABC, {semantic_base}"
|
|
279
|
+
else:
|
|
280
|
+
inheritance = semantic_base
|
|
281
|
+
break
|
|
282
|
+
|
|
283
|
+
# Get field information from the class - only fields defined on this class, not inherited
|
|
284
|
+
fields = []
|
|
285
|
+
if hasattr(cls, "__annotations__") and hasattr(cls, "model_fields"):
|
|
286
|
+
# Only process fields that are actually defined on this class
|
|
287
|
+
for field_name in cls.__annotations__:
|
|
288
|
+
if (
|
|
289
|
+
field_name in cls.model_fields
|
|
290
|
+
and f"{class_name}.{field_name}" not in FIELDS_TO_IGNORE
|
|
291
|
+
):
|
|
292
|
+
field_info = cls.model_fields[field_name]
|
|
293
|
+
field_type = field_info.annotation
|
|
294
|
+
field_default = field_info.default
|
|
295
|
+
field_description = getattr(field_info, "description", None)
|
|
296
|
+
|
|
297
|
+
# Transform the field type
|
|
298
|
+
semantic_type = dsl_to_semantic_type_name(field_type)
|
|
299
|
+
|
|
300
|
+
# Check if we should change the default of `None` to `[]` if the type is a list
|
|
301
|
+
if field_default is None and semantic_type.startswith("list["):
|
|
302
|
+
field_default = []
|
|
303
|
+
|
|
304
|
+
# Check if we should change the default of `None` to `{}` if the type is a dict
|
|
305
|
+
if field_default is None and semantic_type.startswith("dict["):
|
|
306
|
+
field_default = {}
|
|
307
|
+
|
|
308
|
+
# Create field definition
|
|
309
|
+
field_def = create_field_definition(
|
|
310
|
+
field_name, semantic_type, field_default, field_description
|
|
311
|
+
)
|
|
312
|
+
fields.append(field_def)
|
|
313
|
+
|
|
314
|
+
# Build class definition
|
|
315
|
+
lines = [f"class {semantic_name}({inheritance}):"]
|
|
316
|
+
lines.append(f' """{docstring}"""')
|
|
317
|
+
lines.append("")
|
|
318
|
+
|
|
319
|
+
# Add fields
|
|
320
|
+
if fields:
|
|
321
|
+
lines.extend(fields)
|
|
322
|
+
else:
|
|
323
|
+
lines.append(" pass")
|
|
324
|
+
|
|
325
|
+
return "\n".join(lines)
|
|
326
|
+
|
|
327
|
+
|
|
328
|
+
def create_field_definition(
|
|
329
|
+
field_name: str,
|
|
330
|
+
field_type: str,
|
|
331
|
+
field_default: Any,
|
|
332
|
+
field_description: str | None,
|
|
333
|
+
) -> str:
|
|
334
|
+
"""Create a field definition string."""
|
|
335
|
+
# Handle aliases
|
|
336
|
+
alias_part = ""
|
|
337
|
+
if field_name == "else_":
|
|
338
|
+
alias_part = ', alias="else"'
|
|
339
|
+
|
|
340
|
+
# Handle default values
|
|
341
|
+
# Check for PydanticUndefined (required field)
|
|
342
|
+
from enum import Enum
|
|
343
|
+
|
|
344
|
+
from pydantic_core import PydanticUndefined
|
|
345
|
+
|
|
346
|
+
if field_default is PydanticUndefined or field_default is ...:
|
|
347
|
+
default_part = "..."
|
|
348
|
+
elif field_default is None:
|
|
349
|
+
default_part = "None"
|
|
350
|
+
elif isinstance(field_default, Enum):
|
|
351
|
+
# Handle enum values (like DecoderFormat.json) - check this before str since some enums inherit from str
|
|
352
|
+
enum_class_name = field_default.__class__.__name__
|
|
353
|
+
enum_value_name = field_default.name
|
|
354
|
+
default_part = f"{enum_class_name}.{enum_value_name}"
|
|
355
|
+
elif isinstance(field_default, str):
|
|
356
|
+
default_part = f'"{field_default}"'
|
|
357
|
+
elif hasattr(
|
|
358
|
+
field_default, "__name__"
|
|
359
|
+
): # Callable or other objects with names
|
|
360
|
+
# Handle other defaults with names
|
|
361
|
+
if hasattr(field_default, "__module__") and hasattr(
|
|
362
|
+
field_default, "__qualname__"
|
|
363
|
+
):
|
|
364
|
+
default_part = f"{field_default.__qualname__}"
|
|
365
|
+
else:
|
|
366
|
+
default_part = str(field_default)
|
|
367
|
+
else:
|
|
368
|
+
default_part = str(field_default)
|
|
369
|
+
|
|
370
|
+
# Create Field definition
|
|
371
|
+
field_parts = [default_part]
|
|
372
|
+
if field_description:
|
|
373
|
+
# Escape quotes and handle multiline descriptions
|
|
374
|
+
escaped_desc = field_description.replace('"', '\\"').replace(
|
|
375
|
+
"\n", "\\n"
|
|
376
|
+
)
|
|
377
|
+
field_parts.append(f'description="{escaped_desc}"')
|
|
378
|
+
if alias_part:
|
|
379
|
+
field_parts.append(alias_part.lstrip(", "))
|
|
380
|
+
|
|
381
|
+
field_def = f"Field({', '.join(field_parts)})"
|
|
382
|
+
|
|
383
|
+
return f" {field_name}: {field_type} = {field_def}"
|