qtype 0.0.10__py3-none-any.whl → 0.0.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. qtype/application/__init__.py +12 -0
  2. qtype/application/commons/__init__.py +7 -0
  3. qtype/{converters → application/converters}/tools_from_module.py +2 -2
  4. qtype/{converters → application/converters}/types.py +0 -33
  5. qtype/{dsl/document.py → application/documentation.py} +2 -0
  6. qtype/application/facade.py +160 -0
  7. qtype/base/__init__.py +14 -0
  8. qtype/base/exceptions.py +49 -0
  9. qtype/base/logging.py +39 -0
  10. qtype/base/types.py +29 -0
  11. qtype/commands/convert.py +64 -49
  12. qtype/commands/generate.py +59 -4
  13. qtype/commands/run.py +109 -72
  14. qtype/commands/serve.py +42 -28
  15. qtype/commands/validate.py +25 -42
  16. qtype/commands/visualize.py +51 -37
  17. qtype/dsl/__init__.py +9 -0
  18. qtype/dsl/base_types.py +8 -0
  19. qtype/dsl/custom_types.py +6 -4
  20. qtype/dsl/model.py +185 -50
  21. qtype/dsl/validator.py +9 -4
  22. qtype/interpreter/api.py +96 -40
  23. qtype/interpreter/auth/__init__.py +3 -0
  24. qtype/interpreter/auth/aws.py +234 -0
  25. qtype/interpreter/auth/cache.py +67 -0
  26. qtype/interpreter/auth/generic.py +103 -0
  27. qtype/interpreter/batch/flow.py +95 -0
  28. qtype/interpreter/batch/sql_source.py +95 -0
  29. qtype/interpreter/batch/step.py +63 -0
  30. qtype/interpreter/batch/types.py +41 -0
  31. qtype/interpreter/batch/utils.py +179 -0
  32. qtype/interpreter/conversions.py +21 -10
  33. qtype/interpreter/resource_cache.py +4 -2
  34. qtype/interpreter/steps/decoder.py +13 -9
  35. qtype/interpreter/steps/llm_inference.py +7 -9
  36. qtype/interpreter/steps/prompt_template.py +1 -1
  37. qtype/interpreter/streaming_helpers.py +3 -3
  38. qtype/interpreter/typing.py +47 -11
  39. qtype/interpreter/ui/404/index.html +1 -1
  40. qtype/interpreter/ui/404.html +1 -1
  41. qtype/interpreter/ui/index.html +1 -1
  42. qtype/interpreter/ui/index.txt +1 -1
  43. qtype/loader.py +9 -15
  44. qtype/semantic/generate.py +91 -39
  45. qtype/semantic/model.py +183 -52
  46. qtype/semantic/resolver.py +4 -4
  47. {qtype-0.0.10.dist-info → qtype-0.0.11.dist-info}/METADATA +5 -1
  48. {qtype-0.0.10.dist-info → qtype-0.0.11.dist-info}/RECORD +58 -44
  49. qtype/commons/generate.py +0 -93
  50. qtype/semantic/errors.py +0 -4
  51. /qtype/{commons → application/commons}/tools.py +0 -0
  52. /qtype/{commons → application/converters}/__init__.py +0 -0
  53. /qtype/{converters → application/converters}/tools_from_api.py +0 -0
  54. /qtype/{converters → interpreter/batch}/__init__.py +0 -0
  55. /qtype/interpreter/ui/_next/static/{Jb2murBlt2XkN6punrQbE → OT8QJQW3J70VbDWWfrEMT}/_buildManifest.js +0 -0
  56. /qtype/interpreter/ui/_next/static/{Jb2murBlt2XkN6punrQbE → OT8QJQW3J70VbDWWfrEMT}/_ssgManifest.js +0 -0
  57. {qtype-0.0.10.dist-info → qtype-0.0.11.dist-info}/WHEEL +0 -0
  58. {qtype-0.0.10.dist-info → qtype-0.0.11.dist-info}/entry_points.txt +0 -0
  59. {qtype-0.0.10.dist-info → qtype-0.0.11.dist-info}/licenses/LICENSE +0 -0
  60. {qtype-0.0.10.dist-info → qtype-0.0.11.dist-info}/top_level.txt +0 -0
@@ -12,7 +12,7 @@ f:"$Sreact.suspense"
12
12
  :HL["./_next/static/media/569ce4b8f30dc480-s.p.woff2","font",{"crossOrigin":"","type":"font/woff2"}]
13
13
  :HL["./_next/static/media/93f479601ee12b01-s.p.woff2","font",{"crossOrigin":"","type":"font/woff2"}]
14
14
  :HL["./_next/static/css/a262c53826df929b.css","style"]
15
- 0:{"P":null,"b":"Jb2murBlt2XkN6punrQbE","p":".","c":["",""],"i":false,"f":[[["",{"children":["__PAGE__",{}]},"$undefined","$undefined",true],["",["$","$1","c",{"children":[[["$","link","0",{"rel":"stylesheet","href":"./_next/static/css/a262c53826df929b.css","precedence":"next","crossOrigin":"$undefined","nonce":"$undefined"}]],["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__variable_6de60c __variable_152ec0 antialiased","children":["$","$L2",null,{"parallelRouterKey":"children","error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L3",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":404}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],[]],"forbidden":"$undefined","unauthorized":"$undefined"}]}]}]]}],{"children":["__PAGE__",["$","$1","c",{"children":[["$","$L4",null,{"Component":"$5","searchParams":{},"params":{},"promises":["$@6","$@7"]}],null,["$","$L8",null,{"children":["$L9",["$","$La",null,{"promise":"$@b"}]]}]]}],{},null,false]},null,false],["$","$1","h",{"children":[null,[["$","$Lc",null,{"children":"$Ld"}],["$","meta",null,{"name":"next-size-adjust","content":""}]],["$","$Le",null,{"children":["$","div",null,{"hidden":true,"children":["$","$f",null,{"fallback":null,"children":"$L10"}]}]}]]}],false]],"m":"$undefined","G":["$11",[]],"s":false,"S":true}
15
+ 0:{"P":null,"b":"OT8QJQW3J70VbDWWfrEMT","p":".","c":["",""],"i":false,"f":[[["",{"children":["__PAGE__",{}]},"$undefined","$undefined",true],["",["$","$1","c",{"children":[[["$","link","0",{"rel":"stylesheet","href":"./_next/static/css/a262c53826df929b.css","precedence":"next","crossOrigin":"$undefined","nonce":"$undefined"}]],["$","html",null,{"lang":"en","children":["$","body",null,{"className":"__variable_6de60c __variable_152ec0 antialiased","children":["$","$L2",null,{"parallelRouterKey":"children","error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L3",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":404}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],[]],"forbidden":"$undefined","unauthorized":"$undefined"}]}]}]]}],{"children":["__PAGE__",["$","$1","c",{"children":[["$","$L4",null,{"Component":"$5","searchParams":{},"params":{},"promises":["$@6","$@7"]}],null,["$","$L8",null,{"children":["$L9",["$","$La",null,{"promise":"$@b"}]]}]]}],{},null,false]},null,false],["$","$1","h",{"children":[null,[["$","$Lc",null,{"children":"$Ld"}],["$","meta",null,{"name":"next-size-adjust","content":""}]],["$","$Le",null,{"children":["$","div",null,{"hidden":true,"children":["$","$f",null,{"fallback":null,"children":"$L10"}]}]}]]}],false]],"m":"$undefined","G":["$11",[]],"s":false,"S":true}
16
16
  6:{}
17
17
  7:"$0:f:0:1:2:children:1:props:children:0:props:params"
18
18
  d:[["$","meta","0",{"charSet":"utf-8"}],["$","meta","1",{"name":"viewport","content":"width=device-width, initial-scale=1"}]]
qtype/loader.py CHANGED
@@ -7,15 +7,15 @@ from __future__ import annotations
7
7
  import os
8
8
  import re
9
9
  from pathlib import Path
10
- from typing import Any, Type
10
+ from typing import Any
11
11
  from urllib.parse import urljoin, urlparse
12
12
 
13
- import fsspec
13
+ import fsspec # type: ignore[import-untyped]
14
14
  import yaml
15
15
  from dotenv import load_dotenv
16
- from fsspec.core import url_to_fs
17
- from pydantic import BaseModel
16
+ from fsspec.core import url_to_fs # type: ignore[import-untyped]
18
17
 
18
+ from qtype.base.types import CustomTypeRegistry, DocumentRootType
19
19
  from qtype.dsl import model as dsl
20
20
  from qtype.dsl.custom_types import build_dynamic_types
21
21
  from qtype.dsl.validator import validate
@@ -182,7 +182,7 @@ def _include_raw_constructor(loader: YamlLoader, node: yaml.ScalarNode) -> str:
182
182
 
183
183
  try:
184
184
  with fsspec.open(resolved_path, "r", encoding="utf-8") as f:
185
- return f.read() # type: ignore[misc]
185
+ return f.read() # type: ignore[no-any-return]
186
186
  except Exception as e:
187
187
  msg = f"Failed to load included file '{resolved_path}': {e}"
188
188
  raise FileNotFoundError(msg) from e
@@ -253,7 +253,7 @@ def load_yaml_from_string(
253
253
  # Use the string stream directly with the loader
254
254
  result = yaml.load(stream, Loader=YamlLoader)
255
255
 
256
- return result
256
+ return result # type: ignore[no-any-return]
257
257
 
258
258
 
259
259
  def load_yaml(content: str) -> dict[str, Any]:
@@ -310,10 +310,7 @@ def load_yaml(content: str) -> dict[str, Any]:
310
310
  return load_yaml_from_string(content)
311
311
 
312
312
 
313
- ResolveableType = dsl.Agent | dsl.Application | dsl.Flow | list
314
-
315
-
316
- def _resolve_root(doc: dsl.Document) -> ResolveableType:
313
+ def _resolve_root(doc: dsl.Document) -> DocumentRootType:
317
314
  root = doc.root
318
315
  # If the docroot is a type that ends in the name `List`, resolve it again
319
316
  types_to_resolve = set(
@@ -330,11 +327,8 @@ def _resolve_root(doc: dsl.Document) -> ResolveableType:
330
327
  return root # type: ignore[return-value]
331
328
 
332
329
 
333
- CustomTypeRegistry = dict[str, Type[BaseModel]]
334
-
335
-
336
330
  def _list_dynamic_types_from_document(
337
- loaded_yaml: dict[str, Any]
331
+ loaded_yaml: dict[str, Any],
338
332
  ) -> list[dict]:
339
333
  """
340
334
  Build dynamic types from the loaded YAML data.
@@ -367,7 +361,7 @@ def _list_dynamic_types_from_document(
367
361
  return rv
368
362
 
369
363
 
370
- def load_document(content: str) -> tuple[ResolveableType, CustomTypeRegistry]:
364
+ def load_document(content: str) -> tuple[DocumentRootType, CustomTypeRegistry]:
371
365
  """Load a QType YAML file, validate it, and return the resolved root."""
372
366
  yaml_data = load_yaml(content)
373
367
  dynamic_types_lists = _list_dynamic_types_from_document(yaml_data)
@@ -2,6 +2,7 @@ import argparse
2
2
  import inspect
3
3
  import subprocess
4
4
  from pathlib import Path
5
+ from textwrap import dedent
5
6
  from typing import Any, Literal, Union, get_args, get_origin
6
7
 
7
8
  import networkx as nx
@@ -14,6 +15,7 @@ TYPES_TO_IGNORE = {
14
15
  "CustomType",
15
16
  "DecoderFormat",
16
17
  "Document",
18
+ "Flow",
17
19
  "PrimitiveTypeEnum",
18
20
  "StrictBaseModel",
19
21
  "StructuralTypeEnum",
@@ -37,7 +39,7 @@ def sort_classes_by_inheritance(
37
39
  classes: list[tuple[str, type]],
38
40
  ) -> list[tuple[str, type]]:
39
41
  """Sort classes based on their inheritance hierarchy."""
40
- graph = nx.DiGraph()
42
+ graph: nx.DiGraph = nx.DiGraph()
41
43
  class_dict = dict(classes)
42
44
 
43
45
  # Build dependency graph
@@ -90,62 +92,109 @@ def generate_semantic_model(args: argparse.Namespace) -> None:
90
92
  # Write to output file
91
93
  with open(output_path, "w") as f:
92
94
  # Write header
93
- f.write('"""\n')
94
- f.write("Semantic Intermediate Representation models.\n\n")
95
95
  f.write(
96
- "This module contains the semantic models that represent a resolved QType\n"
97
- )
98
- f.write(
99
- "specification where all ID references have been replaced with actual object\n"
100
- )
101
- f.write("references.\n\n")
102
- f.write(
103
- "Generated automatically with command:\nqtype generate semantic-model\n"
96
+ dedent('''
97
+ """
98
+ Semantic Intermediate Representation models.
99
+
100
+ This module contains the semantic models that represent a resolved QType
101
+ specification where all ID references have been replaced with actual object
102
+ references.
103
+
104
+ Generated automatically with command:
105
+ qtype generate semantic-model
106
+
107
+ Types are ignored since they should reflect dsl directly, which is type checked.
108
+ """
109
+
110
+ ''').lstrip()
104
111
  )
105
- f.write('"""\n\n')
106
112
 
107
113
  # Write imports
108
- f.write("from __future__ import annotations\n\n")
109
- f.write("from typing import Any, Type, Literal\n\n")
110
- f.write("from pydantic import BaseModel, Field\n\n")
111
- f.write("# Import enums and type aliases from DSL\n")
112
- f.write(
113
- "from qtype.dsl.model import CustomType, VariableType # noqa: F401\n"
114
- )
115
114
  f.write(
116
- "from qtype.dsl.model import ArrayTypeDefinition, DecoderFormat, PrimitiveTypeEnum, ObjectTypeDefinition, StructuralTypeEnum\n"
115
+ dedent("""
116
+ from __future__ import annotations
117
+
118
+ from typing import Any, Literal
119
+
120
+ from pydantic import BaseModel, Field, model_validator
121
+
122
+ # Import enums and type aliases from DSL
123
+ from qtype.dsl.model import VariableType # noqa: F401
124
+ from qtype.dsl.model import ( # noqa: F401
125
+ CustomType,
126
+ DecoderFormat,
127
+ PrimitiveTypeEnum,
128
+ StepCardinality,
129
+ StructuralTypeEnum,
130
+ )
131
+ from qtype.dsl.model import Variable as DSLVariable # noqa: F401
132
+ from qtype.semantic.base_types import ImmutableModel
133
+
134
+ """).lstrip()
117
135
  )
118
- f.write(
119
- "from qtype.dsl.model import Variable as DSLVariable # noqa: F401\n"
120
- )
121
- f.write("from qtype.semantic.base_types import ImmutableModel\n")
122
136
 
123
137
  # Write the new variable class
124
- f.write("class Variable(DSLVariable, BaseModel):\n")
125
138
  f.write(
126
- ' """Semantic version of DSL Variable with ID references resolved."""\n'
139
+ dedent('''
140
+ class Variable(DSLVariable, BaseModel):
141
+ """Semantic version of DSL Variable with ID references resolved."""
142
+ value: Any | None = Field(None, description="The value of the variable")
143
+ def is_set(self) -> bool:
144
+ return self.value is not None
145
+
146
+ ''').lstrip()
127
147
  )
128
- f.write(
129
- ' value: Any | None = Field(None, description="The value of the variable")\n'
130
- )
131
- f.write(" def is_set(self) -> bool:\n")
132
- f.write(" return self.value is not None\n")
133
148
 
134
149
  # Write classes
135
150
  f.write("\n\n".join(generated))
151
+
152
+ # Write the Flow class which _could_ be generated but we want a validator to update it's carndiality
136
153
  f.write("\n\n")
154
+ f.write(
155
+ dedent('''
156
+ class Flow(Step):
157
+ """Defines a flow of steps that can be executed in sequence or parallel.
158
+ If input or output variables are not specified, they are inferred from
159
+ the first and last step, respectively.
160
+ """
161
+
162
+ description: str | None = Field(
163
+ None, description="Optional description of the flow."
164
+ )
165
+ cardinality: StepCardinality = Field(
166
+ StepCardinality.auto,
167
+ description="The cardinality of the flow, inferred from its steps when set to 'auto'.",
168
+ )
169
+ mode: Literal["Complete", "Chat"] = Field("Complete")
170
+ steps: list[Step] = Field(..., description="List of steps or step IDs.")
171
+
172
+ @model_validator(mode="after")
173
+ def infer_cardinality(self) -> "Flow":
174
+ if self.cardinality == StepCardinality.auto:
175
+ self.cardinality = StepCardinality.one
176
+ for step in self.steps:
177
+ if step.cardinality == StepCardinality.many:
178
+ self.cardinality = StepCardinality.many
179
+ break
180
+ return self
181
+
182
+ ''').lstrip()
183
+ )
137
184
 
138
185
  # Format the file with Ruff
139
186
  format_with_ruff(str(output_path))
140
187
 
141
188
 
142
189
  def format_with_ruff(file_path: str) -> None:
143
- """Format the given file using Ruff."""
144
- try:
145
- subprocess.run(["ruff", "check", "--fix", file_path], check=True)
146
- subprocess.run(["ruff", "format", file_path], check=True)
147
- except subprocess.CalledProcessError as e:
148
- print(f"Error while formatting with Ruff: {e}")
190
+ """Format the given file using Ruff and isort to match pre-commit configuration."""
191
+ # Apply the same formatting as pre-commit but only to the specific file
192
+ # Use --force-exclude to match pre-commit behavior exactly
193
+ subprocess.run(["ruff", "check", "--fix", file_path], check=True)
194
+ subprocess.run(
195
+ ["ruff", "format", "--force-exclude", file_path], check=True
196
+ )
197
+ subprocess.run(["isort", file_path], check=True)
149
198
 
150
199
 
151
200
  DSL_ONLY_UNION_TYPES = {
@@ -246,10 +295,10 @@ def dsl_to_semantic_type_name(field_type: Any) -> str:
246
295
  if hasattr(field_type, "__name__"):
247
296
  type_name = field_type.__name__
248
297
  if _is_dsl_type(field_type) and type_name not in TYPES_TO_IGNORE:
249
- return type_name
298
+ return str(type_name)
250
299
  if type_name == "NoneType":
251
300
  return "None"
252
- return type_name
301
+ return str(type_name)
253
302
 
254
303
  return str(field_type)
255
304
 
@@ -284,6 +333,9 @@ def generate_semantic_class(class_name: str, cls: type) -> str:
284
333
  inheritance = f"ABC, {semantic_base}"
285
334
  else:
286
335
  inheritance = semantic_base
336
+ if semantic_name == "Tool":
337
+ # Tools should inherit from Step and be immutable
338
+ inheritance = f"{semantic_base}, ImmutableModel"
287
339
  break
288
340
 
289
341
  # Get field information from the class - only fields defined on this class, not inherited
qtype/semantic/model.py CHANGED
@@ -7,17 +7,25 @@ references.
7
7
 
8
8
  Generated automatically with command:
9
9
  qtype generate semantic-model
10
+
11
+ Types are ignored since they should reflect dsl directly, which is type checked.
10
12
  """
11
13
 
12
14
  from __future__ import annotations
13
15
 
14
16
  from typing import Any, Literal
15
17
 
16
- from pydantic import BaseModel, Field
18
+ from pydantic import BaseModel, Field, model_validator
17
19
 
18
20
  # Import enums and type aliases from DSL
19
21
  from qtype.dsl.model import VariableType # noqa: F401
20
- from qtype.dsl.model import CustomType, DecoderFormat # noqa: F401
22
+ from qtype.dsl.model import ( # noqa: F401
23
+ CustomType,
24
+ DecoderFormat,
25
+ PrimitiveTypeEnum,
26
+ StepCardinality,
27
+ StructuralTypeEnum,
28
+ )
21
29
  from qtype.dsl.model import Variable as DSLVariable # noqa: F401
22
30
  from qtype.semantic.base_types import ImmutableModel
23
31
 
@@ -31,6 +39,15 @@ class Variable(DSLVariable, BaseModel):
31
39
  return self.value is not None
32
40
 
33
41
 
42
+ class AuthorizationProvider(ImmutableModel):
43
+ """Base class for authentication providers."""
44
+
45
+ id: str = Field(
46
+ ..., description="Unique ID of the authorization configuration."
47
+ )
48
+ type: str = Field(..., description="Authorization method type.")
49
+
50
+
34
51
  class Application(BaseModel):
35
52
  """Defines a complete QType application specification.
36
53
 
@@ -61,8 +78,11 @@ class Application(BaseModel):
61
78
  flows: list[Flow] = Field(
62
79
  [], description="List of flows defined in this application."
63
80
  )
64
- auths: list[AuthorizationProvider] = Field(
65
- [], description="List of authorization providers used for API access."
81
+ auths: list[APIKeyAuthProvider | AWSAuthProvider | OAuth2AuthProvider] = (
82
+ Field(
83
+ [],
84
+ description="List of authorization providers used for API access.",
85
+ )
66
86
  )
67
87
  tools: list[Tool] = Field(
68
88
  [], description="List of tools available in this application."
@@ -75,33 +95,14 @@ class Application(BaseModel):
75
95
  )
76
96
 
77
97
 
78
- class AuthorizationProvider(ImmutableModel):
79
- """Defines how tools or providers authenticate with APIs, such as OAuth2 or API keys."""
80
-
81
- id: str = Field(
82
- ..., description="Unique ID of the authorization configuration."
83
- )
84
- api_key: str | None = Field(
85
- None, description="API key if using token-based auth."
86
- )
87
- client_id: str | None = Field(None, description="OAuth2 client ID.")
88
- client_secret: str | None = Field(
89
- None, description="OAuth2 client secret."
90
- )
91
- host: str | None = Field(
92
- None, description="Base URL or domain of the provider."
93
- )
94
- scopes: list[str] = Field([], description="OAuth2 scopes required.")
95
- token_url: str | None = Field(None, description="Token endpoint URL.")
96
- type: str = Field(
97
- ..., description="Authorization method, e.g., 'oauth2' or 'api_key'."
98
- )
99
-
100
-
101
98
  class Step(BaseModel):
102
99
  """Base class for components that take inputs and produce outputs."""
103
100
 
104
101
  id: str = Field(..., description="Unique ID of this component.")
102
+ cardinality: StepCardinality = Field(
103
+ StepCardinality.one,
104
+ description="Does this step emit 1 (one) or 0...N (many) instances of the outputs?",
105
+ )
105
106
  inputs: list[Variable] = Field(
106
107
  [], description="Input variables required by this step."
107
108
  )
@@ -118,8 +119,10 @@ class Index(ImmutableModel):
118
119
  {},
119
120
  description="Index-specific configuration and connection parameters.",
120
121
  )
121
- auth: AuthorizationProvider | None = Field(
122
- None, description="AuthorizationProvider for accessing the index."
122
+ auth: APIKeyAuthProvider | AWSAuthProvider | OAuth2AuthProvider | None = (
123
+ Field(
124
+ None, description="AuthorizationProvider for accessing the index."
125
+ )
123
126
  )
124
127
  name: str = Field(..., description="Name of the index/collection/table.")
125
128
 
@@ -128,8 +131,8 @@ class Model(ImmutableModel):
128
131
  """Describes a generative model configuration, including provider and model ID."""
129
132
 
130
133
  id: str = Field(..., description="Unique ID for the model.")
131
- auth: AuthorizationProvider | None = Field(
132
- None, description="AuthorizationProvider used for model access."
134
+ auth: APIKeyAuthProvider | AWSAuthProvider | OAuth2AuthProvider | None = (
135
+ Field(None, description="AuthorizationProvider used for model access.")
133
136
  )
134
137
  inference_params: dict[str, Any] = Field(
135
138
  {},
@@ -166,15 +169,63 @@ class TelemetrySink(BaseModel):
166
169
  id: str = Field(
167
170
  ..., description="Unique ID of the telemetry sink configuration."
168
171
  )
169
- auth: AuthorizationProvider | None = Field(
170
- None,
171
- description="AuthorizationProvider used to authenticate telemetry data transmission.",
172
+ auth: APIKeyAuthProvider | AWSAuthProvider | OAuth2AuthProvider | None = (
173
+ Field(
174
+ None,
175
+ description="AuthorizationProvider used to authenticate telemetry data transmission.",
176
+ )
172
177
  )
173
178
  endpoint: str = Field(
174
179
  ..., description="URL endpoint where telemetry data will be sent."
175
180
  )
176
181
 
177
182
 
183
+ class APIKeyAuthProvider(AuthorizationProvider):
184
+ """API key-based authentication provider."""
185
+
186
+ type: Literal["api_key"] = Field("api_key")
187
+ api_key: str = Field(..., description="API key for authentication.")
188
+ host: str | None = Field(
189
+ None, description="Base URL or domain of the provider."
190
+ )
191
+
192
+
193
+ class AWSAuthProvider(AuthorizationProvider):
194
+ """AWS authentication provider supporting multiple credential methods."""
195
+
196
+ type: Literal["aws"] = Field("aws")
197
+ access_key_id: str | None = Field(None, description="AWS access key ID.")
198
+ secret_access_key: str | None = Field(
199
+ None, description="AWS secret access key."
200
+ )
201
+ session_token: str | None = Field(
202
+ None, description="AWS session token for temporary credentials."
203
+ )
204
+ profile_name: str | None = Field(
205
+ None, description="AWS profile name from credentials file."
206
+ )
207
+ role_arn: str | None = Field(
208
+ None, description="ARN of the role to assume."
209
+ )
210
+ role_session_name: str | None = Field(
211
+ None, description="Session name for role assumption."
212
+ )
213
+ external_id: str | None = Field(
214
+ None, description="External ID for role assumption."
215
+ )
216
+ region: str | None = Field(None, description="AWS region.")
217
+
218
+
219
+ class OAuth2AuthProvider(AuthorizationProvider):
220
+ """OAuth2 authentication provider."""
221
+
222
+ type: Literal["oauth2"] = Field("oauth2")
223
+ client_id: str = Field(..., description="OAuth2 client ID.")
224
+ client_secret: str = Field(..., description="OAuth2 client secret.")
225
+ token_url: str = Field(..., description="Token endpoint URL.")
226
+ scopes: list[str] = Field([], description="OAuth2 scopes required.")
227
+
228
+
178
229
  class Condition(Step):
179
230
  """Conditional logic gate within a flow. Supports branching logic for execution based on variable values."""
180
231
 
@@ -202,19 +253,6 @@ class Decoder(Step):
202
253
  )
203
254
 
204
255
 
205
- class Flow(Step):
206
- """Defines a flow of steps that can be executed in sequence or parallel.
207
- If input or output variables are not specified, they are inferred from
208
- the first and last step, respectively.
209
- """
210
-
211
- description: str | None = Field(
212
- None, description="Optional description of the flow."
213
- )
214
- mode: Literal["Complete", "Chat"] = Field("Complete")
215
- steps: list[Step] = Field(..., description="List of steps or step IDs.")
216
-
217
-
218
256
  class LLMInference(Step):
219
257
  """Defines a step that performs inference using a language model.
220
258
  It can take input variables and produce output variables based on the model's response."""
@@ -251,7 +289,27 @@ class Search(Step):
251
289
  )
252
290
 
253
291
 
254
- class Tool(Step):
292
+ class Sink(Step):
293
+ """Base class for data sinks"""
294
+
295
+ id: str = Field(..., description="Unique ID of the data sink.")
296
+ cardinality: Literal["one"] = Field(
297
+ StepCardinality.one,
298
+ description="Flows always emit exactly one instance of the outputs.",
299
+ )
300
+
301
+
302
+ class Source(Step):
303
+ """Base class for data sources"""
304
+
305
+ id: str = Field(..., description="Unique ID of the data source.")
306
+ cardinality: Literal["many"] = Field(
307
+ StepCardinality.many,
308
+ description="Sources always emit 0...N instances of the outputs.",
309
+ )
310
+
311
+
312
+ class Tool(Step, ImmutableModel):
255
313
  """
256
314
  Base class for callable functions or external operations available to the model or as a step in a flow.
257
315
  """
@@ -304,11 +362,55 @@ class VectorSearch(Search):
304
362
  """Performs vector similarity search against a vector index."""
305
363
 
306
364
  default_top_k: int | None = Field(
307
- ...,
365
+ 50,
308
366
  description="Number of top results to retrieve if not provided in the inputs.",
309
367
  )
310
368
 
311
369
 
370
+ class IndexUpsert(Sink):
371
+ """Semantic version of IndexUpsert."""
372
+
373
+ index: Index = Field(
374
+ ..., description="Index to upsert into (object or ID reference)."
375
+ )
376
+
377
+
378
+ class SQLSource(Source):
379
+ """SQL database source that executes queries and emits rows."""
380
+
381
+ query: str = Field(
382
+ ..., description="SQL query to execute. Inputs are injected as params."
383
+ )
384
+ connection: str = Field(
385
+ ...,
386
+ description="Database connection string or reference to auth provider. Typically in SQLAlchemy format.",
387
+ )
388
+ auth: APIKeyAuthProvider | AWSAuthProvider | OAuth2AuthProvider | None = (
389
+ Field(
390
+ None,
391
+ description="Optional AuthorizationProvider for database authentication.",
392
+ )
393
+ )
394
+
395
+
396
+ class SourceType(Source):
397
+ """SQL database source that executes queries and emits rows."""
398
+
399
+ query: str = Field(
400
+ ..., description="SQL query to execute. Inputs are injected as params."
401
+ )
402
+ connection: str = Field(
403
+ ...,
404
+ description="Database connection string or reference to auth provider. Typically in SQLAlchemy format.",
405
+ )
406
+ auth: APIKeyAuthProvider | AWSAuthProvider | OAuth2AuthProvider | None = (
407
+ Field(
408
+ None,
409
+ description="Optional AuthorizationProvider for database authentication.",
410
+ )
411
+ )
412
+
413
+
312
414
  class APITool(Tool):
313
415
  """Tool that invokes an API endpoint."""
314
416
 
@@ -316,9 +418,11 @@ class APITool(Tool):
316
418
  method: str = Field(
317
419
  "GET", description="HTTP method to use (GET, POST, PUT, DELETE, etc.)."
318
420
  )
319
- auth: AuthorizationProvider | None = Field(
320
- None,
321
- description="Optional AuthorizationProvider for API authentication.",
421
+ auth: APIKeyAuthProvider | AWSAuthProvider | OAuth2AuthProvider | None = (
422
+ Field(
423
+ None,
424
+ description="Optional AuthorizationProvider for API authentication.",
425
+ )
322
426
  )
323
427
  headers: dict[str, str] = Field(
324
428
  {}, description="Optional HTTP headers to include in the request."
@@ -334,3 +438,30 @@ class PythonFunctionTool(Tool):
334
438
  module_path: str = Field(
335
439
  ..., description="Optional module path where the function is defined."
336
440
  )
441
+
442
+
443
+ class Flow(Step):
444
+ """Defines a flow of steps that can be executed in sequence or parallel.
445
+ If input or output variables are not specified, they are inferred from
446
+ the first and last step, respectively.
447
+ """
448
+
449
+ description: str | None = Field(
450
+ None, description="Optional description of the flow."
451
+ )
452
+ cardinality: StepCardinality = Field(
453
+ StepCardinality.auto,
454
+ description="The cardinality of the flow, inferred from its steps when set to 'auto'.",
455
+ )
456
+ mode: Literal["Complete", "Chat"] = Field("Complete")
457
+ steps: list[Step] = Field(..., description="List of steps or step IDs.")
458
+
459
+ @model_validator(mode="after")
460
+ def infer_cardinality(self) -> "Flow":
461
+ if self.cardinality == StepCardinality.auto:
462
+ self.cardinality = StepCardinality.one
463
+ for step in self.steps:
464
+ if step.cardinality == StepCardinality.many:
465
+ self.cardinality = StepCardinality.many
466
+ break
467
+ return self
@@ -11,9 +11,9 @@ from typing import Any
11
11
 
12
12
  import qtype.dsl.domain_types
13
13
  import qtype.dsl.model as dsl
14
+ import qtype.semantic.model as ir
15
+ from qtype.base.exceptions import SemanticError
14
16
  from qtype.dsl.validator import _is_dsl_type, _resolve_forward_ref
15
- from qtype.semantic import model as ir
16
- from qtype.semantic.errors import SemanticResolutionError
17
17
 
18
18
  logger = logging.getLogger(__name__)
19
19
 
@@ -53,7 +53,7 @@ def to_semantic_ir(
53
53
  class_name = dslobj.__class__.__name__
54
54
  ir_class = getattr(ir, class_name, None)
55
55
  if not ir_class:
56
- raise SemanticResolutionError(
56
+ raise SemanticError(
57
57
  f"Could not find Semantic class for DSL type: {class_name}"
58
58
  )
59
59
  # iterate over the parameters of the DSL object and convert them to their semantic IR equivalents.
@@ -90,7 +90,7 @@ def resolve(application: dsl.Application) -> ir.Application:
90
90
  # references to actual objects.
91
91
  result = to_semantic_ir(application, {})
92
92
  if not isinstance(result, ir.Application):
93
- raise SemanticResolutionError(
93
+ raise SemanticError(
94
94
  "The root object must be an Application, but got: "
95
95
  f"{type(result).__name__}"
96
96
  )