qtype 0.0.12__py3-none-any.whl → 0.1.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qtype/application/commons/tools.py +1 -1
- qtype/application/converters/tools_from_api.py +476 -11
- qtype/application/converters/tools_from_module.py +38 -14
- qtype/application/converters/types.py +15 -30
- qtype/application/documentation.py +1 -1
- qtype/application/facade.py +102 -85
- qtype/base/types.py +227 -7
- qtype/cli.py +5 -1
- qtype/commands/convert.py +52 -6
- qtype/commands/generate.py +44 -4
- qtype/commands/run.py +78 -36
- qtype/commands/serve.py +74 -44
- qtype/commands/validate.py +37 -14
- qtype/commands/visualize.py +46 -25
- qtype/dsl/__init__.py +6 -5
- qtype/dsl/custom_types.py +1 -1
- qtype/dsl/domain_types.py +86 -5
- qtype/dsl/linker.py +384 -0
- qtype/dsl/loader.py +315 -0
- qtype/dsl/model.py +751 -263
- qtype/dsl/parser.py +200 -0
- qtype/dsl/types.py +50 -0
- qtype/interpreter/api.py +63 -136
- qtype/interpreter/auth/aws.py +19 -9
- qtype/interpreter/auth/generic.py +93 -16
- qtype/interpreter/base/base_step_executor.py +436 -0
- qtype/interpreter/base/batch_step_executor.py +171 -0
- qtype/interpreter/base/exceptions.py +50 -0
- qtype/interpreter/base/executor_context.py +91 -0
- qtype/interpreter/base/factory.py +84 -0
- qtype/interpreter/base/progress_tracker.py +110 -0
- qtype/interpreter/base/secrets.py +339 -0
- qtype/interpreter/base/step_cache.py +74 -0
- qtype/interpreter/base/stream_emitter.py +469 -0
- qtype/interpreter/conversions.py +471 -22
- qtype/interpreter/converters.py +79 -0
- qtype/interpreter/endpoints.py +355 -0
- qtype/interpreter/executors/agent_executor.py +242 -0
- qtype/interpreter/executors/aggregate_executor.py +93 -0
- qtype/interpreter/executors/bedrock_reranker_executor.py +195 -0
- qtype/interpreter/executors/decoder_executor.py +163 -0
- qtype/interpreter/executors/doc_to_text_executor.py +112 -0
- qtype/interpreter/executors/document_embedder_executor.py +107 -0
- qtype/interpreter/executors/document_search_executor.py +113 -0
- qtype/interpreter/executors/document_source_executor.py +118 -0
- qtype/interpreter/executors/document_splitter_executor.py +105 -0
- qtype/interpreter/executors/echo_executor.py +63 -0
- qtype/interpreter/executors/field_extractor_executor.py +165 -0
- qtype/interpreter/executors/file_source_executor.py +101 -0
- qtype/interpreter/executors/file_writer_executor.py +110 -0
- qtype/interpreter/executors/index_upsert_executor.py +232 -0
- qtype/interpreter/executors/invoke_embedding_executor.py +92 -0
- qtype/interpreter/executors/invoke_flow_executor.py +51 -0
- qtype/interpreter/executors/invoke_tool_executor.py +358 -0
- qtype/interpreter/executors/llm_inference_executor.py +272 -0
- qtype/interpreter/executors/prompt_template_executor.py +78 -0
- qtype/interpreter/executors/sql_source_executor.py +106 -0
- qtype/interpreter/executors/vector_search_executor.py +91 -0
- qtype/interpreter/flow.py +173 -22
- qtype/interpreter/logging_progress.py +61 -0
- qtype/interpreter/metadata_api.py +115 -0
- qtype/interpreter/resource_cache.py +5 -4
- qtype/interpreter/rich_progress.py +225 -0
- qtype/interpreter/stream/chat/__init__.py +15 -0
- qtype/interpreter/stream/chat/converter.py +391 -0
- qtype/interpreter/{chat → stream/chat}/file_conversions.py +2 -2
- qtype/interpreter/stream/chat/ui_request_to_domain_type.py +140 -0
- qtype/interpreter/stream/chat/vercel.py +609 -0
- qtype/interpreter/stream/utils/__init__.py +15 -0
- qtype/interpreter/stream/utils/build_vercel_ai_formatter.py +74 -0
- qtype/interpreter/stream/utils/callback_to_stream.py +66 -0
- qtype/interpreter/stream/utils/create_streaming_response.py +18 -0
- qtype/interpreter/stream/utils/default_chat_extract_text.py +20 -0
- qtype/interpreter/stream/utils/error_streaming_response.py +20 -0
- qtype/interpreter/telemetry.py +135 -8
- qtype/interpreter/tools/__init__.py +5 -0
- qtype/interpreter/tools/function_tool_helper.py +265 -0
- qtype/interpreter/types.py +330 -0
- qtype/interpreter/typing.py +83 -89
- qtype/interpreter/ui/404/index.html +1 -1
- qtype/interpreter/ui/404.html +1 -1
- qtype/interpreter/ui/_next/static/{OT8QJQW3J70VbDWWfrEMT → 20HoJN6otZ_LyHLHpCPE6}/_buildManifest.js +1 -1
- qtype/interpreter/ui/_next/static/chunks/434-b2112d19f25c44ff.js +36 -0
- qtype/interpreter/ui/_next/static/chunks/{964-ed4ab073db645007.js → 964-2b041321a01cbf56.js} +1 -1
- qtype/interpreter/ui/_next/static/chunks/app/{layout-5ccbc44fd528d089.js → layout-a05273ead5de2c41.js} +1 -1
- qtype/interpreter/ui/_next/static/chunks/app/page-8c67d16ac90d23cb.js +1 -0
- qtype/interpreter/ui/_next/static/chunks/ba12c10f-546f2714ff8abc66.js +1 -0
- qtype/interpreter/ui/_next/static/chunks/{main-6d261b6c5d6fb6c2.js → main-e26b9cb206da2cac.js} +1 -1
- qtype/interpreter/ui/_next/static/chunks/webpack-08642e441b39b6c2.js +1 -0
- qtype/interpreter/ui/_next/static/css/8a8d1269e362fef7.css +3 -0
- qtype/interpreter/ui/_next/static/media/4cf2300e9c8272f7-s.p.woff2 +0 -0
- qtype/interpreter/ui/icon.png +0 -0
- qtype/interpreter/ui/index.html +1 -1
- qtype/interpreter/ui/index.txt +5 -5
- qtype/semantic/checker.py +643 -0
- qtype/semantic/generate.py +268 -85
- qtype/semantic/loader.py +95 -0
- qtype/semantic/model.py +535 -163
- qtype/semantic/resolver.py +63 -19
- qtype/semantic/visualize.py +50 -35
- {qtype-0.0.12.dist-info → qtype-0.1.3.dist-info}/METADATA +21 -4
- qtype-0.1.3.dist-info/RECORD +137 -0
- qtype/dsl/base_types.py +0 -38
- qtype/dsl/validator.py +0 -464
- qtype/interpreter/batch/__init__.py +0 -0
- qtype/interpreter/batch/flow.py +0 -95
- qtype/interpreter/batch/sql_source.py +0 -95
- qtype/interpreter/batch/step.py +0 -63
- qtype/interpreter/batch/types.py +0 -41
- qtype/interpreter/batch/utils.py +0 -179
- qtype/interpreter/chat/chat_api.py +0 -237
- qtype/interpreter/chat/vercel.py +0 -314
- qtype/interpreter/exceptions.py +0 -10
- qtype/interpreter/step.py +0 -67
- qtype/interpreter/steps/__init__.py +0 -0
- qtype/interpreter/steps/agent.py +0 -114
- qtype/interpreter/steps/condition.py +0 -36
- qtype/interpreter/steps/decoder.py +0 -88
- qtype/interpreter/steps/llm_inference.py +0 -150
- qtype/interpreter/steps/prompt_template.py +0 -54
- qtype/interpreter/steps/search.py +0 -24
- qtype/interpreter/steps/tool.py +0 -53
- qtype/interpreter/streaming_helpers.py +0 -123
- qtype/interpreter/ui/_next/static/chunks/736-7fc606e244fedcb1.js +0 -36
- qtype/interpreter/ui/_next/static/chunks/app/page-c72e847e888e549d.js +0 -1
- qtype/interpreter/ui/_next/static/chunks/ba12c10f-22556063851a6df2.js +0 -1
- qtype/interpreter/ui/_next/static/chunks/webpack-8289c17c67827f22.js +0 -1
- qtype/interpreter/ui/_next/static/css/a262c53826df929b.css +0 -3
- qtype/interpreter/ui/_next/static/media/569ce4b8f30dc480-s.p.woff2 +0 -0
- qtype/interpreter/ui/favicon.ico +0 -0
- qtype/loader.py +0 -389
- qtype-0.0.12.dist-info/RECORD +0 -105
- /qtype/interpreter/ui/_next/static/{OT8QJQW3J70VbDWWfrEMT → 20HoJN6otZ_LyHLHpCPE6}/_ssgManifest.js +0 -0
- {qtype-0.0.12.dist-info → qtype-0.1.3.dist-info}/WHEEL +0 -0
- {qtype-0.0.12.dist-info → qtype-0.1.3.dist-info}/entry_points.txt +0 -0
- {qtype-0.0.12.dist-info → qtype-0.1.3.dist-info}/licenses/LICENSE +0 -0
- {qtype-0.0.12.dist-info → qtype-0.1.3.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
"""Utilities for converting callbacks to async iterators."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
from collections.abc import AsyncIterator, Awaitable, Callable
|
|
7
|
+
from typing import TypeVar
|
|
8
|
+
|
|
9
|
+
T = TypeVar("T")
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
async def callback_to_async_iterator(
|
|
13
|
+
task_fn: Callable[[Callable[[T], Awaitable[None]]], Awaitable[None]],
|
|
14
|
+
) -> AsyncIterator[T]:
|
|
15
|
+
"""
|
|
16
|
+
Convert a callback-based async function to an async iterator.
|
|
17
|
+
|
|
18
|
+
This utility bridges callback-style APIs (where you provide a callback
|
|
19
|
+
that gets called with events) to async iterator style (where you yield
|
|
20
|
+
events).
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
task_fn: Async function that accepts a callback. The callback
|
|
24
|
+
will be called with each event. The task_fn should complete
|
|
25
|
+
when done producing events.
|
|
26
|
+
|
|
27
|
+
Yields:
|
|
28
|
+
Events that were passed to the callback
|
|
29
|
+
|
|
30
|
+
Example:
|
|
31
|
+
```python
|
|
32
|
+
async def execute_with_callback(callback):
|
|
33
|
+
await callback("event1")
|
|
34
|
+
await callback("event2")
|
|
35
|
+
# Function completes when done
|
|
36
|
+
|
|
37
|
+
async for event in callback_to_async_iterator(execute_with_callback):
|
|
38
|
+
print(event) # Prints "event1", "event2"
|
|
39
|
+
```
|
|
40
|
+
"""
|
|
41
|
+
queue: asyncio.Queue[T | None] = asyncio.Queue()
|
|
42
|
+
|
|
43
|
+
async def callback(item: T) -> None:
|
|
44
|
+
"""Queue items as they arrive."""
|
|
45
|
+
await queue.put(item)
|
|
46
|
+
|
|
47
|
+
async def run_task() -> None:
|
|
48
|
+
"""Execute the task and signal completion."""
|
|
49
|
+
try:
|
|
50
|
+
await task_fn(callback)
|
|
51
|
+
finally:
|
|
52
|
+
await queue.put(None) # Signal completion
|
|
53
|
+
|
|
54
|
+
# Start task in background
|
|
55
|
+
task = asyncio.create_task(run_task())
|
|
56
|
+
|
|
57
|
+
try:
|
|
58
|
+
# Yield items from queue until None
|
|
59
|
+
while True:
|
|
60
|
+
item = await queue.get()
|
|
61
|
+
if item is None:
|
|
62
|
+
break
|
|
63
|
+
yield item
|
|
64
|
+
finally:
|
|
65
|
+
# Ensure task completes
|
|
66
|
+
await task
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections.abc import Generator
|
|
4
|
+
|
|
5
|
+
from fastapi.responses import StreamingResponse
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def create_streaming_response(
|
|
9
|
+
formatter: Generator[str, None, None],
|
|
10
|
+
) -> StreamingResponse:
|
|
11
|
+
"""
|
|
12
|
+
Wrap a formatter generator into a StreamingResponse with proper headers.
|
|
13
|
+
"""
|
|
14
|
+
response = StreamingResponse(
|
|
15
|
+
formatter, media_type="text/plain; charset=utf-8"
|
|
16
|
+
)
|
|
17
|
+
response.headers["x-vercel-ai-ui-message-stream"] = "v1"
|
|
18
|
+
return response
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from qtype.dsl.domain_types import ChatMessage
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def default_chat_extract_text(message: Any) -> str:
|
|
9
|
+
"""
|
|
10
|
+
Default extractor for ChatMessage or generic objects.
|
|
11
|
+
"""
|
|
12
|
+
if isinstance(message, ChatMessage):
|
|
13
|
+
return " ".join(
|
|
14
|
+
[
|
|
15
|
+
getattr(block, "content", "")
|
|
16
|
+
for block in message.blocks
|
|
17
|
+
if getattr(block, "content", "")
|
|
18
|
+
]
|
|
19
|
+
)
|
|
20
|
+
return str(message)
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from fastapi.responses import StreamingResponse
|
|
4
|
+
|
|
5
|
+
from qtype.interpreter.stream.chat.vercel import ErrorChunk
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def error_streaming_response(message: str) -> StreamingResponse:
|
|
9
|
+
"""
|
|
10
|
+
Create a streaming response with a single ErrorChunk.
|
|
11
|
+
"""
|
|
12
|
+
error_chunk = ErrorChunk(errorText=message)
|
|
13
|
+
response = StreamingResponse(
|
|
14
|
+
[
|
|
15
|
+
f"data: {error_chunk.model_dump_json(by_alias=True, exclude_none=True)}\n\n"
|
|
16
|
+
],
|
|
17
|
+
media_type="text/plain; charset=utf-8",
|
|
18
|
+
)
|
|
19
|
+
response.headers["x-vercel-ai-ui-message-stream"] = "v1"
|
|
20
|
+
return response
|
qtype/interpreter/telemetry.py
CHANGED
|
@@ -1,16 +1,143 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import base64
|
|
4
|
+
|
|
1
5
|
from openinference.instrumentation.llama_index import LlamaIndexInstrumentor
|
|
2
|
-
from
|
|
6
|
+
from opentelemetry import trace
|
|
7
|
+
from opentelemetry.exporter.otlp.proto.http.trace_exporter import (
|
|
8
|
+
OTLPSpanExporter,
|
|
9
|
+
)
|
|
10
|
+
from opentelemetry.sdk.resources import Resource
|
|
11
|
+
from opentelemetry.sdk.trace import TracerProvider
|
|
12
|
+
from opentelemetry.sdk.trace.export import BatchSpanProcessor
|
|
3
13
|
|
|
14
|
+
from qtype.interpreter.base.secrets import SecretManagerBase
|
|
4
15
|
from qtype.semantic.model import TelemetrySink
|
|
5
16
|
|
|
6
17
|
|
|
7
|
-
def
|
|
8
|
-
|
|
18
|
+
def _setup_langfuse_otel(
|
|
19
|
+
sink: TelemetrySink,
|
|
20
|
+
project_id: str,
|
|
21
|
+
secret_manager: SecretManagerBase,
|
|
22
|
+
context: str,
|
|
23
|
+
) -> TracerProvider:
|
|
24
|
+
"""
|
|
25
|
+
Initializes and registers Langfuse as an OTEL trace exporter.
|
|
9
26
|
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
27
|
+
Langfuse supports OpenTelemetry via its OTLP-compatible endpoint at
|
|
28
|
+
/api/public/otel. This function configures an OTLP exporter with
|
|
29
|
+
Basic Auth credentials to send traces to Langfuse.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
sink: TelemetrySink with Langfuse endpoint and credentials
|
|
33
|
+
project_id: Project identifier for grouping traces in Langfuse
|
|
34
|
+
secret_manager: For resolving secret references
|
|
35
|
+
context: Context string for error messages
|
|
36
|
+
|
|
37
|
+
Returns:
|
|
38
|
+
Configured OpenTelemetry TracerProvider
|
|
39
|
+
"""
|
|
40
|
+
# 1. Resolve secrets for Langfuse from args
|
|
41
|
+
# Langfuse requires public_key and secret_key in args
|
|
42
|
+
args = sink.args | {"host": sink.endpoint}
|
|
43
|
+
args = secret_manager.resolve_secrets_in_dict(
|
|
44
|
+
args, f"telemetry sink '{sink.id}' args"
|
|
15
45
|
)
|
|
46
|
+
|
|
47
|
+
public_key = args.get("public_key")
|
|
48
|
+
secret_key = args.get("secret_key")
|
|
49
|
+
|
|
50
|
+
if not public_key or not secret_key:
|
|
51
|
+
msg = (
|
|
52
|
+
f"Langfuse telemetry sink '{sink.id}' requires "
|
|
53
|
+
"'public_key' and 'secret_key' in args. "
|
|
54
|
+
f"Got keys: {list(args.keys())}"
|
|
55
|
+
)
|
|
56
|
+
raise ValueError(msg)
|
|
57
|
+
|
|
58
|
+
# 2. Resolve the endpoint (host)
|
|
59
|
+
host = args["host"]
|
|
60
|
+
|
|
61
|
+
# 3. Build OTLP endpoint for Langfuse
|
|
62
|
+
# Langfuse OTLP ingestion endpoint
|
|
63
|
+
endpoint = f"{host.rstrip('/')}/api/public/otel"
|
|
64
|
+
|
|
65
|
+
# 4. Create Basic Auth header
|
|
66
|
+
# Langfuse uses Basic Auth with public_key:secret_key
|
|
67
|
+
auth_string = f"{public_key}:{secret_key}"
|
|
68
|
+
b64_auth = base64.b64encode(auth_string.encode()).decode()
|
|
69
|
+
headers = {"Authorization": f"Basic {b64_auth}"}
|
|
70
|
+
|
|
71
|
+
# 5. Setup OTEL Provider and Exporter
|
|
72
|
+
# Set service.name resource, which maps to project_id in Langfuse
|
|
73
|
+
resource = Resource(attributes={"service.name": project_id})
|
|
74
|
+
tracer_provider = TracerProvider(resource=resource)
|
|
75
|
+
|
|
76
|
+
# Create the OTLP exporter configured for Langfuse
|
|
77
|
+
exporter = OTLPSpanExporter(endpoint=endpoint, headers=headers)
|
|
78
|
+
tracer_provider.add_span_processor(BatchSpanProcessor(exporter))
|
|
79
|
+
|
|
80
|
+
# Set as the global tracer provider
|
|
81
|
+
trace.set_tracer_provider(tracer_provider)
|
|
82
|
+
|
|
83
|
+
return tracer_provider
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def register(
|
|
87
|
+
telemetry: TelemetrySink,
|
|
88
|
+
secret_manager: SecretManagerBase,
|
|
89
|
+
project_id: str | None = None,
|
|
90
|
+
) -> TracerProvider:
|
|
91
|
+
"""
|
|
92
|
+
Register and configure telemetry for the QType runtime.
|
|
93
|
+
|
|
94
|
+
This function sets up telemetry collection by:
|
|
95
|
+
1. Resolving any SecretReferences in the telemetry endpoint
|
|
96
|
+
2. Registering with the Phoenix OTEL provider
|
|
97
|
+
3. Instrumenting LlamaIndex for automatic tracing
|
|
98
|
+
|
|
99
|
+
Args:
|
|
100
|
+
telemetry: TelemetrySink configuration with endpoint and auth
|
|
101
|
+
project_id: Optional project identifier for telemetry grouping.
|
|
102
|
+
If not provided, uses telemetry.id
|
|
103
|
+
secret_manager: Optional secret manager for resolving endpoint URLs
|
|
104
|
+
that are stored as SecretReferences. If None, uses NoOpSecretManager
|
|
105
|
+
which will raise an error if secrets are needed.
|
|
106
|
+
|
|
107
|
+
Returns:
|
|
108
|
+
TracerProvider instance for managing telemetry lifecycle.
|
|
109
|
+
|
|
110
|
+
Note:
|
|
111
|
+
Supports Phoenix and Langfuse telemetry providers.
|
|
112
|
+
Phoenix is the default.
|
|
113
|
+
"""
|
|
114
|
+
|
|
115
|
+
# Only llama_index and phoenix are supported for now
|
|
116
|
+
|
|
117
|
+
project_id = project_id if project_id else telemetry.id
|
|
118
|
+
|
|
119
|
+
if telemetry.provider == "Phoenix":
|
|
120
|
+
from phoenix.otel import register as register_phoenix
|
|
121
|
+
|
|
122
|
+
args = {
|
|
123
|
+
"endpoint": telemetry.endpoint,
|
|
124
|
+
"project_name": project_id,
|
|
125
|
+
} | telemetry.args
|
|
126
|
+
|
|
127
|
+
args = secret_manager.resolve_secrets_in_dict(
|
|
128
|
+
args, f"telemetry sink '{telemetry.id}'"
|
|
129
|
+
)
|
|
130
|
+
tracer_provider = register_phoenix(**args)
|
|
131
|
+
elif telemetry.provider == "Langfuse":
|
|
132
|
+
tracer_provider = _setup_langfuse_otel(
|
|
133
|
+
sink=telemetry,
|
|
134
|
+
project_id=project_id,
|
|
135
|
+
secret_manager=secret_manager,
|
|
136
|
+
context=f"telemetry sink '{telemetry.id}'",
|
|
137
|
+
)
|
|
138
|
+
else:
|
|
139
|
+
raise ValueError(
|
|
140
|
+
f"Unsupported telemetry provider: {telemetry.provider}"
|
|
141
|
+
)
|
|
16
142
|
LlamaIndexInstrumentor().instrument(tracer_provider=tracer_provider)
|
|
143
|
+
return tracer_provider
|
|
@@ -0,0 +1,265 @@
|
|
|
1
|
+
"""Helper mixin for creating LlamaIndex FunctionTools from QType definitions."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import importlib
|
|
6
|
+
import logging
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
from llama_index.core.bridge.pydantic import BaseModel
|
|
10
|
+
from llama_index.core.bridge.pydantic import Field as PydanticField
|
|
11
|
+
from llama_index.core.tools import FunctionTool, ToolMetadata
|
|
12
|
+
from pydantic import create_model
|
|
13
|
+
|
|
14
|
+
from qtype.base.types import PrimitiveTypeEnum
|
|
15
|
+
from qtype.dsl.model import ListType
|
|
16
|
+
from qtype.dsl.types import PRIMITIVE_TO_PYTHON_TYPE
|
|
17
|
+
from qtype.semantic.model import APITool, PythonFunctionTool, ToolParameter
|
|
18
|
+
|
|
19
|
+
logger = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class FunctionToolHelper:
|
|
23
|
+
"""Mixin providing utilities for creating LlamaIndex FunctionTools.
|
|
24
|
+
|
|
25
|
+
This mixin provides methods to convert QType tool definitions
|
|
26
|
+
(APITool, PythonFunctionTool) into LlamaIndex FunctionTool instances
|
|
27
|
+
with proper metadata and Pydantic schemas.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
@staticmethod
|
|
31
|
+
def _qtype_type_to_python_type(
|
|
32
|
+
param: ToolParameter,
|
|
33
|
+
) -> type:
|
|
34
|
+
"""Convert QType ToolParameter type to Python type for Pydantic.
|
|
35
|
+
|
|
36
|
+
The param.type has already been resolved during semantic model
|
|
37
|
+
creation, so we just need to convert it to the appropriate Python
|
|
38
|
+
type:
|
|
39
|
+
- Primitive types → Python type via PRIMITIVE_TO_PYTHON_TYPE
|
|
40
|
+
- BaseModel subclasses (domain/custom types) → pass through
|
|
41
|
+
- List types → list[element_type] (recursively resolved)
|
|
42
|
+
- Unknown → str
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
param: The QType ToolParameter to convert.
|
|
46
|
+
|
|
47
|
+
Returns:
|
|
48
|
+
Python type suitable for Pydantic field annotation.
|
|
49
|
+
"""
|
|
50
|
+
# Handle primitive types
|
|
51
|
+
if isinstance(param.type, PrimitiveTypeEnum):
|
|
52
|
+
return PRIMITIVE_TO_PYTHON_TYPE[param.type]
|
|
53
|
+
|
|
54
|
+
# Handle list types - recursively resolve element type
|
|
55
|
+
if isinstance(param.type, ListType):
|
|
56
|
+
# Create a mock parameter with the element type to recursively
|
|
57
|
+
# resolve it
|
|
58
|
+
element_param = ToolParameter(
|
|
59
|
+
type=param.type.element_type,
|
|
60
|
+
optional=False,
|
|
61
|
+
)
|
|
62
|
+
element_python_type = (
|
|
63
|
+
FunctionToolHelper._qtype_type_to_python_type(element_param)
|
|
64
|
+
)
|
|
65
|
+
return list[element_python_type] # type: ignore[valid-type]
|
|
66
|
+
|
|
67
|
+
# Handle domain/custom types (BaseModel subclasses)
|
|
68
|
+
if isinstance(param.type, type) and issubclass(param.type, BaseModel):
|
|
69
|
+
return param.type
|
|
70
|
+
|
|
71
|
+
# For unresolved string references or unknown types, default to str
|
|
72
|
+
return str
|
|
73
|
+
|
|
74
|
+
@staticmethod
|
|
75
|
+
def _create_fn_schema(
|
|
76
|
+
tool_name: str,
|
|
77
|
+
inputs: dict[str, ToolParameter],
|
|
78
|
+
) -> type[BaseModel] | None:
|
|
79
|
+
"""Create a Pydantic model from QType tool input parameters.
|
|
80
|
+
|
|
81
|
+
Args:
|
|
82
|
+
tool_name: Name of the tool (used for model name).
|
|
83
|
+
inputs: Dictionary of input parameter names to ToolParameter.
|
|
84
|
+
|
|
85
|
+
Returns:
|
|
86
|
+
Pydantic BaseModel class representing the tool's input schema.
|
|
87
|
+
Returns an empty BaseModel if there are no inputs (required by
|
|
88
|
+
LlamaIndex ReActAgent).
|
|
89
|
+
"""
|
|
90
|
+
# Build field definitions for Pydantic model
|
|
91
|
+
# Each field is a tuple of (type_annotation, field_info)
|
|
92
|
+
field_definitions: dict[str, Any] = {}
|
|
93
|
+
|
|
94
|
+
for param_name, param in inputs.items():
|
|
95
|
+
python_type = FunctionToolHelper._qtype_type_to_python_type(param)
|
|
96
|
+
|
|
97
|
+
# Create field with optional annotation
|
|
98
|
+
if param.optional:
|
|
99
|
+
field_definitions[param_name] = (
|
|
100
|
+
python_type | None, # type: ignore[valid-type]
|
|
101
|
+
PydanticField(default=None),
|
|
102
|
+
)
|
|
103
|
+
else:
|
|
104
|
+
field_definitions[param_name] = (
|
|
105
|
+
python_type,
|
|
106
|
+
PydanticField(...),
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
# Create dynamic Pydantic model
|
|
110
|
+
model_name = f"{tool_name.replace('-', '_').replace('.', '_')}_Input"
|
|
111
|
+
return create_model(model_name, **field_definitions) # type: ignore[call-overload]
|
|
112
|
+
|
|
113
|
+
@staticmethod
|
|
114
|
+
def _create_tool_metadata(
|
|
115
|
+
tool: APITool | PythonFunctionTool,
|
|
116
|
+
) -> ToolMetadata:
|
|
117
|
+
"""Create ToolMetadata from a QType tool definition.
|
|
118
|
+
|
|
119
|
+
Args:
|
|
120
|
+
tool: The QType tool (API or Python function).
|
|
121
|
+
|
|
122
|
+
Returns:
|
|
123
|
+
ToolMetadata for use with FunctionTool.
|
|
124
|
+
"""
|
|
125
|
+
# Create Pydantic schema from tool inputs
|
|
126
|
+
fn_schema = FunctionToolHelper._create_fn_schema(
|
|
127
|
+
tool.name, tool.inputs
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
return ToolMetadata(
|
|
131
|
+
name=tool.name,
|
|
132
|
+
description=tool.description,
|
|
133
|
+
fn_schema=fn_schema,
|
|
134
|
+
return_direct=False,
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
def _create_python_function_tool(
|
|
138
|
+
self, tool: PythonFunctionTool
|
|
139
|
+
) -> FunctionTool:
|
|
140
|
+
"""Create a FunctionTool for a Python function.
|
|
141
|
+
|
|
142
|
+
For Python functions, we import and wrap the actual function,
|
|
143
|
+
allowing LlamaIndex to access its signature while routing
|
|
144
|
+
execution through our wrapper for consistent error handling,
|
|
145
|
+
logging, and telemetry across all tool invocations.
|
|
146
|
+
|
|
147
|
+
Args:
|
|
148
|
+
tool: The Python function tool definition.
|
|
149
|
+
|
|
150
|
+
Returns:
|
|
151
|
+
LlamaIndex FunctionTool wrapping the Python function.
|
|
152
|
+
|
|
153
|
+
Raises:
|
|
154
|
+
ValueError: If the function cannot be imported.
|
|
155
|
+
"""
|
|
156
|
+
try:
|
|
157
|
+
# Import the actual Python function
|
|
158
|
+
module = importlib.import_module(tool.module_path)
|
|
159
|
+
function = getattr(module, tool.function_name, None)
|
|
160
|
+
if function is None:
|
|
161
|
+
raise ValueError(
|
|
162
|
+
(
|
|
163
|
+
f"Function '{tool.function_name}' not found in "
|
|
164
|
+
f"module '{tool.module_path}'"
|
|
165
|
+
)
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
# Create metadata from QType tool definition
|
|
169
|
+
metadata = FunctionToolHelper._create_tool_metadata(tool)
|
|
170
|
+
|
|
171
|
+
# Create wrapper that validates inputs using Pydantic schema
|
|
172
|
+
# before calling the function through execution
|
|
173
|
+
# This maintains consistent error handling and hooks
|
|
174
|
+
async def wrapped_fn(**kwargs: Any) -> Any:
|
|
175
|
+
# Keep original kwargs for streaming events (JSON-compatible)
|
|
176
|
+
original_kwargs = kwargs.copy()
|
|
177
|
+
|
|
178
|
+
# Validate and parse inputs using the Pydantic schema
|
|
179
|
+
if metadata.fn_schema is not None:
|
|
180
|
+
validated_inputs = metadata.fn_schema(**kwargs)
|
|
181
|
+
# Convert Pydantic model to dict with Python native types
|
|
182
|
+
# (datetime objects, etc.)
|
|
183
|
+
kwargs = validated_inputs.model_dump(mode="python")
|
|
184
|
+
|
|
185
|
+
# Pass both the validated kwargs and original for streaming
|
|
186
|
+
return await self.execute_python_tool( # type: ignore[attr-defined]
|
|
187
|
+
tool, kwargs, original_inputs=original_kwargs
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
return FunctionTool(
|
|
191
|
+
fn=None,
|
|
192
|
+
async_fn=wrapped_fn,
|
|
193
|
+
metadata=metadata,
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
except (ImportError, AttributeError) as e:
|
|
197
|
+
raise ValueError(
|
|
198
|
+
(
|
|
199
|
+
f"Failed to import Python function "
|
|
200
|
+
f"'{tool.function_name}' "
|
|
201
|
+
f"from '{tool.module_path}': {e}"
|
|
202
|
+
)
|
|
203
|
+
) from e
|
|
204
|
+
|
|
205
|
+
def _create_api_tool(self, tool: APITool) -> FunctionTool:
|
|
206
|
+
"""Create a FunctionTool for an API endpoint.
|
|
207
|
+
|
|
208
|
+
Wraps the API tool execution in a function that can be called
|
|
209
|
+
by LlamaIndex agents, handling authentication, request formatting,
|
|
210
|
+
and error handling consistently.
|
|
211
|
+
|
|
212
|
+
Args:
|
|
213
|
+
tool: The API tool definition.
|
|
214
|
+
|
|
215
|
+
Returns:
|
|
216
|
+
LlamaIndex FunctionTool wrapping the API tool execution.
|
|
217
|
+
"""
|
|
218
|
+
# Create metadata from QType tool definition
|
|
219
|
+
metadata = FunctionToolHelper._create_tool_metadata(tool)
|
|
220
|
+
|
|
221
|
+
async def api_wrapper(**kwargs: Any) -> Any:
|
|
222
|
+
"""Wrapper function that executes the API tool."""
|
|
223
|
+
# Keep original kwargs for streaming events (JSON-compatible)
|
|
224
|
+
original_kwargs = kwargs.copy()
|
|
225
|
+
|
|
226
|
+
# Validate and parse inputs using the Pydantic schema
|
|
227
|
+
if metadata.fn_schema is not None:
|
|
228
|
+
validated_inputs = metadata.fn_schema(**kwargs)
|
|
229
|
+
# Convert Pydantic model to dict for execution
|
|
230
|
+
kwargs = validated_inputs.model_dump(mode="python")
|
|
231
|
+
|
|
232
|
+
# Pass both the validated kwargs and original for streaming
|
|
233
|
+
return await self.execute_api_tool( # type: ignore[attr-defined]
|
|
234
|
+
tool, kwargs, original_inputs=original_kwargs
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
return FunctionTool(
|
|
238
|
+
fn=None,
|
|
239
|
+
async_fn=api_wrapper,
|
|
240
|
+
metadata=metadata,
|
|
241
|
+
)
|
|
242
|
+
|
|
243
|
+
def _create_function_tool(
|
|
244
|
+
self, tool: APITool | PythonFunctionTool
|
|
245
|
+
) -> FunctionTool:
|
|
246
|
+
"""Create a LlamaIndex FunctionTool from a QType tool definition.
|
|
247
|
+
|
|
248
|
+
Dispatches to specialized methods based on tool type for optimal
|
|
249
|
+
handling while maintaining consistent metadata generation.
|
|
250
|
+
|
|
251
|
+
Args:
|
|
252
|
+
tool: The QType tool (API or Python function).
|
|
253
|
+
|
|
254
|
+
Returns:
|
|
255
|
+
LlamaIndex FunctionTool wrapping the tool execution.
|
|
256
|
+
|
|
257
|
+
Raises:
|
|
258
|
+
ValueError: If the tool type is unsupported.
|
|
259
|
+
"""
|
|
260
|
+
if isinstance(tool, PythonFunctionTool):
|
|
261
|
+
return self._create_python_function_tool(tool)
|
|
262
|
+
elif isinstance(tool, APITool):
|
|
263
|
+
return self._create_api_tool(tool)
|
|
264
|
+
else:
|
|
265
|
+
raise ValueError(f"Unsupported tool type: {type(tool)}")
|