qtype 0.0.16__py3-none-any.whl → 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qtype/application/commons/tools.py +1 -1
- qtype/application/converters/tools_from_api.py +5 -5
- qtype/application/converters/tools_from_module.py +2 -2
- qtype/application/converters/types.py +14 -43
- qtype/application/documentation.py +1 -1
- qtype/application/facade.py +94 -73
- qtype/base/types.py +227 -7
- qtype/cli.py +4 -0
- qtype/commands/convert.py +20 -8
- qtype/commands/generate.py +19 -27
- qtype/commands/run.py +73 -36
- qtype/commands/serve.py +74 -54
- qtype/commands/validate.py +34 -8
- qtype/commands/visualize.py +46 -22
- qtype/dsl/__init__.py +6 -5
- qtype/dsl/custom_types.py +1 -1
- qtype/dsl/domain_types.py +65 -5
- qtype/dsl/linker.py +384 -0
- qtype/dsl/loader.py +315 -0
- qtype/dsl/model.py +612 -363
- qtype/dsl/parser.py +200 -0
- qtype/dsl/types.py +50 -0
- qtype/interpreter/api.py +57 -136
- qtype/interpreter/auth/aws.py +19 -9
- qtype/interpreter/auth/generic.py +93 -16
- qtype/interpreter/base/base_step_executor.py +436 -0
- qtype/interpreter/base/batch_step_executor.py +171 -0
- qtype/interpreter/base/exceptions.py +50 -0
- qtype/interpreter/base/executor_context.py +74 -0
- qtype/interpreter/base/factory.py +117 -0
- qtype/interpreter/base/progress_tracker.py +110 -0
- qtype/interpreter/base/secrets.py +339 -0
- qtype/interpreter/base/step_cache.py +74 -0
- qtype/interpreter/base/stream_emitter.py +469 -0
- qtype/interpreter/conversions.py +462 -22
- qtype/interpreter/converters.py +77 -0
- qtype/interpreter/endpoints.py +355 -0
- qtype/interpreter/executors/agent_executor.py +242 -0
- qtype/interpreter/executors/aggregate_executor.py +93 -0
- qtype/interpreter/executors/decoder_executor.py +163 -0
- qtype/interpreter/executors/doc_to_text_executor.py +112 -0
- qtype/interpreter/executors/document_embedder_executor.py +107 -0
- qtype/interpreter/executors/document_search_executor.py +122 -0
- qtype/interpreter/executors/document_source_executor.py +118 -0
- qtype/interpreter/executors/document_splitter_executor.py +105 -0
- qtype/interpreter/executors/echo_executor.py +63 -0
- qtype/interpreter/executors/field_extractor_executor.py +160 -0
- qtype/interpreter/executors/file_source_executor.py +101 -0
- qtype/interpreter/executors/file_writer_executor.py +110 -0
- qtype/interpreter/executors/index_upsert_executor.py +228 -0
- qtype/interpreter/executors/invoke_embedding_executor.py +92 -0
- qtype/interpreter/executors/invoke_flow_executor.py +51 -0
- qtype/interpreter/executors/invoke_tool_executor.py +358 -0
- qtype/interpreter/executors/llm_inference_executor.py +272 -0
- qtype/interpreter/executors/prompt_template_executor.py +78 -0
- qtype/interpreter/executors/sql_source_executor.py +106 -0
- qtype/interpreter/executors/vector_search_executor.py +91 -0
- qtype/interpreter/flow.py +159 -22
- qtype/interpreter/metadata_api.py +115 -0
- qtype/interpreter/resource_cache.py +5 -4
- qtype/interpreter/rich_progress.py +225 -0
- qtype/interpreter/stream/chat/__init__.py +15 -0
- qtype/interpreter/stream/chat/converter.py +391 -0
- qtype/interpreter/{chat → stream/chat}/file_conversions.py +2 -2
- qtype/interpreter/stream/chat/ui_request_to_domain_type.py +140 -0
- qtype/interpreter/stream/chat/vercel.py +609 -0
- qtype/interpreter/stream/utils/__init__.py +15 -0
- qtype/interpreter/stream/utils/build_vercel_ai_formatter.py +74 -0
- qtype/interpreter/stream/utils/callback_to_stream.py +66 -0
- qtype/interpreter/stream/utils/create_streaming_response.py +18 -0
- qtype/interpreter/stream/utils/default_chat_extract_text.py +20 -0
- qtype/interpreter/stream/utils/error_streaming_response.py +20 -0
- qtype/interpreter/telemetry.py +135 -8
- qtype/interpreter/tools/__init__.py +5 -0
- qtype/interpreter/tools/function_tool_helper.py +265 -0
- qtype/interpreter/types.py +330 -0
- qtype/interpreter/typing.py +83 -89
- qtype/interpreter/ui/404/index.html +1 -1
- qtype/interpreter/ui/404.html +1 -1
- qtype/interpreter/ui/_next/static/{nUaw6_IwRwPqkzwe5s725 → 20HoJN6otZ_LyHLHpCPE6}/_buildManifest.js +1 -1
- qtype/interpreter/ui/_next/static/chunks/{393-8fd474427f8e19ce.js → 434-b2112d19f25c44ff.js} +3 -3
- qtype/interpreter/ui/_next/static/chunks/app/page-8c67d16ac90d23cb.js +1 -0
- qtype/interpreter/ui/_next/static/chunks/ba12c10f-546f2714ff8abc66.js +1 -0
- qtype/interpreter/ui/_next/static/css/8a8d1269e362fef7.css +3 -0
- qtype/interpreter/ui/icon.png +0 -0
- qtype/interpreter/ui/index.html +1 -1
- qtype/interpreter/ui/index.txt +4 -4
- qtype/semantic/checker.py +583 -0
- qtype/semantic/generate.py +262 -83
- qtype/semantic/loader.py +95 -0
- qtype/semantic/model.py +436 -159
- qtype/semantic/resolver.py +63 -19
- qtype/semantic/visualize.py +28 -31
- {qtype-0.0.16.dist-info → qtype-0.1.1.dist-info}/METADATA +16 -3
- qtype-0.1.1.dist-info/RECORD +135 -0
- qtype/dsl/base_types.py +0 -38
- qtype/dsl/validator.py +0 -465
- qtype/interpreter/batch/__init__.py +0 -0
- qtype/interpreter/batch/file_sink_source.py +0 -162
- qtype/interpreter/batch/flow.py +0 -95
- qtype/interpreter/batch/sql_source.py +0 -92
- qtype/interpreter/batch/step.py +0 -74
- qtype/interpreter/batch/types.py +0 -41
- qtype/interpreter/batch/utils.py +0 -178
- qtype/interpreter/chat/chat_api.py +0 -237
- qtype/interpreter/chat/vercel.py +0 -314
- qtype/interpreter/exceptions.py +0 -10
- qtype/interpreter/step.py +0 -67
- qtype/interpreter/steps/__init__.py +0 -0
- qtype/interpreter/steps/agent.py +0 -114
- qtype/interpreter/steps/condition.py +0 -36
- qtype/interpreter/steps/decoder.py +0 -88
- qtype/interpreter/steps/llm_inference.py +0 -171
- qtype/interpreter/steps/prompt_template.py +0 -54
- qtype/interpreter/steps/search.py +0 -24
- qtype/interpreter/steps/tool.py +0 -219
- qtype/interpreter/streaming_helpers.py +0 -123
- qtype/interpreter/ui/_next/static/chunks/app/page-7e26b6156cfb55d3.js +0 -1
- qtype/interpreter/ui/_next/static/chunks/ba12c10f-22556063851a6df2.js +0 -1
- qtype/interpreter/ui/_next/static/css/b40532b0db09cce3.css +0 -3
- qtype/interpreter/ui/favicon.ico +0 -0
- qtype/loader.py +0 -390
- qtype-0.0.16.dist-info/RECORD +0 -106
- /qtype/interpreter/ui/_next/static/{nUaw6_IwRwPqkzwe5s725 → 20HoJN6otZ_LyHLHpCPE6}/_ssgManifest.js +0 -0
- {qtype-0.0.16.dist-info → qtype-0.1.1.dist-info}/WHEEL +0 -0
- {qtype-0.0.16.dist-info → qtype-0.1.1.dist-info}/entry_points.txt +0 -0
- {qtype-0.0.16.dist-info → qtype-0.1.1.dist-info}/licenses/LICENSE +0 -0
- {qtype-0.0.16.dist-info → qtype-0.1.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,330 @@
|
|
|
1
|
+
from typing import Any, Dict, Literal, Optional, Protocol, Union
|
|
2
|
+
|
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field
|
|
4
|
+
|
|
5
|
+
from qtype.base.types import StrictBaseModel
|
|
6
|
+
from qtype.dsl.domain_types import ChatMessage
|
|
7
|
+
from qtype.semantic.model import Step
|
|
8
|
+
|
|
9
|
+
# Stream Event Types (Discriminated Union)
|
|
10
|
+
# These events are emitted by executors during flow execution
|
|
11
|
+
# and can be converted to Vercel UI chunks for frontend display
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class TextStreamStartEvent(BaseModel):
|
|
15
|
+
"""Signals the start of incremental text streaming.
|
|
16
|
+
|
|
17
|
+
Use this when beginning to stream LLM-generated content or other
|
|
18
|
+
incremental text output. Must be followed by TextStreamDeltaEvents
|
|
19
|
+
and eventually a TextStreamEndEvent with the same stream_id.
|
|
20
|
+
|
|
21
|
+
Maps to: TextStartChunk in Vercel protocol
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
type: Literal["text_stream_start"] = "text_stream_start"
|
|
25
|
+
step: Step
|
|
26
|
+
stream_id: str = Field(
|
|
27
|
+
description="Unique ID to correlate start/delta/end events"
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class TextStreamDeltaEvent(BaseModel):
|
|
32
|
+
"""Carries an incremental chunk of text content.
|
|
33
|
+
|
|
34
|
+
Use this for streaming LLM responses or other incremental text.
|
|
35
|
+
The delta represents a small piece of text to append to the stream.
|
|
36
|
+
|
|
37
|
+
Maps to: TextDeltaChunk in Vercel protocol
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
type: Literal["text_stream_delta"] = "text_stream_delta"
|
|
41
|
+
step: Step
|
|
42
|
+
stream_id: str = Field(
|
|
43
|
+
description="Must match the stream_id from TextStreamStartEvent"
|
|
44
|
+
)
|
|
45
|
+
delta: str = Field(description="Incremental text content to append")
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class TextStreamEndEvent(BaseModel):
|
|
49
|
+
"""Signals the completion of incremental text streaming.
|
|
50
|
+
|
|
51
|
+
Use this to mark the end of a text stream. After this event,
|
|
52
|
+
no more deltas should be sent for this stream_id.
|
|
53
|
+
|
|
54
|
+
Maps to: TextEndChunk in Vercel protocol
|
|
55
|
+
"""
|
|
56
|
+
|
|
57
|
+
type: Literal["text_stream_end"] = "text_stream_end"
|
|
58
|
+
step: Step
|
|
59
|
+
stream_id: str = Field(
|
|
60
|
+
description="Must match the stream_id from TextStreamStartEvent"
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class ReasoningStreamStartEvent(BaseModel):
|
|
65
|
+
"""Signals the start of incremental reasoning streaming.
|
|
66
|
+
|
|
67
|
+
Use this when an agent begins outputting reasoning/thinking steps.
|
|
68
|
+
Must be followed by ReasoningStreamDeltaEvents and eventually
|
|
69
|
+
a ReasoningStreamEndEvent with the same stream_id.
|
|
70
|
+
|
|
71
|
+
Maps to: ReasoningStartChunk in Vercel protocol
|
|
72
|
+
"""
|
|
73
|
+
|
|
74
|
+
type: Literal["reasoning_stream_start"] = "reasoning_stream_start"
|
|
75
|
+
step: Step
|
|
76
|
+
stream_id: str = Field(
|
|
77
|
+
description="Unique ID to correlate start/delta/end events"
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
class ReasoningStreamDeltaEvent(BaseModel):
|
|
82
|
+
"""Carries an incremental chunk of reasoning content.
|
|
83
|
+
|
|
84
|
+
Use this for streaming agent reasoning/thinking steps.
|
|
85
|
+
The delta represents a small piece of reasoning text to append.
|
|
86
|
+
|
|
87
|
+
Maps to: ReasoningDeltaChunk in Vercel protocol
|
|
88
|
+
"""
|
|
89
|
+
|
|
90
|
+
type: Literal["reasoning_stream_delta"] = "reasoning_stream_delta"
|
|
91
|
+
step: Step
|
|
92
|
+
stream_id: str = Field(
|
|
93
|
+
description="Must match the stream_id from ReasoningStreamStartEvent"
|
|
94
|
+
)
|
|
95
|
+
delta: str = Field(description="Incremental reasoning content to append")
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
class ReasoningStreamEndEvent(BaseModel):
|
|
99
|
+
"""Signals the completion of incremental reasoning streaming.
|
|
100
|
+
|
|
101
|
+
Use this to mark the end of a reasoning stream. After this event,
|
|
102
|
+
no more deltas should be sent for this stream_id.
|
|
103
|
+
|
|
104
|
+
Maps to: ReasoningEndChunk in Vercel protocol
|
|
105
|
+
"""
|
|
106
|
+
|
|
107
|
+
type: Literal["reasoning_stream_end"] = "reasoning_stream_end"
|
|
108
|
+
step: Step
|
|
109
|
+
stream_id: str = Field(
|
|
110
|
+
description="Must match the stream_id from ReasoningStreamStartEvent"
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
class StatusEvent(BaseModel):
|
|
115
|
+
"""Reports a complete status message from a step.
|
|
116
|
+
|
|
117
|
+
Use this for non-streaming status updates like:
|
|
118
|
+
- "Writing 3 records to file.csv"
|
|
119
|
+
- "Processing document..."
|
|
120
|
+
- "Search completed: found 5 results"
|
|
121
|
+
|
|
122
|
+
Maps to: StartStepChunk + TextStartChunk + TextDeltaChunk +
|
|
123
|
+
TextEndChunk + FinishStepChunk (wrapped as a complete step)
|
|
124
|
+
"""
|
|
125
|
+
|
|
126
|
+
type: Literal["status"] = "status"
|
|
127
|
+
step: Step
|
|
128
|
+
message: str = Field(description="Complete status message to display")
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
class StepStartEvent(BaseModel):
|
|
132
|
+
"""Marks the beginning of a logical step boundary.
|
|
133
|
+
|
|
134
|
+
Use this to group related events together visually in the UI.
|
|
135
|
+
Must be paired with a StepEndEvent.
|
|
136
|
+
|
|
137
|
+
Maps to: StartStepChunk in Vercel protocol
|
|
138
|
+
"""
|
|
139
|
+
|
|
140
|
+
type: Literal["step_start"] = "step_start"
|
|
141
|
+
step: Step
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
class StepEndEvent(BaseModel):
|
|
145
|
+
"""Marks the end of a logical step boundary.
|
|
146
|
+
|
|
147
|
+
Use this to close a step boundary opened by StepStartEvent.
|
|
148
|
+
|
|
149
|
+
Maps to: FinishStepChunk in Vercel protocol
|
|
150
|
+
"""
|
|
151
|
+
|
|
152
|
+
type: Literal["step_end"] = "step_end"
|
|
153
|
+
step: Step
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
class ToolExecutionStartEvent(BaseModel):
|
|
157
|
+
"""Signals the start of tool execution.
|
|
158
|
+
|
|
159
|
+
Use this when a tool is about to be invoked, either by an LLM
|
|
160
|
+
or by a tool executor.
|
|
161
|
+
|
|
162
|
+
Maps to: ToolInputAvailableChunk in Vercel protocol
|
|
163
|
+
"""
|
|
164
|
+
|
|
165
|
+
type: Literal["tool_execution_start"] = "tool_execution_start"
|
|
166
|
+
step: Step
|
|
167
|
+
tool_call_id: str = Field(description="Unique identifier for this call")
|
|
168
|
+
tool_name: str = Field(description="Name of the tool being executed")
|
|
169
|
+
tool_input: dict[str, Any] = Field(
|
|
170
|
+
description="Input parameters for the tool"
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
class ToolExecutionEndEvent(BaseModel):
|
|
175
|
+
"""Signals the completion of tool execution.
|
|
176
|
+
|
|
177
|
+
Use this when a tool has finished executing successfully.
|
|
178
|
+
|
|
179
|
+
Maps to: ToolOutputAvailableChunk in Vercel protocol
|
|
180
|
+
"""
|
|
181
|
+
|
|
182
|
+
type: Literal["tool_execution_end"] = "tool_execution_end"
|
|
183
|
+
step: Step
|
|
184
|
+
tool_call_id: str = Field(
|
|
185
|
+
description="Must match tool_call_id from ToolExecutionStartEvent"
|
|
186
|
+
)
|
|
187
|
+
tool_output: Any = Field(description="Output returned by the tool")
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
class ToolExecutionErrorEvent(BaseModel):
|
|
191
|
+
"""Signals that tool execution failed.
|
|
192
|
+
|
|
193
|
+
Use this when a tool encounters an error during execution.
|
|
194
|
+
|
|
195
|
+
Maps to: ToolOutputErrorChunk in Vercel protocol
|
|
196
|
+
"""
|
|
197
|
+
|
|
198
|
+
type: Literal["tool_execution_error"] = "tool_execution_error"
|
|
199
|
+
step: Step
|
|
200
|
+
tool_call_id: str = Field(
|
|
201
|
+
description="Must match tool_call_id from ToolExecutionStartEvent"
|
|
202
|
+
)
|
|
203
|
+
error_message: str = Field(description="Description of the error")
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
class ErrorEvent(BaseModel):
|
|
207
|
+
"""Signals a general error occurred during step execution.
|
|
208
|
+
|
|
209
|
+
Use this for errors that aren't specific to tool execution.
|
|
210
|
+
|
|
211
|
+
Maps to: ErrorChunk in Vercel protocol
|
|
212
|
+
"""
|
|
213
|
+
|
|
214
|
+
type: Literal["error"] = "error"
|
|
215
|
+
step: Step
|
|
216
|
+
error_message: str = Field(description="Description of the error")
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
# Union type for all stream events
|
|
220
|
+
StreamEvent = Union[
|
|
221
|
+
TextStreamStartEvent,
|
|
222
|
+
TextStreamDeltaEvent,
|
|
223
|
+
ReasoningStreamStartEvent,
|
|
224
|
+
ReasoningStreamDeltaEvent,
|
|
225
|
+
ReasoningStreamEndEvent,
|
|
226
|
+
TextStreamEndEvent,
|
|
227
|
+
ReasoningStreamStartEvent,
|
|
228
|
+
ReasoningStreamEndEvent,
|
|
229
|
+
StatusEvent,
|
|
230
|
+
StepStartEvent,
|
|
231
|
+
StepEndEvent,
|
|
232
|
+
ToolExecutionStartEvent,
|
|
233
|
+
ToolExecutionEndEvent,
|
|
234
|
+
ToolExecutionErrorEvent,
|
|
235
|
+
ErrorEvent,
|
|
236
|
+
]
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
class StreamingCallback(Protocol):
|
|
240
|
+
"""The async callback protocol for handling real-time stream events."""
|
|
241
|
+
|
|
242
|
+
async def __call__(self, event: StreamEvent) -> None: ...
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
class ProgressCallback(Protocol):
|
|
246
|
+
"""
|
|
247
|
+
A protocol representing a callback function for reporting progress during a multi-step process.
|
|
248
|
+
|
|
249
|
+
The callback is called with the following arguments:
|
|
250
|
+
step_id (str): Identifier for the current step or phase.
|
|
251
|
+
items_processed (int): Number of items processed so far in the current step.
|
|
252
|
+
total_items (int | None): Total number of items to process in the current step, or None if unknown.
|
|
253
|
+
|
|
254
|
+
Implementations should use this callback to provide progress updates, such as updating a progress bar or logging progress information.
|
|
255
|
+
"""
|
|
256
|
+
|
|
257
|
+
def __call__(
|
|
258
|
+
self,
|
|
259
|
+
step_id: str,
|
|
260
|
+
items_processed: int,
|
|
261
|
+
items_in_error: int,
|
|
262
|
+
items_succeeded: int,
|
|
263
|
+
total_items: int | None,
|
|
264
|
+
cache_hits: int | None,
|
|
265
|
+
cache_misses: int | None,
|
|
266
|
+
) -> None: ...
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
class StepError(BaseModel):
|
|
270
|
+
"""A structured error object attached to a failed FlowState."""
|
|
271
|
+
|
|
272
|
+
step_id: str
|
|
273
|
+
error_message: str
|
|
274
|
+
exception_type: str
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
class Session(StrictBaseModel):
|
|
278
|
+
model_config = ConfigDict(extra="forbid")
|
|
279
|
+
"""Represents a user session, encapsulating all relevant state and context."""
|
|
280
|
+
session_id: str = Field(
|
|
281
|
+
..., description="Unique identifier for the session."
|
|
282
|
+
)
|
|
283
|
+
conversation_history: list[ChatMessage] = Field(
|
|
284
|
+
default_factory=list,
|
|
285
|
+
description="History of messages in the conversation.",
|
|
286
|
+
)
|
|
287
|
+
|
|
288
|
+
|
|
289
|
+
class FlowMessage(BaseModel):
|
|
290
|
+
"""
|
|
291
|
+
Represents the complete state of one execution path at a point in time.
|
|
292
|
+
This object is the primary data structure passed between StepExecutors.
|
|
293
|
+
"""
|
|
294
|
+
|
|
295
|
+
session: Session
|
|
296
|
+
variables: Dict[str, Any] = Field(
|
|
297
|
+
default_factory=dict,
|
|
298
|
+
description="Mapping of variable IDs to their values.",
|
|
299
|
+
)
|
|
300
|
+
error: Optional[StepError] = None
|
|
301
|
+
|
|
302
|
+
def is_failed(self) -> bool:
|
|
303
|
+
"""Checks if this state has encountered an error."""
|
|
304
|
+
return self.error is not None
|
|
305
|
+
|
|
306
|
+
def set_error(self, step_id: str, exc: Exception):
|
|
307
|
+
"""Marks this state as failed, capturing error details."""
|
|
308
|
+
if not self.is_failed(): # Only capture the first error
|
|
309
|
+
self.error = StepError(
|
|
310
|
+
step_id=step_id,
|
|
311
|
+
error_message=str(exc),
|
|
312
|
+
exception_type=type(exc).__name__,
|
|
313
|
+
)
|
|
314
|
+
|
|
315
|
+
# It's useful to have copy-on-write style helpers
|
|
316
|
+
def copy_with_variables(
|
|
317
|
+
self, new_variables: dict[str, Any]
|
|
318
|
+
) -> "FlowMessage":
|
|
319
|
+
new_state = self.model_copy(deep=True)
|
|
320
|
+
new_state.variables.update(new_variables)
|
|
321
|
+
return new_state
|
|
322
|
+
|
|
323
|
+
|
|
324
|
+
class InterpreterError(Exception):
|
|
325
|
+
"""Base exception class for ProtoGen interpreter errors."""
|
|
326
|
+
|
|
327
|
+
def __init__(self, message: str, details: Any = None) -> None:
|
|
328
|
+
super().__init__(message)
|
|
329
|
+
self.message = message
|
|
330
|
+
self.details = details
|
qtype/interpreter/typing.py
CHANGED
|
@@ -1,11 +1,13 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
import uuid
|
|
3
4
|
from typing import Any, Type
|
|
4
5
|
|
|
5
6
|
from pydantic import BaseModel, Field, create_model
|
|
6
7
|
|
|
7
|
-
from qtype.application.converters.types import PRIMITIVE_TO_PYTHON_TYPE
|
|
8
8
|
from qtype.dsl.model import PrimitiveTypeEnum
|
|
9
|
+
from qtype.dsl.types import PRIMITIVE_TO_PYTHON_TYPE
|
|
10
|
+
from qtype.interpreter.types import FlowMessage, Session
|
|
9
11
|
from qtype.semantic.model import Flow, Variable
|
|
10
12
|
|
|
11
13
|
|
|
@@ -34,100 +36,92 @@ def _get_variable_type(var: Variable) -> tuple[Type, dict[str, Any]]:
|
|
|
34
36
|
return python_type, field_metadata
|
|
35
37
|
|
|
36
38
|
|
|
37
|
-
def
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
fields["flow_id"] = (str, Field(description="ID of the executed flow"))
|
|
45
|
-
fields["status"] = (str, Field(description="Execution status"))
|
|
46
|
-
|
|
47
|
-
if is_batch:
|
|
48
|
-
# Include information about the number of results, errors, etc.
|
|
49
|
-
fields["num_inputs"] = (int, Field(description="Number of inputs."))
|
|
50
|
-
fields["num_results"] = (int, Field(description="Number of results."))
|
|
51
|
-
fields["num_errors"] = (int, Field(description="Number of errors."))
|
|
52
|
-
fields["errors"] = (
|
|
53
|
-
list[dict[Any, Any]],
|
|
54
|
-
Field(description="All inputs with their associated errors."),
|
|
39
|
+
def _fields_from_variables(variables: list[Variable]) -> dict:
|
|
40
|
+
fields = {}
|
|
41
|
+
for var in variables:
|
|
42
|
+
python_type, type_metadata = _get_variable_type(var)
|
|
43
|
+
field_info = Field(
|
|
44
|
+
title=var.id,
|
|
45
|
+
json_schema_extra=type_metadata,
|
|
55
46
|
)
|
|
47
|
+
fields[var.id] = (python_type, field_info)
|
|
48
|
+
return fields
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def create_output_shape(flow: Flow) -> Type[BaseModel]:
|
|
52
|
+
return create_model(
|
|
53
|
+
f"{flow.id}Result",
|
|
54
|
+
__base__=BaseModel,
|
|
55
|
+
**_fields_from_variables(flow.outputs),
|
|
56
|
+
) # type: ignore
|
|
56
57
|
|
|
57
|
-
# Add dynamic output fields
|
|
58
|
-
if flow.outputs:
|
|
59
|
-
output_fields = {}
|
|
60
|
-
for var in flow.outputs:
|
|
61
|
-
python_type, type_metadata = _get_variable_type(var)
|
|
62
|
-
|
|
63
|
-
# Make type optional for batch processing since rows might have missing values
|
|
64
|
-
if is_batch:
|
|
65
|
-
from typing import Union
|
|
66
|
-
|
|
67
|
-
python_type = Union[python_type, type(None)] # type: ignore
|
|
68
|
-
|
|
69
|
-
field_info = Field(
|
|
70
|
-
# TODO: grok the description from the variable if available
|
|
71
|
-
# description=f"Output for {var.id}",
|
|
72
|
-
title=var.id,
|
|
73
|
-
json_schema_extra=type_metadata,
|
|
74
|
-
)
|
|
75
|
-
output_fields[var.id] = (python_type, field_info)
|
|
76
|
-
|
|
77
|
-
# Create nested outputs model
|
|
78
|
-
outputs_model: Type[BaseModel] = create_model(
|
|
79
|
-
f"{flow.id}Outputs",
|
|
80
|
-
__base__=BaseModel,
|
|
81
|
-
**output_fields,
|
|
82
|
-
) # type: ignore
|
|
83
|
-
if is_batch:
|
|
84
|
-
fields["outputs"] = (
|
|
85
|
-
list[outputs_model], # type: ignore
|
|
86
|
-
Field(description="List of flow execution outputs"),
|
|
87
|
-
)
|
|
88
|
-
else:
|
|
89
|
-
fields["outputs"] = (
|
|
90
|
-
outputs_model,
|
|
91
|
-
Field(description="Flow execution outputs"),
|
|
92
|
-
)
|
|
93
|
-
else:
|
|
94
|
-
fields["outputs"] = (
|
|
95
|
-
dict[str, Any],
|
|
96
|
-
Field(description="Flow execution outputs"),
|
|
97
|
-
) # type: ignore
|
|
98
58
|
|
|
59
|
+
def create_output_container_type(flow: Flow) -> Type[BaseModel]:
|
|
60
|
+
"""Dynamically create a Pydantic response model for a flow.
|
|
61
|
+
|
|
62
|
+
Always returns a batch-style response with a list of outputs.
|
|
63
|
+
"""
|
|
64
|
+
output_shape: Type[BaseModel] = create_output_shape(flow)
|
|
65
|
+
|
|
66
|
+
fields: dict[str, tuple[Any, Any]] = {}
|
|
67
|
+
fields["errors"] = (
|
|
68
|
+
list[dict[Any, Any]],
|
|
69
|
+
Field(description="List of errored execution outputs"),
|
|
70
|
+
)
|
|
71
|
+
fields["outputs"] = (
|
|
72
|
+
list[output_shape],
|
|
73
|
+
Field(description="List of successful execution outputs"),
|
|
74
|
+
)
|
|
99
75
|
return create_model(f"{flow.id}Response", __base__=BaseModel, **fields) # type: ignore
|
|
100
76
|
|
|
101
77
|
|
|
102
|
-
def
|
|
78
|
+
def create_input_shape(flow: Flow) -> Type[BaseModel]:
|
|
103
79
|
"""Dynamically create a Pydantic request model for a flow."""
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
80
|
+
return create_model(
|
|
81
|
+
f"{flow.id}Request",
|
|
82
|
+
__base__=BaseModel,
|
|
83
|
+
**_fields_from_variables(flow.inputs),
|
|
84
|
+
) # type: ignore
|
|
109
85
|
|
|
110
|
-
fields = {}
|
|
111
|
-
for var in flow.inputs:
|
|
112
|
-
python_type, type_metadata = _get_variable_type(var)
|
|
113
|
-
field_info = Field(
|
|
114
|
-
# TODO: grok the description from the variable if available
|
|
115
|
-
# description=f"Input for {var.id}",
|
|
116
|
-
title=var.id,
|
|
117
|
-
json_schema_extra=type_metadata,
|
|
118
|
-
)
|
|
119
|
-
fields[var.id] = (python_type, field_info)
|
|
120
86
|
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
87
|
+
def request_to_flow_message(request: BaseModel, **kwargs) -> FlowMessage:
|
|
88
|
+
"""
|
|
89
|
+
Convert API input data into a FlowMessage for the interpreter.
|
|
90
|
+
|
|
91
|
+
Args:
|
|
92
|
+
flow: The flow being executed
|
|
93
|
+
request: Input Request
|
|
94
|
+
session_id: Optional session ID for conversational flows
|
|
95
|
+
|
|
96
|
+
Returns:
|
|
97
|
+
FlowMessage ready for execution
|
|
98
|
+
"""
|
|
99
|
+
session_id = kwargs.get("session_id", str(uuid.uuid4()))
|
|
100
|
+
conversation_history = kwargs.get("conversation_history", [])
|
|
101
|
+
|
|
102
|
+
session = Session(
|
|
103
|
+
session_id=session_id, conversation_history=conversation_history
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
variables = {}
|
|
107
|
+
for id in request.model_dump().keys():
|
|
108
|
+
variables[id] = getattr(request, id)
|
|
109
|
+
|
|
110
|
+
return FlowMessage(session=session, variables=variables)
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def flow_results_to_output_container(
|
|
114
|
+
messages: list[FlowMessage],
|
|
115
|
+
output_shape: Type[BaseModel],
|
|
116
|
+
output_container: Type[BaseModel],
|
|
117
|
+
):
|
|
118
|
+
outputs = []
|
|
119
|
+
errors = []
|
|
120
|
+
for m in messages:
|
|
121
|
+
if m.is_failed() and m.error is not None:
|
|
122
|
+
errors.append(m.error.model_dump())
|
|
123
|
+
else:
|
|
124
|
+
output_instance = output_shape(**m.variables)
|
|
125
|
+
outputs.append(output_instance.model_dump())
|
|
126
|
+
|
|
127
|
+
return output_container(outputs=outputs, errors=errors)
|
|
@@ -1 +1 @@
|
|
|
1
|
-
<!DOCTYPE html><!--
|
|
1
|
+
<!DOCTYPE html><!--20HoJN6otZ_LyHLHpCPE6--><html lang="en"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="preload" href="./_next/static/media/4cf2300e9c8272f7-s.p.woff2" as="font" crossorigin="" type="font/woff2"/><link rel="preload" href="./_next/static/media/93f479601ee12b01-s.p.woff2" as="font" crossorigin="" type="font/woff2"/><link rel="stylesheet" href="./_next/static/css/8a8d1269e362fef7.css" data-precedence="next"/><link rel="preload" as="script" fetchPriority="low" href="./_next/static/chunks/webpack-08642e441b39b6c2.js"/><script src="./_next/static/chunks/4bd1b696-cf72ae8a39fa05aa.js" async=""></script><script src="./_next/static/chunks/964-2b041321a01cbf56.js" async=""></script><script src="./_next/static/chunks/main-app-6fc6346bc8f7f163.js" async=""></script><meta name="robots" content="noindex"/><meta name="next-size-adjust" content=""/><title>404: This page could not be found.</title><title>QType</title><meta name="description" content="DSL for rapid prototyping of AI applications"/><link rel="icon" href="/ui/icon.png?5369ea17987f04b5" type="image/png" sizes="907x907"/><script src="./_next/static/chunks/polyfills-42372ed130431b0a.js" noModule=""></script></head><body class="__variable_c57559 __variable_152ec0 antialiased"><div hidden=""><!--$--><!--/$--></div><div style="font-family:system-ui,"Segoe UI",Roboto,Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji";height:100vh;text-align:center;display:flex;flex-direction:column;align-items:center;justify-content:center"><div><style>body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}</style><h1 class="next-error-h1" style="display:inline-block;margin:0 20px 0 0;padding:0 23px 0 0;font-size:24px;font-weight:500;vertical-align:top;line-height:49px">404</h1><div style="display:inline-block"><h2 style="font-size:14px;font-weight:400;line-height:49px;margin:0">This page could not be found.</h2></div></div></div><!--$--><!--/$--><script src="./_next/static/chunks/webpack-08642e441b39b6c2.js" id="_R_" async=""></script><script>(self.__next_f=self.__next_f||[]).push([0])</script><script>self.__next_f.push([1,"1:\"$Sreact.fragment\"\n2:I[7555,[],\"\"]\n3:I[1295,[],\"\"]\n4:I[9665,[],\"OutletBoundary\"]\n6:I[4911,[],\"AsyncMetadataOutlet\"]\n8:I[9665,[],\"ViewportBoundary\"]\na:I[9665,[],\"MetadataBoundary\"]\nb:\"$Sreact.suspense\"\nd:I[8393,[],\"\"]\n:HL[\"./_next/static/media/4cf2300e9c8272f7-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n:HL[\"./_next/static/media/93f479601ee12b01-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n:HL[\"./_next/static/css/8a8d1269e362fef7.css\",\"style\"]\n"])</script><script>self.__next_f.push([1,"0:{\"P\":null,\"b\":\"20HoJN6otZ_LyHLHpCPE6\",\"p\":\".\",\"c\":[\"\",\"_not-found\",\"\"],\"i\":false,\"f\":[[[\"\",{\"children\":[\"/_not-found\",{\"children\":[\"__PAGE__\",{}]}]},\"$undefined\",\"$undefined\",true],[\"\",[\"$\",\"$1\",\"c\",{\"children\":[[[\"$\",\"link\",\"0\",{\"rel\":\"stylesheet\",\"href\":\"./_next/static/css/8a8d1269e362fef7.css\",\"precedence\":\"next\",\"crossOrigin\":\"$undefined\",\"nonce\":\"$undefined\"}]],[\"$\",\"html\",null,{\"lang\":\"en\",\"children\":[\"$\",\"body\",null,{\"className\":\"__variable_c57559 __variable_152ec0 antialiased\",\"children\":[\"$\",\"$L2\",null,{\"parallelRouterKey\":\"children\",\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L3\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":\"$undefined\",\"forbidden\":\"$undefined\",\"unauthorized\":\"$undefined\"}]}]}]]}],{\"children\":[\"/_not-found\",[\"$\",\"$1\",\"c\",{\"children\":[null,[\"$\",\"$L2\",null,{\"parallelRouterKey\":\"children\",\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L3\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":\"$undefined\",\"forbidden\":\"$undefined\",\"unauthorized\":\"$undefined\"}]]}],{\"children\":[\"__PAGE__\",[\"$\",\"$1\",\"c\",{\"children\":[[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":{\"fontFamily\":\"system-ui,\\\"Segoe UI\\\",Roboto,Helvetica,Arial,sans-serif,\\\"Apple Color Emoji\\\",\\\"Segoe UI Emoji\\\"\",\"height\":\"100vh\",\"textAlign\":\"center\",\"display\":\"flex\",\"flexDirection\":\"column\",\"alignItems\":\"center\",\"justifyContent\":\"center\"},\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":{\"display\":\"inline-block\",\"margin\":\"0 20px 0 0\",\"padding\":\"0 23px 0 0\",\"fontSize\":24,\"fontWeight\":500,\"verticalAlign\":\"top\",\"lineHeight\":\"49px\"},\"children\":404}],[\"$\",\"div\",null,{\"style\":{\"display\":\"inline-block\"},\"children\":[\"$\",\"h2\",null,{\"style\":{\"fontSize\":14,\"fontWeight\":400,\"lineHeight\":\"49px\",\"margin\":0},\"children\":\"This page could not be found.\"}]}]]}]}]],null,[\"$\",\"$L4\",null,{\"children\":[\"$L5\",[\"$\",\"$L6\",null,{\"promise\":\"$@7\"}]]}]]}],{},null,false]},null,false]},null,false],[\"$\",\"$1\",\"h\",{\"children\":[[\"$\",\"meta\",null,{\"name\":\"robots\",\"content\":\"noindex\"}],[[\"$\",\"$L8\",null,{\"children\":\"$L9\"}],[\"$\",\"meta\",null,{\"name\":\"next-size-adjust\",\"content\":\"\"}]],[\"$\",\"$La\",null,{\"children\":[\"$\",\"div\",null,{\"hidden\":true,\"children\":[\"$\",\"$b\",null,{\"fallback\":null,\"children\":\"$Lc\"}]}]}]]}],false]],\"m\":\"$undefined\",\"G\":[\"$d\",[]],\"s\":false,\"S\":true}\n"])</script><script>self.__next_f.push([1,"9:[[\"$\",\"meta\",\"0\",{\"charSet\":\"utf-8\"}],[\"$\",\"meta\",\"1\",{\"name\":\"viewport\",\"content\":\"width=device-width, initial-scale=1\"}]]\n5:null\n"])</script><script>self.__next_f.push([1,"e:I[8175,[],\"IconMark\"]\n7:{\"metadata\":[[\"$\",\"title\",\"0\",{\"children\":\"QType\"}],[\"$\",\"meta\",\"1\",{\"name\":\"description\",\"content\":\"DSL for rapid prototyping of AI applications\"}],[\"$\",\"link\",\"2\",{\"rel\":\"icon\",\"href\":\"/ui/icon.png?5369ea17987f04b5\",\"type\":\"image/png\",\"sizes\":\"907x907\"}],[\"$\",\"$Le\",\"3\",{}]],\"error\":null,\"digest\":\"$undefined\"}\n"])</script><script>self.__next_f.push([1,"c:\"$7:metadata\"\n"])</script></body></html>
|
qtype/interpreter/ui/404.html
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
<!DOCTYPE html><!--
|
|
1
|
+
<!DOCTYPE html><!--20HoJN6otZ_LyHLHpCPE6--><html lang="en"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="preload" href="./_next/static/media/4cf2300e9c8272f7-s.p.woff2" as="font" crossorigin="" type="font/woff2"/><link rel="preload" href="./_next/static/media/93f479601ee12b01-s.p.woff2" as="font" crossorigin="" type="font/woff2"/><link rel="stylesheet" href="./_next/static/css/8a8d1269e362fef7.css" data-precedence="next"/><link rel="preload" as="script" fetchPriority="low" href="./_next/static/chunks/webpack-08642e441b39b6c2.js"/><script src="./_next/static/chunks/4bd1b696-cf72ae8a39fa05aa.js" async=""></script><script src="./_next/static/chunks/964-2b041321a01cbf56.js" async=""></script><script src="./_next/static/chunks/main-app-6fc6346bc8f7f163.js" async=""></script><meta name="robots" content="noindex"/><meta name="next-size-adjust" content=""/><title>404: This page could not be found.</title><title>QType</title><meta name="description" content="DSL for rapid prototyping of AI applications"/><link rel="icon" href="/ui/icon.png?5369ea17987f04b5" type="image/png" sizes="907x907"/><script src="./_next/static/chunks/polyfills-42372ed130431b0a.js" noModule=""></script></head><body class="__variable_c57559 __variable_152ec0 antialiased"><div hidden=""><!--$--><!--/$--></div><div style="font-family:system-ui,"Segoe UI",Roboto,Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji";height:100vh;text-align:center;display:flex;flex-direction:column;align-items:center;justify-content:center"><div><style>body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}</style><h1 class="next-error-h1" style="display:inline-block;margin:0 20px 0 0;padding:0 23px 0 0;font-size:24px;font-weight:500;vertical-align:top;line-height:49px">404</h1><div style="display:inline-block"><h2 style="font-size:14px;font-weight:400;line-height:49px;margin:0">This page could not be found.</h2></div></div></div><!--$--><!--/$--><script src="./_next/static/chunks/webpack-08642e441b39b6c2.js" id="_R_" async=""></script><script>(self.__next_f=self.__next_f||[]).push([0])</script><script>self.__next_f.push([1,"1:\"$Sreact.fragment\"\n2:I[7555,[],\"\"]\n3:I[1295,[],\"\"]\n4:I[9665,[],\"OutletBoundary\"]\n6:I[4911,[],\"AsyncMetadataOutlet\"]\n8:I[9665,[],\"ViewportBoundary\"]\na:I[9665,[],\"MetadataBoundary\"]\nb:\"$Sreact.suspense\"\nd:I[8393,[],\"\"]\n:HL[\"./_next/static/media/4cf2300e9c8272f7-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n:HL[\"./_next/static/media/93f479601ee12b01-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n:HL[\"./_next/static/css/8a8d1269e362fef7.css\",\"style\"]\n"])</script><script>self.__next_f.push([1,"0:{\"P\":null,\"b\":\"20HoJN6otZ_LyHLHpCPE6\",\"p\":\".\",\"c\":[\"\",\"_not-found\",\"\"],\"i\":false,\"f\":[[[\"\",{\"children\":[\"/_not-found\",{\"children\":[\"__PAGE__\",{}]}]},\"$undefined\",\"$undefined\",true],[\"\",[\"$\",\"$1\",\"c\",{\"children\":[[[\"$\",\"link\",\"0\",{\"rel\":\"stylesheet\",\"href\":\"./_next/static/css/8a8d1269e362fef7.css\",\"precedence\":\"next\",\"crossOrigin\":\"$undefined\",\"nonce\":\"$undefined\"}]],[\"$\",\"html\",null,{\"lang\":\"en\",\"children\":[\"$\",\"body\",null,{\"className\":\"__variable_c57559 __variable_152ec0 antialiased\",\"children\":[\"$\",\"$L2\",null,{\"parallelRouterKey\":\"children\",\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L3\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":\"$undefined\",\"forbidden\":\"$undefined\",\"unauthorized\":\"$undefined\"}]}]}]]}],{\"children\":[\"/_not-found\",[\"$\",\"$1\",\"c\",{\"children\":[null,[\"$\",\"$L2\",null,{\"parallelRouterKey\":\"children\",\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L3\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":\"$undefined\",\"forbidden\":\"$undefined\",\"unauthorized\":\"$undefined\"}]]}],{\"children\":[\"__PAGE__\",[\"$\",\"$1\",\"c\",{\"children\":[[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":{\"fontFamily\":\"system-ui,\\\"Segoe UI\\\",Roboto,Helvetica,Arial,sans-serif,\\\"Apple Color Emoji\\\",\\\"Segoe UI Emoji\\\"\",\"height\":\"100vh\",\"textAlign\":\"center\",\"display\":\"flex\",\"flexDirection\":\"column\",\"alignItems\":\"center\",\"justifyContent\":\"center\"},\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":{\"display\":\"inline-block\",\"margin\":\"0 20px 0 0\",\"padding\":\"0 23px 0 0\",\"fontSize\":24,\"fontWeight\":500,\"verticalAlign\":\"top\",\"lineHeight\":\"49px\"},\"children\":404}],[\"$\",\"div\",null,{\"style\":{\"display\":\"inline-block\"},\"children\":[\"$\",\"h2\",null,{\"style\":{\"fontSize\":14,\"fontWeight\":400,\"lineHeight\":\"49px\",\"margin\":0},\"children\":\"This page could not be found.\"}]}]]}]}]],null,[\"$\",\"$L4\",null,{\"children\":[\"$L5\",[\"$\",\"$L6\",null,{\"promise\":\"$@7\"}]]}]]}],{},null,false]},null,false]},null,false],[\"$\",\"$1\",\"h\",{\"children\":[[\"$\",\"meta\",null,{\"name\":\"robots\",\"content\":\"noindex\"}],[[\"$\",\"$L8\",null,{\"children\":\"$L9\"}],[\"$\",\"meta\",null,{\"name\":\"next-size-adjust\",\"content\":\"\"}]],[\"$\",\"$La\",null,{\"children\":[\"$\",\"div\",null,{\"hidden\":true,\"children\":[\"$\",\"$b\",null,{\"fallback\":null,\"children\":\"$Lc\"}]}]}]]}],false]],\"m\":\"$undefined\",\"G\":[\"$d\",[]],\"s\":false,\"S\":true}\n"])</script><script>self.__next_f.push([1,"9:[[\"$\",\"meta\",\"0\",{\"charSet\":\"utf-8\"}],[\"$\",\"meta\",\"1\",{\"name\":\"viewport\",\"content\":\"width=device-width, initial-scale=1\"}]]\n5:null\n"])</script><script>self.__next_f.push([1,"e:I[8175,[],\"IconMark\"]\n7:{\"metadata\":[[\"$\",\"title\",\"0\",{\"children\":\"QType\"}],[\"$\",\"meta\",\"1\",{\"name\":\"description\",\"content\":\"DSL for rapid prototyping of AI applications\"}],[\"$\",\"link\",\"2\",{\"rel\":\"icon\",\"href\":\"/ui/icon.png?5369ea17987f04b5\",\"type\":\"image/png\",\"sizes\":\"907x907\"}],[\"$\",\"$Le\",\"3\",{}]],\"error\":null,\"digest\":\"$undefined\"}\n"])</script><script>self.__next_f.push([1,"c:\"$7:metadata\"\n"])</script></body></html>
|
qtype/interpreter/ui/_next/static/{nUaw6_IwRwPqkzwe5s725 → 20HoJN6otZ_LyHLHpCPE6}/_buildManifest.js
RENAMED
|
@@ -1 +1 @@
|
|
|
1
|
-
self.__BUILD_MANIFEST=function(e,r,t){return{__rewrites:{afterFiles:[],beforeFiles:[],fallback:[]},__routerFilterStatic:{numItems:3,errorRate:1e-4,numBits:58,numHashes:14,bitArray:[1,1,0,e,0,e,e,r,
|
|
1
|
+
self.__BUILD_MANIFEST=function(e,r,t){return{__rewrites:{afterFiles:[],beforeFiles:[],fallback:[]},__routerFilterStatic:{numItems:3,errorRate:1e-4,numBits:58,numHashes:14,bitArray:[1,1,0,e,0,e,e,r,r,e,r,e,r,e,r,e,r,r,e,r,r,r,e,e,r,e,r,r,e,r,e,e,e,e,r,e,e,r,e,e,r,r,e,r,e,r,r,e,e,e,e,e,e,e,r,r,e,e]},__routerFilterDynamic:{numItems:r,errorRate:1e-4,numBits:r,numHashes:null,bitArray:[]},"/_error":["static/chunks/pages/_error-03529f2c21436739.js"],sortedPages:["/_app","/_error"]}}(1,0,1e-4),self.__BUILD_MANIFEST_CB&&self.__BUILD_MANIFEST_CB();
|