qtype 0.0.4__py3-none-any.whl → 0.0.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qtype/commands/convert.py +18 -5
- qtype/commands/generate.py +16 -8
- qtype/commands/run.py +6 -83
- qtype/commands/serve.py +73 -0
- qtype/commands/validate.py +18 -8
- qtype/commands/visualize.py +87 -0
- qtype/commons/generate.py +9 -4
- qtype/converters/tools_from_module.py +69 -134
- qtype/converters/types.py +47 -1
- qtype/dsl/base_types.py +0 -1
- qtype/dsl/custom_types.py +73 -0
- qtype/dsl/document.py +27 -3
- qtype/dsl/domain_types.py +3 -0
- qtype/dsl/model.py +60 -73
- qtype/dsl/validator.py +20 -0
- qtype/interpreter/api.py +49 -13
- qtype/interpreter/chat/chat_api.py +237 -0
- qtype/interpreter/chat/file_conversions.py +57 -0
- qtype/interpreter/chat/vercel.py +314 -0
- qtype/interpreter/conversions.py +2 -0
- qtype/interpreter/steps/llm_inference.py +44 -19
- qtype/interpreter/streaming_helpers.py +123 -0
- qtype/interpreter/typing.py +29 -10
- qtype/interpreter/ui/404/index.html +1 -0
- qtype/interpreter/ui/404.html +1 -0
- qtype/interpreter/ui/_next/static/chunks/4bd1b696-cf72ae8a39fa05aa.js +1 -0
- qtype/interpreter/ui/_next/static/chunks/736-7fc606e244fedcb1.js +36 -0
- qtype/interpreter/ui/_next/static/chunks/964-ed4ab073db645007.js +1 -0
- qtype/interpreter/ui/_next/static/chunks/app/_not-found/page-e110d2a9d0a83d82.js +1 -0
- qtype/interpreter/ui/_next/static/chunks/app/layout-107b589eb751bfb7.js +1 -0
- qtype/interpreter/ui/_next/static/chunks/app/page-c72e847e888e549d.js +1 -0
- qtype/interpreter/ui/_next/static/chunks/ba12c10f-22556063851a6df2.js +1 -0
- qtype/interpreter/ui/_next/static/chunks/framework-7c95b8e5103c9e90.js +1 -0
- qtype/interpreter/ui/_next/static/chunks/main-6d261b6c5d6fb6c2.js +1 -0
- qtype/interpreter/ui/_next/static/chunks/main-app-6fc6346bc8f7f163.js +1 -0
- qtype/interpreter/ui/_next/static/chunks/pages/_app-0a0020ddd67f79cf.js +1 -0
- qtype/interpreter/ui/_next/static/chunks/pages/_error-03529f2c21436739.js +1 -0
- qtype/interpreter/ui/_next/static/chunks/polyfills-42372ed130431b0a.js +1 -0
- qtype/interpreter/ui/_next/static/chunks/webpack-2218571d248e99d2.js +1 -0
- qtype/interpreter/ui/_next/static/css/d4ad601c4774485e.css +3 -0
- qtype/interpreter/ui/_next/static/dBTVLoSkoaoznQv-yROk9/_buildManifest.js +1 -0
- qtype/interpreter/ui/_next/static/dBTVLoSkoaoznQv-yROk9/_ssgManifest.js +1 -0
- qtype/interpreter/ui/_next/static/media/569ce4b8f30dc480-s.p.woff2 +0 -0
- qtype/interpreter/ui/_next/static/media/747892c23ea88013-s.woff2 +0 -0
- qtype/interpreter/ui/_next/static/media/8d697b304b401681-s.woff2 +0 -0
- qtype/interpreter/ui/_next/static/media/93f479601ee12b01-s.p.woff2 +0 -0
- qtype/interpreter/ui/_next/static/media/9610d9e46709d722-s.woff2 +0 -0
- qtype/interpreter/ui/_next/static/media/ba015fad6dcf6784-s.woff2 +0 -0
- qtype/interpreter/ui/favicon.ico +0 -0
- qtype/interpreter/ui/file.svg +1 -0
- qtype/interpreter/ui/globe.svg +1 -0
- qtype/interpreter/ui/index.html +1 -0
- qtype/interpreter/ui/index.txt +22 -0
- qtype/interpreter/ui/next.svg +1 -0
- qtype/interpreter/ui/vercel.svg +1 -0
- qtype/interpreter/ui/window.svg +1 -0
- qtype/loader.py +57 -8
- qtype/semantic/generate.py +17 -5
- qtype/semantic/model.py +16 -24
- qtype/semantic/visualize.py +485 -0
- {qtype-0.0.4.dist-info → qtype-0.0.6.dist-info}/METADATA +28 -20
- qtype-0.0.6.dist-info/RECORD +91 -0
- qtype-0.0.4.dist-info/RECORD +0 -50
- {qtype-0.0.4.dist-info → qtype-0.0.6.dist-info}/WHEEL +0 -0
- {qtype-0.0.4.dist-info → qtype-0.0.6.dist-info}/entry_points.txt +0 -0
- {qtype-0.0.4.dist-info → qtype-0.0.6.dist-info}/licenses/LICENSE +0 -0
- {qtype-0.0.4.dist-info → qtype-0.0.6.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,314 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Pydantic models for Vercel AI SDK UI types.
|
|
3
|
+
|
|
4
|
+
This module reproduces the exact TypeScript type shapes from the AI SDK UI
|
|
5
|
+
as Pydantic models for use in Python implementations.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
from typing import Any, Literal, Union
|
|
11
|
+
|
|
12
|
+
from pydantic import BaseModel, Field
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
# Provider metadata
|
|
16
|
+
class ProviderMetadata(BaseModel):
|
|
17
|
+
"""Provider-specific metadata.
|
|
18
|
+
|
|
19
|
+
Reproduces: ProviderMetadata from ui/ui-message-chunks.ts
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
model_config = {"extra": "allow"}
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
# UI Message Parts
|
|
26
|
+
class TextUIPart(BaseModel):
|
|
27
|
+
"""A text part of a message.
|
|
28
|
+
|
|
29
|
+
Reproduces: TextUIPart from ui/ui-messages.ts
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
type: Literal["text"] = "text"
|
|
33
|
+
text: str
|
|
34
|
+
state: Literal["streaming", "done"] | None = None
|
|
35
|
+
provider_metadata: ProviderMetadata | None = Field(
|
|
36
|
+
default=None, alias="providerMetadata"
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class ReasoningUIPart(BaseModel):
|
|
41
|
+
"""A reasoning part of a message.
|
|
42
|
+
|
|
43
|
+
Reproduces: ReasoningUIPart from ui/ui-messages.ts
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
type: Literal["reasoning"] = "reasoning"
|
|
47
|
+
text: str
|
|
48
|
+
state: Literal["streaming", "done"] | None = None
|
|
49
|
+
provider_metadata: ProviderMetadata | None = Field(
|
|
50
|
+
default=None, alias="providerMetadata"
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class SourceUrlUIPart(BaseModel):
|
|
55
|
+
"""A source URL part of a message.
|
|
56
|
+
|
|
57
|
+
Reproduces: SourceUrlUIPart from ui/ui-messages.ts
|
|
58
|
+
"""
|
|
59
|
+
|
|
60
|
+
type: Literal["source-url"] = "source-url"
|
|
61
|
+
source_id: str = Field(alias="sourceId")
|
|
62
|
+
url: str
|
|
63
|
+
title: str | None = None
|
|
64
|
+
provider_metadata: ProviderMetadata | None = Field(
|
|
65
|
+
default=None, alias="providerMetadata"
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class SourceDocumentUIPart(BaseModel):
|
|
70
|
+
"""A document source part of a message.
|
|
71
|
+
|
|
72
|
+
Reproduces: SourceDocumentUIPart from ui/ui-messages.ts
|
|
73
|
+
"""
|
|
74
|
+
|
|
75
|
+
type: Literal["source-document"] = "source-document"
|
|
76
|
+
source_id: str = Field(alias="sourceId")
|
|
77
|
+
media_type: str = Field(alias="mediaType")
|
|
78
|
+
title: str
|
|
79
|
+
filename: str | None = None
|
|
80
|
+
provider_metadata: ProviderMetadata | None = Field(
|
|
81
|
+
default=None, alias="providerMetadata"
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
class FileUIPart(BaseModel):
|
|
86
|
+
"""A file part of a message.
|
|
87
|
+
|
|
88
|
+
Reproduces: FileUIPart from ui/ui-messages.ts
|
|
89
|
+
"""
|
|
90
|
+
|
|
91
|
+
type: Literal["file"] = "file"
|
|
92
|
+
media_type: str = Field(alias="mediaType")
|
|
93
|
+
filename: str | None = None
|
|
94
|
+
url: str
|
|
95
|
+
provider_metadata: ProviderMetadata | None = Field(
|
|
96
|
+
default=None, alias="providerMetadata"
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
class StepStartUIPart(BaseModel):
|
|
101
|
+
"""A step boundary part of a message.
|
|
102
|
+
|
|
103
|
+
Reproduces: StepStartUIPart from ui/ui-messages.ts
|
|
104
|
+
"""
|
|
105
|
+
|
|
106
|
+
type: Literal["step-start"] = "step-start"
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
# Union type for UI message parts
|
|
110
|
+
UIMessagePart = Union[
|
|
111
|
+
TextUIPart,
|
|
112
|
+
ReasoningUIPart,
|
|
113
|
+
SourceUrlUIPart,
|
|
114
|
+
SourceDocumentUIPart,
|
|
115
|
+
FileUIPart,
|
|
116
|
+
StepStartUIPart,
|
|
117
|
+
]
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
# UI Message
|
|
121
|
+
class UIMessage(BaseModel):
|
|
122
|
+
"""AI SDK UI Message.
|
|
123
|
+
|
|
124
|
+
Reproduces: UIMessage from ui/ui-messages.ts
|
|
125
|
+
"""
|
|
126
|
+
|
|
127
|
+
id: str
|
|
128
|
+
role: Literal["system", "user", "assistant"]
|
|
129
|
+
metadata: dict[str, Any] | None = None
|
|
130
|
+
parts: list[UIMessagePart]
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
# Chat Request (the request body sent from frontend)
|
|
134
|
+
class ChatRequest(BaseModel):
|
|
135
|
+
"""Chat request format sent from AI SDK UI/React.
|
|
136
|
+
|
|
137
|
+
Reproduces: ChatRequest from ui/chat-transport.ts
|
|
138
|
+
"""
|
|
139
|
+
|
|
140
|
+
id: str # chatId
|
|
141
|
+
messages: list[UIMessage]
|
|
142
|
+
trigger: Literal["submit-message", "regenerate-message"]
|
|
143
|
+
message_id: str | None = Field(default=None, alias="messageId")
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
# UI Message Chunks (streaming events)
|
|
147
|
+
class TextStartChunk(BaseModel):
|
|
148
|
+
"""Text start chunk.
|
|
149
|
+
|
|
150
|
+
Reproduces: TextStartChunk from ui/ui-message-chunks.ts
|
|
151
|
+
"""
|
|
152
|
+
|
|
153
|
+
type: Literal["text-start"] = "text-start"
|
|
154
|
+
id: str
|
|
155
|
+
provider_metadata: ProviderMetadata | None = Field(
|
|
156
|
+
default=None, alias="providerMetadata"
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
class TextDeltaChunk(BaseModel):
|
|
161
|
+
"""Text delta chunk.
|
|
162
|
+
|
|
163
|
+
Reproduces: TextDeltaChunk from ui/ui-message-chunks.ts
|
|
164
|
+
"""
|
|
165
|
+
|
|
166
|
+
type: Literal["text-delta"] = "text-delta"
|
|
167
|
+
id: str
|
|
168
|
+
delta: str
|
|
169
|
+
provider_metadata: ProviderMetadata | None = Field(
|
|
170
|
+
default=None, alias="providerMetadata"
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
class TextEndChunk(BaseModel):
|
|
175
|
+
"""Text end chunk.
|
|
176
|
+
|
|
177
|
+
Reproduces: TextEndChunk from ui/ui-message-chunks.ts
|
|
178
|
+
"""
|
|
179
|
+
|
|
180
|
+
type: Literal["text-end"] = "text-end"
|
|
181
|
+
id: str
|
|
182
|
+
provider_metadata: ProviderMetadata | None = Field(
|
|
183
|
+
default=None, alias="providerMetadata"
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
class ReasoningStartChunk(BaseModel):
|
|
188
|
+
"""Reasoning start chunk.
|
|
189
|
+
|
|
190
|
+
Reproduces: ReasoningStartChunk from ui/ui-message-chunks.ts
|
|
191
|
+
"""
|
|
192
|
+
|
|
193
|
+
type: Literal["reasoning-start"] = "reasoning-start"
|
|
194
|
+
id: str
|
|
195
|
+
provider_metadata: ProviderMetadata | None = Field(
|
|
196
|
+
default=None, alias="providerMetadata"
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
class ReasoningDeltaChunk(BaseModel):
|
|
201
|
+
"""Reasoning delta chunk.
|
|
202
|
+
|
|
203
|
+
Reproduces: ReasoningDeltaChunk from ui/ui-message-chunks.ts
|
|
204
|
+
"""
|
|
205
|
+
|
|
206
|
+
type: Literal["reasoning-delta"] = "reasoning-delta"
|
|
207
|
+
id: str
|
|
208
|
+
delta: str
|
|
209
|
+
provider_metadata: ProviderMetadata | None = Field(
|
|
210
|
+
default=None, alias="providerMetadata"
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
class ReasoningEndChunk(BaseModel):
|
|
215
|
+
"""Reasoning end chunk.
|
|
216
|
+
|
|
217
|
+
Reproduces: ReasoningEndChunk from ui/ui-message-chunks.ts
|
|
218
|
+
"""
|
|
219
|
+
|
|
220
|
+
type: Literal["reasoning-end"] = "reasoning-end"
|
|
221
|
+
id: str
|
|
222
|
+
provider_metadata: ProviderMetadata | None = Field(
|
|
223
|
+
default=None, alias="providerMetadata"
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
class ErrorChunk(BaseModel):
|
|
228
|
+
"""Error chunk.
|
|
229
|
+
|
|
230
|
+
Reproduces: ErrorChunk from ui/ui-message-chunks.ts
|
|
231
|
+
"""
|
|
232
|
+
|
|
233
|
+
type: Literal["error"] = "error"
|
|
234
|
+
error_text: str = Field(alias="errorText")
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
class StartStepChunk(BaseModel):
|
|
238
|
+
"""Start step chunk.
|
|
239
|
+
|
|
240
|
+
Reproduces: StartStepChunk from ui/ui-message-chunks.ts
|
|
241
|
+
"""
|
|
242
|
+
|
|
243
|
+
type: Literal["start-step"] = "start-step"
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
class FinishStepChunk(BaseModel):
|
|
247
|
+
"""Finish step chunk.
|
|
248
|
+
|
|
249
|
+
Reproduces: FinishStepChunk from ui/ui-message-chunks.ts
|
|
250
|
+
"""
|
|
251
|
+
|
|
252
|
+
type: Literal["finish-step"] = "finish-step"
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
class StartChunk(BaseModel):
|
|
256
|
+
"""Start chunk.
|
|
257
|
+
|
|
258
|
+
Reproduces: StartChunk from ui/ui-message-chunks.ts
|
|
259
|
+
"""
|
|
260
|
+
|
|
261
|
+
type: Literal["start"] = "start"
|
|
262
|
+
message_id: str | None = Field(default=None, alias="messageId")
|
|
263
|
+
message_metadata: dict[str, Any] | None = Field(
|
|
264
|
+
default=None, alias="messageMetadata"
|
|
265
|
+
)
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
class FinishChunk(BaseModel):
|
|
269
|
+
"""Finish chunk.
|
|
270
|
+
|
|
271
|
+
Reproduces: FinishChunk from ui/ui-message-chunks.ts
|
|
272
|
+
"""
|
|
273
|
+
|
|
274
|
+
type: Literal["finish"] = "finish"
|
|
275
|
+
message_metadata: dict[str, Any] | None = Field(
|
|
276
|
+
default=None, alias="messageMetadata"
|
|
277
|
+
)
|
|
278
|
+
|
|
279
|
+
|
|
280
|
+
class AbortChunk(BaseModel):
|
|
281
|
+
"""Abort chunk.
|
|
282
|
+
|
|
283
|
+
Reproduces: AbortChunk from ui/ui-message-chunks.ts
|
|
284
|
+
"""
|
|
285
|
+
|
|
286
|
+
type: Literal["abort"] = "abort"
|
|
287
|
+
|
|
288
|
+
|
|
289
|
+
class MessageMetadataChunk(BaseModel):
|
|
290
|
+
"""Message metadata chunk.
|
|
291
|
+
|
|
292
|
+
Reproduces: MessageMetadataChunk from ui/ui-message-chunks.ts
|
|
293
|
+
"""
|
|
294
|
+
|
|
295
|
+
type: Literal["message-metadata"] = "message-metadata"
|
|
296
|
+
message_metadata: dict[str, Any] = Field(alias="messageMetadata")
|
|
297
|
+
|
|
298
|
+
|
|
299
|
+
# Union type for all UI message chunks
|
|
300
|
+
UIMessageChunk = Union[
|
|
301
|
+
TextStartChunk,
|
|
302
|
+
TextDeltaChunk,
|
|
303
|
+
TextEndChunk,
|
|
304
|
+
ReasoningStartChunk,
|
|
305
|
+
ReasoningDeltaChunk,
|
|
306
|
+
ReasoningEndChunk,
|
|
307
|
+
ErrorChunk,
|
|
308
|
+
StartStepChunk,
|
|
309
|
+
FinishStepChunk,
|
|
310
|
+
StartChunk,
|
|
311
|
+
FinishChunk,
|
|
312
|
+
AbortChunk,
|
|
313
|
+
MessageMetadataChunk,
|
|
314
|
+
]
|
qtype/interpreter/conversions.py
CHANGED
|
@@ -113,6 +113,8 @@ def to_content_block(content: ChatContent) -> ContentBlock:
|
|
|
113
113
|
return AudioBlock(audio=content.content)
|
|
114
114
|
elif content.type == PrimitiveTypeEnum.file:
|
|
115
115
|
return DocumentBlock(data=content.content)
|
|
116
|
+
elif content.type == PrimitiveTypeEnum.bytes:
|
|
117
|
+
return DocumentBlock(data=content.content)
|
|
116
118
|
|
|
117
119
|
raise InterpreterError(
|
|
118
120
|
f"Unsupported content type: {content.type} with data of type {type(content.content)}"
|
|
@@ -26,7 +26,8 @@ def execute(
|
|
|
26
26
|
|
|
27
27
|
Args:
|
|
28
28
|
li: The LLM inference step to execute.
|
|
29
|
-
|
|
29
|
+
stream_fn: Optional streaming callback function.
|
|
30
|
+
**kwargs: Additional keyword arguments including conversation_history.
|
|
30
31
|
"""
|
|
31
32
|
logger.debug(f"Executing LLM inference step: {li.id}")
|
|
32
33
|
|
|
@@ -58,26 +59,44 @@ def execute(
|
|
|
58
59
|
)
|
|
59
60
|
elif output_variable.type == ChatMessage:
|
|
60
61
|
model = to_llm(li.model, li.system_message)
|
|
61
|
-
|
|
62
62
|
if not all(
|
|
63
63
|
isinstance(input.value, ChatMessage) for input in li.inputs
|
|
64
64
|
):
|
|
65
65
|
raise InterpreterError(
|
|
66
66
|
f"LLMInference step with ChatMessage output must have ChatMessage inputs. Got {li.inputs}"
|
|
67
67
|
)
|
|
68
|
+
|
|
69
|
+
# Current user input
|
|
68
70
|
inputs = [
|
|
69
71
|
to_chat_message(input.value) # type: ignore
|
|
70
72
|
for input in li.inputs
|
|
71
|
-
]
|
|
73
|
+
]
|
|
72
74
|
|
|
73
|
-
#
|
|
75
|
+
# The session id is used to isolate the memory from other "users"
|
|
76
|
+
session_id = kwargs.get("session_id")
|
|
77
|
+
|
|
78
|
+
# If memory is defined, use it.
|
|
74
79
|
if li.memory:
|
|
75
|
-
|
|
76
|
-
memory = to_memory(kwargs.get("session_id"), li.memory)
|
|
77
|
-
inputs = memory.get(inputs)
|
|
78
|
-
else:
|
|
79
|
-
memory = None
|
|
80
|
+
memory = to_memory(session_id, li.memory)
|
|
80
81
|
|
|
82
|
+
from llama_index.core.async_utils import asyncio_run
|
|
83
|
+
|
|
84
|
+
# add the inputs to the memory
|
|
85
|
+
asyncio_run(memory.aput_messages(inputs))
|
|
86
|
+
# Use the whole memory state as inputs to the llm
|
|
87
|
+
inputs = memory.get_all()
|
|
88
|
+
else:
|
|
89
|
+
# If memory is not defined, see if a conversation history was provided.
|
|
90
|
+
# This is the list of messages from the front end
|
|
91
|
+
conversation_history = kwargs.get("conversation_history", []) # type: ignore
|
|
92
|
+
if not isinstance(conversation_history, list):
|
|
93
|
+
raise ValueError(
|
|
94
|
+
"Unexpected error: conversation history is not a list."
|
|
95
|
+
)
|
|
96
|
+
history: list[ChatMessage] = conversation_history
|
|
97
|
+
inputs = [to_chat_message(msg) for msg in history] + inputs
|
|
98
|
+
|
|
99
|
+
# If the stream function is set, we'll stream the results
|
|
81
100
|
if stream_fn:
|
|
82
101
|
generator = model.stream_chat(
|
|
83
102
|
messages=inputs,
|
|
@@ -87,10 +106,12 @@ def execute(
|
|
|
87
106
|
else {}
|
|
88
107
|
),
|
|
89
108
|
)
|
|
90
|
-
for
|
|
91
|
-
stream_fn(li,
|
|
109
|
+
for chat_result in generator:
|
|
110
|
+
stream_fn(li, chat_result.delta)
|
|
111
|
+
# Get the final result for processing
|
|
112
|
+
chat_result = chat_result # Use the last result from streaming
|
|
92
113
|
else:
|
|
93
|
-
|
|
114
|
+
chat_result: ChatResponse = model.chat(
|
|
94
115
|
messages=inputs,
|
|
95
116
|
**(
|
|
96
117
|
li.model.inference_params
|
|
@@ -98,9 +119,9 @@ def execute(
|
|
|
98
119
|
else {}
|
|
99
120
|
),
|
|
100
121
|
)
|
|
101
|
-
output_variable.value = from_chat_message(
|
|
102
|
-
if memory:
|
|
103
|
-
memory.put(
|
|
122
|
+
output_variable.value = from_chat_message(chat_result.message)
|
|
123
|
+
if li.memory:
|
|
124
|
+
memory.put(chat_result.message)
|
|
104
125
|
else:
|
|
105
126
|
model = to_llm(li.model, li.system_message)
|
|
106
127
|
|
|
@@ -118,10 +139,14 @@ def execute(
|
|
|
118
139
|
|
|
119
140
|
if stream_fn:
|
|
120
141
|
generator = model.stream_complete(prompt=input)
|
|
121
|
-
for
|
|
122
|
-
stream_fn(li,
|
|
142
|
+
for complete_result in generator:
|
|
143
|
+
stream_fn(li, complete_result.delta)
|
|
144
|
+
# Get the final result for processing
|
|
145
|
+
complete_result = (
|
|
146
|
+
complete_result
|
|
147
|
+
) # Use the last result from streaming
|
|
123
148
|
else:
|
|
124
|
-
|
|
125
|
-
output_variable.value =
|
|
149
|
+
complete_result: CompletionResponse = model.complete(prompt=input)
|
|
150
|
+
output_variable.value = complete_result.text
|
|
126
151
|
|
|
127
152
|
return li.outputs # type: ignore[return-value]
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Streaming helpers for bridging callback-based execution with generator patterns.
|
|
3
|
+
|
|
4
|
+
This module provides utilities to convert callback-based streaming functions
|
|
5
|
+
into generators that can be used with FastAPI's StreamingResponse.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import queue
|
|
11
|
+
import threading
|
|
12
|
+
from collections.abc import Callable, Generator
|
|
13
|
+
from concurrent.futures import Future
|
|
14
|
+
from typing import Any, TypeVar
|
|
15
|
+
|
|
16
|
+
from qtype.dsl.domain_types import ChatMessage
|
|
17
|
+
from qtype.semantic.model import Step
|
|
18
|
+
|
|
19
|
+
T = TypeVar("T")
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def create_streaming_generator(
|
|
23
|
+
execution_func: Callable[..., T],
|
|
24
|
+
*args: Any,
|
|
25
|
+
timeout: float = 30.0,
|
|
26
|
+
join_timeout: float = 5.0,
|
|
27
|
+
**kwargs: Any,
|
|
28
|
+
) -> tuple[Generator[tuple[Step, ChatMessage | str], None, None], Future[T]]:
|
|
29
|
+
"""
|
|
30
|
+
Convert a callback-based streaming function into a generator with result future.
|
|
31
|
+
|
|
32
|
+
This function executes the provided function in a separate thread and
|
|
33
|
+
converts its stream_fn callback pattern into a generator that yields
|
|
34
|
+
(step, message) tuples. Additionally returns a Future that will contain
|
|
35
|
+
the execution function's return value.
|
|
36
|
+
|
|
37
|
+
Args:
|
|
38
|
+
execution_func: Function to execute that accepts a stream_fn parameter
|
|
39
|
+
*args: Positional arguments to pass to execution_func
|
|
40
|
+
timeout: Timeout in seconds for queue.get() operations
|
|
41
|
+
join_timeout: Timeout in seconds for thread.join()
|
|
42
|
+
**kwargs: Keyword arguments to pass to execution_func
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
Tuple of (generator, future) where:
|
|
46
|
+
- generator yields (Step, ChatMessage | str) tuples from streaming callback
|
|
47
|
+
- future will contain the return value of execution_func
|
|
48
|
+
|
|
49
|
+
Example:
|
|
50
|
+
```python
|
|
51
|
+
def my_flow_execution(flow: Flow, stream_fn: Callable | None = None):
|
|
52
|
+
# Some execution logic that calls stream_fn(step, message)
|
|
53
|
+
return {"status": "completed", "steps_executed": 3}
|
|
54
|
+
|
|
55
|
+
# Convert to generator with result
|
|
56
|
+
stream_gen, result_future = create_streaming_generator(
|
|
57
|
+
my_flow_execution,
|
|
58
|
+
flow_copy,
|
|
59
|
+
some_other_arg="value"
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
# Process streaming data
|
|
63
|
+
for step, message in stream_gen:
|
|
64
|
+
print(f"Step {step.id}: {message}")
|
|
65
|
+
|
|
66
|
+
# Get final result (blocks until execution completes)
|
|
67
|
+
final_result = result_future.result(timeout=10.0)
|
|
68
|
+
print(f"Execution result: {final_result}")
|
|
69
|
+
```
|
|
70
|
+
"""
|
|
71
|
+
# Create thread-safe queue for communication
|
|
72
|
+
stream_queue: queue.Queue[
|
|
73
|
+
tuple[Step, ChatMessage | str] | None
|
|
74
|
+
] = queue.Queue()
|
|
75
|
+
|
|
76
|
+
# Create future for the return value
|
|
77
|
+
result_future: Future[T] = Future()
|
|
78
|
+
|
|
79
|
+
def stream_callback(step: Step, msg: ChatMessage | str) -> None:
|
|
80
|
+
"""Callback function that pushes data to the queue."""
|
|
81
|
+
stream_queue.put((step, msg))
|
|
82
|
+
|
|
83
|
+
def execution_task() -> None:
|
|
84
|
+
"""Execute the function in a separate thread."""
|
|
85
|
+
try:
|
|
86
|
+
# Add the stream_fn callback to kwargs
|
|
87
|
+
kwargs_with_callback = kwargs.copy()
|
|
88
|
+
kwargs_with_callback["stream_fn"] = stream_callback
|
|
89
|
+
|
|
90
|
+
# Execute the function with the callback and capture result
|
|
91
|
+
result = execution_func(*args, **kwargs_with_callback)
|
|
92
|
+
result_future.set_result(result)
|
|
93
|
+
except Exception as e:
|
|
94
|
+
# Set exception on future if execution fails
|
|
95
|
+
result_future.set_exception(e)
|
|
96
|
+
finally:
|
|
97
|
+
# Signal end of stream
|
|
98
|
+
stream_queue.put(None)
|
|
99
|
+
|
|
100
|
+
# Start execution in separate thread
|
|
101
|
+
execution_thread = threading.Thread(target=execution_task)
|
|
102
|
+
execution_thread.start()
|
|
103
|
+
|
|
104
|
+
def generator() -> Generator[tuple[Step, ChatMessage | str], None, None]:
|
|
105
|
+
"""Generator that yields streaming data from the queue."""
|
|
106
|
+
try:
|
|
107
|
+
# Yield data as it becomes available
|
|
108
|
+
while True:
|
|
109
|
+
try:
|
|
110
|
+
# Wait for data with timeout to avoid hanging
|
|
111
|
+
data = stream_queue.get(timeout=timeout)
|
|
112
|
+
if data is None:
|
|
113
|
+
# End of stream signal
|
|
114
|
+
break
|
|
115
|
+
yield data
|
|
116
|
+
except queue.Empty:
|
|
117
|
+
# Handle timeout - break and let thread cleanup
|
|
118
|
+
break
|
|
119
|
+
finally:
|
|
120
|
+
# Ensure thread cleanup
|
|
121
|
+
execution_thread.join(timeout=join_timeout)
|
|
122
|
+
|
|
123
|
+
return generator(), result_future
|
qtype/interpreter/typing.py
CHANGED
|
@@ -5,19 +5,34 @@ from typing import Any, Type
|
|
|
5
5
|
from pydantic import BaseModel, Field, create_model
|
|
6
6
|
|
|
7
7
|
from qtype.converters.types import PRIMITIVE_TO_PYTHON_TYPE
|
|
8
|
-
from qtype.dsl.model import
|
|
8
|
+
from qtype.dsl.model import PrimitiveTypeEnum
|
|
9
9
|
from qtype.semantic.model import Flow, Variable
|
|
10
10
|
|
|
11
11
|
|
|
12
|
-
def _get_variable_type(var: Variable) -> Type:
|
|
12
|
+
def _get_variable_type(var: Variable) -> tuple[Type, dict[str, Any]]:
|
|
13
|
+
"""Get the Python type and metadata for a variable.
|
|
14
|
+
|
|
15
|
+
Returns:
|
|
16
|
+
Tuple of (python_type, field_metadata) where field_metadata contains
|
|
17
|
+
information about the original QType type.
|
|
18
|
+
"""
|
|
19
|
+
field_metadata = {}
|
|
20
|
+
|
|
13
21
|
if isinstance(var.type, PrimitiveTypeEnum):
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
22
|
+
python_type = PRIMITIVE_TO_PYTHON_TYPE.get(var.type, str)
|
|
23
|
+
field_metadata["qtype_type"] = var.type.value
|
|
24
|
+
elif (
|
|
25
|
+
isinstance(var.type, type)
|
|
26
|
+
and issubclass(var.type, BaseModel)
|
|
27
|
+
and hasattr(var.type, "__name__")
|
|
28
|
+
):
|
|
29
|
+
python_type = var.type
|
|
30
|
+
field_metadata["qtype_type"] = var.type.__name__
|
|
17
31
|
else:
|
|
18
|
-
# TODO: handle custom TypeDefinition...
|
|
19
32
|
raise ValueError(f"Unsupported variable type: {var.type}")
|
|
20
33
|
|
|
34
|
+
return python_type, field_metadata
|
|
35
|
+
|
|
21
36
|
|
|
22
37
|
def create_output_type_model(flow: Flow) -> Type[BaseModel]:
|
|
23
38
|
"""Dynamically create a Pydantic response model for a flow."""
|
|
@@ -31,10 +46,12 @@ def create_output_type_model(flow: Flow) -> Type[BaseModel]:
|
|
|
31
46
|
if flow.outputs:
|
|
32
47
|
output_fields = {}
|
|
33
48
|
for var in flow.outputs:
|
|
34
|
-
python_type = _get_variable_type(var)
|
|
49
|
+
python_type, type_metadata = _get_variable_type(var)
|
|
35
50
|
field_info = Field(
|
|
36
|
-
description
|
|
51
|
+
# TODO: grok the description from the variable if available
|
|
52
|
+
# description=f"Output for {var.id}",
|
|
37
53
|
title=var.id,
|
|
54
|
+
json_schema_extra=type_metadata,
|
|
38
55
|
)
|
|
39
56
|
output_fields[var.id] = (python_type, field_info)
|
|
40
57
|
|
|
@@ -68,10 +85,12 @@ def create_input_type_model(flow: Flow) -> Type[BaseModel]:
|
|
|
68
85
|
|
|
69
86
|
fields = {}
|
|
70
87
|
for var in flow.inputs:
|
|
71
|
-
python_type = _get_variable_type(var)
|
|
88
|
+
python_type, type_metadata = _get_variable_type(var)
|
|
72
89
|
field_info = Field(
|
|
73
|
-
description
|
|
90
|
+
# TODO: grok the description from the variable if available
|
|
91
|
+
# description=f"Input for {var.id}",
|
|
74
92
|
title=var.id,
|
|
93
|
+
json_schema_extra=type_metadata,
|
|
75
94
|
)
|
|
76
95
|
fields[var.id] = (python_type, field_info)
|
|
77
96
|
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
<!DOCTYPE html><!--dBTVLoSkoaoznQv_yROk9--><html lang="en"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="preload" href="/ui/_next/static/media/569ce4b8f30dc480-s.p.woff2" as="font" crossorigin="" type="font/woff2"/><link rel="preload" href="/ui/_next/static/media/93f479601ee12b01-s.p.woff2" as="font" crossorigin="" type="font/woff2"/><link rel="stylesheet" href="/ui/_next/static/css/d4ad601c4774485e.css" data-precedence="next"/><link rel="preload" as="script" fetchPriority="low" href="/ui/_next/static/chunks/webpack-2218571d248e99d2.js"/><script src="/ui/_next/static/chunks/4bd1b696-cf72ae8a39fa05aa.js" async=""></script><script src="/ui/_next/static/chunks/964-ed4ab073db645007.js" async=""></script><script src="/ui/_next/static/chunks/main-app-6fc6346bc8f7f163.js" async=""></script><meta name="robots" content="noindex"/><meta name="next-size-adjust" content=""/><title>404: This page could not be found.</title><title>Create Next App</title><meta name="description" content="Generated by create next app"/><link rel="icon" href="/ui/favicon.ico" type="image/x-icon" sizes="16x16"/><script src="/ui/_next/static/chunks/polyfills-42372ed130431b0a.js" noModule=""></script></head><body class="__variable_fdf202 __variable_e80179 antialiased"><div hidden=""><!--$--><!--/$--></div><div style="font-family:system-ui,"Segoe UI",Roboto,Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji";height:100vh;text-align:center;display:flex;flex-direction:column;align-items:center;justify-content:center"><div><style>body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}</style><h1 class="next-error-h1" style="display:inline-block;margin:0 20px 0 0;padding:0 23px 0 0;font-size:24px;font-weight:500;vertical-align:top;line-height:49px">404</h1><div style="display:inline-block"><h2 style="font-size:14px;font-weight:400;line-height:49px;margin:0">This page could not be found.</h2></div></div></div><!--$--><!--/$--><script src="/ui/_next/static/chunks/webpack-2218571d248e99d2.js" id="_R_" async=""></script><script>(self.__next_f=self.__next_f||[]).push([0])</script><script>self.__next_f.push([1,"1:\"$Sreact.fragment\"\n2:I[7555,[],\"\"]\n3:I[1295,[],\"\"]\n4:I[9665,[],\"OutletBoundary\"]\n6:I[4911,[],\"AsyncMetadataOutlet\"]\n8:I[9665,[],\"ViewportBoundary\"]\na:I[9665,[],\"MetadataBoundary\"]\nb:\"$Sreact.suspense\"\nd:I[8393,[],\"\"]\n:HL[\"/ui/_next/static/media/569ce4b8f30dc480-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n:HL[\"/ui/_next/static/media/93f479601ee12b01-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n:HL[\"/ui/_next/static/css/d4ad601c4774485e.css\",\"style\"]\n"])</script><script>self.__next_f.push([1,"0:{\"P\":null,\"b\":\"dBTVLoSkoaoznQv-yROk9\",\"p\":\"/ui\",\"c\":[\"\",\"_not-found\",\"\"],\"i\":false,\"f\":[[[\"\",{\"children\":[\"/_not-found\",{\"children\":[\"__PAGE__\",{}]}]},\"$undefined\",\"$undefined\",true],[\"\",[\"$\",\"$1\",\"c\",{\"children\":[[[\"$\",\"link\",\"0\",{\"rel\":\"stylesheet\",\"href\":\"/ui/_next/static/css/d4ad601c4774485e.css\",\"precedence\":\"next\",\"crossOrigin\":\"$undefined\",\"nonce\":\"$undefined\"}]],[\"$\",\"html\",null,{\"lang\":\"en\",\"children\":[\"$\",\"body\",null,{\"className\":\"__variable_fdf202 __variable_e80179 antialiased\",\"children\":[\"$\",\"$L2\",null,{\"parallelRouterKey\":\"children\",\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L3\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":\"$undefined\",\"forbidden\":\"$undefined\",\"unauthorized\":\"$undefined\"}]}]}]]}],{\"children\":[\"/_not-found\",[\"$\",\"$1\",\"c\",{\"children\":[null,[\"$\",\"$L2\",null,{\"parallelRouterKey\":\"children\",\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L3\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":\"$undefined\",\"forbidden\":\"$undefined\",\"unauthorized\":\"$undefined\"}]]}],{\"children\":[\"__PAGE__\",[\"$\",\"$1\",\"c\",{\"children\":[[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":{\"fontFamily\":\"system-ui,\\\"Segoe UI\\\",Roboto,Helvetica,Arial,sans-serif,\\\"Apple Color Emoji\\\",\\\"Segoe UI Emoji\\\"\",\"height\":\"100vh\",\"textAlign\":\"center\",\"display\":\"flex\",\"flexDirection\":\"column\",\"alignItems\":\"center\",\"justifyContent\":\"center\"},\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":{\"display\":\"inline-block\",\"margin\":\"0 20px 0 0\",\"padding\":\"0 23px 0 0\",\"fontSize\":24,\"fontWeight\":500,\"verticalAlign\":\"top\",\"lineHeight\":\"49px\"},\"children\":404}],[\"$\",\"div\",null,{\"style\":{\"display\":\"inline-block\"},\"children\":[\"$\",\"h2\",null,{\"style\":{\"fontSize\":14,\"fontWeight\":400,\"lineHeight\":\"49px\",\"margin\":0},\"children\":\"This page could not be found.\"}]}]]}]}]],null,[\"$\",\"$L4\",null,{\"children\":[\"$L5\",[\"$\",\"$L6\",null,{\"promise\":\"$@7\"}]]}]]}],{},null,false]},null,false]},null,false],[\"$\",\"$1\",\"h\",{\"children\":[[\"$\",\"meta\",null,{\"name\":\"robots\",\"content\":\"noindex\"}],[[\"$\",\"$L8\",null,{\"children\":\"$L9\"}],[\"$\",\"meta\",null,{\"name\":\"next-size-adjust\",\"content\":\"\"}]],[\"$\",\"$La\",null,{\"children\":[\"$\",\"div\",null,{\"hidden\":true,\"children\":[\"$\",\"$b\",null,{\"fallback\":null,\"children\":\"$Lc\"}]}]}]]}],false]],\"m\":\"$undefined\",\"G\":[\"$d\",[]],\"s\":false,\"S\":true}\n"])</script><script>self.__next_f.push([1,"9:[[\"$\",\"meta\",\"0\",{\"charSet\":\"utf-8\"}],[\"$\",\"meta\",\"1\",{\"name\":\"viewport\",\"content\":\"width=device-width, initial-scale=1\"}]]\n5:null\n"])</script><script>self.__next_f.push([1,"e:I[8175,[],\"IconMark\"]\n7:{\"metadata\":[[\"$\",\"title\",\"0\",{\"children\":\"Create Next App\"}],[\"$\",\"meta\",\"1\",{\"name\":\"description\",\"content\":\"Generated by create next app\"}],[\"$\",\"link\",\"2\",{\"rel\":\"icon\",\"href\":\"/ui/favicon.ico\",\"type\":\"image/x-icon\",\"sizes\":\"16x16\"}],[\"$\",\"$Le\",\"3\",{}]],\"error\":null,\"digest\":\"$undefined\"}\n"])</script><script>self.__next_f.push([1,"c:\"$7:metadata\"\n"])</script></body></html>
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
<!DOCTYPE html><!--dBTVLoSkoaoznQv_yROk9--><html lang="en"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="preload" href="/ui/_next/static/media/569ce4b8f30dc480-s.p.woff2" as="font" crossorigin="" type="font/woff2"/><link rel="preload" href="/ui/_next/static/media/93f479601ee12b01-s.p.woff2" as="font" crossorigin="" type="font/woff2"/><link rel="stylesheet" href="/ui/_next/static/css/d4ad601c4774485e.css" data-precedence="next"/><link rel="preload" as="script" fetchPriority="low" href="/ui/_next/static/chunks/webpack-2218571d248e99d2.js"/><script src="/ui/_next/static/chunks/4bd1b696-cf72ae8a39fa05aa.js" async=""></script><script src="/ui/_next/static/chunks/964-ed4ab073db645007.js" async=""></script><script src="/ui/_next/static/chunks/main-app-6fc6346bc8f7f163.js" async=""></script><meta name="robots" content="noindex"/><meta name="next-size-adjust" content=""/><title>404: This page could not be found.</title><title>Create Next App</title><meta name="description" content="Generated by create next app"/><link rel="icon" href="/ui/favicon.ico" type="image/x-icon" sizes="16x16"/><script src="/ui/_next/static/chunks/polyfills-42372ed130431b0a.js" noModule=""></script></head><body class="__variable_fdf202 __variable_e80179 antialiased"><div hidden=""><!--$--><!--/$--></div><div style="font-family:system-ui,"Segoe UI",Roboto,Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji";height:100vh;text-align:center;display:flex;flex-direction:column;align-items:center;justify-content:center"><div><style>body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}</style><h1 class="next-error-h1" style="display:inline-block;margin:0 20px 0 0;padding:0 23px 0 0;font-size:24px;font-weight:500;vertical-align:top;line-height:49px">404</h1><div style="display:inline-block"><h2 style="font-size:14px;font-weight:400;line-height:49px;margin:0">This page could not be found.</h2></div></div></div><!--$--><!--/$--><script src="/ui/_next/static/chunks/webpack-2218571d248e99d2.js" id="_R_" async=""></script><script>(self.__next_f=self.__next_f||[]).push([0])</script><script>self.__next_f.push([1,"1:\"$Sreact.fragment\"\n2:I[7555,[],\"\"]\n3:I[1295,[],\"\"]\n4:I[9665,[],\"OutletBoundary\"]\n6:I[4911,[],\"AsyncMetadataOutlet\"]\n8:I[9665,[],\"ViewportBoundary\"]\na:I[9665,[],\"MetadataBoundary\"]\nb:\"$Sreact.suspense\"\nd:I[8393,[],\"\"]\n:HL[\"/ui/_next/static/media/569ce4b8f30dc480-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n:HL[\"/ui/_next/static/media/93f479601ee12b01-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n:HL[\"/ui/_next/static/css/d4ad601c4774485e.css\",\"style\"]\n"])</script><script>self.__next_f.push([1,"0:{\"P\":null,\"b\":\"dBTVLoSkoaoznQv-yROk9\",\"p\":\"/ui\",\"c\":[\"\",\"_not-found\",\"\"],\"i\":false,\"f\":[[[\"\",{\"children\":[\"/_not-found\",{\"children\":[\"__PAGE__\",{}]}]},\"$undefined\",\"$undefined\",true],[\"\",[\"$\",\"$1\",\"c\",{\"children\":[[[\"$\",\"link\",\"0\",{\"rel\":\"stylesheet\",\"href\":\"/ui/_next/static/css/d4ad601c4774485e.css\",\"precedence\":\"next\",\"crossOrigin\":\"$undefined\",\"nonce\":\"$undefined\"}]],[\"$\",\"html\",null,{\"lang\":\"en\",\"children\":[\"$\",\"body\",null,{\"className\":\"__variable_fdf202 __variable_e80179 antialiased\",\"children\":[\"$\",\"$L2\",null,{\"parallelRouterKey\":\"children\",\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L3\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":\"$undefined\",\"forbidden\":\"$undefined\",\"unauthorized\":\"$undefined\"}]}]}]]}],{\"children\":[\"/_not-found\",[\"$\",\"$1\",\"c\",{\"children\":[null,[\"$\",\"$L2\",null,{\"parallelRouterKey\":\"children\",\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L3\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":\"$undefined\",\"forbidden\":\"$undefined\",\"unauthorized\":\"$undefined\"}]]}],{\"children\":[\"__PAGE__\",[\"$\",\"$1\",\"c\",{\"children\":[[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":{\"fontFamily\":\"system-ui,\\\"Segoe UI\\\",Roboto,Helvetica,Arial,sans-serif,\\\"Apple Color Emoji\\\",\\\"Segoe UI Emoji\\\"\",\"height\":\"100vh\",\"textAlign\":\"center\",\"display\":\"flex\",\"flexDirection\":\"column\",\"alignItems\":\"center\",\"justifyContent\":\"center\"},\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":{\"display\":\"inline-block\",\"margin\":\"0 20px 0 0\",\"padding\":\"0 23px 0 0\",\"fontSize\":24,\"fontWeight\":500,\"verticalAlign\":\"top\",\"lineHeight\":\"49px\"},\"children\":404}],[\"$\",\"div\",null,{\"style\":{\"display\":\"inline-block\"},\"children\":[\"$\",\"h2\",null,{\"style\":{\"fontSize\":14,\"fontWeight\":400,\"lineHeight\":\"49px\",\"margin\":0},\"children\":\"This page could not be found.\"}]}]]}]}]],null,[\"$\",\"$L4\",null,{\"children\":[\"$L5\",[\"$\",\"$L6\",null,{\"promise\":\"$@7\"}]]}]]}],{},null,false]},null,false]},null,false],[\"$\",\"$1\",\"h\",{\"children\":[[\"$\",\"meta\",null,{\"name\":\"robots\",\"content\":\"noindex\"}],[[\"$\",\"$L8\",null,{\"children\":\"$L9\"}],[\"$\",\"meta\",null,{\"name\":\"next-size-adjust\",\"content\":\"\"}]],[\"$\",\"$La\",null,{\"children\":[\"$\",\"div\",null,{\"hidden\":true,\"children\":[\"$\",\"$b\",null,{\"fallback\":null,\"children\":\"$Lc\"}]}]}]]}],false]],\"m\":\"$undefined\",\"G\":[\"$d\",[]],\"s\":false,\"S\":true}\n"])</script><script>self.__next_f.push([1,"9:[[\"$\",\"meta\",\"0\",{\"charSet\":\"utf-8\"}],[\"$\",\"meta\",\"1\",{\"name\":\"viewport\",\"content\":\"width=device-width, initial-scale=1\"}]]\n5:null\n"])</script><script>self.__next_f.push([1,"e:I[8175,[],\"IconMark\"]\n7:{\"metadata\":[[\"$\",\"title\",\"0\",{\"children\":\"Create Next App\"}],[\"$\",\"meta\",\"1\",{\"name\":\"description\",\"content\":\"Generated by create next app\"}],[\"$\",\"link\",\"2\",{\"rel\":\"icon\",\"href\":\"/ui/favicon.ico\",\"type\":\"image/x-icon\",\"sizes\":\"16x16\"}],[\"$\",\"$Le\",\"3\",{}]],\"error\":null,\"digest\":\"$undefined\"}\n"])</script><script>self.__next_f.push([1,"c:\"$7:metadata\"\n"])</script></body></html>
|