qtype 0.0.9__py3-none-any.whl → 0.0.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qtype/application/__init__.py +12 -0
- qtype/application/commons/__init__.py +7 -0
- qtype/{converters → application/converters}/tools_from_module.py +2 -2
- qtype/{converters → application/converters}/types.py +0 -33
- qtype/{dsl/document.py → application/documentation.py} +2 -0
- qtype/application/facade.py +160 -0
- qtype/base/__init__.py +14 -0
- qtype/base/exceptions.py +49 -0
- qtype/base/logging.py +39 -0
- qtype/base/types.py +29 -0
- qtype/commands/convert.py +64 -49
- qtype/commands/generate.py +59 -4
- qtype/commands/run.py +109 -72
- qtype/commands/serve.py +42 -28
- qtype/commands/validate.py +25 -42
- qtype/commands/visualize.py +51 -37
- qtype/dsl/__init__.py +9 -0
- qtype/dsl/base_types.py +8 -0
- qtype/dsl/custom_types.py +6 -4
- qtype/dsl/model.py +185 -50
- qtype/dsl/validator.py +9 -4
- qtype/interpreter/api.py +96 -40
- qtype/interpreter/auth/__init__.py +3 -0
- qtype/interpreter/auth/aws.py +234 -0
- qtype/interpreter/auth/cache.py +67 -0
- qtype/interpreter/auth/generic.py +103 -0
- qtype/interpreter/batch/flow.py +95 -0
- qtype/interpreter/batch/sql_source.py +95 -0
- qtype/interpreter/batch/step.py +63 -0
- qtype/interpreter/batch/types.py +41 -0
- qtype/interpreter/batch/utils.py +179 -0
- qtype/interpreter/conversions.py +21 -10
- qtype/interpreter/resource_cache.py +4 -2
- qtype/interpreter/steps/decoder.py +13 -9
- qtype/interpreter/steps/llm_inference.py +7 -9
- qtype/interpreter/steps/prompt_template.py +1 -1
- qtype/interpreter/streaming_helpers.py +3 -3
- qtype/interpreter/typing.py +47 -11
- qtype/interpreter/ui/404/index.html +1 -1
- qtype/interpreter/ui/404.html +1 -1
- qtype/interpreter/ui/index.html +1 -1
- qtype/interpreter/ui/index.txt +1 -1
- qtype/loader.py +9 -15
- qtype/semantic/generate.py +91 -39
- qtype/semantic/model.py +183 -52
- qtype/semantic/resolver.py +4 -4
- {qtype-0.0.9.dist-info → qtype-0.0.11.dist-info}/METADATA +5 -1
- {qtype-0.0.9.dist-info → qtype-0.0.11.dist-info}/RECORD +58 -44
- qtype/commons/generate.py +0 -93
- qtype/semantic/errors.py +0 -4
- /qtype/{commons → application/commons}/tools.py +0 -0
- /qtype/{commons → application/converters}/__init__.py +0 -0
- /qtype/{converters → application/converters}/tools_from_api.py +0 -0
- /qtype/{converters → interpreter/batch}/__init__.py +0 -0
- /qtype/interpreter/ui/_next/static/{uMm4B0RSTGhXxgH3rTfwc → OT8QJQW3J70VbDWWfrEMT}/_buildManifest.js +0 -0
- /qtype/interpreter/ui/_next/static/{uMm4B0RSTGhXxgH3rTfwc → OT8QJQW3J70VbDWWfrEMT}/_ssgManifest.js +0 -0
- {qtype-0.0.9.dist-info → qtype-0.0.11.dist-info}/WHEEL +0 -0
- {qtype-0.0.9.dist-info → qtype-0.0.11.dist-info}/entry_points.txt +0 -0
- {qtype-0.0.9.dist-info → qtype-0.0.11.dist-info}/licenses/LICENSE +0 -0
- {qtype-0.0.9.dist-info → qtype-0.0.11.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import copy
|
|
4
|
+
from typing import Any, Callable, Tuple
|
|
5
|
+
|
|
6
|
+
import pandas as pd
|
|
7
|
+
|
|
8
|
+
from qtype.interpreter.batch.types import BatchConfig, ErrorMode
|
|
9
|
+
from qtype.semantic.model import Step
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class InputMissingError(Exception):
|
|
13
|
+
"""Raised when a required input variable is missing from the DataFrame."""
|
|
14
|
+
|
|
15
|
+
pass
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def validate_inputs(batch: pd.DataFrame, step: Step) -> pd.DataFrame:
|
|
19
|
+
"""Ensures all input variables for the step are columns in the DataFrame.
|
|
20
|
+
If not, an Exception is raised.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
batch: The input DataFrame to decode.
|
|
24
|
+
decoder: The decoder step to validate.
|
|
25
|
+
|
|
26
|
+
Returns:
|
|
27
|
+
A view of the dataframe with only the input columns for the step.
|
|
28
|
+
|
|
29
|
+
Raises:
|
|
30
|
+
InputMissingError: If the decoder step does not have exactly one input or if the input column is not in the DataFrame.
|
|
31
|
+
"""
|
|
32
|
+
input_ids = [input_var.id for input_var in step.inputs]
|
|
33
|
+
for input_var in input_ids:
|
|
34
|
+
if input_var not in batch.columns:
|
|
35
|
+
raise InputMissingError(
|
|
36
|
+
f"Input DataFrame must contain column '{input_var}' for step {step.id}."
|
|
37
|
+
)
|
|
38
|
+
return batch[input_ids]
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def fail_mode_wrapper(
|
|
42
|
+
f: Callable[..., dict[str, Any]],
|
|
43
|
+
row: pd.Series,
|
|
44
|
+
batch_config: BatchConfig,
|
|
45
|
+
**kwargs: dict[str, Any],
|
|
46
|
+
) -> dict | Exception:
|
|
47
|
+
"""Executes a function with error handling based on the batch configuration.
|
|
48
|
+
|
|
49
|
+
Args:
|
|
50
|
+
f: The function to execute that can take any arguments and returns a dict of results.
|
|
51
|
+
row: The input row as a dictionary.
|
|
52
|
+
batch_config: Configuration for error handling.
|
|
53
|
+
**kwargs: Additional keyword arguments.
|
|
54
|
+
|
|
55
|
+
Returns:
|
|
56
|
+
The result of the function or an Exception if an error occurs and the error mode is set to CONTINUE.
|
|
57
|
+
"""
|
|
58
|
+
try:
|
|
59
|
+
# turn row into a dict and merge with kwargs
|
|
60
|
+
merged_kwargs = {**row.to_dict(), **kwargs}
|
|
61
|
+
return f(**merged_kwargs)
|
|
62
|
+
except Exception as e:
|
|
63
|
+
if batch_config.error_mode == ErrorMode.FAIL:
|
|
64
|
+
raise e
|
|
65
|
+
else:
|
|
66
|
+
return e
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def single_step_adapter(
|
|
70
|
+
step: Step, **inputs: dict[str, Any]
|
|
71
|
+
) -> dict[str, Any]:
|
|
72
|
+
"""A batch adapter for steps that have no side effects or access shared resources."""
|
|
73
|
+
from qtype.interpreter.step import execute_step
|
|
74
|
+
|
|
75
|
+
step_clone = copy.deepcopy(step)
|
|
76
|
+
for input_var in step_clone.inputs:
|
|
77
|
+
if input_var.id in inputs:
|
|
78
|
+
input_var.value = inputs[input_var.id]
|
|
79
|
+
else:
|
|
80
|
+
raise ValueError(
|
|
81
|
+
f"Input variable '{input_var.id}' not found in inputs."
|
|
82
|
+
)
|
|
83
|
+
execute_step(step_clone)
|
|
84
|
+
return {
|
|
85
|
+
output_var.id: output_var.value for output_var in step_clone.outputs
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def to_series(
|
|
90
|
+
rv: dict | Exception, error_col_name: str = "error"
|
|
91
|
+
) -> pd.Series:
|
|
92
|
+
# If rv is an exception, return a series with index "error"
|
|
93
|
+
if isinstance(rv, Exception):
|
|
94
|
+
return pd.Series({error_col_name: str(rv)})
|
|
95
|
+
return pd.Series(rv) # type: ignore[no-any-return]
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def batch_iterator(
|
|
99
|
+
f: Callable[..., dict[str, Any]],
|
|
100
|
+
batch: pd.DataFrame,
|
|
101
|
+
batch_config: BatchConfig,
|
|
102
|
+
**kwargs: Any,
|
|
103
|
+
) -> Tuple[pd.DataFrame, pd.DataFrame]:
|
|
104
|
+
"""Executes a step over a batch of inputs with error handling.
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
step: The step to execute.
|
|
108
|
+
batch: The input DataFrame to process.
|
|
109
|
+
batch_config: Configuration for error handling.
|
|
110
|
+
**kwargs: Additional keyword arguments to pass to the step.
|
|
111
|
+
|
|
112
|
+
Returns:
|
|
113
|
+
A tuple containing two DataFrames:
|
|
114
|
+
- The first DataFrame contains successful results with output columns.
|
|
115
|
+
- The second DataFrame contains rows that encountered errors with an 'error' column.
|
|
116
|
+
"""
|
|
117
|
+
|
|
118
|
+
# Use a unique column name for errors
|
|
119
|
+
error_col = "error_" + str(id(f))
|
|
120
|
+
|
|
121
|
+
# If error_col is already in the dataframe, throw an exception
|
|
122
|
+
if error_col in batch.columns:
|
|
123
|
+
raise ValueError(
|
|
124
|
+
f"Error column name '{error_col}' already exists in the batch DataFrame."
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
def the_pipe(row: pd.Series) -> pd.Series:
|
|
128
|
+
return to_series(
|
|
129
|
+
fail_mode_wrapper(f, row, batch_config=batch_config, **kwargs),
|
|
130
|
+
error_col_name=error_col,
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
results = batch.apply(the_pipe, axis=1)
|
|
134
|
+
|
|
135
|
+
# Split the results into two dataframes, one where error_col is not defined, and one where it is.
|
|
136
|
+
success_mask = ~results[error_col].notna()
|
|
137
|
+
failed_mask = results[error_col].notna()
|
|
138
|
+
|
|
139
|
+
# Create success DataFrame (drop the error column)
|
|
140
|
+
success_df = results[success_mask].drop(columns=[error_col])
|
|
141
|
+
|
|
142
|
+
# Create failed DataFrame (keep only original columns plus error)
|
|
143
|
+
original_columns = batch.columns.tolist()
|
|
144
|
+
|
|
145
|
+
if failed_mask.any():
|
|
146
|
+
failed_df = results[failed_mask]
|
|
147
|
+
# Drop all the output columns from failed_df, keep only original input columns + error
|
|
148
|
+
failed_df = failed_df[original_columns + [error_col]]
|
|
149
|
+
else:
|
|
150
|
+
# No failed rows, create empty DataFrame with expected structure
|
|
151
|
+
failed_df = pd.DataFrame(columns=original_columns + [error_col])
|
|
152
|
+
|
|
153
|
+
return success_df, failed_df
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def reconcile_results_and_errors(
|
|
157
|
+
results: list[pd.DataFrame], errors: list[pd.DataFrame]
|
|
158
|
+
) -> Tuple[pd.DataFrame, pd.DataFrame]:
|
|
159
|
+
"""
|
|
160
|
+
Concatenates lists of pandas DataFrames containing results and errors into single DataFrames.
|
|
161
|
+
|
|
162
|
+
If the input lists are empty, creates empty DataFrames as placeholders.
|
|
163
|
+
|
|
164
|
+
Args:
|
|
165
|
+
results (list[pd.DataFrame]): List of DataFrames containing results.
|
|
166
|
+
errors (list[pd.DataFrame]): List of DataFrames containing errors.
|
|
167
|
+
|
|
168
|
+
Returns:
|
|
169
|
+
Tuple[pd.DataFrame, pd.DataFrame]: A tuple containing:
|
|
170
|
+
- A single DataFrame with all results concatenated.
|
|
171
|
+
- A single DataFrame with all errors concatenated.
|
|
172
|
+
"""
|
|
173
|
+
if not results:
|
|
174
|
+
results = [pd.DataFrame({})]
|
|
175
|
+
if not errors:
|
|
176
|
+
errors = [pd.DataFrame({})]
|
|
177
|
+
return pd.concat(results, ignore_index=True), pd.concat(
|
|
178
|
+
errors, ignore_index=True
|
|
179
|
+
)
|
qtype/interpreter/conversions.py
CHANGED
|
@@ -38,7 +38,9 @@ def to_llm(model: Model, system_prompt: str | None) -> BaseLLM:
|
|
|
38
38
|
if model.provider in "aws-bedrock":
|
|
39
39
|
# BedrockConverse requires a model_id and system_prompt
|
|
40
40
|
# Inference params can be passed as additional kwargs
|
|
41
|
-
from llama_index.llms.bedrock_converse import
|
|
41
|
+
from llama_index.llms.bedrock_converse import ( # type: ignore[import]
|
|
42
|
+
BedrockConverse,
|
|
43
|
+
)
|
|
42
44
|
|
|
43
45
|
brv: BaseLLM = BedrockConverse(
|
|
44
46
|
model=model.model_id if model.model_id else model.id,
|
|
@@ -53,17 +55,22 @@ def to_llm(model: Model, system_prompt: str | None) -> BaseLLM:
|
|
|
53
55
|
model=model.model_id if model.model_id else model.id,
|
|
54
56
|
system_prompt=system_prompt,
|
|
55
57
|
**(model.inference_params if model.inference_params else {}),
|
|
56
|
-
api_key=model.auth
|
|
57
|
-
if model.auth
|
|
58
|
+
api_key=getattr(model.auth, "api_key", None)
|
|
59
|
+
if model.auth
|
|
58
60
|
else None,
|
|
59
61
|
)
|
|
60
62
|
elif model.provider == "anthropic":
|
|
61
|
-
from llama_index.llms.anthropic import
|
|
63
|
+
from llama_index.llms.anthropic import ( # type: ignore[import-untyped]
|
|
64
|
+
Anthropic,
|
|
65
|
+
)
|
|
62
66
|
|
|
63
67
|
arv: BaseLLM = Anthropic(
|
|
64
68
|
model=model.model_id if model.model_id else model.id,
|
|
65
69
|
system_prompt=system_prompt,
|
|
66
70
|
**(model.inference_params if model.inference_params else {}),
|
|
71
|
+
api_key=getattr(model.auth, "api_key", None)
|
|
72
|
+
if model.auth
|
|
73
|
+
else None,
|
|
67
74
|
)
|
|
68
75
|
return arv
|
|
69
76
|
else:
|
|
@@ -77,19 +84,23 @@ def to_embedding_model(model: Model) -> BaseEmbedding:
|
|
|
77
84
|
"""Convert a qtype Model to a LlamaIndex embedding model."""
|
|
78
85
|
|
|
79
86
|
if model.provider in {"bedrock", "aws", "aws-bedrock"}:
|
|
80
|
-
from llama_index.embeddings.bedrock import
|
|
87
|
+
from llama_index.embeddings.bedrock import ( # type: ignore[import-untyped]
|
|
88
|
+
BedrockEmbedding,
|
|
89
|
+
)
|
|
81
90
|
|
|
82
|
-
|
|
91
|
+
bedrock_embedding: BaseEmbedding = BedrockEmbedding(
|
|
83
92
|
model_name=model.model_id if model.model_id else model.id
|
|
84
93
|
)
|
|
85
|
-
return
|
|
94
|
+
return bedrock_embedding
|
|
86
95
|
elif model.provider == "openai":
|
|
87
|
-
from llama_index.embeddings.openai import
|
|
96
|
+
from llama_index.embeddings.openai import ( # type: ignore[import-untyped]
|
|
97
|
+
OpenAIEmbedding,
|
|
98
|
+
)
|
|
88
99
|
|
|
89
|
-
|
|
100
|
+
openai_embedding: BaseEmbedding = OpenAIEmbedding(
|
|
90
101
|
model_name=model.model_id if model.model_id else model.id
|
|
91
102
|
)
|
|
92
|
-
return
|
|
103
|
+
return openai_embedding
|
|
93
104
|
else:
|
|
94
105
|
raise InterpreterError(
|
|
95
106
|
f"Unsupported embedding model provider: {model.provider}."
|
|
@@ -2,11 +2,13 @@ import functools
|
|
|
2
2
|
import os
|
|
3
3
|
from typing import Any, Callable
|
|
4
4
|
|
|
5
|
-
from cachetools import LRUCache
|
|
5
|
+
from cachetools import LRUCache # type: ignore[import-untyped]
|
|
6
6
|
|
|
7
7
|
# Global LRU cache with a reasonable default size
|
|
8
8
|
_RESOURCE_CACHE_MAX_SIZE = int(os.environ.get("RESOURCE_CACHE_MAX_SIZE", 128))
|
|
9
|
-
_GLOBAL_RESOURCE_CACHE = LRUCache(
|
|
9
|
+
_GLOBAL_RESOURCE_CACHE: LRUCache[Any, Any] = LRUCache(
|
|
10
|
+
maxsize=_RESOURCE_CACHE_MAX_SIZE
|
|
11
|
+
)
|
|
10
12
|
|
|
11
13
|
|
|
12
14
|
def cached_resource(func: Callable[..., Any]) -> Callable[..., Any]:
|
|
@@ -44,6 +44,17 @@ def parse_xml(input: str) -> dict[str, Any]:
|
|
|
44
44
|
raise ValueError(f"Invalid XML input: {e}")
|
|
45
45
|
|
|
46
46
|
|
|
47
|
+
def parse(input: str, format: DecoderFormat) -> dict[str, Any]:
|
|
48
|
+
if format == DecoderFormat.json:
|
|
49
|
+
return parse_json(input)
|
|
50
|
+
elif format == DecoderFormat.xml:
|
|
51
|
+
return parse_xml(input)
|
|
52
|
+
else:
|
|
53
|
+
raise ValueError(
|
|
54
|
+
f"Unsupported decoder format: {format}. Supported formats are: {DecoderFormat.json}, {DecoderFormat.xml}."
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
|
|
47
58
|
def execute(decoder: Decoder, **kwargs: dict[str, Any]) -> list[Variable]:
|
|
48
59
|
"""Execute a decoder step with the provided arguments.
|
|
49
60
|
|
|
@@ -64,14 +75,7 @@ def execute(decoder: Decoder, **kwargs: dict[str, Any]) -> list[Variable]:
|
|
|
64
75
|
f"Input to decoder step {decoder.id} must be a string, found {type(input).__name__}."
|
|
65
76
|
)
|
|
66
77
|
|
|
67
|
-
|
|
68
|
-
result_dict = parse_json(input)
|
|
69
|
-
elif decoder.format == DecoderFormat.xml:
|
|
70
|
-
result_dict = parse_xml(input)
|
|
71
|
-
else:
|
|
72
|
-
raise ValueError(
|
|
73
|
-
f"Unsupported decoder format: {decoder.format}. Supported formats are: {DecoderFormat.json}, {DecoderFormat.xml}."
|
|
74
|
-
)
|
|
78
|
+
result_dict = parse(input, decoder.format)
|
|
75
79
|
|
|
76
80
|
# Set the output variables with the parsed results
|
|
77
81
|
for output in decoder.outputs:
|
|
@@ -81,4 +85,4 @@ def execute(decoder: Decoder, **kwargs: dict[str, Any]) -> list[Variable]:
|
|
|
81
85
|
raise ValueError(
|
|
82
86
|
f"Output variable {output.id} not found in decoded result: {result_dict}"
|
|
83
87
|
)
|
|
84
|
-
return decoder.outputs
|
|
88
|
+
return decoder.outputs # type: ignore[no-any-return]
|
|
@@ -97,6 +97,7 @@ def execute(
|
|
|
97
97
|
inputs = [to_chat_message(msg) for msg in history] + inputs
|
|
98
98
|
|
|
99
99
|
# If the stream function is set, we'll stream the results
|
|
100
|
+
chat_result: ChatResponse
|
|
100
101
|
if stream_fn:
|
|
101
102
|
generator = model.stream_chat(
|
|
102
103
|
messages=inputs,
|
|
@@ -106,12 +107,12 @@ def execute(
|
|
|
106
107
|
else {}
|
|
107
108
|
),
|
|
108
109
|
)
|
|
109
|
-
for
|
|
110
|
-
stream_fn(li,
|
|
110
|
+
for chat_response in generator:
|
|
111
|
+
stream_fn(li, chat_response.delta)
|
|
111
112
|
# Get the final result for processing
|
|
112
|
-
chat_result =
|
|
113
|
+
chat_result = chat_response # Use the last result from streaming
|
|
113
114
|
else:
|
|
114
|
-
chat_result
|
|
115
|
+
chat_result = model.chat(
|
|
115
116
|
messages=inputs,
|
|
116
117
|
**(
|
|
117
118
|
li.model.inference_params
|
|
@@ -137,16 +138,13 @@ def execute(
|
|
|
137
138
|
)
|
|
138
139
|
input = str(input)
|
|
139
140
|
|
|
141
|
+
complete_result: CompletionResponse
|
|
140
142
|
if stream_fn:
|
|
141
143
|
generator = model.stream_complete(prompt=input)
|
|
142
144
|
for complete_result in generator:
|
|
143
145
|
stream_fn(li, complete_result.delta)
|
|
144
|
-
# Get the final result for processing
|
|
145
|
-
complete_result = (
|
|
146
|
-
complete_result
|
|
147
|
-
) # Use the last result from streaming
|
|
148
146
|
else:
|
|
149
|
-
complete_result
|
|
147
|
+
complete_result = model.complete(prompt=input)
|
|
150
148
|
output_variable.value = complete_result.text
|
|
151
149
|
|
|
152
150
|
return li.outputs # type: ignore[return-value]
|
|
@@ -69,9 +69,9 @@ def create_streaming_generator(
|
|
|
69
69
|
```
|
|
70
70
|
"""
|
|
71
71
|
# Create thread-safe queue for communication
|
|
72
|
-
stream_queue: queue.Queue[
|
|
73
|
-
|
|
74
|
-
|
|
72
|
+
stream_queue: queue.Queue[tuple[Step, ChatMessage | str] | None] = (
|
|
73
|
+
queue.Queue()
|
|
74
|
+
)
|
|
75
75
|
|
|
76
76
|
# Create future for the return value
|
|
77
77
|
result_future: Future[T] = Future()
|
qtype/interpreter/typing.py
CHANGED
|
@@ -4,7 +4,7 @@ from typing import Any, Type
|
|
|
4
4
|
|
|
5
5
|
from pydantic import BaseModel, Field, create_model
|
|
6
6
|
|
|
7
|
-
from qtype.converters.types import PRIMITIVE_TO_PYTHON_TYPE
|
|
7
|
+
from qtype.application.converters.types import PRIMITIVE_TO_PYTHON_TYPE
|
|
8
8
|
from qtype.dsl.model import PrimitiveTypeEnum
|
|
9
9
|
from qtype.semantic.model import Flow, Variable
|
|
10
10
|
|
|
@@ -34,19 +34,38 @@ def _get_variable_type(var: Variable) -> tuple[Type, dict[str, Any]]:
|
|
|
34
34
|
return python_type, field_metadata
|
|
35
35
|
|
|
36
36
|
|
|
37
|
-
def create_output_type_model(
|
|
37
|
+
def create_output_type_model(
|
|
38
|
+
flow: Flow, is_batch: bool = False
|
|
39
|
+
) -> Type[BaseModel]:
|
|
38
40
|
"""Dynamically create a Pydantic response model for a flow."""
|
|
39
|
-
fields = {}
|
|
41
|
+
fields: dict[str, tuple[Any, Any]] = {}
|
|
40
42
|
|
|
41
43
|
# Always include flow_id and status
|
|
42
44
|
fields["flow_id"] = (str, Field(description="ID of the executed flow"))
|
|
43
45
|
fields["status"] = (str, Field(description="Execution status"))
|
|
44
46
|
|
|
47
|
+
if is_batch:
|
|
48
|
+
# Include information about the number of results, errors, etc.
|
|
49
|
+
fields["num_inputs"] = (int, Field(description="Number of inputs."))
|
|
50
|
+
fields["num_results"] = (int, Field(description="Number of results."))
|
|
51
|
+
fields["num_errors"] = (int, Field(description="Number of errors."))
|
|
52
|
+
fields["errors"] = (
|
|
53
|
+
list[dict[Any, Any]],
|
|
54
|
+
Field(description="All inputs with their associated errors."),
|
|
55
|
+
)
|
|
56
|
+
|
|
45
57
|
# Add dynamic output fields
|
|
46
58
|
if flow.outputs:
|
|
47
59
|
output_fields = {}
|
|
48
60
|
for var in flow.outputs:
|
|
49
61
|
python_type, type_metadata = _get_variable_type(var)
|
|
62
|
+
|
|
63
|
+
# Make type optional for batch processing since rows might have missing values
|
|
64
|
+
if is_batch:
|
|
65
|
+
from typing import Union
|
|
66
|
+
|
|
67
|
+
python_type = Union[python_type, type(None)] # type: ignore
|
|
68
|
+
|
|
50
69
|
field_info = Field(
|
|
51
70
|
# TODO: grok the description from the variable if available
|
|
52
71
|
# description=f"Output for {var.id}",
|
|
@@ -56,15 +75,21 @@ def create_output_type_model(flow: Flow) -> Type[BaseModel]:
|
|
|
56
75
|
output_fields[var.id] = (python_type, field_info)
|
|
57
76
|
|
|
58
77
|
# Create nested outputs model
|
|
59
|
-
outputs_model = create_model(
|
|
78
|
+
outputs_model: Type[BaseModel] = create_model(
|
|
60
79
|
f"{flow.id}Outputs",
|
|
61
80
|
__base__=BaseModel,
|
|
62
81
|
**output_fields,
|
|
63
82
|
) # type: ignore
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
83
|
+
if is_batch:
|
|
84
|
+
fields["outputs"] = (
|
|
85
|
+
list[outputs_model], # type: ignore
|
|
86
|
+
Field(description="List of flow execution outputs"),
|
|
87
|
+
)
|
|
88
|
+
else:
|
|
89
|
+
fields["outputs"] = (
|
|
90
|
+
outputs_model,
|
|
91
|
+
Field(description="Flow execution outputs"),
|
|
92
|
+
)
|
|
68
93
|
else:
|
|
69
94
|
fields["outputs"] = (
|
|
70
95
|
dict[str, Any],
|
|
@@ -74,10 +99,9 @@ def create_output_type_model(flow: Flow) -> Type[BaseModel]:
|
|
|
74
99
|
return create_model(f"{flow.id}Response", __base__=BaseModel, **fields) # type: ignore
|
|
75
100
|
|
|
76
101
|
|
|
77
|
-
def create_input_type_model(flow: Flow) -> Type[BaseModel]:
|
|
102
|
+
def create_input_type_model(flow: Flow, is_batch: bool) -> Type[BaseModel]:
|
|
78
103
|
"""Dynamically create a Pydantic request model for a flow."""
|
|
79
|
-
if not flow.inputs:
|
|
80
|
-
# Return a simple model with no required fields
|
|
104
|
+
if not flow.inputs and not is_batch:
|
|
81
105
|
return create_model(
|
|
82
106
|
f"{flow.id}Request",
|
|
83
107
|
__base__=BaseModel,
|
|
@@ -94,4 +118,16 @@ def create_input_type_model(flow: Flow) -> Type[BaseModel]:
|
|
|
94
118
|
)
|
|
95
119
|
fields[var.id] = (python_type, field_info)
|
|
96
120
|
|
|
121
|
+
if is_batch:
|
|
122
|
+
# For batch processing, wrap inputs in a list
|
|
123
|
+
single_input_model: Type[BaseModel] = create_model(
|
|
124
|
+
f"{flow.id}SingleInput", __base__=BaseModel, **fields
|
|
125
|
+
) # type: ignore
|
|
126
|
+
fields = {
|
|
127
|
+
"inputs": (
|
|
128
|
+
list[single_input_model], # type: ignore
|
|
129
|
+
Field(description="List of inputs for batch processing"),
|
|
130
|
+
)
|
|
131
|
+
}
|
|
132
|
+
|
|
97
133
|
return create_model(f"{flow.id}Request", __base__=BaseModel, **fields) # type: ignore
|
|
@@ -1 +1 @@
|
|
|
1
|
-
<!DOCTYPE html><!--
|
|
1
|
+
<!DOCTYPE html><!--OT8QJQW3J70VbDWWfrEMT--><html lang="en"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="preload" href="./_next/static/media/569ce4b8f30dc480-s.p.woff2" as="font" crossorigin="" type="font/woff2"/><link rel="preload" href="./_next/static/media/93f479601ee12b01-s.p.woff2" as="font" crossorigin="" type="font/woff2"/><link rel="stylesheet" href="./_next/static/css/a262c53826df929b.css" data-precedence="next"/><link rel="preload" as="script" fetchPriority="low" href="./_next/static/chunks/webpack-8289c17c67827f22.js"/><script src="./_next/static/chunks/4bd1b696-cf72ae8a39fa05aa.js" async=""></script><script src="./_next/static/chunks/964-ed4ab073db645007.js" async=""></script><script src="./_next/static/chunks/main-app-6fc6346bc8f7f163.js" async=""></script><meta name="robots" content="noindex"/><meta name="next-size-adjust" content=""/><title>404: This page could not be found.</title><title>Create Next App</title><meta name="description" content="Generated by create next app"/><link rel="icon" href="/ui/favicon.ico" type="image/x-icon" sizes="16x16"/><script src="./_next/static/chunks/polyfills-42372ed130431b0a.js" noModule=""></script></head><body class="__variable_6de60c __variable_152ec0 antialiased"><div hidden=""><!--$--><!--/$--></div><div style="font-family:system-ui,"Segoe UI",Roboto,Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji";height:100vh;text-align:center;display:flex;flex-direction:column;align-items:center;justify-content:center"><div><style>body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}</style><h1 class="next-error-h1" style="display:inline-block;margin:0 20px 0 0;padding:0 23px 0 0;font-size:24px;font-weight:500;vertical-align:top;line-height:49px">404</h1><div style="display:inline-block"><h2 style="font-size:14px;font-weight:400;line-height:49px;margin:0">This page could not be found.</h2></div></div></div><!--$--><!--/$--><script src="./_next/static/chunks/webpack-8289c17c67827f22.js" id="_R_" async=""></script><script>(self.__next_f=self.__next_f||[]).push([0])</script><script>self.__next_f.push([1,"1:\"$Sreact.fragment\"\n2:I[7555,[],\"\"]\n3:I[1295,[],\"\"]\n4:I[9665,[],\"OutletBoundary\"]\n6:I[4911,[],\"AsyncMetadataOutlet\"]\n8:I[9665,[],\"ViewportBoundary\"]\na:I[9665,[],\"MetadataBoundary\"]\nb:\"$Sreact.suspense\"\nd:I[8393,[],\"\"]\n:HL[\"./_next/static/media/569ce4b8f30dc480-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n:HL[\"./_next/static/media/93f479601ee12b01-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n:HL[\"./_next/static/css/a262c53826df929b.css\",\"style\"]\n"])</script><script>self.__next_f.push([1,"0:{\"P\":null,\"b\":\"OT8QJQW3J70VbDWWfrEMT\",\"p\":\".\",\"c\":[\"\",\"_not-found\",\"\"],\"i\":false,\"f\":[[[\"\",{\"children\":[\"/_not-found\",{\"children\":[\"__PAGE__\",{}]}]},\"$undefined\",\"$undefined\",true],[\"\",[\"$\",\"$1\",\"c\",{\"children\":[[[\"$\",\"link\",\"0\",{\"rel\":\"stylesheet\",\"href\":\"./_next/static/css/a262c53826df929b.css\",\"precedence\":\"next\",\"crossOrigin\":\"$undefined\",\"nonce\":\"$undefined\"}]],[\"$\",\"html\",null,{\"lang\":\"en\",\"children\":[\"$\",\"body\",null,{\"className\":\"__variable_6de60c __variable_152ec0 antialiased\",\"children\":[\"$\",\"$L2\",null,{\"parallelRouterKey\":\"children\",\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L3\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":\"$undefined\",\"forbidden\":\"$undefined\",\"unauthorized\":\"$undefined\"}]}]}]]}],{\"children\":[\"/_not-found\",[\"$\",\"$1\",\"c\",{\"children\":[null,[\"$\",\"$L2\",null,{\"parallelRouterKey\":\"children\",\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L3\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":\"$undefined\",\"forbidden\":\"$undefined\",\"unauthorized\":\"$undefined\"}]]}],{\"children\":[\"__PAGE__\",[\"$\",\"$1\",\"c\",{\"children\":[[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":{\"fontFamily\":\"system-ui,\\\"Segoe UI\\\",Roboto,Helvetica,Arial,sans-serif,\\\"Apple Color Emoji\\\",\\\"Segoe UI Emoji\\\"\",\"height\":\"100vh\",\"textAlign\":\"center\",\"display\":\"flex\",\"flexDirection\":\"column\",\"alignItems\":\"center\",\"justifyContent\":\"center\"},\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":{\"display\":\"inline-block\",\"margin\":\"0 20px 0 0\",\"padding\":\"0 23px 0 0\",\"fontSize\":24,\"fontWeight\":500,\"verticalAlign\":\"top\",\"lineHeight\":\"49px\"},\"children\":404}],[\"$\",\"div\",null,{\"style\":{\"display\":\"inline-block\"},\"children\":[\"$\",\"h2\",null,{\"style\":{\"fontSize\":14,\"fontWeight\":400,\"lineHeight\":\"49px\",\"margin\":0},\"children\":\"This page could not be found.\"}]}]]}]}]],null,[\"$\",\"$L4\",null,{\"children\":[\"$L5\",[\"$\",\"$L6\",null,{\"promise\":\"$@7\"}]]}]]}],{},null,false]},null,false]},null,false],[\"$\",\"$1\",\"h\",{\"children\":[[\"$\",\"meta\",null,{\"name\":\"robots\",\"content\":\"noindex\"}],[[\"$\",\"$L8\",null,{\"children\":\"$L9\"}],[\"$\",\"meta\",null,{\"name\":\"next-size-adjust\",\"content\":\"\"}]],[\"$\",\"$La\",null,{\"children\":[\"$\",\"div\",null,{\"hidden\":true,\"children\":[\"$\",\"$b\",null,{\"fallback\":null,\"children\":\"$Lc\"}]}]}]]}],false]],\"m\":\"$undefined\",\"G\":[\"$d\",[]],\"s\":false,\"S\":true}\n"])</script><script>self.__next_f.push([1,"9:[[\"$\",\"meta\",\"0\",{\"charSet\":\"utf-8\"}],[\"$\",\"meta\",\"1\",{\"name\":\"viewport\",\"content\":\"width=device-width, initial-scale=1\"}]]\n5:null\n"])</script><script>self.__next_f.push([1,"e:I[8175,[],\"IconMark\"]\n7:{\"metadata\":[[\"$\",\"title\",\"0\",{\"children\":\"Create Next App\"}],[\"$\",\"meta\",\"1\",{\"name\":\"description\",\"content\":\"Generated by create next app\"}],[\"$\",\"link\",\"2\",{\"rel\":\"icon\",\"href\":\"/ui/favicon.ico\",\"type\":\"image/x-icon\",\"sizes\":\"16x16\"}],[\"$\",\"$Le\",\"3\",{}]],\"error\":null,\"digest\":\"$undefined\"}\n"])</script><script>self.__next_f.push([1,"c:\"$7:metadata\"\n"])</script></body></html>
|
qtype/interpreter/ui/404.html
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
<!DOCTYPE html><!--
|
|
1
|
+
<!DOCTYPE html><!--OT8QJQW3J70VbDWWfrEMT--><html lang="en"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="preload" href="./_next/static/media/569ce4b8f30dc480-s.p.woff2" as="font" crossorigin="" type="font/woff2"/><link rel="preload" href="./_next/static/media/93f479601ee12b01-s.p.woff2" as="font" crossorigin="" type="font/woff2"/><link rel="stylesheet" href="./_next/static/css/a262c53826df929b.css" data-precedence="next"/><link rel="preload" as="script" fetchPriority="low" href="./_next/static/chunks/webpack-8289c17c67827f22.js"/><script src="./_next/static/chunks/4bd1b696-cf72ae8a39fa05aa.js" async=""></script><script src="./_next/static/chunks/964-ed4ab073db645007.js" async=""></script><script src="./_next/static/chunks/main-app-6fc6346bc8f7f163.js" async=""></script><meta name="robots" content="noindex"/><meta name="next-size-adjust" content=""/><title>404: This page could not be found.</title><title>Create Next App</title><meta name="description" content="Generated by create next app"/><link rel="icon" href="/ui/favicon.ico" type="image/x-icon" sizes="16x16"/><script src="./_next/static/chunks/polyfills-42372ed130431b0a.js" noModule=""></script></head><body class="__variable_6de60c __variable_152ec0 antialiased"><div hidden=""><!--$--><!--/$--></div><div style="font-family:system-ui,"Segoe UI",Roboto,Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji";height:100vh;text-align:center;display:flex;flex-direction:column;align-items:center;justify-content:center"><div><style>body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}</style><h1 class="next-error-h1" style="display:inline-block;margin:0 20px 0 0;padding:0 23px 0 0;font-size:24px;font-weight:500;vertical-align:top;line-height:49px">404</h1><div style="display:inline-block"><h2 style="font-size:14px;font-weight:400;line-height:49px;margin:0">This page could not be found.</h2></div></div></div><!--$--><!--/$--><script src="./_next/static/chunks/webpack-8289c17c67827f22.js" id="_R_" async=""></script><script>(self.__next_f=self.__next_f||[]).push([0])</script><script>self.__next_f.push([1,"1:\"$Sreact.fragment\"\n2:I[7555,[],\"\"]\n3:I[1295,[],\"\"]\n4:I[9665,[],\"OutletBoundary\"]\n6:I[4911,[],\"AsyncMetadataOutlet\"]\n8:I[9665,[],\"ViewportBoundary\"]\na:I[9665,[],\"MetadataBoundary\"]\nb:\"$Sreact.suspense\"\nd:I[8393,[],\"\"]\n:HL[\"./_next/static/media/569ce4b8f30dc480-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n:HL[\"./_next/static/media/93f479601ee12b01-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n:HL[\"./_next/static/css/a262c53826df929b.css\",\"style\"]\n"])</script><script>self.__next_f.push([1,"0:{\"P\":null,\"b\":\"OT8QJQW3J70VbDWWfrEMT\",\"p\":\".\",\"c\":[\"\",\"_not-found\",\"\"],\"i\":false,\"f\":[[[\"\",{\"children\":[\"/_not-found\",{\"children\":[\"__PAGE__\",{}]}]},\"$undefined\",\"$undefined\",true],[\"\",[\"$\",\"$1\",\"c\",{\"children\":[[[\"$\",\"link\",\"0\",{\"rel\":\"stylesheet\",\"href\":\"./_next/static/css/a262c53826df929b.css\",\"precedence\":\"next\",\"crossOrigin\":\"$undefined\",\"nonce\":\"$undefined\"}]],[\"$\",\"html\",null,{\"lang\":\"en\",\"children\":[\"$\",\"body\",null,{\"className\":\"__variable_6de60c __variable_152ec0 antialiased\",\"children\":[\"$\",\"$L2\",null,{\"parallelRouterKey\":\"children\",\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L3\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":\"$undefined\",\"forbidden\":\"$undefined\",\"unauthorized\":\"$undefined\"}]}]}]]}],{\"children\":[\"/_not-found\",[\"$\",\"$1\",\"c\",{\"children\":[null,[\"$\",\"$L2\",null,{\"parallelRouterKey\":\"children\",\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L3\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":\"$undefined\",\"forbidden\":\"$undefined\",\"unauthorized\":\"$undefined\"}]]}],{\"children\":[\"__PAGE__\",[\"$\",\"$1\",\"c\",{\"children\":[[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":{\"fontFamily\":\"system-ui,\\\"Segoe UI\\\",Roboto,Helvetica,Arial,sans-serif,\\\"Apple Color Emoji\\\",\\\"Segoe UI Emoji\\\"\",\"height\":\"100vh\",\"textAlign\":\"center\",\"display\":\"flex\",\"flexDirection\":\"column\",\"alignItems\":\"center\",\"justifyContent\":\"center\"},\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":{\"display\":\"inline-block\",\"margin\":\"0 20px 0 0\",\"padding\":\"0 23px 0 0\",\"fontSize\":24,\"fontWeight\":500,\"verticalAlign\":\"top\",\"lineHeight\":\"49px\"},\"children\":404}],[\"$\",\"div\",null,{\"style\":{\"display\":\"inline-block\"},\"children\":[\"$\",\"h2\",null,{\"style\":{\"fontSize\":14,\"fontWeight\":400,\"lineHeight\":\"49px\",\"margin\":0},\"children\":\"This page could not be found.\"}]}]]}]}]],null,[\"$\",\"$L4\",null,{\"children\":[\"$L5\",[\"$\",\"$L6\",null,{\"promise\":\"$@7\"}]]}]]}],{},null,false]},null,false]},null,false],[\"$\",\"$1\",\"h\",{\"children\":[[\"$\",\"meta\",null,{\"name\":\"robots\",\"content\":\"noindex\"}],[[\"$\",\"$L8\",null,{\"children\":\"$L9\"}],[\"$\",\"meta\",null,{\"name\":\"next-size-adjust\",\"content\":\"\"}]],[\"$\",\"$La\",null,{\"children\":[\"$\",\"div\",null,{\"hidden\":true,\"children\":[\"$\",\"$b\",null,{\"fallback\":null,\"children\":\"$Lc\"}]}]}]]}],false]],\"m\":\"$undefined\",\"G\":[\"$d\",[]],\"s\":false,\"S\":true}\n"])</script><script>self.__next_f.push([1,"9:[[\"$\",\"meta\",\"0\",{\"charSet\":\"utf-8\"}],[\"$\",\"meta\",\"1\",{\"name\":\"viewport\",\"content\":\"width=device-width, initial-scale=1\"}]]\n5:null\n"])</script><script>self.__next_f.push([1,"e:I[8175,[],\"IconMark\"]\n7:{\"metadata\":[[\"$\",\"title\",\"0\",{\"children\":\"Create Next App\"}],[\"$\",\"meta\",\"1\",{\"name\":\"description\",\"content\":\"Generated by create next app\"}],[\"$\",\"link\",\"2\",{\"rel\":\"icon\",\"href\":\"/ui/favicon.ico\",\"type\":\"image/x-icon\",\"sizes\":\"16x16\"}],[\"$\",\"$Le\",\"3\",{}]],\"error\":null,\"digest\":\"$undefined\"}\n"])</script><script>self.__next_f.push([1,"c:\"$7:metadata\"\n"])</script></body></html>
|
qtype/interpreter/ui/index.html
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
<!DOCTYPE html><!--
|
|
1
|
+
<!DOCTYPE html><!--OT8QJQW3J70VbDWWfrEMT--><html lang="en"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="preload" href="./_next/static/media/569ce4b8f30dc480-s.p.woff2" as="font" crossorigin="" type="font/woff2"/><link rel="preload" href="./_next/static/media/93f479601ee12b01-s.p.woff2" as="font" crossorigin="" type="font/woff2"/><link rel="stylesheet" href="./_next/static/css/a262c53826df929b.css" data-precedence="next"/><link rel="preload" as="script" fetchPriority="low" href="./_next/static/chunks/webpack-8289c17c67827f22.js"/><script src="./_next/static/chunks/4bd1b696-cf72ae8a39fa05aa.js" async=""></script><script src="./_next/static/chunks/964-ed4ab073db645007.js" async=""></script><script src="./_next/static/chunks/main-app-6fc6346bc8f7f163.js" async=""></script><script src="./_next/static/chunks/ba12c10f-22556063851a6df2.js" async=""></script><script src="./_next/static/chunks/736-7fc606e244fedcb1.js" async=""></script><script src="./_next/static/chunks/app/page-c72e847e888e549d.js" async=""></script><meta name="next-size-adjust" content=""/><title>Create Next App</title><meta name="description" content="Generated by create next app"/><link rel="icon" href="/ui/favicon.ico" type="image/x-icon" sizes="16x16"/><script src="./_next/static/chunks/polyfills-42372ed130431b0a.js" noModule=""></script></head><body class="__variable_6de60c __variable_152ec0 antialiased"><div hidden=""><!--$--><!--/$--></div><div class="font-sans min-h-screen p-6 sm:p-8"><main class="max-w-6xl mx-auto space-y-6"><div class="text-center border-b pb-4"><h1 class="text-4xl font-bold text-gray-900 dark:text-white mb-2">QType Frontend</h1></div><p class="text-muted-foreground text-sm">Loading flows...</p></main></div><!--$--><!--/$--><script src="./_next/static/chunks/webpack-8289c17c67827f22.js" id="_R_" async=""></script><script>(self.__next_f=self.__next_f||[]).push([0])</script><script>self.__next_f.push([1,"1:\"$Sreact.fragment\"\n2:I[7555,[],\"\"]\n3:I[1295,[],\"\"]\n4:I[894,[],\"ClientPageRoot\"]\n5:I[8785,[\"803\",\"static/chunks/ba12c10f-22556063851a6df2.js\",\"736\",\"static/chunks/736-7fc606e244fedcb1.js\",\"974\",\"static/chunks/app/page-c72e847e888e549d.js\"],\"default\"]\n8:I[9665,[],\"OutletBoundary\"]\na:I[4911,[],\"AsyncMetadataOutlet\"]\nc:I[9665,[],\"ViewportBoundary\"]\ne:I[9665,[],\"MetadataBoundary\"]\nf:\"$Sreact.suspense\"\n11:I[8393,[],\"\"]\n:HL[\"./_next/static/media/569ce4b8f30dc480-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n:HL[\"./_next/static/media/93f479601ee12b01-s.p.woff2\",\"font\",{\"crossOrigin\":\"\",\"type\":\"font/woff2\"}]\n:HL[\"./_next/static/css/a262c53826df929b.css\",\"style\"]\n"])</script><script>self.__next_f.push([1,"0:{\"P\":null,\"b\":\"OT8QJQW3J70VbDWWfrEMT\",\"p\":\".\",\"c\":[\"\",\"\"],\"i\":false,\"f\":[[[\"\",{\"children\":[\"__PAGE__\",{}]},\"$undefined\",\"$undefined\",true],[\"\",[\"$\",\"$1\",\"c\",{\"children\":[[[\"$\",\"link\",\"0\",{\"rel\":\"stylesheet\",\"href\":\"./_next/static/css/a262c53826df929b.css\",\"precedence\":\"next\",\"crossOrigin\":\"$undefined\",\"nonce\":\"$undefined\"}]],[\"$\",\"html\",null,{\"lang\":\"en\",\"children\":[\"$\",\"body\",null,{\"className\":\"__variable_6de60c __variable_152ec0 antialiased\",\"children\":[\"$\",\"$L2\",null,{\"parallelRouterKey\":\"children\",\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L3\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":[[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":{\"fontFamily\":\"system-ui,\\\"Segoe UI\\\",Roboto,Helvetica,Arial,sans-serif,\\\"Apple Color Emoji\\\",\\\"Segoe UI Emoji\\\"\",\"height\":\"100vh\",\"textAlign\":\"center\",\"display\":\"flex\",\"flexDirection\":\"column\",\"alignItems\":\"center\",\"justifyContent\":\"center\"},\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":{\"display\":\"inline-block\",\"margin\":\"0 20px 0 0\",\"padding\":\"0 23px 0 0\",\"fontSize\":24,\"fontWeight\":500,\"verticalAlign\":\"top\",\"lineHeight\":\"49px\"},\"children\":404}],[\"$\",\"div\",null,{\"style\":{\"display\":\"inline-block\"},\"children\":[\"$\",\"h2\",null,{\"style\":{\"fontSize\":14,\"fontWeight\":400,\"lineHeight\":\"49px\",\"margin\":0},\"children\":\"This page could not be found.\"}]}]]}]}]],[]],\"forbidden\":\"$undefined\",\"unauthorized\":\"$undefined\"}]}]}]]}],{\"children\":[\"__PAGE__\",[\"$\",\"$1\",\"c\",{\"children\":[[\"$\",\"$L4\",null,{\"Component\":\"$5\",\"searchParams\":{},\"params\":{},\"promises\":[\"$@6\",\"$@7\"]}],null,[\"$\",\"$L8\",null,{\"children\":[\"$L9\",[\"$\",\"$La\",null,{\"promise\":\"$@b\"}]]}]]}],{},null,false]},null,false],[\"$\",\"$1\",\"h\",{\"children\":[null,[[\"$\",\"$Lc\",null,{\"children\":\"$Ld\"}],[\"$\",\"meta\",null,{\"name\":\"next-size-adjust\",\"content\":\"\"}]],[\"$\",\"$Le\",null,{\"children\":[\"$\",\"div\",null,{\"hidden\":true,\"children\":[\"$\",\"$f\",null,{\"fallback\":null,\"children\":\"$L10\"}]}]}]]}],false]],\"m\":\"$undefined\",\"G\":[\"$11\",[]],\"s\":false,\"S\":true}\n"])</script><script>self.__next_f.push([1,"6:{}\n7:\"$0:f:0:1:2:children:1:props:children:0:props:params\"\n"])</script><script>self.__next_f.push([1,"d:[[\"$\",\"meta\",\"0\",{\"charSet\":\"utf-8\"}],[\"$\",\"meta\",\"1\",{\"name\":\"viewport\",\"content\":\"width=device-width, initial-scale=1\"}]]\n9:null\n"])</script><script>self.__next_f.push([1,"12:I[8175,[],\"IconMark\"]\nb:{\"metadata\":[[\"$\",\"title\",\"0\",{\"children\":\"Create Next App\"}],[\"$\",\"meta\",\"1\",{\"name\":\"description\",\"content\":\"Generated by create next app\"}],[\"$\",\"link\",\"2\",{\"rel\":\"icon\",\"href\":\"/ui/favicon.ico\",\"type\":\"image/x-icon\",\"sizes\":\"16x16\"}],[\"$\",\"$L12\",\"3\",{}]],\"error\":null,\"digest\":\"$undefined\"}\n"])</script><script>self.__next_f.push([1,"10:\"$b:metadata\"\n"])</script></body></html>
|