@posthog/agent 1.12.0 → 1.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/_virtual/_commonjsHelpers.js +6 -0
- package/dist/_virtual/_commonjsHelpers.js.map +1 -0
- package/dist/_virtual/index.js +4 -0
- package/dist/_virtual/index.js.map +1 -0
- package/dist/node_modules/@ai-sdk/anthropic/dist/index.js +1154 -0
- package/dist/node_modules/@ai-sdk/anthropic/dist/index.js.map +1 -0
- package/dist/node_modules/@ai-sdk/provider/dist/index.js +296 -0
- package/dist/node_modules/@ai-sdk/provider/dist/index.js.map +1 -0
- package/dist/node_modules/@ai-sdk/provider-utils/dist/index.js +576 -0
- package/dist/node_modules/@ai-sdk/provider-utils/dist/index.js.map +1 -0
- package/dist/node_modules/@ai-sdk/ui-utils/dist/index.js +741 -0
- package/dist/node_modules/@ai-sdk/ui-utils/dist/index.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/api/context.js +112 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/api/context.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/api/diag.js +123 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/api/diag.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/api/metrics.js +62 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/api/metrics.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/api/propagation.js +91 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/api/propagation.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/api/trace.js +79 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/api/trace.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js +59 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js +99 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/baggage/utils.js +31 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/baggage/utils.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js +69 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/context/context.js +54 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/context/context.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/context-api.js +22 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/context-api.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js +104 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js +44 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/diag/types.js +43 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/diag/types.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/diag-api.js +27 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/diag-api.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/internal/global-utils.js +62 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/internal/global-utils.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/internal/semver.js +121 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/internal/semver.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeter.js +167 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeter.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeterProvider.js +33 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeterProvider.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/metrics-api.js +22 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/metrics-api.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js +21 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js +35 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js +40 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/propagation-api.js +22 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/propagation-api.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js +70 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js +78 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js +34 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js +55 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js +56 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/trace/context-utils.js +76 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/trace/context-utils.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js +27 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js +45 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/trace/status.js +22 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/trace/status.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js +25 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/trace-api.js +24 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/trace-api.js.map +1 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/version.js +20 -0
- package/dist/node_modules/@opentelemetry/api/build/esm/version.js.map +1 -0
- package/dist/node_modules/ai/dist/index.js +2870 -0
- package/dist/node_modules/ai/dist/index.js.map +1 -0
- package/dist/node_modules/nanoid/non-secure/index.js +13 -0
- package/dist/node_modules/nanoid/non-secure/index.js.map +1 -0
- package/dist/node_modules/secure-json-parse/index.js +133 -0
- package/dist/node_modules/secure-json-parse/index.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/Options.js +37 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/Options.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/Refs.js +26 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/Refs.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/errorMessages.js +17 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/errorMessages.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/getRelativePath.js +11 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/getRelativePath.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/index.js +8 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/index.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parseDef.js +66 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parseDef.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/any.js +21 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/any.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/array.js +30 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/array.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/bigint.js +53 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/bigint.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/boolean.js +8 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/boolean.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/branded.js +8 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/branded.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/catch.js +8 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/catch.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/date.js +50 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/date.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/default.js +11 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/default.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/effects.js +11 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/effects.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/enum.js +9 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/enum.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/intersection.js +56 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/intersection.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/literal.js +24 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/literal.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/map.js +30 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/map.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/nativeEnum.js +19 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/nativeEnum.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/never.js +15 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/never.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/null.js +13 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/null.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/nullable.js +37 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/nullable.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/number.js +56 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/number.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/object.js +76 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/object.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/optional.js +25 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/optional.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/pipeline.js +24 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/pipeline.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/promise.js +8 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/promise.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/readonly.js +8 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/readonly.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/record.js +65 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/record.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/set.js +24 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/set.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/string.js +350 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/string.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/tuple.js +36 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/tuple.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/undefined.js +10 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/undefined.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/union.js +84 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/union.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/unknown.js +8 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/parsers/unknown.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/selectParser.js +110 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/selectParser.js.map +1 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/zodToJsonSchema.js +90 -0
- package/dist/node_modules/zod-to-json-schema/dist/esm/zodToJsonSchema.js.map +1 -0
- package/dist/src/agent.d.ts +3 -0
- package/dist/src/agent.d.ts.map +1 -1
- package/dist/src/agent.js +93 -291
- package/dist/src/agent.js.map +1 -1
- package/dist/src/agents/planning.d.ts +1 -1
- package/dist/src/agents/planning.d.ts.map +1 -1
- package/dist/src/agents/planning.js +1 -2
- package/dist/src/agents/planning.js.map +1 -1
- package/dist/src/agents/research.d.ts +1 -1
- package/dist/src/agents/research.d.ts.map +1 -1
- package/dist/src/agents/research.js +3 -6
- package/dist/src/agents/research.js.map +1 -1
- package/dist/src/prompt-builder.d.ts.map +1 -1
- package/dist/src/prompt-builder.js +0 -1
- package/dist/src/prompt-builder.js.map +1 -1
- package/dist/src/structured-extraction.d.ts +2 -2
- package/dist/src/structured-extraction.d.ts.map +1 -1
- package/dist/src/structured-extraction.js +51 -110
- package/dist/src/structured-extraction.js.map +1 -1
- package/dist/src/workflow/config.d.ts +3 -0
- package/dist/src/workflow/config.d.ts.map +1 -0
- package/dist/src/workflow/config.js +43 -0
- package/dist/src/workflow/config.js.map +1 -0
- package/dist/src/workflow/steps/build.d.ts +3 -0
- package/dist/src/workflow/steps/build.d.ts.map +1 -0
- package/dist/src/workflow/steps/build.js +64 -0
- package/dist/src/workflow/steps/build.js.map +1 -0
- package/dist/src/workflow/steps/plan.d.ts +3 -0
- package/dist/src/workflow/steps/plan.d.ts.map +1 -0
- package/dist/src/workflow/steps/plan.js +86 -0
- package/dist/src/workflow/steps/plan.js.map +1 -0
- package/dist/src/workflow/steps/research.d.ts +3 -0
- package/dist/src/workflow/steps/research.d.ts.map +1 -0
- package/dist/src/workflow/steps/research.js +124 -0
- package/dist/src/workflow/steps/research.js.map +1 -0
- package/dist/src/workflow/types.d.ts +48 -0
- package/dist/src/workflow/types.d.ts.map +1 -0
- package/dist/src/workflow/utils.d.ts +12 -0
- package/dist/src/workflow/utils.d.ts.map +1 -0
- package/dist/src/workflow/utils.js +38 -0
- package/dist/src/workflow/utils.js.map +1 -0
- package/package.json +5 -2
- package/src/agent.ts +112 -321
- package/src/agents/planning.ts +1 -2
- package/src/agents/research.ts +3 -6
- package/src/prompt-builder.ts +0 -2
- package/src/structured-extraction.ts +58 -115
- package/src/workflow/config.ts +42 -0
- package/src/workflow/steps/build.ts +87 -0
- package/src/workflow/steps/plan.ts +112 -0
- package/src/workflow/steps/research.ts +156 -0
- package/src/workflow/types.ts +53 -0
- package/src/workflow/utils.ts +50 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sources":["../../../../node_modules/ai/dist/index.mjs"],"sourcesContent":["var __defProp = Object.defineProperty;\nvar __export = (target, all) => {\n for (var name17 in all)\n __defProp(target, name17, { get: all[name17], enumerable: true });\n};\n\n// core/index.ts\nimport { createIdGenerator as createIdGenerator5, generateId as generateId2 } from \"@ai-sdk/provider-utils\";\nimport {\n formatAssistantStreamPart,\n formatDataStreamPart as formatDataStreamPart3,\n jsonSchema as jsonSchema2,\n parseAssistantStreamPart,\n parseDataStreamPart,\n processDataStream,\n processTextStream,\n zodSchema\n} from \"@ai-sdk/ui-utils\";\n\n// core/data-stream/create-data-stream.ts\nimport { formatDataStreamPart } from \"@ai-sdk/ui-utils\";\nfunction createDataStream({\n execute,\n onError = () => \"An error occurred.\"\n // mask error messages for safety by default\n}) {\n let controller;\n const ongoingStreamPromises = [];\n const stream = new ReadableStream({\n start(controllerArg) {\n controller = controllerArg;\n }\n });\n function safeEnqueue(data) {\n try {\n controller.enqueue(data);\n } catch (error) {\n }\n }\n try {\n const result = execute({\n write(data) {\n safeEnqueue(data);\n },\n writeData(data) {\n safeEnqueue(formatDataStreamPart(\"data\", [data]));\n },\n writeMessageAnnotation(annotation) {\n safeEnqueue(formatDataStreamPart(\"message_annotations\", [annotation]));\n },\n writeSource(source) {\n safeEnqueue(formatDataStreamPart(\"source\", source));\n },\n merge(streamArg) {\n ongoingStreamPromises.push(\n (async () => {\n const reader = streamArg.getReader();\n while (true) {\n const { done, value } = await reader.read();\n if (done)\n break;\n safeEnqueue(value);\n }\n })().catch((error) => {\n safeEnqueue(formatDataStreamPart(\"error\", onError(error)));\n })\n );\n },\n onError\n });\n if (result) {\n ongoingStreamPromises.push(\n result.catch((error) => {\n safeEnqueue(formatDataStreamPart(\"error\", onError(error)));\n })\n );\n }\n } catch (error) {\n safeEnqueue(formatDataStreamPart(\"error\", onError(error)));\n }\n const waitForStreams = new Promise(async (resolve) => {\n while (ongoingStreamPromises.length > 0) {\n await ongoingStreamPromises.shift();\n }\n resolve();\n });\n waitForStreams.finally(() => {\n try {\n controller.close();\n } catch (error) {\n }\n });\n return stream;\n}\n\n// core/util/prepare-response-headers.ts\nfunction prepareResponseHeaders(headers, {\n contentType,\n dataStreamVersion\n}) {\n const responseHeaders = new Headers(headers != null ? headers : {});\n if (!responseHeaders.has(\"Content-Type\")) {\n responseHeaders.set(\"Content-Type\", contentType);\n }\n if (dataStreamVersion !== void 0) {\n responseHeaders.set(\"X-Vercel-AI-Data-Stream\", dataStreamVersion);\n }\n return responseHeaders;\n}\n\n// core/data-stream/create-data-stream-response.ts\nfunction createDataStreamResponse({\n status,\n statusText,\n headers,\n execute,\n onError\n}) {\n return new Response(\n createDataStream({ execute, onError }).pipeThrough(new TextEncoderStream()),\n {\n status,\n statusText,\n headers: prepareResponseHeaders(headers, {\n contentType: \"text/plain; charset=utf-8\",\n dataStreamVersion: \"v1\"\n })\n }\n );\n}\n\n// core/util/prepare-outgoing-http-headers.ts\nfunction prepareOutgoingHttpHeaders(headers, {\n contentType,\n dataStreamVersion\n}) {\n const outgoingHeaders = {};\n if (headers != null) {\n for (const [key, value] of Object.entries(headers)) {\n outgoingHeaders[key] = value;\n }\n }\n if (outgoingHeaders[\"Content-Type\"] == null) {\n outgoingHeaders[\"Content-Type\"] = contentType;\n }\n if (dataStreamVersion !== void 0) {\n outgoingHeaders[\"X-Vercel-AI-Data-Stream\"] = dataStreamVersion;\n }\n return outgoingHeaders;\n}\n\n// core/util/write-to-server-response.ts\nfunction writeToServerResponse({\n response,\n status,\n statusText,\n headers,\n stream\n}) {\n response.writeHead(status != null ? status : 200, statusText, headers);\n const reader = stream.getReader();\n const read = async () => {\n try {\n while (true) {\n const { done, value } = await reader.read();\n if (done)\n break;\n response.write(value);\n }\n } catch (error) {\n throw error;\n } finally {\n response.end();\n }\n };\n read();\n}\n\n// core/data-stream/pipe-data-stream-to-response.ts\nfunction pipeDataStreamToResponse(response, {\n status,\n statusText,\n headers,\n execute,\n onError\n}) {\n writeToServerResponse({\n response,\n status,\n statusText,\n headers: prepareOutgoingHttpHeaders(headers, {\n contentType: \"text/plain; charset=utf-8\",\n dataStreamVersion: \"v1\"\n }),\n stream: createDataStream({ execute, onError }).pipeThrough(\n new TextEncoderStream()\n )\n });\n}\n\n// errors/unsupported-model-version-error.ts\nimport { AISDKError } from \"@ai-sdk/provider\";\nvar UnsupportedModelVersionError = class extends AISDKError {\n constructor() {\n super({\n name: \"AI_UnsupportedModelVersionError\",\n message: `Unsupported model version. AI SDK 4 only supports models that implement specification version \"v1\". Please upgrade to AI SDK 5 to use this model.`\n });\n }\n};\n\n// errors/invalid-argument-error.ts\nimport { AISDKError as AISDKError2 } from \"@ai-sdk/provider\";\nvar name = \"AI_InvalidArgumentError\";\nvar marker = `vercel.ai.error.${name}`;\nvar symbol = Symbol.for(marker);\nvar _a;\nvar InvalidArgumentError = class extends AISDKError2 {\n constructor({\n parameter,\n value,\n message\n }) {\n super({\n name,\n message: `Invalid argument for parameter ${parameter}: ${message}`\n });\n this[_a] = true;\n this.parameter = parameter;\n this.value = value;\n }\n static isInstance(error) {\n return AISDKError2.hasMarker(error, marker);\n }\n};\n_a = symbol;\n\n// util/retry-with-exponential-backoff.ts\nimport { APICallError } from \"@ai-sdk/provider\";\nimport { delay, getErrorMessage, isAbortError } from \"@ai-sdk/provider-utils\";\n\n// util/retry-error.ts\nimport { AISDKError as AISDKError3 } from \"@ai-sdk/provider\";\nvar name2 = \"AI_RetryError\";\nvar marker2 = `vercel.ai.error.${name2}`;\nvar symbol2 = Symbol.for(marker2);\nvar _a2;\nvar RetryError = class extends AISDKError3 {\n constructor({\n message,\n reason,\n errors\n }) {\n super({ name: name2, message });\n this[_a2] = true;\n this.reason = reason;\n this.errors = errors;\n this.lastError = errors[errors.length - 1];\n }\n static isInstance(error) {\n return AISDKError3.hasMarker(error, marker2);\n }\n};\n_a2 = symbol2;\n\n// util/retry-with-exponential-backoff.ts\nvar retryWithExponentialBackoff = ({\n maxRetries = 2,\n initialDelayInMs = 2e3,\n backoffFactor = 2\n} = {}) => async (f) => _retryWithExponentialBackoff(f, {\n maxRetries,\n delayInMs: initialDelayInMs,\n backoffFactor\n});\nasync function _retryWithExponentialBackoff(f, {\n maxRetries,\n delayInMs,\n backoffFactor\n}, errors = []) {\n try {\n return await f();\n } catch (error) {\n if (isAbortError(error)) {\n throw error;\n }\n if (maxRetries === 0) {\n throw error;\n }\n const errorMessage = getErrorMessage(error);\n const newErrors = [...errors, error];\n const tryNumber = newErrors.length;\n if (tryNumber > maxRetries) {\n throw new RetryError({\n message: `Failed after ${tryNumber} attempts. Last error: ${errorMessage}`,\n reason: \"maxRetriesExceeded\",\n errors: newErrors\n });\n }\n if (error instanceof Error && APICallError.isInstance(error) && error.isRetryable === true && tryNumber <= maxRetries) {\n await delay(delayInMs);\n return _retryWithExponentialBackoff(\n f,\n { maxRetries, delayInMs: backoffFactor * delayInMs, backoffFactor },\n newErrors\n );\n }\n if (tryNumber === 1) {\n throw error;\n }\n throw new RetryError({\n message: `Failed after ${tryNumber} attempts with non-retryable error: '${errorMessage}'`,\n reason: \"errorNotRetryable\",\n errors: newErrors\n });\n }\n}\n\n// core/prompt/prepare-retries.ts\nfunction prepareRetries({\n maxRetries\n}) {\n if (maxRetries != null) {\n if (!Number.isInteger(maxRetries)) {\n throw new InvalidArgumentError({\n parameter: \"maxRetries\",\n value: maxRetries,\n message: \"maxRetries must be an integer\"\n });\n }\n if (maxRetries < 0) {\n throw new InvalidArgumentError({\n parameter: \"maxRetries\",\n value: maxRetries,\n message: \"maxRetries must be >= 0\"\n });\n }\n }\n const maxRetriesResult = maxRetries != null ? maxRetries : 2;\n return {\n maxRetries: maxRetriesResult,\n retry: retryWithExponentialBackoff({ maxRetries: maxRetriesResult })\n };\n}\n\n// core/telemetry/assemble-operation-name.ts\nfunction assembleOperationName({\n operationId,\n telemetry\n}) {\n return {\n // standardized operation and resource name:\n \"operation.name\": `${operationId}${(telemetry == null ? void 0 : telemetry.functionId) != null ? ` ${telemetry.functionId}` : \"\"}`,\n \"resource.name\": telemetry == null ? void 0 : telemetry.functionId,\n // detailed, AI SDK specific data:\n \"ai.operationId\": operationId,\n \"ai.telemetry.functionId\": telemetry == null ? void 0 : telemetry.functionId\n };\n}\n\n// core/telemetry/get-base-telemetry-attributes.ts\nfunction getBaseTelemetryAttributes({\n model,\n settings,\n telemetry,\n headers\n}) {\n var _a17;\n return {\n \"ai.model.provider\": model.provider,\n \"ai.model.id\": model.modelId,\n // settings:\n ...Object.entries(settings).reduce((attributes, [key, value]) => {\n attributes[`ai.settings.${key}`] = value;\n return attributes;\n }, {}),\n // add metadata as attributes:\n ...Object.entries((_a17 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a17 : {}).reduce(\n (attributes, [key, value]) => {\n attributes[`ai.telemetry.metadata.${key}`] = value;\n return attributes;\n },\n {}\n ),\n // request headers\n ...Object.entries(headers != null ? headers : {}).reduce((attributes, [key, value]) => {\n if (value !== void 0) {\n attributes[`ai.request.headers.${key}`] = value;\n }\n return attributes;\n }, {})\n };\n}\n\n// core/telemetry/get-tracer.ts\nimport { trace } from \"@opentelemetry/api\";\n\n// core/telemetry/noop-tracer.ts\nvar noopTracer = {\n startSpan() {\n return noopSpan;\n },\n startActiveSpan(name17, arg1, arg2, arg3) {\n if (typeof arg1 === \"function\") {\n return arg1(noopSpan);\n }\n if (typeof arg2 === \"function\") {\n return arg2(noopSpan);\n }\n if (typeof arg3 === \"function\") {\n return arg3(noopSpan);\n }\n }\n};\nvar noopSpan = {\n spanContext() {\n return noopSpanContext;\n },\n setAttribute() {\n return this;\n },\n setAttributes() {\n return this;\n },\n addEvent() {\n return this;\n },\n addLink() {\n return this;\n },\n addLinks() {\n return this;\n },\n setStatus() {\n return this;\n },\n updateName() {\n return this;\n },\n end() {\n return this;\n },\n isRecording() {\n return false;\n },\n recordException() {\n return this;\n }\n};\nvar noopSpanContext = {\n traceId: \"\",\n spanId: \"\",\n traceFlags: 0\n};\n\n// core/telemetry/get-tracer.ts\nfunction getTracer({\n isEnabled = false,\n tracer\n} = {}) {\n if (!isEnabled) {\n return noopTracer;\n }\n if (tracer) {\n return tracer;\n }\n return trace.getTracer(\"ai\");\n}\n\n// core/telemetry/record-span.ts\nimport { SpanStatusCode } from \"@opentelemetry/api\";\nfunction recordSpan({\n name: name17,\n tracer,\n attributes,\n fn,\n endWhenDone = true\n}) {\n return tracer.startActiveSpan(name17, { attributes }, async (span) => {\n try {\n const result = await fn(span);\n if (endWhenDone) {\n span.end();\n }\n return result;\n } catch (error) {\n try {\n recordErrorOnSpan(span, error);\n } finally {\n span.end();\n }\n throw error;\n }\n });\n}\nfunction recordErrorOnSpan(span, error) {\n if (error instanceof Error) {\n span.recordException({\n name: error.name,\n message: error.message,\n stack: error.stack\n });\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: error.message\n });\n } else {\n span.setStatus({ code: SpanStatusCode.ERROR });\n }\n}\n\n// core/telemetry/select-telemetry-attributes.ts\nfunction selectTelemetryAttributes({\n telemetry,\n attributes\n}) {\n if ((telemetry == null ? void 0 : telemetry.isEnabled) !== true) {\n return {};\n }\n return Object.entries(attributes).reduce((attributes2, [key, value]) => {\n if (value === void 0) {\n return attributes2;\n }\n if (typeof value === \"object\" && \"input\" in value && typeof value.input === \"function\") {\n if ((telemetry == null ? void 0 : telemetry.recordInputs) === false) {\n return attributes2;\n }\n const result = value.input();\n return result === void 0 ? attributes2 : { ...attributes2, [key]: result };\n }\n if (typeof value === \"object\" && \"output\" in value && typeof value.output === \"function\") {\n if ((telemetry == null ? void 0 : telemetry.recordOutputs) === false) {\n return attributes2;\n }\n const result = value.output();\n return result === void 0 ? attributes2 : { ...attributes2, [key]: result };\n }\n return { ...attributes2, [key]: value };\n }, {});\n}\n\n// core/embed/embed.ts\nasync function embed({\n model,\n value,\n maxRetries: maxRetriesArg,\n abortSignal,\n headers,\n experimental_telemetry: telemetry\n}) {\n if (typeof model === \"string\" || model.specificationVersion !== \"v1\") {\n throw new UnsupportedModelVersionError();\n }\n const { maxRetries, retry } = prepareRetries({ maxRetries: maxRetriesArg });\n const baseTelemetryAttributes = getBaseTelemetryAttributes({\n model,\n telemetry,\n headers,\n settings: { maxRetries }\n });\n const tracer = getTracer(telemetry);\n return recordSpan({\n name: \"ai.embed\",\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({ operationId: \"ai.embed\", telemetry }),\n ...baseTelemetryAttributes,\n \"ai.value\": { input: () => JSON.stringify(value) }\n }\n }),\n tracer,\n fn: async (span) => {\n const { embedding, usage, rawResponse } = await retry(\n () => (\n // nested spans to align with the embedMany telemetry data:\n recordSpan({\n name: \"ai.embed.doEmbed\",\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: \"ai.embed.doEmbed\",\n telemetry\n }),\n ...baseTelemetryAttributes,\n // specific settings that only make sense on the outer level:\n \"ai.values\": { input: () => [JSON.stringify(value)] }\n }\n }),\n tracer,\n fn: async (doEmbedSpan) => {\n var _a17;\n const modelResponse = await model.doEmbed({\n values: [value],\n abortSignal,\n headers\n });\n const embedding2 = modelResponse.embeddings[0];\n const usage2 = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };\n doEmbedSpan.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n \"ai.embeddings\": {\n output: () => modelResponse.embeddings.map(\n (embedding3) => JSON.stringify(embedding3)\n )\n },\n \"ai.usage.tokens\": usage2.tokens\n }\n })\n );\n return {\n embedding: embedding2,\n usage: usage2,\n rawResponse: modelResponse.rawResponse\n };\n }\n })\n )\n );\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n \"ai.embedding\": { output: () => JSON.stringify(embedding) },\n \"ai.usage.tokens\": usage.tokens\n }\n })\n );\n return new DefaultEmbedResult({ value, embedding, usage, rawResponse });\n }\n });\n}\nvar DefaultEmbedResult = class {\n constructor(options) {\n this.value = options.value;\n this.embedding = options.embedding;\n this.usage = options.usage;\n this.rawResponse = options.rawResponse;\n }\n};\n\n// core/util/split-array.ts\nfunction splitArray(array, chunkSize) {\n if (chunkSize <= 0) {\n throw new Error(\"chunkSize must be greater than 0\");\n }\n const result = [];\n for (let i = 0; i < array.length; i += chunkSize) {\n result.push(array.slice(i, i + chunkSize));\n }\n return result;\n}\n\n// core/embed/embed-many.ts\nasync function embedMany({\n model,\n values,\n maxRetries: maxRetriesArg,\n abortSignal,\n headers,\n experimental_telemetry: telemetry\n}) {\n if (typeof model === \"string\" || model.specificationVersion !== \"v1\") {\n throw new UnsupportedModelVersionError();\n }\n const { maxRetries, retry } = prepareRetries({ maxRetries: maxRetriesArg });\n const baseTelemetryAttributes = getBaseTelemetryAttributes({\n model,\n telemetry,\n headers,\n settings: { maxRetries }\n });\n const tracer = getTracer(telemetry);\n return recordSpan({\n name: \"ai.embedMany\",\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({ operationId: \"ai.embedMany\", telemetry }),\n ...baseTelemetryAttributes,\n // specific settings that only make sense on the outer level:\n \"ai.values\": {\n input: () => values.map((value) => JSON.stringify(value))\n }\n }\n }),\n tracer,\n fn: async (span) => {\n const maxEmbeddingsPerCall = model.maxEmbeddingsPerCall;\n if (maxEmbeddingsPerCall == null) {\n const { embeddings: embeddings2, usage } = await retry(() => {\n return recordSpan({\n name: \"ai.embedMany.doEmbed\",\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: \"ai.embedMany.doEmbed\",\n telemetry\n }),\n ...baseTelemetryAttributes,\n // specific settings that only make sense on the outer level:\n \"ai.values\": {\n input: () => values.map((value) => JSON.stringify(value))\n }\n }\n }),\n tracer,\n fn: async (doEmbedSpan) => {\n var _a17;\n const modelResponse = await model.doEmbed({\n values,\n abortSignal,\n headers\n });\n const embeddings3 = modelResponse.embeddings;\n const usage2 = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };\n doEmbedSpan.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n \"ai.embeddings\": {\n output: () => embeddings3.map((embedding) => JSON.stringify(embedding))\n },\n \"ai.usage.tokens\": usage2.tokens\n }\n })\n );\n return { embeddings: embeddings3, usage: usage2 };\n }\n });\n });\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n \"ai.embeddings\": {\n output: () => embeddings2.map((embedding) => JSON.stringify(embedding))\n },\n \"ai.usage.tokens\": usage.tokens\n }\n })\n );\n return new DefaultEmbedManyResult({ values, embeddings: embeddings2, usage });\n }\n const valueChunks = splitArray(values, maxEmbeddingsPerCall);\n const embeddings = [];\n let tokens = 0;\n for (const chunk of valueChunks) {\n const { embeddings: responseEmbeddings, usage } = await retry(() => {\n return recordSpan({\n name: \"ai.embedMany.doEmbed\",\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: \"ai.embedMany.doEmbed\",\n telemetry\n }),\n ...baseTelemetryAttributes,\n // specific settings that only make sense on the outer level:\n \"ai.values\": {\n input: () => chunk.map((value) => JSON.stringify(value))\n }\n }\n }),\n tracer,\n fn: async (doEmbedSpan) => {\n var _a17;\n const modelResponse = await model.doEmbed({\n values: chunk,\n abortSignal,\n headers\n });\n const embeddings2 = modelResponse.embeddings;\n const usage2 = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };\n doEmbedSpan.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n \"ai.embeddings\": {\n output: () => embeddings2.map((embedding) => JSON.stringify(embedding))\n },\n \"ai.usage.tokens\": usage2.tokens\n }\n })\n );\n return { embeddings: embeddings2, usage: usage2 };\n }\n });\n });\n embeddings.push(...responseEmbeddings);\n tokens += usage.tokens;\n }\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n \"ai.embeddings\": {\n output: () => embeddings.map((embedding) => JSON.stringify(embedding))\n },\n \"ai.usage.tokens\": tokens\n }\n })\n );\n return new DefaultEmbedManyResult({\n values,\n embeddings,\n usage: { tokens }\n });\n }\n });\n}\nvar DefaultEmbedManyResult = class {\n constructor(options) {\n this.values = options.values;\n this.embeddings = options.embeddings;\n this.usage = options.usage;\n }\n};\n\n// errors/no-image-generated-error.ts\nimport { AISDKError as AISDKError4 } from \"@ai-sdk/provider\";\nvar name3 = \"AI_NoImageGeneratedError\";\nvar marker3 = `vercel.ai.error.${name3}`;\nvar symbol3 = Symbol.for(marker3);\nvar _a3;\nvar NoImageGeneratedError = class extends AISDKError4 {\n constructor({\n message = \"No image generated.\",\n cause,\n responses\n }) {\n super({ name: name3, message, cause });\n this[_a3] = true;\n this.responses = responses;\n }\n static isInstance(error) {\n return AISDKError4.hasMarker(error, marker3);\n }\n};\n_a3 = symbol3;\n\n// core/generate-text/generated-file.ts\nimport {\n convertBase64ToUint8Array,\n convertUint8ArrayToBase64\n} from \"@ai-sdk/provider-utils\";\nvar DefaultGeneratedFile = class {\n constructor({\n data,\n mimeType\n }) {\n const isUint8Array = data instanceof Uint8Array;\n this.base64Data = isUint8Array ? void 0 : data;\n this.uint8ArrayData = isUint8Array ? data : void 0;\n this.mimeType = mimeType;\n }\n // lazy conversion with caching to avoid unnecessary conversion overhead:\n get base64() {\n if (this.base64Data == null) {\n this.base64Data = convertUint8ArrayToBase64(this.uint8ArrayData);\n }\n return this.base64Data;\n }\n // lazy conversion with caching to avoid unnecessary conversion overhead:\n get uint8Array() {\n if (this.uint8ArrayData == null) {\n this.uint8ArrayData = convertBase64ToUint8Array(this.base64Data);\n }\n return this.uint8ArrayData;\n }\n};\nvar DefaultGeneratedFileWithType = class extends DefaultGeneratedFile {\n constructor(options) {\n super(options);\n this.type = \"file\";\n }\n};\n\n// core/util/detect-mimetype.ts\nimport { convertBase64ToUint8Array as convertBase64ToUint8Array2 } from \"@ai-sdk/provider-utils\";\nvar imageMimeTypeSignatures = [\n {\n mimeType: \"image/gif\",\n bytesPrefix: [71, 73, 70],\n base64Prefix: \"R0lG\"\n },\n {\n mimeType: \"image/png\",\n bytesPrefix: [137, 80, 78, 71],\n base64Prefix: \"iVBORw\"\n },\n {\n mimeType: \"image/jpeg\",\n bytesPrefix: [255, 216],\n base64Prefix: \"/9j/\"\n },\n {\n mimeType: \"image/webp\",\n bytesPrefix: [82, 73, 70, 70],\n base64Prefix: \"UklGRg\"\n },\n {\n mimeType: \"image/bmp\",\n bytesPrefix: [66, 77],\n base64Prefix: \"Qk\"\n },\n {\n mimeType: \"image/tiff\",\n bytesPrefix: [73, 73, 42, 0],\n base64Prefix: \"SUkqAA\"\n },\n {\n mimeType: \"image/tiff\",\n bytesPrefix: [77, 77, 0, 42],\n base64Prefix: \"TU0AKg\"\n },\n {\n mimeType: \"image/avif\",\n bytesPrefix: [\n 0,\n 0,\n 0,\n 32,\n 102,\n 116,\n 121,\n 112,\n 97,\n 118,\n 105,\n 102\n ],\n base64Prefix: \"AAAAIGZ0eXBhdmlm\"\n },\n {\n mimeType: \"image/heic\",\n bytesPrefix: [\n 0,\n 0,\n 0,\n 32,\n 102,\n 116,\n 121,\n 112,\n 104,\n 101,\n 105,\n 99\n ],\n base64Prefix: \"AAAAIGZ0eXBoZWlj\"\n }\n];\nvar audioMimeTypeSignatures = [\n {\n mimeType: \"audio/mpeg\",\n bytesPrefix: [255, 251],\n base64Prefix: \"//s=\"\n },\n {\n mimeType: \"audio/wav\",\n bytesPrefix: [82, 73, 70, 70],\n base64Prefix: \"UklGR\"\n },\n {\n mimeType: \"audio/ogg\",\n bytesPrefix: [79, 103, 103, 83],\n base64Prefix: \"T2dnUw\"\n },\n {\n mimeType: \"audio/flac\",\n bytesPrefix: [102, 76, 97, 67],\n base64Prefix: \"ZkxhQw\"\n },\n {\n mimeType: \"audio/aac\",\n bytesPrefix: [64, 21, 0, 0],\n base64Prefix: \"QBUA\"\n },\n {\n mimeType: \"audio/mp4\",\n bytesPrefix: [102, 116, 121, 112],\n base64Prefix: \"ZnR5cA\"\n }\n];\nvar stripID3 = (data) => {\n const bytes = typeof data === \"string\" ? convertBase64ToUint8Array2(data) : data;\n const id3Size = (bytes[6] & 127) << 21 | (bytes[7] & 127) << 14 | (bytes[8] & 127) << 7 | bytes[9] & 127;\n return bytes.slice(id3Size + 10);\n};\nfunction stripID3TagsIfPresent(data) {\n const hasId3 = typeof data === \"string\" && data.startsWith(\"SUQz\") || typeof data !== \"string\" && data.length > 10 && data[0] === 73 && // 'I'\n data[1] === 68 && // 'D'\n data[2] === 51;\n return hasId3 ? stripID3(data) : data;\n}\nfunction detectMimeType({\n data,\n signatures\n}) {\n const processedData = stripID3TagsIfPresent(data);\n for (const signature of signatures) {\n if (typeof processedData === \"string\" ? processedData.startsWith(signature.base64Prefix) : processedData.length >= signature.bytesPrefix.length && signature.bytesPrefix.every(\n (byte, index) => processedData[index] === byte\n )) {\n return signature.mimeType;\n }\n }\n return void 0;\n}\n\n// core/generate-image/generate-image.ts\nasync function generateImage({\n model,\n prompt,\n n = 1,\n size,\n aspectRatio,\n seed,\n providerOptions,\n maxRetries: maxRetriesArg,\n abortSignal,\n headers\n}) {\n var _a17;\n if (typeof model === \"string\" || model.specificationVersion !== \"v1\") {\n throw new UnsupportedModelVersionError();\n }\n const { retry } = prepareRetries({ maxRetries: maxRetriesArg });\n const maxImagesPerCall = (_a17 = model.maxImagesPerCall) != null ? _a17 : 1;\n const callCount = Math.ceil(n / maxImagesPerCall);\n const callImageCounts = Array.from({ length: callCount }, (_, i) => {\n if (i < callCount - 1) {\n return maxImagesPerCall;\n }\n const remainder = n % maxImagesPerCall;\n return remainder === 0 ? maxImagesPerCall : remainder;\n });\n const results = await Promise.all(\n callImageCounts.map(\n async (callImageCount) => retry(\n () => model.doGenerate({\n prompt,\n n: callImageCount,\n abortSignal,\n headers,\n size,\n aspectRatio,\n seed,\n providerOptions: providerOptions != null ? providerOptions : {}\n })\n )\n )\n );\n const images = [];\n const warnings = [];\n const responses = [];\n for (const result of results) {\n images.push(\n ...result.images.map(\n (image) => {\n var _a18;\n return new DefaultGeneratedFile({\n data: image,\n mimeType: (_a18 = detectMimeType({\n data: image,\n signatures: imageMimeTypeSignatures\n })) != null ? _a18 : \"image/png\"\n });\n }\n )\n );\n warnings.push(...result.warnings);\n responses.push(result.response);\n }\n if (!images.length) {\n throw new NoImageGeneratedError({ responses });\n }\n return new DefaultGenerateImageResult({ images, warnings, responses });\n}\nvar DefaultGenerateImageResult = class {\n constructor(options) {\n this.images = options.images;\n this.warnings = options.warnings;\n this.responses = options.responses;\n }\n get image() {\n return this.images[0];\n }\n};\n\n// core/generate-object/generate-object.ts\nimport {\n JSONParseError,\n TypeValidationError as TypeValidationError2\n} from \"@ai-sdk/provider\";\nimport { createIdGenerator, safeParseJSON } from \"@ai-sdk/provider-utils\";\n\n// errors/no-object-generated-error.ts\nimport { AISDKError as AISDKError5 } from \"@ai-sdk/provider\";\nvar name4 = \"AI_NoObjectGeneratedError\";\nvar marker4 = `vercel.ai.error.${name4}`;\nvar symbol4 = Symbol.for(marker4);\nvar _a4;\nvar NoObjectGeneratedError = class extends AISDKError5 {\n constructor({\n message = \"No object generated.\",\n cause,\n text: text2,\n response,\n usage,\n finishReason\n }) {\n super({ name: name4, message, cause });\n this[_a4] = true;\n this.text = text2;\n this.response = response;\n this.usage = usage;\n this.finishReason = finishReason;\n }\n static isInstance(error) {\n return AISDKError5.hasMarker(error, marker4);\n }\n};\n_a4 = symbol4;\n\n// util/download-error.ts\nimport { AISDKError as AISDKError6 } from \"@ai-sdk/provider\";\nvar name5 = \"AI_DownloadError\";\nvar marker5 = `vercel.ai.error.${name5}`;\nvar symbol5 = Symbol.for(marker5);\nvar _a5;\nvar DownloadError = class extends AISDKError6 {\n constructor({\n url,\n statusCode,\n statusText,\n cause,\n message = cause == null ? `Failed to download ${url}: ${statusCode} ${statusText}` : `Failed to download ${url}: ${cause}`\n }) {\n super({ name: name5, message, cause });\n this[_a5] = true;\n this.url = url;\n this.statusCode = statusCode;\n this.statusText = statusText;\n }\n static isInstance(error) {\n return AISDKError6.hasMarker(error, marker5);\n }\n};\n_a5 = symbol5;\n\n// util/download.ts\nasync function download({ url }) {\n var _a17;\n const urlText = url.toString();\n try {\n const response = await fetch(urlText);\n if (!response.ok) {\n throw new DownloadError({\n url: urlText,\n statusCode: response.status,\n statusText: response.statusText\n });\n }\n return {\n data: new Uint8Array(await response.arrayBuffer()),\n mimeType: (_a17 = response.headers.get(\"content-type\")) != null ? _a17 : void 0\n };\n } catch (error) {\n if (DownloadError.isInstance(error)) {\n throw error;\n }\n throw new DownloadError({ url: urlText, cause: error });\n }\n}\n\n// core/prompt/data-content.ts\nimport {\n convertBase64ToUint8Array as convertBase64ToUint8Array3,\n convertUint8ArrayToBase64 as convertUint8ArrayToBase642\n} from \"@ai-sdk/provider-utils\";\n\n// core/prompt/invalid-data-content-error.ts\nimport { AISDKError as AISDKError7 } from \"@ai-sdk/provider\";\nvar name6 = \"AI_InvalidDataContentError\";\nvar marker6 = `vercel.ai.error.${name6}`;\nvar symbol6 = Symbol.for(marker6);\nvar _a6;\nvar InvalidDataContentError = class extends AISDKError7 {\n constructor({\n content,\n cause,\n message = `Invalid data content. Expected a base64 string, Uint8Array, ArrayBuffer, or Buffer, but got ${typeof content}.`\n }) {\n super({ name: name6, message, cause });\n this[_a6] = true;\n this.content = content;\n }\n static isInstance(error) {\n return AISDKError7.hasMarker(error, marker6);\n }\n};\n_a6 = symbol6;\n\n// core/prompt/data-content.ts\nimport { z } from \"zod\";\nvar dataContentSchema = z.union([\n z.string(),\n z.instanceof(Uint8Array),\n z.instanceof(ArrayBuffer),\n z.custom(\n // Buffer might not be available in some environments such as CloudFlare:\n (value) => {\n var _a17, _b;\n return (_b = (_a17 = globalThis.Buffer) == null ? void 0 : _a17.isBuffer(value)) != null ? _b : false;\n },\n { message: \"Must be a Buffer\" }\n )\n]);\nfunction convertDataContentToBase64String(content) {\n if (typeof content === \"string\") {\n return content;\n }\n if (content instanceof ArrayBuffer) {\n return convertUint8ArrayToBase642(new Uint8Array(content));\n }\n return convertUint8ArrayToBase642(content);\n}\nfunction convertDataContentToUint8Array(content) {\n if (content instanceof Uint8Array) {\n return content;\n }\n if (typeof content === \"string\") {\n try {\n return convertBase64ToUint8Array3(content);\n } catch (error) {\n throw new InvalidDataContentError({\n message: \"Invalid data content. Content string is not a base64-encoded media.\",\n content,\n cause: error\n });\n }\n }\n if (content instanceof ArrayBuffer) {\n return new Uint8Array(content);\n }\n throw new InvalidDataContentError({ content });\n}\nfunction convertUint8ArrayToText(uint8Array) {\n try {\n return new TextDecoder().decode(uint8Array);\n } catch (error) {\n throw new Error(\"Error decoding Uint8Array to text\");\n }\n}\n\n// core/prompt/invalid-message-role-error.ts\nimport { AISDKError as AISDKError8 } from \"@ai-sdk/provider\";\nvar name7 = \"AI_InvalidMessageRoleError\";\nvar marker7 = `vercel.ai.error.${name7}`;\nvar symbol7 = Symbol.for(marker7);\nvar _a7;\nvar InvalidMessageRoleError = class extends AISDKError8 {\n constructor({\n role,\n message = `Invalid message role: '${role}'. Must be one of: \"system\", \"user\", \"assistant\", \"tool\".`\n }) {\n super({ name: name7, message });\n this[_a7] = true;\n this.role = role;\n }\n static isInstance(error) {\n return AISDKError8.hasMarker(error, marker7);\n }\n};\n_a7 = symbol7;\n\n// core/prompt/split-data-url.ts\nfunction splitDataUrl(dataUrl) {\n try {\n const [header, base64Content] = dataUrl.split(\",\");\n return {\n mimeType: header.split(\";\")[0].split(\":\")[1],\n base64Content\n };\n } catch (error) {\n return {\n mimeType: void 0,\n base64Content: void 0\n };\n }\n}\n\n// core/prompt/convert-to-language-model-prompt.ts\nasync function convertToLanguageModelPrompt({\n prompt,\n modelSupportsImageUrls = true,\n modelSupportsUrl = () => false,\n downloadImplementation = download\n}) {\n const downloadedAssets = await downloadAssets(\n prompt.messages,\n downloadImplementation,\n modelSupportsImageUrls,\n modelSupportsUrl\n );\n return [\n ...prompt.system != null ? [{ role: \"system\", content: prompt.system }] : [],\n ...prompt.messages.map(\n (message) => convertToLanguageModelMessage(message, downloadedAssets)\n )\n ];\n}\nfunction convertToLanguageModelMessage(message, downloadedAssets) {\n var _a17, _b, _c, _d, _e, _f;\n const role = message.role;\n switch (role) {\n case \"system\": {\n return {\n role: \"system\",\n content: message.content,\n providerMetadata: (_a17 = message.providerOptions) != null ? _a17 : message.experimental_providerMetadata\n };\n }\n case \"user\": {\n if (typeof message.content === \"string\") {\n return {\n role: \"user\",\n content: [{ type: \"text\", text: message.content }],\n providerMetadata: (_b = message.providerOptions) != null ? _b : message.experimental_providerMetadata\n };\n }\n return {\n role: \"user\",\n content: message.content.map((part) => convertPartToLanguageModelPart(part, downloadedAssets)).filter((part) => part.type !== \"text\" || part.text !== \"\"),\n providerMetadata: (_c = message.providerOptions) != null ? _c : message.experimental_providerMetadata\n };\n }\n case \"assistant\": {\n if (typeof message.content === \"string\") {\n return {\n role: \"assistant\",\n content: [{ type: \"text\", text: message.content }],\n providerMetadata: (_d = message.providerOptions) != null ? _d : message.experimental_providerMetadata\n };\n }\n return {\n role: \"assistant\",\n content: message.content.filter(\n // remove empty text parts:\n (part) => part.type !== \"text\" || part.text !== \"\"\n ).map((part) => {\n var _a18;\n const providerOptions = (_a18 = part.providerOptions) != null ? _a18 : part.experimental_providerMetadata;\n switch (part.type) {\n case \"file\": {\n return {\n type: \"file\",\n data: part.data instanceof URL ? part.data : convertDataContentToBase64String(part.data),\n filename: part.filename,\n mimeType: part.mimeType,\n providerMetadata: providerOptions\n };\n }\n case \"reasoning\": {\n return {\n type: \"reasoning\",\n text: part.text,\n signature: part.signature,\n providerMetadata: providerOptions\n };\n }\n case \"redacted-reasoning\": {\n return {\n type: \"redacted-reasoning\",\n data: part.data,\n providerMetadata: providerOptions\n };\n }\n case \"text\": {\n return {\n type: \"text\",\n text: part.text,\n providerMetadata: providerOptions\n };\n }\n case \"tool-call\": {\n return {\n type: \"tool-call\",\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n args: part.args,\n providerMetadata: providerOptions\n };\n }\n }\n }),\n providerMetadata: (_e = message.providerOptions) != null ? _e : message.experimental_providerMetadata\n };\n }\n case \"tool\": {\n return {\n role: \"tool\",\n content: message.content.map((part) => {\n var _a18;\n return {\n type: \"tool-result\",\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n result: part.result,\n content: part.experimental_content,\n isError: part.isError,\n providerMetadata: (_a18 = part.providerOptions) != null ? _a18 : part.experimental_providerMetadata\n };\n }),\n providerMetadata: (_f = message.providerOptions) != null ? _f : message.experimental_providerMetadata\n };\n }\n default: {\n const _exhaustiveCheck = role;\n throw new InvalidMessageRoleError({ role: _exhaustiveCheck });\n }\n }\n}\nasync function downloadAssets(messages, downloadImplementation, modelSupportsImageUrls, modelSupportsUrl) {\n const urls = messages.filter((message) => message.role === \"user\").map((message) => message.content).filter(\n (content) => Array.isArray(content)\n ).flat().filter(\n (part) => part.type === \"image\" || part.type === \"file\"\n ).filter(\n (part) => !(part.type === \"image\" && modelSupportsImageUrls === true)\n ).map((part) => part.type === \"image\" ? part.image : part.data).map(\n (part) => (\n // support string urls:\n typeof part === \"string\" && (part.startsWith(\"http:\") || part.startsWith(\"https:\")) ? new URL(part) : part\n )\n ).filter((image) => image instanceof URL).filter((url) => !modelSupportsUrl(url));\n const downloadedImages = await Promise.all(\n urls.map(async (url) => ({\n url,\n data: await downloadImplementation({ url })\n }))\n );\n return Object.fromEntries(\n downloadedImages.map(({ url, data }) => [url.toString(), data])\n );\n}\nfunction convertPartToLanguageModelPart(part, downloadedAssets) {\n var _a17, _b, _c, _d;\n if (part.type === \"text\") {\n return {\n type: \"text\",\n text: part.text,\n providerMetadata: (_a17 = part.providerOptions) != null ? _a17 : part.experimental_providerMetadata\n };\n }\n let mimeType = part.mimeType;\n let data;\n let content;\n let normalizedData;\n const type = part.type;\n switch (type) {\n case \"image\":\n data = part.image;\n break;\n case \"file\":\n data = part.data;\n break;\n default:\n throw new Error(`Unsupported part type: ${type}`);\n }\n try {\n content = typeof data === \"string\" ? new URL(data) : data;\n } catch (error) {\n content = data;\n }\n if (content instanceof URL) {\n if (content.protocol === \"data:\") {\n const { mimeType: dataUrlMimeType, base64Content } = splitDataUrl(\n content.toString()\n );\n if (dataUrlMimeType == null || base64Content == null) {\n throw new Error(`Invalid data URL format in part ${type}`);\n }\n mimeType = dataUrlMimeType;\n normalizedData = convertDataContentToUint8Array(base64Content);\n } else {\n const downloadedFile = downloadedAssets[content.toString()];\n if (downloadedFile) {\n normalizedData = downloadedFile.data;\n mimeType != null ? mimeType : mimeType = downloadedFile.mimeType;\n } else {\n normalizedData = content;\n }\n }\n } else {\n normalizedData = convertDataContentToUint8Array(content);\n }\n switch (type) {\n case \"image\": {\n if (normalizedData instanceof Uint8Array) {\n mimeType = (_b = detectMimeType({\n data: normalizedData,\n signatures: imageMimeTypeSignatures\n })) != null ? _b : mimeType;\n }\n return {\n type: \"image\",\n image: normalizedData,\n mimeType,\n providerMetadata: (_c = part.providerOptions) != null ? _c : part.experimental_providerMetadata\n };\n }\n case \"file\": {\n if (mimeType == null) {\n throw new Error(`Mime type is missing for file part`);\n }\n return {\n type: \"file\",\n data: normalizedData instanceof Uint8Array ? convertDataContentToBase64String(normalizedData) : normalizedData,\n filename: part.filename,\n mimeType,\n providerMetadata: (_d = part.providerOptions) != null ? _d : part.experimental_providerMetadata\n };\n }\n }\n}\n\n// core/prompt/prepare-call-settings.ts\nfunction prepareCallSettings({\n maxTokens,\n temperature,\n topP,\n topK,\n presencePenalty,\n frequencyPenalty,\n stopSequences,\n seed\n}) {\n if (maxTokens != null) {\n if (!Number.isInteger(maxTokens)) {\n throw new InvalidArgumentError({\n parameter: \"maxTokens\",\n value: maxTokens,\n message: \"maxTokens must be an integer\"\n });\n }\n if (maxTokens < 1) {\n throw new InvalidArgumentError({\n parameter: \"maxTokens\",\n value: maxTokens,\n message: \"maxTokens must be >= 1\"\n });\n }\n }\n if (temperature != null) {\n if (typeof temperature !== \"number\") {\n throw new InvalidArgumentError({\n parameter: \"temperature\",\n value: temperature,\n message: \"temperature must be a number\"\n });\n }\n }\n if (topP != null) {\n if (typeof topP !== \"number\") {\n throw new InvalidArgumentError({\n parameter: \"topP\",\n value: topP,\n message: \"topP must be a number\"\n });\n }\n }\n if (topK != null) {\n if (typeof topK !== \"number\") {\n throw new InvalidArgumentError({\n parameter: \"topK\",\n value: topK,\n message: \"topK must be a number\"\n });\n }\n }\n if (presencePenalty != null) {\n if (typeof presencePenalty !== \"number\") {\n throw new InvalidArgumentError({\n parameter: \"presencePenalty\",\n value: presencePenalty,\n message: \"presencePenalty must be a number\"\n });\n }\n }\n if (frequencyPenalty != null) {\n if (typeof frequencyPenalty !== \"number\") {\n throw new InvalidArgumentError({\n parameter: \"frequencyPenalty\",\n value: frequencyPenalty,\n message: \"frequencyPenalty must be a number\"\n });\n }\n }\n if (seed != null) {\n if (!Number.isInteger(seed)) {\n throw new InvalidArgumentError({\n parameter: \"seed\",\n value: seed,\n message: \"seed must be an integer\"\n });\n }\n }\n return {\n maxTokens,\n // TODO v5 remove default 0 for temperature\n temperature: temperature != null ? temperature : 0,\n topP,\n topK,\n presencePenalty,\n frequencyPenalty,\n stopSequences: stopSequences != null && stopSequences.length > 0 ? stopSequences : void 0,\n seed\n };\n}\n\n// core/prompt/standardize-prompt.ts\nimport { InvalidPromptError } from \"@ai-sdk/provider\";\nimport { safeValidateTypes } from \"@ai-sdk/provider-utils\";\nimport { z as z7 } from \"zod\";\n\n// core/prompt/attachments-to-parts.ts\nfunction attachmentsToParts(attachments) {\n var _a17, _b, _c;\n const parts = [];\n for (const attachment of attachments) {\n let url;\n try {\n url = new URL(attachment.url);\n } catch (error) {\n throw new Error(`Invalid URL: ${attachment.url}`);\n }\n switch (url.protocol) {\n case \"http:\":\n case \"https:\": {\n if ((_a17 = attachment.contentType) == null ? void 0 : _a17.startsWith(\"image/\")) {\n parts.push({ type: \"image\", image: url });\n } else {\n if (!attachment.contentType) {\n throw new Error(\n \"If the attachment is not an image, it must specify a content type\"\n );\n }\n parts.push({\n type: \"file\",\n data: url,\n mimeType: attachment.contentType\n });\n }\n break;\n }\n case \"data:\": {\n let header;\n let base64Content;\n let mimeType;\n try {\n [header, base64Content] = attachment.url.split(\",\");\n mimeType = header.split(\";\")[0].split(\":\")[1];\n } catch (error) {\n throw new Error(`Error processing data URL: ${attachment.url}`);\n }\n if (mimeType == null || base64Content == null) {\n throw new Error(`Invalid data URL format: ${attachment.url}`);\n }\n if ((_b = attachment.contentType) == null ? void 0 : _b.startsWith(\"image/\")) {\n parts.push({\n type: \"image\",\n image: convertDataContentToUint8Array(base64Content)\n });\n } else if ((_c = attachment.contentType) == null ? void 0 : _c.startsWith(\"text/\")) {\n parts.push({\n type: \"text\",\n text: convertUint8ArrayToText(\n convertDataContentToUint8Array(base64Content)\n )\n });\n } else {\n if (!attachment.contentType) {\n throw new Error(\n \"If the attachment is not an image or text, it must specify a content type\"\n );\n }\n parts.push({\n type: \"file\",\n data: base64Content,\n mimeType: attachment.contentType\n });\n }\n break;\n }\n default: {\n throw new Error(`Unsupported URL protocol: ${url.protocol}`);\n }\n }\n }\n return parts;\n}\n\n// core/prompt/message-conversion-error.ts\nimport { AISDKError as AISDKError9 } from \"@ai-sdk/provider\";\nvar name8 = \"AI_MessageConversionError\";\nvar marker8 = `vercel.ai.error.${name8}`;\nvar symbol8 = Symbol.for(marker8);\nvar _a8;\nvar MessageConversionError = class extends AISDKError9 {\n constructor({\n originalMessage,\n message\n }) {\n super({ name: name8, message });\n this[_a8] = true;\n this.originalMessage = originalMessage;\n }\n static isInstance(error) {\n return AISDKError9.hasMarker(error, marker8);\n }\n};\n_a8 = symbol8;\n\n// core/prompt/convert-to-core-messages.ts\nfunction convertToCoreMessages(messages, options) {\n var _a17, _b;\n const tools = (_a17 = options == null ? void 0 : options.tools) != null ? _a17 : {};\n const coreMessages = [];\n for (let i = 0; i < messages.length; i++) {\n const message = messages[i];\n const isLastMessage = i === messages.length - 1;\n const { role, content, experimental_attachments } = message;\n switch (role) {\n case \"system\": {\n coreMessages.push({\n role: \"system\",\n content\n });\n break;\n }\n case \"user\": {\n if (message.parts == null) {\n coreMessages.push({\n role: \"user\",\n content: experimental_attachments ? [\n { type: \"text\", text: content },\n ...attachmentsToParts(experimental_attachments)\n ] : content\n });\n } else {\n const textParts = message.parts.filter((part) => part.type === \"text\").map((part) => ({\n type: \"text\",\n text: part.text\n }));\n coreMessages.push({\n role: \"user\",\n content: experimental_attachments ? [...textParts, ...attachmentsToParts(experimental_attachments)] : textParts\n });\n }\n break;\n }\n case \"assistant\": {\n if (message.parts != null) {\n let processBlock2 = function() {\n const content2 = [];\n for (const part of block) {\n switch (part.type) {\n case \"file\":\n case \"text\": {\n content2.push(part);\n break;\n }\n case \"reasoning\": {\n for (const detail of part.details) {\n switch (detail.type) {\n case \"text\":\n content2.push({\n type: \"reasoning\",\n text: detail.text,\n signature: detail.signature\n });\n break;\n case \"redacted\":\n content2.push({\n type: \"redacted-reasoning\",\n data: detail.data\n });\n break;\n }\n }\n break;\n }\n case \"tool-invocation\":\n content2.push({\n type: \"tool-call\",\n toolCallId: part.toolInvocation.toolCallId,\n toolName: part.toolInvocation.toolName,\n args: part.toolInvocation.args\n });\n break;\n default: {\n const _exhaustiveCheck = part;\n throw new Error(`Unsupported part: ${_exhaustiveCheck}`);\n }\n }\n }\n coreMessages.push({\n role: \"assistant\",\n content: content2\n });\n const stepInvocations = block.filter(\n (part) => part.type === \"tool-invocation\"\n ).map((part) => part.toolInvocation);\n if (stepInvocations.length > 0) {\n coreMessages.push({\n role: \"tool\",\n content: stepInvocations.map(\n (toolInvocation) => {\n if (!(\"result\" in toolInvocation)) {\n throw new MessageConversionError({\n originalMessage: message,\n message: \"ToolInvocation must have a result: \" + JSON.stringify(toolInvocation)\n });\n }\n const { toolCallId, toolName, result } = toolInvocation;\n const tool2 = tools[toolName];\n return (tool2 == null ? void 0 : tool2.experimental_toToolResultContent) != null ? {\n type: \"tool-result\",\n toolCallId,\n toolName,\n result: tool2.experimental_toToolResultContent(result),\n experimental_content: tool2.experimental_toToolResultContent(result)\n } : {\n type: \"tool-result\",\n toolCallId,\n toolName,\n result\n };\n }\n )\n });\n }\n block = [];\n blockHasToolInvocations = false;\n currentStep++;\n };\n var processBlock = processBlock2;\n let currentStep = 0;\n let blockHasToolInvocations = false;\n let block = [];\n for (const part of message.parts) {\n switch (part.type) {\n case \"text\": {\n if (blockHasToolInvocations) {\n processBlock2();\n }\n block.push(part);\n break;\n }\n case \"file\":\n case \"reasoning\": {\n block.push(part);\n break;\n }\n case \"tool-invocation\": {\n if (((_b = part.toolInvocation.step) != null ? _b : 0) !== currentStep) {\n processBlock2();\n }\n block.push(part);\n blockHasToolInvocations = true;\n break;\n }\n }\n }\n processBlock2();\n break;\n }\n const toolInvocations = message.toolInvocations;\n if (toolInvocations == null || toolInvocations.length === 0) {\n coreMessages.push({ role: \"assistant\", content });\n break;\n }\n const maxStep = toolInvocations.reduce((max, toolInvocation) => {\n var _a18;\n return Math.max(max, (_a18 = toolInvocation.step) != null ? _a18 : 0);\n }, 0);\n for (let i2 = 0; i2 <= maxStep; i2++) {\n const stepInvocations = toolInvocations.filter(\n (toolInvocation) => {\n var _a18;\n return ((_a18 = toolInvocation.step) != null ? _a18 : 0) === i2;\n }\n );\n if (stepInvocations.length === 0) {\n continue;\n }\n coreMessages.push({\n role: \"assistant\",\n content: [\n ...isLastMessage && content && i2 === 0 ? [{ type: \"text\", text: content }] : [],\n ...stepInvocations.map(\n ({ toolCallId, toolName, args }) => ({\n type: \"tool-call\",\n toolCallId,\n toolName,\n args\n })\n )\n ]\n });\n coreMessages.push({\n role: \"tool\",\n content: stepInvocations.map((toolInvocation) => {\n if (!(\"result\" in toolInvocation)) {\n throw new MessageConversionError({\n originalMessage: message,\n message: \"ToolInvocation must have a result: \" + JSON.stringify(toolInvocation)\n });\n }\n const { toolCallId, toolName, result } = toolInvocation;\n const tool2 = tools[toolName];\n return (tool2 == null ? void 0 : tool2.experimental_toToolResultContent) != null ? {\n type: \"tool-result\",\n toolCallId,\n toolName,\n result: tool2.experimental_toToolResultContent(result),\n experimental_content: tool2.experimental_toToolResultContent(result)\n } : {\n type: \"tool-result\",\n toolCallId,\n toolName,\n result\n };\n })\n });\n }\n if (content && !isLastMessage) {\n coreMessages.push({ role: \"assistant\", content });\n }\n break;\n }\n case \"data\": {\n break;\n }\n default: {\n const _exhaustiveCheck = role;\n throw new MessageConversionError({\n originalMessage: message,\n message: `Unsupported role: ${_exhaustiveCheck}`\n });\n }\n }\n }\n return coreMessages;\n}\n\n// core/prompt/message.ts\nimport { z as z6 } from \"zod\";\n\n// core/types/provider-metadata.ts\nimport { z as z3 } from \"zod\";\n\n// core/types/json-value.ts\nimport { z as z2 } from \"zod\";\nvar jsonValueSchema = z2.lazy(\n () => z2.union([\n z2.null(),\n z2.string(),\n z2.number(),\n z2.boolean(),\n z2.record(z2.string(), jsonValueSchema),\n z2.array(jsonValueSchema)\n ])\n);\n\n// core/types/provider-metadata.ts\nvar providerMetadataSchema = z3.record(\n z3.string(),\n z3.record(z3.string(), jsonValueSchema)\n);\n\n// core/prompt/content-part.ts\nimport { z as z5 } from \"zod\";\n\n// core/prompt/tool-result-content.ts\nimport { z as z4 } from \"zod\";\nvar toolResultContentSchema = z4.array(\n z4.union([\n z4.object({ type: z4.literal(\"text\"), text: z4.string() }),\n z4.object({\n type: z4.literal(\"image\"),\n data: z4.string(),\n mimeType: z4.string().optional()\n })\n ])\n);\n\n// core/prompt/content-part.ts\nvar textPartSchema = z5.object({\n type: z5.literal(\"text\"),\n text: z5.string(),\n providerOptions: providerMetadataSchema.optional(),\n experimental_providerMetadata: providerMetadataSchema.optional()\n});\nvar imagePartSchema = z5.object({\n type: z5.literal(\"image\"),\n image: z5.union([dataContentSchema, z5.instanceof(URL)]),\n mimeType: z5.string().optional(),\n providerOptions: providerMetadataSchema.optional(),\n experimental_providerMetadata: providerMetadataSchema.optional()\n});\nvar filePartSchema = z5.object({\n type: z5.literal(\"file\"),\n data: z5.union([dataContentSchema, z5.instanceof(URL)]),\n filename: z5.string().optional(),\n mimeType: z5.string(),\n providerOptions: providerMetadataSchema.optional(),\n experimental_providerMetadata: providerMetadataSchema.optional()\n});\nvar reasoningPartSchema = z5.object({\n type: z5.literal(\"reasoning\"),\n text: z5.string(),\n providerOptions: providerMetadataSchema.optional(),\n experimental_providerMetadata: providerMetadataSchema.optional()\n});\nvar redactedReasoningPartSchema = z5.object({\n type: z5.literal(\"redacted-reasoning\"),\n data: z5.string(),\n providerOptions: providerMetadataSchema.optional(),\n experimental_providerMetadata: providerMetadataSchema.optional()\n});\nvar toolCallPartSchema = z5.object({\n type: z5.literal(\"tool-call\"),\n toolCallId: z5.string(),\n toolName: z5.string(),\n args: z5.unknown(),\n providerOptions: providerMetadataSchema.optional(),\n experimental_providerMetadata: providerMetadataSchema.optional()\n});\nvar toolResultPartSchema = z5.object({\n type: z5.literal(\"tool-result\"),\n toolCallId: z5.string(),\n toolName: z5.string(),\n result: z5.unknown(),\n content: toolResultContentSchema.optional(),\n isError: z5.boolean().optional(),\n providerOptions: providerMetadataSchema.optional(),\n experimental_providerMetadata: providerMetadataSchema.optional()\n});\n\n// core/prompt/message.ts\nvar coreSystemMessageSchema = z6.object({\n role: z6.literal(\"system\"),\n content: z6.string(),\n providerOptions: providerMetadataSchema.optional(),\n experimental_providerMetadata: providerMetadataSchema.optional()\n});\nvar coreUserMessageSchema = z6.object({\n role: z6.literal(\"user\"),\n content: z6.union([\n z6.string(),\n z6.array(z6.union([textPartSchema, imagePartSchema, filePartSchema]))\n ]),\n providerOptions: providerMetadataSchema.optional(),\n experimental_providerMetadata: providerMetadataSchema.optional()\n});\nvar coreAssistantMessageSchema = z6.object({\n role: z6.literal(\"assistant\"),\n content: z6.union([\n z6.string(),\n z6.array(\n z6.union([\n textPartSchema,\n filePartSchema,\n reasoningPartSchema,\n redactedReasoningPartSchema,\n toolCallPartSchema\n ])\n )\n ]),\n providerOptions: providerMetadataSchema.optional(),\n experimental_providerMetadata: providerMetadataSchema.optional()\n});\nvar coreToolMessageSchema = z6.object({\n role: z6.literal(\"tool\"),\n content: z6.array(toolResultPartSchema),\n providerOptions: providerMetadataSchema.optional(),\n experimental_providerMetadata: providerMetadataSchema.optional()\n});\nvar coreMessageSchema = z6.union([\n coreSystemMessageSchema,\n coreUserMessageSchema,\n coreAssistantMessageSchema,\n coreToolMessageSchema\n]);\n\n// core/prompt/standardize-prompt.ts\nfunction standardizePrompt({\n prompt,\n tools\n}) {\n if (prompt.prompt == null && prompt.messages == null) {\n throw new InvalidPromptError({\n prompt,\n message: \"prompt or messages must be defined\"\n });\n }\n if (prompt.prompt != null && prompt.messages != null) {\n throw new InvalidPromptError({\n prompt,\n message: \"prompt and messages cannot be defined at the same time\"\n });\n }\n if (prompt.system != null && typeof prompt.system !== \"string\") {\n throw new InvalidPromptError({\n prompt,\n message: \"system must be a string\"\n });\n }\n if (prompt.prompt != null) {\n if (typeof prompt.prompt !== \"string\") {\n throw new InvalidPromptError({\n prompt,\n message: \"prompt must be a string\"\n });\n }\n return {\n type: \"prompt\",\n system: prompt.system,\n messages: [\n {\n role: \"user\",\n content: prompt.prompt\n }\n ]\n };\n }\n if (prompt.messages != null) {\n const promptType = detectPromptType(prompt.messages);\n const messages = promptType === \"ui-messages\" ? convertToCoreMessages(prompt.messages, {\n tools\n }) : prompt.messages;\n if (messages.length === 0) {\n throw new InvalidPromptError({\n prompt,\n message: \"messages must not be empty\"\n });\n }\n const validationResult = safeValidateTypes({\n value: messages,\n schema: z7.array(coreMessageSchema)\n });\n if (!validationResult.success) {\n throw new InvalidPromptError({\n prompt,\n message: [\n \"message must be a CoreMessage or a UI message\",\n `Validation error: ${validationResult.error.message}`\n ].join(\"\\n\"),\n cause: validationResult.error\n });\n }\n return {\n type: \"messages\",\n messages,\n system: prompt.system\n };\n }\n throw new Error(\"unreachable\");\n}\nfunction detectPromptType(prompt) {\n if (!Array.isArray(prompt)) {\n throw new InvalidPromptError({\n prompt,\n message: [\n \"messages must be an array of CoreMessage or UIMessage\",\n `Received non-array value: ${JSON.stringify(prompt)}`\n ].join(\"\\n\"),\n cause: prompt\n });\n }\n if (prompt.length === 0) {\n return \"messages\";\n }\n const characteristics = prompt.map(detectSingleMessageCharacteristics);\n if (characteristics.some((c) => c === \"has-ui-specific-parts\")) {\n return \"ui-messages\";\n }\n const nonMessageIndex = characteristics.findIndex(\n (c) => c !== \"has-core-specific-parts\" && c !== \"message\"\n );\n if (nonMessageIndex === -1) {\n return \"messages\";\n }\n throw new InvalidPromptError({\n prompt,\n message: [\n \"messages must be an array of CoreMessage or UIMessage\",\n `Received message of type: \"${characteristics[nonMessageIndex]}\" at index ${nonMessageIndex}`,\n `messages[${nonMessageIndex}]: ${JSON.stringify(prompt[nonMessageIndex])}`\n ].join(\"\\n\"),\n cause: prompt\n });\n}\nfunction detectSingleMessageCharacteristics(message) {\n if (typeof message === \"object\" && message !== null && (message.role === \"function\" || // UI-only role\n message.role === \"data\" || // UI-only role\n \"toolInvocations\" in message || // UI-specific field\n \"parts\" in message || // UI-specific field\n \"experimental_attachments\" in message)) {\n return \"has-ui-specific-parts\";\n } else if (typeof message === \"object\" && message !== null && \"content\" in message && (Array.isArray(message.content) || // Core messages can have array content\n \"experimental_providerMetadata\" in message || \"providerOptions\" in message)) {\n return \"has-core-specific-parts\";\n } else if (typeof message === \"object\" && message !== null && \"role\" in message && \"content\" in message && typeof message.content === \"string\" && [\"system\", \"user\", \"assistant\", \"tool\"].includes(message.role)) {\n return \"message\";\n } else {\n return \"other\";\n }\n}\n\n// core/types/usage.ts\nfunction calculateLanguageModelUsage({\n promptTokens,\n completionTokens\n}) {\n return {\n promptTokens,\n completionTokens,\n totalTokens: promptTokens + completionTokens\n };\n}\nfunction addLanguageModelUsage(usage1, usage2) {\n return {\n promptTokens: usage1.promptTokens + usage2.promptTokens,\n completionTokens: usage1.completionTokens + usage2.completionTokens,\n totalTokens: usage1.totalTokens + usage2.totalTokens\n };\n}\n\n// core/generate-object/inject-json-instruction.ts\nvar DEFAULT_SCHEMA_PREFIX = \"JSON schema:\";\nvar DEFAULT_SCHEMA_SUFFIX = \"You MUST answer with a JSON object that matches the JSON schema above.\";\nvar DEFAULT_GENERIC_SUFFIX = \"You MUST answer with JSON.\";\nfunction injectJsonInstruction({\n prompt,\n schema,\n schemaPrefix = schema != null ? DEFAULT_SCHEMA_PREFIX : void 0,\n schemaSuffix = schema != null ? DEFAULT_SCHEMA_SUFFIX : DEFAULT_GENERIC_SUFFIX\n}) {\n return [\n prompt != null && prompt.length > 0 ? prompt : void 0,\n prompt != null && prompt.length > 0 ? \"\" : void 0,\n // add a newline if prompt is not null\n schemaPrefix,\n schema != null ? JSON.stringify(schema) : void 0,\n schemaSuffix\n ].filter((line) => line != null).join(\"\\n\");\n}\n\n// core/generate-object/output-strategy.ts\nimport {\n isJSONArray,\n isJSONObject,\n TypeValidationError,\n UnsupportedFunctionalityError\n} from \"@ai-sdk/provider\";\nimport { safeValidateTypes as safeValidateTypes2 } from \"@ai-sdk/provider-utils\";\nimport { asSchema } from \"@ai-sdk/ui-utils\";\n\n// core/util/async-iterable-stream.ts\nfunction createAsyncIterableStream(source) {\n const stream = source.pipeThrough(new TransformStream());\n stream[Symbol.asyncIterator] = () => {\n const reader = stream.getReader();\n return {\n async next() {\n const { done, value } = await reader.read();\n return done ? { done: true, value: void 0 } : { done: false, value };\n }\n };\n };\n return stream;\n}\n\n// core/generate-object/output-strategy.ts\nvar noSchemaOutputStrategy = {\n type: \"no-schema\",\n jsonSchema: void 0,\n validatePartialResult({ value, textDelta }) {\n return { success: true, value: { partial: value, textDelta } };\n },\n validateFinalResult(value, context) {\n return value === void 0 ? {\n success: false,\n error: new NoObjectGeneratedError({\n message: \"No object generated: response did not match schema.\",\n text: context.text,\n response: context.response,\n usage: context.usage,\n finishReason: context.finishReason\n })\n } : { success: true, value };\n },\n createElementStream() {\n throw new UnsupportedFunctionalityError({\n functionality: \"element streams in no-schema mode\"\n });\n }\n};\nvar objectOutputStrategy = (schema) => ({\n type: \"object\",\n jsonSchema: schema.jsonSchema,\n validatePartialResult({ value, textDelta }) {\n return {\n success: true,\n value: {\n // Note: currently no validation of partial results:\n partial: value,\n textDelta\n }\n };\n },\n validateFinalResult(value) {\n return safeValidateTypes2({ value, schema });\n },\n createElementStream() {\n throw new UnsupportedFunctionalityError({\n functionality: \"element streams in object mode\"\n });\n }\n});\nvar arrayOutputStrategy = (schema) => {\n const { $schema, ...itemSchema } = schema.jsonSchema;\n return {\n type: \"enum\",\n // wrap in object that contains array of elements, since most LLMs will not\n // be able to generate an array directly:\n // possible future optimization: use arrays directly when model supports grammar-guided generation\n jsonSchema: {\n $schema: \"http://json-schema.org/draft-07/schema#\",\n type: \"object\",\n properties: {\n elements: { type: \"array\", items: itemSchema }\n },\n required: [\"elements\"],\n additionalProperties: false\n },\n validatePartialResult({ value, latestObject, isFirstDelta, isFinalDelta }) {\n var _a17;\n if (!isJSONObject(value) || !isJSONArray(value.elements)) {\n return {\n success: false,\n error: new TypeValidationError({\n value,\n cause: \"value must be an object that contains an array of elements\"\n })\n };\n }\n const inputArray = value.elements;\n const resultArray = [];\n for (let i = 0; i < inputArray.length; i++) {\n const element = inputArray[i];\n const result = safeValidateTypes2({ value: element, schema });\n if (i === inputArray.length - 1 && !isFinalDelta) {\n continue;\n }\n if (!result.success) {\n return result;\n }\n resultArray.push(result.value);\n }\n const publishedElementCount = (_a17 = latestObject == null ? void 0 : latestObject.length) != null ? _a17 : 0;\n let textDelta = \"\";\n if (isFirstDelta) {\n textDelta += \"[\";\n }\n if (publishedElementCount > 0) {\n textDelta += \",\";\n }\n textDelta += resultArray.slice(publishedElementCount).map((element) => JSON.stringify(element)).join(\",\");\n if (isFinalDelta) {\n textDelta += \"]\";\n }\n return {\n success: true,\n value: {\n partial: resultArray,\n textDelta\n }\n };\n },\n validateFinalResult(value) {\n if (!isJSONObject(value) || !isJSONArray(value.elements)) {\n return {\n success: false,\n error: new TypeValidationError({\n value,\n cause: \"value must be an object that contains an array of elements\"\n })\n };\n }\n const inputArray = value.elements;\n for (const element of inputArray) {\n const result = safeValidateTypes2({ value: element, schema });\n if (!result.success) {\n return result;\n }\n }\n return { success: true, value: inputArray };\n },\n createElementStream(originalStream) {\n let publishedElements = 0;\n return createAsyncIterableStream(\n originalStream.pipeThrough(\n new TransformStream({\n transform(chunk, controller) {\n switch (chunk.type) {\n case \"object\": {\n const array = chunk.object;\n for (; publishedElements < array.length; publishedElements++) {\n controller.enqueue(array[publishedElements]);\n }\n break;\n }\n case \"text-delta\":\n case \"finish\":\n case \"error\":\n break;\n default: {\n const _exhaustiveCheck = chunk;\n throw new Error(\n `Unsupported chunk type: ${_exhaustiveCheck}`\n );\n }\n }\n }\n })\n )\n );\n }\n };\n};\nvar enumOutputStrategy = (enumValues) => {\n return {\n type: \"enum\",\n // wrap in object that contains result, since most LLMs will not\n // be able to generate an enum value directly:\n // possible future optimization: use enums directly when model supports top-level enums\n jsonSchema: {\n $schema: \"http://json-schema.org/draft-07/schema#\",\n type: \"object\",\n properties: {\n result: { type: \"string\", enum: enumValues }\n },\n required: [\"result\"],\n additionalProperties: false\n },\n validateFinalResult(value) {\n if (!isJSONObject(value) || typeof value.result !== \"string\") {\n return {\n success: false,\n error: new TypeValidationError({\n value,\n cause: 'value must be an object that contains a string in the \"result\" property.'\n })\n };\n }\n const result = value.result;\n return enumValues.includes(result) ? { success: true, value: result } : {\n success: false,\n error: new TypeValidationError({\n value,\n cause: \"value must be a string in the enum\"\n })\n };\n },\n validatePartialResult() {\n throw new UnsupportedFunctionalityError({\n functionality: \"partial results in enum mode\"\n });\n },\n createElementStream() {\n throw new UnsupportedFunctionalityError({\n functionality: \"element streams in enum mode\"\n });\n }\n };\n};\nfunction getOutputStrategy({\n output,\n schema,\n enumValues\n}) {\n switch (output) {\n case \"object\":\n return objectOutputStrategy(asSchema(schema));\n case \"array\":\n return arrayOutputStrategy(asSchema(schema));\n case \"enum\":\n return enumOutputStrategy(enumValues);\n case \"no-schema\":\n return noSchemaOutputStrategy;\n default: {\n const _exhaustiveCheck = output;\n throw new Error(`Unsupported output: ${_exhaustiveCheck}`);\n }\n }\n}\n\n// core/generate-object/validate-object-generation-input.ts\nfunction validateObjectGenerationInput({\n output,\n mode,\n schema,\n schemaName,\n schemaDescription,\n enumValues\n}) {\n if (output != null && output !== \"object\" && output !== \"array\" && output !== \"enum\" && output !== \"no-schema\") {\n throw new InvalidArgumentError({\n parameter: \"output\",\n value: output,\n message: \"Invalid output type.\"\n });\n }\n if (output === \"no-schema\") {\n if (mode === \"auto\" || mode === \"tool\") {\n throw new InvalidArgumentError({\n parameter: \"mode\",\n value: mode,\n message: 'Mode must be \"json\" for no-schema output.'\n });\n }\n if (schema != null) {\n throw new InvalidArgumentError({\n parameter: \"schema\",\n value: schema,\n message: \"Schema is not supported for no-schema output.\"\n });\n }\n if (schemaDescription != null) {\n throw new InvalidArgumentError({\n parameter: \"schemaDescription\",\n value: schemaDescription,\n message: \"Schema description is not supported for no-schema output.\"\n });\n }\n if (schemaName != null) {\n throw new InvalidArgumentError({\n parameter: \"schemaName\",\n value: schemaName,\n message: \"Schema name is not supported for no-schema output.\"\n });\n }\n if (enumValues != null) {\n throw new InvalidArgumentError({\n parameter: \"enumValues\",\n value: enumValues,\n message: \"Enum values are not supported for no-schema output.\"\n });\n }\n }\n if (output === \"object\") {\n if (schema == null) {\n throw new InvalidArgumentError({\n parameter: \"schema\",\n value: schema,\n message: \"Schema is required for object output.\"\n });\n }\n if (enumValues != null) {\n throw new InvalidArgumentError({\n parameter: \"enumValues\",\n value: enumValues,\n message: \"Enum values are not supported for object output.\"\n });\n }\n }\n if (output === \"array\") {\n if (schema == null) {\n throw new InvalidArgumentError({\n parameter: \"schema\",\n value: schema,\n message: \"Element schema is required for array output.\"\n });\n }\n if (enumValues != null) {\n throw new InvalidArgumentError({\n parameter: \"enumValues\",\n value: enumValues,\n message: \"Enum values are not supported for array output.\"\n });\n }\n }\n if (output === \"enum\") {\n if (schema != null) {\n throw new InvalidArgumentError({\n parameter: \"schema\",\n value: schema,\n message: \"Schema is not supported for enum output.\"\n });\n }\n if (schemaDescription != null) {\n throw new InvalidArgumentError({\n parameter: \"schemaDescription\",\n value: schemaDescription,\n message: \"Schema description is not supported for enum output.\"\n });\n }\n if (schemaName != null) {\n throw new InvalidArgumentError({\n parameter: \"schemaName\",\n value: schemaName,\n message: \"Schema name is not supported for enum output.\"\n });\n }\n if (enumValues == null) {\n throw new InvalidArgumentError({\n parameter: \"enumValues\",\n value: enumValues,\n message: \"Enum values are required for enum output.\"\n });\n }\n for (const value of enumValues) {\n if (typeof value !== \"string\") {\n throw new InvalidArgumentError({\n parameter: \"enumValues\",\n value,\n message: \"Enum values must be strings.\"\n });\n }\n }\n }\n}\n\n// core/prompt/stringify-for-telemetry.ts\nfunction stringifyForTelemetry(prompt) {\n const processedPrompt = prompt.map((message) => {\n return {\n ...message,\n content: typeof message.content === \"string\" ? message.content : message.content.map(processPart)\n };\n });\n return JSON.stringify(processedPrompt);\n}\nfunction processPart(part) {\n if (part.type === \"image\") {\n return {\n ...part,\n image: part.image instanceof Uint8Array ? convertDataContentToBase64String(part.image) : part.image\n };\n }\n return part;\n}\n\n// core/generate-object/generate-object.ts\nvar originalGenerateId = createIdGenerator({ prefix: \"aiobj\", size: 24 });\nasync function generateObject({\n model,\n enum: enumValues,\n // rename bc enum is reserved by typescript\n schema: inputSchema,\n schemaName,\n schemaDescription,\n mode,\n output = \"object\",\n system,\n prompt,\n messages,\n maxRetries: maxRetriesArg,\n abortSignal,\n headers,\n experimental_repairText: repairText,\n experimental_telemetry: telemetry,\n experimental_providerMetadata,\n providerOptions = experimental_providerMetadata,\n _internal: {\n generateId: generateId3 = originalGenerateId,\n currentDate = () => /* @__PURE__ */ new Date()\n } = {},\n ...settings\n}) {\n if (typeof model === \"string\" || model.specificationVersion !== \"v1\") {\n throw new UnsupportedModelVersionError();\n }\n validateObjectGenerationInput({\n output,\n mode,\n schema: inputSchema,\n schemaName,\n schemaDescription,\n enumValues\n });\n const { maxRetries, retry } = prepareRetries({ maxRetries: maxRetriesArg });\n const outputStrategy = getOutputStrategy({\n output,\n schema: inputSchema,\n enumValues\n });\n if (outputStrategy.type === \"no-schema\" && mode === void 0) {\n mode = \"json\";\n }\n const baseTelemetryAttributes = getBaseTelemetryAttributes({\n model,\n telemetry,\n headers,\n settings: { ...settings, maxRetries }\n });\n const tracer = getTracer(telemetry);\n return recordSpan({\n name: \"ai.generateObject\",\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: \"ai.generateObject\",\n telemetry\n }),\n ...baseTelemetryAttributes,\n // specific settings that only make sense on the outer level:\n \"ai.prompt\": {\n input: () => JSON.stringify({ system, prompt, messages })\n },\n \"ai.schema\": outputStrategy.jsonSchema != null ? { input: () => JSON.stringify(outputStrategy.jsonSchema) } : void 0,\n \"ai.schema.name\": schemaName,\n \"ai.schema.description\": schemaDescription,\n \"ai.settings.output\": outputStrategy.type,\n \"ai.settings.mode\": mode\n }\n }),\n tracer,\n fn: async (span) => {\n var _a17, _b, _c, _d;\n if (mode === \"auto\" || mode == null) {\n mode = model.defaultObjectGenerationMode;\n }\n let result;\n let finishReason;\n let usage;\n let warnings;\n let rawResponse;\n let response;\n let request;\n let logprobs;\n let resultProviderMetadata;\n switch (mode) {\n case \"json\": {\n const standardizedPrompt = standardizePrompt({\n prompt: {\n system: outputStrategy.jsonSchema == null ? injectJsonInstruction({ prompt: system }) : model.supportsStructuredOutputs ? system : injectJsonInstruction({\n prompt: system,\n schema: outputStrategy.jsonSchema\n }),\n prompt,\n messages\n },\n tools: void 0\n });\n const promptMessages = await convertToLanguageModelPrompt({\n prompt: standardizedPrompt,\n modelSupportsImageUrls: model.supportsImageUrls,\n modelSupportsUrl: (_a17 = model.supportsUrl) == null ? void 0 : _a17.bind(model)\n // support 'this' context\n });\n const generateResult = await retry(\n () => recordSpan({\n name: \"ai.generateObject.doGenerate\",\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: \"ai.generateObject.doGenerate\",\n telemetry\n }),\n ...baseTelemetryAttributes,\n \"ai.prompt.format\": {\n input: () => standardizedPrompt.type\n },\n \"ai.prompt.messages\": {\n input: () => JSON.stringify(promptMessages)\n },\n \"ai.settings.mode\": mode,\n // standardized gen-ai llm span attributes:\n \"gen_ai.system\": model.provider,\n \"gen_ai.request.model\": model.modelId,\n \"gen_ai.request.frequency_penalty\": settings.frequencyPenalty,\n \"gen_ai.request.max_tokens\": settings.maxTokens,\n \"gen_ai.request.presence_penalty\": settings.presencePenalty,\n \"gen_ai.request.temperature\": settings.temperature,\n \"gen_ai.request.top_k\": settings.topK,\n \"gen_ai.request.top_p\": settings.topP\n }\n }),\n tracer,\n fn: async (span2) => {\n var _a18, _b2, _c2, _d2, _e, _f;\n const result2 = await model.doGenerate({\n mode: {\n type: \"object-json\",\n schema: outputStrategy.jsonSchema,\n name: schemaName,\n description: schemaDescription\n },\n ...prepareCallSettings(settings),\n inputFormat: standardizedPrompt.type,\n prompt: promptMessages,\n providerMetadata: providerOptions,\n abortSignal,\n headers\n });\n const responseData = {\n id: (_b2 = (_a18 = result2.response) == null ? void 0 : _a18.id) != null ? _b2 : generateId3(),\n timestamp: (_d2 = (_c2 = result2.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),\n modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId\n };\n if (result2.text === void 0) {\n throw new NoObjectGeneratedError({\n message: \"No object generated: the model did not return a response.\",\n response: responseData,\n usage: calculateLanguageModelUsage(result2.usage),\n finishReason: result2.finishReason\n });\n }\n span2.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n \"ai.response.finishReason\": result2.finishReason,\n \"ai.response.object\": { output: () => result2.text },\n \"ai.response.id\": responseData.id,\n \"ai.response.model\": responseData.modelId,\n \"ai.response.timestamp\": responseData.timestamp.toISOString(),\n \"ai.response.providerMetadata\": JSON.stringify(\n result2.providerMetadata\n ),\n \"ai.usage.promptTokens\": result2.usage.promptTokens,\n \"ai.usage.completionTokens\": result2.usage.completionTokens,\n // standardized gen-ai llm span attributes:\n \"gen_ai.response.finish_reasons\": [result2.finishReason],\n \"gen_ai.response.id\": responseData.id,\n \"gen_ai.response.model\": responseData.modelId,\n \"gen_ai.usage.prompt_tokens\": result2.usage.promptTokens,\n \"gen_ai.usage.completion_tokens\": result2.usage.completionTokens\n }\n })\n );\n return { ...result2, objectText: result2.text, responseData };\n }\n })\n );\n result = generateResult.objectText;\n finishReason = generateResult.finishReason;\n usage = generateResult.usage;\n warnings = generateResult.warnings;\n rawResponse = generateResult.rawResponse;\n logprobs = generateResult.logprobs;\n resultProviderMetadata = generateResult.providerMetadata;\n request = (_b = generateResult.request) != null ? _b : {};\n response = generateResult.responseData;\n break;\n }\n case \"tool\": {\n const standardizedPrompt = standardizePrompt({\n prompt: { system, prompt, messages },\n tools: void 0\n });\n const promptMessages = await convertToLanguageModelPrompt({\n prompt: standardizedPrompt,\n modelSupportsImageUrls: model.supportsImageUrls,\n modelSupportsUrl: (_c = model.supportsUrl) == null ? void 0 : _c.bind(model)\n // support 'this' context,\n });\n const inputFormat = standardizedPrompt.type;\n const generateResult = await retry(\n () => recordSpan({\n name: \"ai.generateObject.doGenerate\",\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: \"ai.generateObject.doGenerate\",\n telemetry\n }),\n ...baseTelemetryAttributes,\n \"ai.prompt.format\": {\n input: () => inputFormat\n },\n \"ai.prompt.messages\": {\n input: () => stringifyForTelemetry(promptMessages)\n },\n \"ai.settings.mode\": mode,\n // standardized gen-ai llm span attributes:\n \"gen_ai.system\": model.provider,\n \"gen_ai.request.model\": model.modelId,\n \"gen_ai.request.frequency_penalty\": settings.frequencyPenalty,\n \"gen_ai.request.max_tokens\": settings.maxTokens,\n \"gen_ai.request.presence_penalty\": settings.presencePenalty,\n \"gen_ai.request.temperature\": settings.temperature,\n \"gen_ai.request.top_k\": settings.topK,\n \"gen_ai.request.top_p\": settings.topP\n }\n }),\n tracer,\n fn: async (span2) => {\n var _a18, _b2, _c2, _d2, _e, _f, _g, _h;\n const result2 = await model.doGenerate({\n mode: {\n type: \"object-tool\",\n tool: {\n type: \"function\",\n name: schemaName != null ? schemaName : \"json\",\n description: schemaDescription != null ? schemaDescription : \"Respond with a JSON object.\",\n parameters: outputStrategy.jsonSchema\n }\n },\n ...prepareCallSettings(settings),\n inputFormat,\n prompt: promptMessages,\n providerMetadata: providerOptions,\n abortSignal,\n headers\n });\n const objectText = (_b2 = (_a18 = result2.toolCalls) == null ? void 0 : _a18[0]) == null ? void 0 : _b2.args;\n const responseData = {\n id: (_d2 = (_c2 = result2.response) == null ? void 0 : _c2.id) != null ? _d2 : generateId3(),\n timestamp: (_f = (_e = result2.response) == null ? void 0 : _e.timestamp) != null ? _f : currentDate(),\n modelId: (_h = (_g = result2.response) == null ? void 0 : _g.modelId) != null ? _h : model.modelId\n };\n if (objectText === void 0) {\n throw new NoObjectGeneratedError({\n message: \"No object generated: the tool was not called.\",\n response: responseData,\n usage: calculateLanguageModelUsage(result2.usage),\n finishReason: result2.finishReason\n });\n }\n span2.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n \"ai.response.finishReason\": result2.finishReason,\n \"ai.response.object\": { output: () => objectText },\n \"ai.response.id\": responseData.id,\n \"ai.response.model\": responseData.modelId,\n \"ai.response.timestamp\": responseData.timestamp.toISOString(),\n \"ai.response.providerMetadata\": JSON.stringify(\n result2.providerMetadata\n ),\n \"ai.usage.promptTokens\": result2.usage.promptTokens,\n \"ai.usage.completionTokens\": result2.usage.completionTokens,\n // standardized gen-ai llm span attributes:\n \"gen_ai.response.finish_reasons\": [result2.finishReason],\n \"gen_ai.response.id\": responseData.id,\n \"gen_ai.response.model\": responseData.modelId,\n \"gen_ai.usage.input_tokens\": result2.usage.promptTokens,\n \"gen_ai.usage.output_tokens\": result2.usage.completionTokens\n }\n })\n );\n return { ...result2, objectText, responseData };\n }\n })\n );\n result = generateResult.objectText;\n finishReason = generateResult.finishReason;\n usage = generateResult.usage;\n warnings = generateResult.warnings;\n rawResponse = generateResult.rawResponse;\n logprobs = generateResult.logprobs;\n resultProviderMetadata = generateResult.providerMetadata;\n request = (_d = generateResult.request) != null ? _d : {};\n response = generateResult.responseData;\n break;\n }\n case void 0: {\n throw new Error(\n \"Model does not have a default object generation mode.\"\n );\n }\n default: {\n const _exhaustiveCheck = mode;\n throw new Error(`Unsupported mode: ${_exhaustiveCheck}`);\n }\n }\n function processResult(result2) {\n const parseResult = safeParseJSON({ text: result2 });\n if (!parseResult.success) {\n throw new NoObjectGeneratedError({\n message: \"No object generated: could not parse the response.\",\n cause: parseResult.error,\n text: result2,\n response,\n usage: calculateLanguageModelUsage(usage),\n finishReason\n });\n }\n const validationResult = outputStrategy.validateFinalResult(\n parseResult.value,\n {\n text: result2,\n response,\n usage: calculateLanguageModelUsage(usage)\n }\n );\n if (!validationResult.success) {\n throw new NoObjectGeneratedError({\n message: \"No object generated: response did not match schema.\",\n cause: validationResult.error,\n text: result2,\n response,\n usage: calculateLanguageModelUsage(usage),\n finishReason\n });\n }\n return validationResult.value;\n }\n let object2;\n try {\n object2 = processResult(result);\n } catch (error) {\n if (repairText != null && NoObjectGeneratedError.isInstance(error) && (JSONParseError.isInstance(error.cause) || TypeValidationError2.isInstance(error.cause))) {\n const repairedText = await repairText({\n text: result,\n error: error.cause\n });\n if (repairedText === null) {\n throw error;\n }\n object2 = processResult(repairedText);\n } else {\n throw error;\n }\n }\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n \"ai.response.finishReason\": finishReason,\n \"ai.response.object\": {\n output: () => JSON.stringify(object2)\n },\n \"ai.usage.promptTokens\": usage.promptTokens,\n \"ai.usage.completionTokens\": usage.completionTokens\n }\n })\n );\n return new DefaultGenerateObjectResult({\n object: object2,\n finishReason,\n usage: calculateLanguageModelUsage(usage),\n warnings,\n request,\n response: {\n ...response,\n headers: rawResponse == null ? void 0 : rawResponse.headers,\n body: rawResponse == null ? void 0 : rawResponse.body\n },\n logprobs,\n providerMetadata: resultProviderMetadata\n });\n }\n });\n}\nvar DefaultGenerateObjectResult = class {\n constructor(options) {\n this.object = options.object;\n this.finishReason = options.finishReason;\n this.usage = options.usage;\n this.warnings = options.warnings;\n this.providerMetadata = options.providerMetadata;\n this.experimental_providerMetadata = options.providerMetadata;\n this.response = options.response;\n this.request = options.request;\n this.logprobs = options.logprobs;\n }\n toJsonResponse(init) {\n var _a17;\n return new Response(JSON.stringify(this.object), {\n status: (_a17 = init == null ? void 0 : init.status) != null ? _a17 : 200,\n headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {\n contentType: \"application/json; charset=utf-8\"\n })\n });\n }\n};\n\n// core/generate-object/stream-object.ts\nimport { createIdGenerator as createIdGenerator2 } from \"@ai-sdk/provider-utils\";\nimport {\n isDeepEqualData,\n parsePartialJson\n} from \"@ai-sdk/ui-utils\";\n\n// util/delayed-promise.ts\nvar DelayedPromise = class {\n constructor() {\n this.status = { type: \"pending\" };\n this._resolve = void 0;\n this._reject = void 0;\n }\n get value() {\n if (this.promise) {\n return this.promise;\n }\n this.promise = new Promise((resolve, reject) => {\n if (this.status.type === \"resolved\") {\n resolve(this.status.value);\n } else if (this.status.type === \"rejected\") {\n reject(this.status.error);\n }\n this._resolve = resolve;\n this._reject = reject;\n });\n return this.promise;\n }\n resolve(value) {\n var _a17;\n this.status = { type: \"resolved\", value };\n if (this.promise) {\n (_a17 = this._resolve) == null ? void 0 : _a17.call(this, value);\n }\n }\n reject(error) {\n var _a17;\n this.status = { type: \"rejected\", error };\n if (this.promise) {\n (_a17 = this._reject) == null ? void 0 : _a17.call(this, error);\n }\n }\n};\n\n// util/create-resolvable-promise.ts\nfunction createResolvablePromise() {\n let resolve;\n let reject;\n const promise = new Promise((res, rej) => {\n resolve = res;\n reject = rej;\n });\n return {\n promise,\n resolve,\n reject\n };\n}\n\n// core/util/create-stitchable-stream.ts\nfunction createStitchableStream() {\n let innerStreamReaders = [];\n let controller = null;\n let isClosed = false;\n let waitForNewStream = createResolvablePromise();\n const processPull = async () => {\n if (isClosed && innerStreamReaders.length === 0) {\n controller == null ? void 0 : controller.close();\n return;\n }\n if (innerStreamReaders.length === 0) {\n waitForNewStream = createResolvablePromise();\n await waitForNewStream.promise;\n return processPull();\n }\n try {\n const { value, done } = await innerStreamReaders[0].read();\n if (done) {\n innerStreamReaders.shift();\n if (innerStreamReaders.length > 0) {\n await processPull();\n } else if (isClosed) {\n controller == null ? void 0 : controller.close();\n }\n } else {\n controller == null ? void 0 : controller.enqueue(value);\n }\n } catch (error) {\n controller == null ? void 0 : controller.error(error);\n innerStreamReaders.shift();\n if (isClosed && innerStreamReaders.length === 0) {\n controller == null ? void 0 : controller.close();\n }\n }\n };\n return {\n stream: new ReadableStream({\n start(controllerParam) {\n controller = controllerParam;\n },\n pull: processPull,\n async cancel() {\n for (const reader of innerStreamReaders) {\n await reader.cancel();\n }\n innerStreamReaders = [];\n isClosed = true;\n }\n }),\n addStream: (innerStream) => {\n if (isClosed) {\n throw new Error(\"Cannot add inner stream: outer stream is closed\");\n }\n innerStreamReaders.push(innerStream.getReader());\n waitForNewStream.resolve();\n },\n /**\n * Gracefully close the outer stream. This will let the inner streams\n * finish processing and then close the outer stream.\n */\n close: () => {\n isClosed = true;\n waitForNewStream.resolve();\n if (innerStreamReaders.length === 0) {\n controller == null ? void 0 : controller.close();\n }\n },\n /**\n * Immediately close the outer stream. This will cancel all inner streams\n * and close the outer stream.\n */\n terminate: () => {\n isClosed = true;\n waitForNewStream.resolve();\n innerStreamReaders.forEach((reader) => reader.cancel());\n innerStreamReaders = [];\n controller == null ? void 0 : controller.close();\n }\n };\n}\n\n// core/util/now.ts\nfunction now() {\n var _a17, _b;\n return (_b = (_a17 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a17.now()) != null ? _b : Date.now();\n}\n\n// core/generate-object/stream-object.ts\nvar originalGenerateId2 = createIdGenerator2({ prefix: \"aiobj\", size: 24 });\nfunction streamObject({\n model,\n schema: inputSchema,\n schemaName,\n schemaDescription,\n mode,\n output = \"object\",\n system,\n prompt,\n messages,\n maxRetries,\n abortSignal,\n headers,\n experimental_telemetry: telemetry,\n experimental_providerMetadata,\n providerOptions = experimental_providerMetadata,\n onError,\n onFinish,\n _internal: {\n generateId: generateId3 = originalGenerateId2,\n currentDate = () => /* @__PURE__ */ new Date(),\n now: now2 = now\n } = {},\n ...settings\n}) {\n if (typeof model === \"string\" || model.specificationVersion !== \"v1\") {\n throw new UnsupportedModelVersionError();\n }\n validateObjectGenerationInput({\n output,\n mode,\n schema: inputSchema,\n schemaName,\n schemaDescription\n });\n const outputStrategy = getOutputStrategy({ output, schema: inputSchema });\n if (outputStrategy.type === \"no-schema\" && mode === void 0) {\n mode = \"json\";\n }\n return new DefaultStreamObjectResult({\n model,\n telemetry,\n headers,\n settings,\n maxRetries,\n abortSignal,\n outputStrategy,\n system,\n prompt,\n messages,\n schemaName,\n schemaDescription,\n providerOptions,\n mode,\n onError,\n onFinish,\n generateId: generateId3,\n currentDate,\n now: now2\n });\n}\nvar DefaultStreamObjectResult = class {\n constructor({\n model,\n headers,\n telemetry,\n settings,\n maxRetries: maxRetriesArg,\n abortSignal,\n outputStrategy,\n system,\n prompt,\n messages,\n schemaName,\n schemaDescription,\n providerOptions,\n mode,\n onError,\n onFinish,\n generateId: generateId3,\n currentDate,\n now: now2\n }) {\n this.objectPromise = new DelayedPromise();\n this.usagePromise = new DelayedPromise();\n this.providerMetadataPromise = new DelayedPromise();\n this.warningsPromise = new DelayedPromise();\n this.requestPromise = new DelayedPromise();\n this.responsePromise = new DelayedPromise();\n const { maxRetries, retry } = prepareRetries({\n maxRetries: maxRetriesArg\n });\n const baseTelemetryAttributes = getBaseTelemetryAttributes({\n model,\n telemetry,\n headers,\n settings: { ...settings, maxRetries }\n });\n const tracer = getTracer(telemetry);\n const self = this;\n const stitchableStream = createStitchableStream();\n const eventProcessor = new TransformStream({\n transform(chunk, controller) {\n controller.enqueue(chunk);\n if (chunk.type === \"error\") {\n onError == null ? void 0 : onError({ error: chunk.error });\n }\n }\n });\n this.baseStream = stitchableStream.stream.pipeThrough(eventProcessor);\n recordSpan({\n name: \"ai.streamObject\",\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: \"ai.streamObject\",\n telemetry\n }),\n ...baseTelemetryAttributes,\n // specific settings that only make sense on the outer level:\n \"ai.prompt\": {\n input: () => JSON.stringify({ system, prompt, messages })\n },\n \"ai.schema\": outputStrategy.jsonSchema != null ? { input: () => JSON.stringify(outputStrategy.jsonSchema) } : void 0,\n \"ai.schema.name\": schemaName,\n \"ai.schema.description\": schemaDescription,\n \"ai.settings.output\": outputStrategy.type,\n \"ai.settings.mode\": mode\n }\n }),\n tracer,\n endWhenDone: false,\n fn: async (rootSpan) => {\n var _a17, _b;\n if (mode === \"auto\" || mode == null) {\n mode = model.defaultObjectGenerationMode;\n }\n let callOptions;\n let transformer;\n switch (mode) {\n case \"json\": {\n const standardizedPrompt = standardizePrompt({\n prompt: {\n system: outputStrategy.jsonSchema == null ? injectJsonInstruction({ prompt: system }) : model.supportsStructuredOutputs ? system : injectJsonInstruction({\n prompt: system,\n schema: outputStrategy.jsonSchema\n }),\n prompt,\n messages\n },\n tools: void 0\n });\n callOptions = {\n mode: {\n type: \"object-json\",\n schema: outputStrategy.jsonSchema,\n name: schemaName,\n description: schemaDescription\n },\n ...prepareCallSettings(settings),\n inputFormat: standardizedPrompt.type,\n prompt: await convertToLanguageModelPrompt({\n prompt: standardizedPrompt,\n modelSupportsImageUrls: model.supportsImageUrls,\n modelSupportsUrl: (_a17 = model.supportsUrl) == null ? void 0 : _a17.bind(model)\n // support 'this' context\n }),\n providerMetadata: providerOptions,\n abortSignal,\n headers\n };\n transformer = {\n transform: (chunk, controller) => {\n switch (chunk.type) {\n case \"text-delta\":\n controller.enqueue(chunk.textDelta);\n break;\n case \"response-metadata\":\n case \"finish\":\n case \"error\":\n controller.enqueue(chunk);\n break;\n }\n }\n };\n break;\n }\n case \"tool\": {\n const standardizedPrompt = standardizePrompt({\n prompt: { system, prompt, messages },\n tools: void 0\n });\n callOptions = {\n mode: {\n type: \"object-tool\",\n tool: {\n type: \"function\",\n name: schemaName != null ? schemaName : \"json\",\n description: schemaDescription != null ? schemaDescription : \"Respond with a JSON object.\",\n parameters: outputStrategy.jsonSchema\n }\n },\n ...prepareCallSettings(settings),\n inputFormat: standardizedPrompt.type,\n prompt: await convertToLanguageModelPrompt({\n prompt: standardizedPrompt,\n modelSupportsImageUrls: model.supportsImageUrls,\n modelSupportsUrl: (_b = model.supportsUrl) == null ? void 0 : _b.bind(model)\n // support 'this' context,\n }),\n providerMetadata: providerOptions,\n abortSignal,\n headers\n };\n transformer = {\n transform(chunk, controller) {\n switch (chunk.type) {\n case \"tool-call-delta\":\n controller.enqueue(chunk.argsTextDelta);\n break;\n case \"response-metadata\":\n case \"finish\":\n case \"error\":\n controller.enqueue(chunk);\n break;\n }\n }\n };\n break;\n }\n case void 0: {\n throw new Error(\n \"Model does not have a default object generation mode.\"\n );\n }\n default: {\n const _exhaustiveCheck = mode;\n throw new Error(`Unsupported mode: ${_exhaustiveCheck}`);\n }\n }\n const {\n result: { stream, warnings, rawResponse, request },\n doStreamSpan,\n startTimestampMs\n } = await retry(\n () => recordSpan({\n name: \"ai.streamObject.doStream\",\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: \"ai.streamObject.doStream\",\n telemetry\n }),\n ...baseTelemetryAttributes,\n \"ai.prompt.format\": {\n input: () => callOptions.inputFormat\n },\n \"ai.prompt.messages\": {\n input: () => stringifyForTelemetry(callOptions.prompt)\n },\n \"ai.settings.mode\": mode,\n // standardized gen-ai llm span attributes:\n \"gen_ai.system\": model.provider,\n \"gen_ai.request.model\": model.modelId,\n \"gen_ai.request.frequency_penalty\": settings.frequencyPenalty,\n \"gen_ai.request.max_tokens\": settings.maxTokens,\n \"gen_ai.request.presence_penalty\": settings.presencePenalty,\n \"gen_ai.request.temperature\": settings.temperature,\n \"gen_ai.request.top_k\": settings.topK,\n \"gen_ai.request.top_p\": settings.topP\n }\n }),\n tracer,\n endWhenDone: false,\n fn: async (doStreamSpan2) => ({\n startTimestampMs: now2(),\n doStreamSpan: doStreamSpan2,\n result: await model.doStream(callOptions)\n })\n })\n );\n self.requestPromise.resolve(request != null ? request : {});\n let usage;\n let finishReason;\n let providerMetadata;\n let object2;\n let error;\n let accumulatedText = \"\";\n let textDelta = \"\";\n let response = {\n id: generateId3(),\n timestamp: currentDate(),\n modelId: model.modelId\n };\n let latestObjectJson = void 0;\n let latestObject = void 0;\n let isFirstChunk = true;\n let isFirstDelta = true;\n const transformedStream = stream.pipeThrough(new TransformStream(transformer)).pipeThrough(\n new TransformStream({\n async transform(chunk, controller) {\n var _a18, _b2, _c;\n if (isFirstChunk) {\n const msToFirstChunk = now2() - startTimestampMs;\n isFirstChunk = false;\n doStreamSpan.addEvent(\"ai.stream.firstChunk\", {\n \"ai.stream.msToFirstChunk\": msToFirstChunk\n });\n doStreamSpan.setAttributes({\n \"ai.stream.msToFirstChunk\": msToFirstChunk\n });\n }\n if (typeof chunk === \"string\") {\n accumulatedText += chunk;\n textDelta += chunk;\n const { value: currentObjectJson, state: parseState } = parsePartialJson(accumulatedText);\n if (currentObjectJson !== void 0 && !isDeepEqualData(latestObjectJson, currentObjectJson)) {\n const validationResult = outputStrategy.validatePartialResult({\n value: currentObjectJson,\n textDelta,\n latestObject,\n isFirstDelta,\n isFinalDelta: parseState === \"successful-parse\"\n });\n if (validationResult.success && !isDeepEqualData(\n latestObject,\n validationResult.value.partial\n )) {\n latestObjectJson = currentObjectJson;\n latestObject = validationResult.value.partial;\n controller.enqueue({\n type: \"object\",\n object: latestObject\n });\n controller.enqueue({\n type: \"text-delta\",\n textDelta: validationResult.value.textDelta\n });\n textDelta = \"\";\n isFirstDelta = false;\n }\n }\n return;\n }\n switch (chunk.type) {\n case \"response-metadata\": {\n response = {\n id: (_a18 = chunk.id) != null ? _a18 : response.id,\n timestamp: (_b2 = chunk.timestamp) != null ? _b2 : response.timestamp,\n modelId: (_c = chunk.modelId) != null ? _c : response.modelId\n };\n break;\n }\n case \"finish\": {\n if (textDelta !== \"\") {\n controller.enqueue({ type: \"text-delta\", textDelta });\n }\n finishReason = chunk.finishReason;\n usage = calculateLanguageModelUsage(chunk.usage);\n providerMetadata = chunk.providerMetadata;\n controller.enqueue({ ...chunk, usage, response });\n self.usagePromise.resolve(usage);\n self.providerMetadataPromise.resolve(providerMetadata);\n self.responsePromise.resolve({\n ...response,\n headers: rawResponse == null ? void 0 : rawResponse.headers\n });\n const validationResult = outputStrategy.validateFinalResult(\n latestObjectJson,\n {\n text: accumulatedText,\n response,\n usage\n }\n );\n if (validationResult.success) {\n object2 = validationResult.value;\n self.objectPromise.resolve(object2);\n } else {\n error = new NoObjectGeneratedError({\n message: \"No object generated: response did not match schema.\",\n cause: validationResult.error,\n text: accumulatedText,\n response,\n usage,\n finishReason\n });\n self.objectPromise.reject(error);\n }\n break;\n }\n default: {\n controller.enqueue(chunk);\n break;\n }\n }\n },\n // invoke onFinish callback and resolve toolResults promise when the stream is about to close:\n async flush(controller) {\n try {\n const finalUsage = usage != null ? usage : {\n promptTokens: NaN,\n completionTokens: NaN,\n totalTokens: NaN\n };\n doStreamSpan.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n \"ai.response.finishReason\": finishReason,\n \"ai.response.object\": {\n output: () => JSON.stringify(object2)\n },\n \"ai.response.id\": response.id,\n \"ai.response.model\": response.modelId,\n \"ai.response.timestamp\": response.timestamp.toISOString(),\n \"ai.response.providerMetadata\": JSON.stringify(providerMetadata),\n \"ai.usage.promptTokens\": finalUsage.promptTokens,\n \"ai.usage.completionTokens\": finalUsage.completionTokens,\n // standardized gen-ai llm span attributes:\n \"gen_ai.response.finish_reasons\": [finishReason],\n \"gen_ai.response.id\": response.id,\n \"gen_ai.response.model\": response.modelId,\n \"gen_ai.usage.input_tokens\": finalUsage.promptTokens,\n \"gen_ai.usage.output_tokens\": finalUsage.completionTokens\n }\n })\n );\n doStreamSpan.end();\n rootSpan.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n \"ai.usage.promptTokens\": finalUsage.promptTokens,\n \"ai.usage.completionTokens\": finalUsage.completionTokens,\n \"ai.response.object\": {\n output: () => JSON.stringify(object2)\n },\n \"ai.response.providerMetadata\": JSON.stringify(providerMetadata)\n }\n })\n );\n await (onFinish == null ? void 0 : onFinish({\n usage: finalUsage,\n object: object2,\n error,\n response: {\n ...response,\n headers: rawResponse == null ? void 0 : rawResponse.headers\n },\n warnings,\n providerMetadata,\n experimental_providerMetadata: providerMetadata\n }));\n } catch (error2) {\n controller.enqueue({ type: \"error\", error: error2 });\n } finally {\n rootSpan.end();\n }\n }\n })\n );\n stitchableStream.addStream(transformedStream);\n }\n }).catch((error) => {\n stitchableStream.addStream(\n new ReadableStream({\n start(controller) {\n controller.enqueue({ type: \"error\", error });\n controller.close();\n }\n })\n );\n }).finally(() => {\n stitchableStream.close();\n });\n this.outputStrategy = outputStrategy;\n }\n get object() {\n return this.objectPromise.value;\n }\n get usage() {\n return this.usagePromise.value;\n }\n get experimental_providerMetadata() {\n return this.providerMetadataPromise.value;\n }\n get providerMetadata() {\n return this.providerMetadataPromise.value;\n }\n get warnings() {\n return this.warningsPromise.value;\n }\n get request() {\n return this.requestPromise.value;\n }\n get response() {\n return this.responsePromise.value;\n }\n get partialObjectStream() {\n return createAsyncIterableStream(\n this.baseStream.pipeThrough(\n new TransformStream({\n transform(chunk, controller) {\n switch (chunk.type) {\n case \"object\":\n controller.enqueue(chunk.object);\n break;\n case \"text-delta\":\n case \"finish\":\n case \"error\":\n break;\n default: {\n const _exhaustiveCheck = chunk;\n throw new Error(`Unsupported chunk type: ${_exhaustiveCheck}`);\n }\n }\n }\n })\n )\n );\n }\n get elementStream() {\n return this.outputStrategy.createElementStream(this.baseStream);\n }\n get textStream() {\n return createAsyncIterableStream(\n this.baseStream.pipeThrough(\n new TransformStream({\n transform(chunk, controller) {\n switch (chunk.type) {\n case \"text-delta\":\n controller.enqueue(chunk.textDelta);\n break;\n case \"object\":\n case \"finish\":\n case \"error\":\n break;\n default: {\n const _exhaustiveCheck = chunk;\n throw new Error(`Unsupported chunk type: ${_exhaustiveCheck}`);\n }\n }\n }\n })\n )\n );\n }\n get fullStream() {\n return createAsyncIterableStream(this.baseStream);\n }\n pipeTextStreamToResponse(response, init) {\n writeToServerResponse({\n response,\n status: init == null ? void 0 : init.status,\n statusText: init == null ? void 0 : init.statusText,\n headers: prepareOutgoingHttpHeaders(init == null ? void 0 : init.headers, {\n contentType: \"text/plain; charset=utf-8\"\n }),\n stream: this.textStream.pipeThrough(new TextEncoderStream())\n });\n }\n toTextStreamResponse(init) {\n var _a17;\n return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {\n status: (_a17 = init == null ? void 0 : init.status) != null ? _a17 : 200,\n headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {\n contentType: \"text/plain; charset=utf-8\"\n })\n });\n }\n};\n\n// core/generate-text/generate-text.ts\nimport { createIdGenerator as createIdGenerator3 } from \"@ai-sdk/provider-utils\";\n\n// errors/no-output-specified-error.ts\nimport { AISDKError as AISDKError10 } from \"@ai-sdk/provider\";\nvar name9 = \"AI_NoOutputSpecifiedError\";\nvar marker9 = `vercel.ai.error.${name9}`;\nvar symbol9 = Symbol.for(marker9);\nvar _a9;\nvar NoOutputSpecifiedError = class extends AISDKError10 {\n // used in isInstance\n constructor({ message = \"No output specified.\" } = {}) {\n super({ name: name9, message });\n this[_a9] = true;\n }\n static isInstance(error) {\n return AISDKError10.hasMarker(error, marker9);\n }\n};\n_a9 = symbol9;\n\n// errors/tool-execution-error.ts\nimport { AISDKError as AISDKError11, getErrorMessage as getErrorMessage2 } from \"@ai-sdk/provider\";\nvar name10 = \"AI_ToolExecutionError\";\nvar marker10 = `vercel.ai.error.${name10}`;\nvar symbol10 = Symbol.for(marker10);\nvar _a10;\nvar ToolExecutionError = class extends AISDKError11 {\n constructor({\n toolArgs,\n toolName,\n toolCallId,\n cause,\n message = `Error executing tool ${toolName}: ${getErrorMessage2(cause)}`\n }) {\n super({ name: name10, message, cause });\n this[_a10] = true;\n this.toolArgs = toolArgs;\n this.toolName = toolName;\n this.toolCallId = toolCallId;\n }\n static isInstance(error) {\n return AISDKError11.hasMarker(error, marker10);\n }\n};\n_a10 = symbol10;\n\n// core/prompt/prepare-tools-and-tool-choice.ts\nimport { asSchema as asSchema2 } from \"@ai-sdk/ui-utils\";\n\n// core/util/is-non-empty-object.ts\nfunction isNonEmptyObject(object2) {\n return object2 != null && Object.keys(object2).length > 0;\n}\n\n// core/prompt/prepare-tools-and-tool-choice.ts\nfunction prepareToolsAndToolChoice({\n tools,\n toolChoice,\n activeTools\n}) {\n if (!isNonEmptyObject(tools)) {\n return {\n tools: void 0,\n toolChoice: void 0\n };\n }\n const filteredTools = activeTools != null ? Object.entries(tools).filter(\n ([name17]) => activeTools.includes(name17)\n ) : Object.entries(tools);\n return {\n tools: filteredTools.map(([name17, tool2]) => {\n const toolType = tool2.type;\n switch (toolType) {\n case void 0:\n case \"function\":\n return {\n type: \"function\",\n name: name17,\n description: tool2.description,\n parameters: asSchema2(tool2.parameters).jsonSchema\n };\n case \"provider-defined\":\n return {\n type: \"provider-defined\",\n name: name17,\n id: tool2.id,\n args: tool2.args\n };\n default: {\n const exhaustiveCheck = toolType;\n throw new Error(`Unsupported tool type: ${exhaustiveCheck}`);\n }\n }\n }),\n toolChoice: toolChoice == null ? { type: \"auto\" } : typeof toolChoice === \"string\" ? { type: toolChoice } : { type: \"tool\", toolName: toolChoice.toolName }\n };\n}\n\n// core/util/split-on-last-whitespace.ts\nvar lastWhitespaceRegexp = /^([\\s\\S]*?)(\\s+)(\\S*)$/;\nfunction splitOnLastWhitespace(text2) {\n const match = text2.match(lastWhitespaceRegexp);\n return match ? { prefix: match[1], whitespace: match[2], suffix: match[3] } : void 0;\n}\n\n// core/util/remove-text-after-last-whitespace.ts\nfunction removeTextAfterLastWhitespace(text2) {\n const match = splitOnLastWhitespace(text2);\n return match ? match.prefix + match.whitespace : text2;\n}\n\n// core/generate-text/parse-tool-call.ts\nimport { safeParseJSON as safeParseJSON2, safeValidateTypes as safeValidateTypes3 } from \"@ai-sdk/provider-utils\";\nimport { asSchema as asSchema3 } from \"@ai-sdk/ui-utils\";\n\n// errors/invalid-tool-arguments-error.ts\nimport { AISDKError as AISDKError12, getErrorMessage as getErrorMessage3 } from \"@ai-sdk/provider\";\nvar name11 = \"AI_InvalidToolArgumentsError\";\nvar marker11 = `vercel.ai.error.${name11}`;\nvar symbol11 = Symbol.for(marker11);\nvar _a11;\nvar InvalidToolArgumentsError = class extends AISDKError12 {\n constructor({\n toolArgs,\n toolName,\n cause,\n message = `Invalid arguments for tool ${toolName}: ${getErrorMessage3(\n cause\n )}`\n }) {\n super({ name: name11, message, cause });\n this[_a11] = true;\n this.toolArgs = toolArgs;\n this.toolName = toolName;\n }\n static isInstance(error) {\n return AISDKError12.hasMarker(error, marker11);\n }\n};\n_a11 = symbol11;\n\n// errors/no-such-tool-error.ts\nimport { AISDKError as AISDKError13 } from \"@ai-sdk/provider\";\nvar name12 = \"AI_NoSuchToolError\";\nvar marker12 = `vercel.ai.error.${name12}`;\nvar symbol12 = Symbol.for(marker12);\nvar _a12;\nvar NoSuchToolError = class extends AISDKError13 {\n constructor({\n toolName,\n availableTools = void 0,\n message = `Model tried to call unavailable tool '${toolName}'. ${availableTools === void 0 ? \"No tools are available.\" : `Available tools: ${availableTools.join(\", \")}.`}`\n }) {\n super({ name: name12, message });\n this[_a12] = true;\n this.toolName = toolName;\n this.availableTools = availableTools;\n }\n static isInstance(error) {\n return AISDKError13.hasMarker(error, marker12);\n }\n};\n_a12 = symbol12;\n\n// errors/tool-call-repair-error.ts\nimport { AISDKError as AISDKError14, getErrorMessage as getErrorMessage4 } from \"@ai-sdk/provider\";\nvar name13 = \"AI_ToolCallRepairError\";\nvar marker13 = `vercel.ai.error.${name13}`;\nvar symbol13 = Symbol.for(marker13);\nvar _a13;\nvar ToolCallRepairError = class extends AISDKError14 {\n constructor({\n cause,\n originalError,\n message = `Error repairing tool call: ${getErrorMessage4(cause)}`\n }) {\n super({ name: name13, message, cause });\n this[_a13] = true;\n this.originalError = originalError;\n }\n static isInstance(error) {\n return AISDKError14.hasMarker(error, marker13);\n }\n};\n_a13 = symbol13;\n\n// core/generate-text/parse-tool-call.ts\nasync function parseToolCall({\n toolCall,\n tools,\n repairToolCall,\n system,\n messages\n}) {\n if (tools == null) {\n throw new NoSuchToolError({ toolName: toolCall.toolName });\n }\n try {\n return await doParseToolCall({ toolCall, tools });\n } catch (error) {\n if (repairToolCall == null || !(NoSuchToolError.isInstance(error) || InvalidToolArgumentsError.isInstance(error))) {\n throw error;\n }\n let repairedToolCall = null;\n try {\n repairedToolCall = await repairToolCall({\n toolCall,\n tools,\n parameterSchema: ({ toolName }) => asSchema3(tools[toolName].parameters).jsonSchema,\n system,\n messages,\n error\n });\n } catch (repairError) {\n throw new ToolCallRepairError({\n cause: repairError,\n originalError: error\n });\n }\n if (repairedToolCall == null) {\n throw error;\n }\n return await doParseToolCall({ toolCall: repairedToolCall, tools });\n }\n}\nasync function doParseToolCall({\n toolCall,\n tools\n}) {\n const toolName = toolCall.toolName;\n const tool2 = tools[toolName];\n if (tool2 == null) {\n throw new NoSuchToolError({\n toolName: toolCall.toolName,\n availableTools: Object.keys(tools)\n });\n }\n const schema = asSchema3(tool2.parameters);\n const parseResult = toolCall.args.trim() === \"\" ? safeValidateTypes3({ value: {}, schema }) : safeParseJSON2({ text: toolCall.args, schema });\n if (parseResult.success === false) {\n throw new InvalidToolArgumentsError({\n toolName,\n toolArgs: toolCall.args,\n cause: parseResult.error\n });\n }\n return {\n type: \"tool-call\",\n toolCallId: toolCall.toolCallId,\n toolName,\n args: parseResult.value\n };\n}\n\n// core/generate-text/reasoning-detail.ts\nfunction asReasoningText(reasoning) {\n const reasoningText = reasoning.filter((part) => part.type === \"text\").map((part) => part.text).join(\"\");\n return reasoningText.length > 0 ? reasoningText : void 0;\n}\n\n// core/generate-text/to-response-messages.ts\nfunction toResponseMessages({\n text: text2 = \"\",\n files,\n reasoning,\n tools,\n toolCalls,\n toolResults,\n messageId,\n generateMessageId\n}) {\n const responseMessages = [];\n const content = [];\n if (reasoning.length > 0) {\n content.push(\n ...reasoning.map(\n (part) => part.type === \"text\" ? { ...part, type: \"reasoning\" } : { ...part, type: \"redacted-reasoning\" }\n )\n );\n }\n if (files.length > 0) {\n content.push(\n ...files.map((file) => ({\n type: \"file\",\n data: file.base64,\n mimeType: file.mimeType\n }))\n );\n }\n if (text2.length > 0) {\n content.push({ type: \"text\", text: text2 });\n }\n if (toolCalls.length > 0) {\n content.push(...toolCalls);\n }\n if (content.length > 0) {\n responseMessages.push({\n role: \"assistant\",\n content,\n id: messageId\n });\n }\n if (toolResults.length > 0) {\n responseMessages.push({\n role: \"tool\",\n id: generateMessageId(),\n content: toolResults.map((toolResult) => {\n const tool2 = tools[toolResult.toolName];\n return (tool2 == null ? void 0 : tool2.experimental_toToolResultContent) != null ? {\n type: \"tool-result\",\n toolCallId: toolResult.toolCallId,\n toolName: toolResult.toolName,\n result: tool2.experimental_toToolResultContent(toolResult.result),\n experimental_content: tool2.experimental_toToolResultContent(\n toolResult.result\n )\n } : {\n type: \"tool-result\",\n toolCallId: toolResult.toolCallId,\n toolName: toolResult.toolName,\n result: toolResult.result\n };\n })\n });\n }\n return responseMessages;\n}\n\n// core/generate-text/generate-text.ts\nvar originalGenerateId3 = createIdGenerator3({\n prefix: \"aitxt\",\n size: 24\n});\nvar originalGenerateMessageId = createIdGenerator3({\n prefix: \"msg\",\n size: 24\n});\nasync function generateText({\n model,\n tools,\n toolChoice,\n system,\n prompt,\n messages,\n maxRetries: maxRetriesArg,\n abortSignal,\n headers,\n maxSteps = 1,\n experimental_generateMessageId: generateMessageId = originalGenerateMessageId,\n experimental_output: output,\n experimental_continueSteps: continueSteps = false,\n experimental_telemetry: telemetry,\n experimental_providerMetadata,\n providerOptions = experimental_providerMetadata,\n experimental_activeTools: activeTools,\n experimental_prepareStep: prepareStep,\n experimental_repairToolCall: repairToolCall,\n _internal: {\n generateId: generateId3 = originalGenerateId3,\n currentDate = () => /* @__PURE__ */ new Date()\n } = {},\n onStepFinish,\n ...settings\n}) {\n var _a17;\n if (typeof model === \"string\" || model.specificationVersion !== \"v1\") {\n throw new UnsupportedModelVersionError();\n }\n if (maxSteps < 1) {\n throw new InvalidArgumentError({\n parameter: \"maxSteps\",\n value: maxSteps,\n message: \"maxSteps must be at least 1\"\n });\n }\n const { maxRetries, retry } = prepareRetries({ maxRetries: maxRetriesArg });\n const baseTelemetryAttributes = getBaseTelemetryAttributes({\n model,\n telemetry,\n headers,\n settings: { ...settings, maxRetries }\n });\n const initialPrompt = standardizePrompt({\n prompt: {\n system: (_a17 = output == null ? void 0 : output.injectIntoSystemPrompt({ system, model })) != null ? _a17 : system,\n prompt,\n messages\n },\n tools\n });\n const tracer = getTracer(telemetry);\n return recordSpan({\n name: \"ai.generateText\",\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: \"ai.generateText\",\n telemetry\n }),\n ...baseTelemetryAttributes,\n // model:\n \"ai.model.provider\": model.provider,\n \"ai.model.id\": model.modelId,\n // specific settings that only make sense on the outer level:\n \"ai.prompt\": {\n input: () => JSON.stringify({ system, prompt, messages })\n },\n \"ai.settings.maxSteps\": maxSteps\n }\n }),\n tracer,\n fn: async (span) => {\n var _a18, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;\n const callSettings = prepareCallSettings(settings);\n let currentModelResponse;\n let currentToolCalls = [];\n let currentToolResults = [];\n let currentReasoningDetails = [];\n let stepCount = 0;\n const responseMessages = [];\n let text2 = \"\";\n const sources = [];\n const steps = [];\n let usage = {\n completionTokens: 0,\n promptTokens: 0,\n totalTokens: 0\n };\n let stepType = \"initial\";\n do {\n const promptFormat = stepCount === 0 ? initialPrompt.type : \"messages\";\n const stepInputMessages = [\n ...initialPrompt.messages,\n ...responseMessages\n ];\n const prepareStepResult = await (prepareStep == null ? void 0 : prepareStep({\n model,\n steps,\n maxSteps,\n stepNumber: stepCount\n }));\n const stepToolChoice = (_a18 = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _a18 : toolChoice;\n const stepActiveTools = (_b = prepareStepResult == null ? void 0 : prepareStepResult.experimental_activeTools) != null ? _b : activeTools;\n const stepModel = (_c = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _c : model;\n const promptMessages = await convertToLanguageModelPrompt({\n prompt: {\n type: promptFormat,\n system: initialPrompt.system,\n messages: stepInputMessages\n },\n modelSupportsImageUrls: stepModel.supportsImageUrls,\n modelSupportsUrl: (_d = stepModel.supportsUrl) == null ? void 0 : _d.bind(stepModel)\n // support 'this' context\n });\n const mode = {\n type: \"regular\",\n ...prepareToolsAndToolChoice({\n tools,\n toolChoice: stepToolChoice,\n activeTools: stepActiveTools\n })\n };\n currentModelResponse = await retry(\n () => recordSpan({\n name: \"ai.generateText.doGenerate\",\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: \"ai.generateText.doGenerate\",\n telemetry\n }),\n ...baseTelemetryAttributes,\n // model:\n \"ai.model.provider\": stepModel.provider,\n \"ai.model.id\": stepModel.modelId,\n // prompt:\n \"ai.prompt.format\": { input: () => promptFormat },\n \"ai.prompt.messages\": {\n input: () => stringifyForTelemetry(promptMessages)\n },\n \"ai.prompt.tools\": {\n // convert the language model level tools:\n input: () => {\n var _a19;\n return (_a19 = mode.tools) == null ? void 0 : _a19.map((tool2) => JSON.stringify(tool2));\n }\n },\n \"ai.prompt.toolChoice\": {\n input: () => mode.toolChoice != null ? JSON.stringify(mode.toolChoice) : void 0\n },\n // standardized gen-ai llm span attributes:\n \"gen_ai.system\": stepModel.provider,\n \"gen_ai.request.model\": stepModel.modelId,\n \"gen_ai.request.frequency_penalty\": settings.frequencyPenalty,\n \"gen_ai.request.max_tokens\": settings.maxTokens,\n \"gen_ai.request.presence_penalty\": settings.presencePenalty,\n \"gen_ai.request.stop_sequences\": settings.stopSequences,\n \"gen_ai.request.temperature\": settings.temperature,\n \"gen_ai.request.top_k\": settings.topK,\n \"gen_ai.request.top_p\": settings.topP\n }\n }),\n tracer,\n fn: async (span2) => {\n var _a19, _b2, _c2, _d2, _e2, _f2;\n const result = await stepModel.doGenerate({\n mode,\n ...callSettings,\n inputFormat: promptFormat,\n responseFormat: output == null ? void 0 : output.responseFormat({ model }),\n prompt: promptMessages,\n providerMetadata: providerOptions,\n abortSignal,\n headers\n });\n const responseData = {\n id: (_b2 = (_a19 = result.response) == null ? void 0 : _a19.id) != null ? _b2 : generateId3(),\n timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),\n modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : stepModel.modelId\n };\n span2.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n \"ai.response.finishReason\": result.finishReason,\n \"ai.response.text\": {\n output: () => result.text\n },\n \"ai.response.toolCalls\": {\n output: () => JSON.stringify(result.toolCalls)\n },\n \"ai.response.id\": responseData.id,\n \"ai.response.model\": responseData.modelId,\n \"ai.response.timestamp\": responseData.timestamp.toISOString(),\n \"ai.response.providerMetadata\": JSON.stringify(\n result.providerMetadata\n ),\n \"ai.usage.promptTokens\": result.usage.promptTokens,\n \"ai.usage.completionTokens\": result.usage.completionTokens,\n // standardized gen-ai llm span attributes:\n \"gen_ai.response.finish_reasons\": [result.finishReason],\n \"gen_ai.response.id\": responseData.id,\n \"gen_ai.response.model\": responseData.modelId,\n \"gen_ai.usage.input_tokens\": result.usage.promptTokens,\n \"gen_ai.usage.output_tokens\": result.usage.completionTokens\n }\n })\n );\n return { ...result, response: responseData };\n }\n })\n );\n currentToolCalls = await Promise.all(\n ((_e = currentModelResponse.toolCalls) != null ? _e : []).map(\n (toolCall) => parseToolCall({\n toolCall,\n tools,\n repairToolCall,\n system,\n messages: stepInputMessages\n })\n )\n );\n currentToolResults = tools == null ? [] : await executeTools({\n toolCalls: currentToolCalls,\n tools,\n tracer,\n telemetry,\n messages: stepInputMessages,\n abortSignal\n });\n const currentUsage = calculateLanguageModelUsage(\n currentModelResponse.usage\n );\n usage = addLanguageModelUsage(usage, currentUsage);\n let nextStepType = \"done\";\n if (++stepCount < maxSteps) {\n if (continueSteps && currentModelResponse.finishReason === \"length\" && // only use continue when there are no tool calls:\n currentToolCalls.length === 0) {\n nextStepType = \"continue\";\n } else if (\n // there are tool calls:\n currentToolCalls.length > 0 && // all current tool calls have results:\n currentToolResults.length === currentToolCalls.length\n ) {\n nextStepType = \"tool-result\";\n }\n }\n const originalText = (_f = currentModelResponse.text) != null ? _f : \"\";\n const stepTextLeadingWhitespaceTrimmed = stepType === \"continue\" && // only for continue steps\n text2.trimEnd() !== text2 ? originalText.trimStart() : originalText;\n const stepText = nextStepType === \"continue\" ? removeTextAfterLastWhitespace(stepTextLeadingWhitespaceTrimmed) : stepTextLeadingWhitespaceTrimmed;\n text2 = nextStepType === \"continue\" || stepType === \"continue\" ? text2 + stepText : stepText;\n currentReasoningDetails = asReasoningDetails(\n currentModelResponse.reasoning\n );\n sources.push(...(_g = currentModelResponse.sources) != null ? _g : []);\n if (stepType === \"continue\") {\n const lastMessage = responseMessages[responseMessages.length - 1];\n if (typeof lastMessage.content === \"string\") {\n lastMessage.content += stepText;\n } else {\n lastMessage.content.push({\n text: stepText,\n type: \"text\"\n });\n }\n } else {\n responseMessages.push(\n ...toResponseMessages({\n text: text2,\n files: asFiles(currentModelResponse.files),\n reasoning: asReasoningDetails(currentModelResponse.reasoning),\n tools: tools != null ? tools : {},\n toolCalls: currentToolCalls,\n toolResults: currentToolResults,\n messageId: generateMessageId(),\n generateMessageId\n })\n );\n }\n const currentStepResult = {\n stepType,\n text: stepText,\n // TODO v5: rename reasoning to reasoningText (and use reasoning for composite array)\n reasoning: asReasoningText(currentReasoningDetails),\n reasoningDetails: currentReasoningDetails,\n files: asFiles(currentModelResponse.files),\n sources: (_h = currentModelResponse.sources) != null ? _h : [],\n toolCalls: currentToolCalls,\n toolResults: currentToolResults,\n finishReason: currentModelResponse.finishReason,\n usage: currentUsage,\n warnings: currentModelResponse.warnings,\n logprobs: currentModelResponse.logprobs,\n request: (_i = currentModelResponse.request) != null ? _i : {},\n response: {\n ...currentModelResponse.response,\n headers: (_j = currentModelResponse.rawResponse) == null ? void 0 : _j.headers,\n body: (_k = currentModelResponse.rawResponse) == null ? void 0 : _k.body,\n // deep clone msgs to avoid mutating past messages in multi-step:\n messages: structuredClone(responseMessages)\n },\n providerMetadata: currentModelResponse.providerMetadata,\n experimental_providerMetadata: currentModelResponse.providerMetadata,\n isContinued: nextStepType === \"continue\"\n };\n steps.push(currentStepResult);\n await (onStepFinish == null ? void 0 : onStepFinish(currentStepResult));\n stepType = nextStepType;\n } while (stepType !== \"done\");\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n \"ai.response.finishReason\": currentModelResponse.finishReason,\n \"ai.response.text\": {\n output: () => currentModelResponse.text\n },\n \"ai.response.toolCalls\": {\n output: () => JSON.stringify(currentModelResponse.toolCalls)\n },\n \"ai.usage.promptTokens\": currentModelResponse.usage.promptTokens,\n \"ai.usage.completionTokens\": currentModelResponse.usage.completionTokens,\n \"ai.response.providerMetadata\": JSON.stringify(\n currentModelResponse.providerMetadata\n )\n }\n })\n );\n return new DefaultGenerateTextResult({\n text: text2,\n files: asFiles(currentModelResponse.files),\n reasoning: asReasoningText(currentReasoningDetails),\n reasoningDetails: currentReasoningDetails,\n sources,\n outputResolver: () => {\n if (output == null) {\n throw new NoOutputSpecifiedError();\n }\n return output.parseOutput(\n { text: text2 },\n {\n response: currentModelResponse.response,\n usage,\n finishReason: currentModelResponse.finishReason\n }\n );\n },\n toolCalls: currentToolCalls,\n toolResults: currentToolResults,\n finishReason: currentModelResponse.finishReason,\n usage,\n warnings: currentModelResponse.warnings,\n request: (_l = currentModelResponse.request) != null ? _l : {},\n response: {\n ...currentModelResponse.response,\n headers: (_m = currentModelResponse.rawResponse) == null ? void 0 : _m.headers,\n body: (_n = currentModelResponse.rawResponse) == null ? void 0 : _n.body,\n messages: responseMessages\n },\n logprobs: currentModelResponse.logprobs,\n steps,\n providerMetadata: currentModelResponse.providerMetadata\n });\n }\n });\n}\nasync function executeTools({\n toolCalls,\n tools,\n tracer,\n telemetry,\n messages,\n abortSignal\n}) {\n const toolResults = await Promise.all(\n toolCalls.map(async ({ toolCallId, toolName, args }) => {\n const tool2 = tools[toolName];\n if ((tool2 == null ? void 0 : tool2.execute) == null) {\n return void 0;\n }\n const result = await recordSpan({\n name: \"ai.toolCall\",\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: \"ai.toolCall\",\n telemetry\n }),\n \"ai.toolCall.name\": toolName,\n \"ai.toolCall.id\": toolCallId,\n \"ai.toolCall.args\": {\n output: () => JSON.stringify(args)\n }\n }\n }),\n tracer,\n fn: async (span) => {\n try {\n const result2 = await tool2.execute(args, {\n toolCallId,\n messages,\n abortSignal\n });\n try {\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n \"ai.toolCall.result\": {\n output: () => JSON.stringify(result2)\n }\n }\n })\n );\n } catch (ignored) {\n }\n return result2;\n } catch (error) {\n recordErrorOnSpan(span, error);\n throw new ToolExecutionError({\n toolCallId,\n toolName,\n toolArgs: args,\n cause: error\n });\n }\n }\n });\n return {\n type: \"tool-result\",\n toolCallId,\n toolName,\n args,\n result\n };\n })\n );\n return toolResults.filter(\n (result) => result != null\n );\n}\nvar DefaultGenerateTextResult = class {\n constructor(options) {\n this.text = options.text;\n this.files = options.files;\n this.reasoning = options.reasoning;\n this.reasoningDetails = options.reasoningDetails;\n this.toolCalls = options.toolCalls;\n this.toolResults = options.toolResults;\n this.finishReason = options.finishReason;\n this.usage = options.usage;\n this.warnings = options.warnings;\n this.request = options.request;\n this.response = options.response;\n this.steps = options.steps;\n this.experimental_providerMetadata = options.providerMetadata;\n this.providerMetadata = options.providerMetadata;\n this.logprobs = options.logprobs;\n this.outputResolver = options.outputResolver;\n this.sources = options.sources;\n }\n get experimental_output() {\n return this.outputResolver();\n }\n};\nfunction asReasoningDetails(reasoning) {\n if (reasoning == null) {\n return [];\n }\n if (typeof reasoning === \"string\") {\n return [{ type: \"text\", text: reasoning }];\n }\n return reasoning;\n}\nfunction asFiles(files) {\n var _a17;\n return (_a17 = files == null ? void 0 : files.map((file) => new DefaultGeneratedFile(file))) != null ? _a17 : [];\n}\n\n// core/generate-text/output.ts\nvar output_exports = {};\n__export(output_exports, {\n object: () => object,\n text: () => text\n});\nimport { safeParseJSON as safeParseJSON3, safeValidateTypes as safeValidateTypes4 } from \"@ai-sdk/provider-utils\";\nimport {\n asSchema as asSchema4,\n parsePartialJson as parsePartialJson2\n} from \"@ai-sdk/ui-utils\";\n\n// errors/index.ts\nimport {\n AISDKError as AISDKError17,\n APICallError as APICallError2,\n EmptyResponseBodyError,\n InvalidPromptError as InvalidPromptError2,\n InvalidResponseDataError,\n JSONParseError as JSONParseError2,\n LoadAPIKeyError,\n NoContentGeneratedError,\n NoSuchModelError,\n TypeValidationError as TypeValidationError3,\n UnsupportedFunctionalityError as UnsupportedFunctionalityError2\n} from \"@ai-sdk/provider\";\n\n// errors/invalid-stream-part-error.ts\nimport { AISDKError as AISDKError15 } from \"@ai-sdk/provider\";\nvar name14 = \"AI_InvalidStreamPartError\";\nvar marker14 = `vercel.ai.error.${name14}`;\nvar symbol14 = Symbol.for(marker14);\nvar _a14;\nvar InvalidStreamPartError = class extends AISDKError15 {\n constructor({\n chunk,\n message\n }) {\n super({ name: name14, message });\n this[_a14] = true;\n this.chunk = chunk;\n }\n static isInstance(error) {\n return AISDKError15.hasMarker(error, marker14);\n }\n};\n_a14 = symbol14;\n\n// errors/mcp-client-error.ts\nimport { AISDKError as AISDKError16 } from \"@ai-sdk/provider\";\nvar name15 = \"AI_MCPClientError\";\nvar marker15 = `vercel.ai.error.${name15}`;\nvar symbol15 = Symbol.for(marker15);\nvar _a15;\nvar MCPClientError = class extends AISDKError16 {\n constructor({\n name: name17 = \"MCPClientError\",\n message,\n cause\n }) {\n super({ name: name17, message, cause });\n this[_a15] = true;\n }\n static isInstance(error) {\n return AISDKError16.hasMarker(error, marker15);\n }\n};\n_a15 = symbol15;\n\n// core/generate-text/output.ts\nvar text = () => ({\n type: \"text\",\n responseFormat: () => ({ type: \"text\" }),\n injectIntoSystemPrompt({ system }) {\n return system;\n },\n parsePartial({ text: text2 }) {\n return { partial: text2 };\n },\n parseOutput({ text: text2 }) {\n return text2;\n }\n});\nvar object = ({\n schema: inputSchema\n}) => {\n const schema = asSchema4(inputSchema);\n return {\n type: \"object\",\n responseFormat: ({ model }) => ({\n type: \"json\",\n schema: model.supportsStructuredOutputs ? schema.jsonSchema : void 0\n }),\n injectIntoSystemPrompt({ system, model }) {\n return model.supportsStructuredOutputs ? system : injectJsonInstruction({\n prompt: system,\n schema: schema.jsonSchema\n });\n },\n parsePartial({ text: text2 }) {\n const result = parsePartialJson2(text2);\n switch (result.state) {\n case \"failed-parse\":\n case \"undefined-input\":\n return void 0;\n case \"repaired-parse\":\n case \"successful-parse\":\n return {\n // Note: currently no validation of partial results:\n partial: result.value\n };\n default: {\n const _exhaustiveCheck = result.state;\n throw new Error(`Unsupported parse state: ${_exhaustiveCheck}`);\n }\n }\n },\n parseOutput({ text: text2 }, context) {\n const parseResult = safeParseJSON3({ text: text2 });\n if (!parseResult.success) {\n throw new NoObjectGeneratedError({\n message: \"No object generated: could not parse the response.\",\n cause: parseResult.error,\n text: text2,\n response: context.response,\n usage: context.usage,\n finishReason: context.finishReason\n });\n }\n const validationResult = safeValidateTypes4({\n value: parseResult.value,\n schema\n });\n if (!validationResult.success) {\n throw new NoObjectGeneratedError({\n message: \"No object generated: response did not match schema.\",\n cause: validationResult.error,\n text: text2,\n response: context.response,\n usage: context.usage,\n finishReason: context.finishReason\n });\n }\n return validationResult.value;\n }\n };\n};\n\n// core/generate-text/smooth-stream.ts\nimport { delay as originalDelay } from \"@ai-sdk/provider-utils\";\nimport { InvalidArgumentError as InvalidArgumentError2 } from \"@ai-sdk/provider\";\nvar CHUNKING_REGEXPS = {\n word: /\\S+\\s+/m,\n line: /\\n+/m\n};\nfunction smoothStream({\n delayInMs = 10,\n chunking = \"word\",\n _internal: { delay: delay2 = originalDelay } = {}\n} = {}) {\n let detectChunk;\n if (typeof chunking === \"function\") {\n detectChunk = (buffer) => {\n const match = chunking(buffer);\n if (match == null) {\n return null;\n }\n if (!match.length) {\n throw new Error(`Chunking function must return a non-empty string.`);\n }\n if (!buffer.startsWith(match)) {\n throw new Error(\n `Chunking function must return a match that is a prefix of the buffer. Received: \"${match}\" expected to start with \"${buffer}\"`\n );\n }\n return match;\n };\n } else {\n const chunkingRegex = typeof chunking === \"string\" ? CHUNKING_REGEXPS[chunking] : chunking;\n if (chunkingRegex == null) {\n throw new InvalidArgumentError2({\n argument: \"chunking\",\n message: `Chunking must be \"word\" or \"line\" or a RegExp. Received: ${chunking}`\n });\n }\n detectChunk = (buffer) => {\n const match = chunkingRegex.exec(buffer);\n if (!match) {\n return null;\n }\n return buffer.slice(0, match.index) + (match == null ? void 0 : match[0]);\n };\n }\n return () => {\n let buffer = \"\";\n return new TransformStream({\n async transform(chunk, controller) {\n if (chunk.type !== \"text-delta\") {\n if (buffer.length > 0) {\n controller.enqueue({ type: \"text-delta\", textDelta: buffer });\n buffer = \"\";\n }\n controller.enqueue(chunk);\n return;\n }\n buffer += chunk.textDelta;\n let match;\n while ((match = detectChunk(buffer)) != null) {\n controller.enqueue({ type: \"text-delta\", textDelta: match });\n buffer = buffer.slice(match.length);\n await delay2(delayInMs);\n }\n }\n });\n };\n}\n\n// core/generate-text/stream-text.ts\nimport { AISDKError as AISDKError18 } from \"@ai-sdk/provider\";\nimport { createIdGenerator as createIdGenerator4 } from \"@ai-sdk/provider-utils\";\nimport { formatDataStreamPart as formatDataStreamPart2 } from \"@ai-sdk/ui-utils\";\n\n// util/as-array.ts\nfunction asArray(value) {\n return value === void 0 ? [] : Array.isArray(value) ? value : [value];\n}\n\n// util/consume-stream.ts\nasync function consumeStream({\n stream,\n onError\n}) {\n const reader = stream.getReader();\n try {\n while (true) {\n const { done } = await reader.read();\n if (done)\n break;\n }\n } catch (error) {\n onError == null ? void 0 : onError(error);\n } finally {\n reader.releaseLock();\n }\n}\n\n// core/util/merge-streams.ts\nfunction mergeStreams(stream1, stream2) {\n const reader1 = stream1.getReader();\n const reader2 = stream2.getReader();\n let lastRead1 = void 0;\n let lastRead2 = void 0;\n let stream1Done = false;\n let stream2Done = false;\n async function readStream1(controller) {\n try {\n if (lastRead1 == null) {\n lastRead1 = reader1.read();\n }\n const result = await lastRead1;\n lastRead1 = void 0;\n if (!result.done) {\n controller.enqueue(result.value);\n } else {\n controller.close();\n }\n } catch (error) {\n controller.error(error);\n }\n }\n async function readStream2(controller) {\n try {\n if (lastRead2 == null) {\n lastRead2 = reader2.read();\n }\n const result = await lastRead2;\n lastRead2 = void 0;\n if (!result.done) {\n controller.enqueue(result.value);\n } else {\n controller.close();\n }\n } catch (error) {\n controller.error(error);\n }\n }\n return new ReadableStream({\n async pull(controller) {\n try {\n if (stream1Done) {\n await readStream2(controller);\n return;\n }\n if (stream2Done) {\n await readStream1(controller);\n return;\n }\n if (lastRead1 == null) {\n lastRead1 = reader1.read();\n }\n if (lastRead2 == null) {\n lastRead2 = reader2.read();\n }\n const { result, reader } = await Promise.race([\n lastRead1.then((result2) => ({ result: result2, reader: reader1 })),\n lastRead2.then((result2) => ({ result: result2, reader: reader2 }))\n ]);\n if (!result.done) {\n controller.enqueue(result.value);\n }\n if (reader === reader1) {\n lastRead1 = void 0;\n if (result.done) {\n await readStream2(controller);\n stream1Done = true;\n }\n } else {\n lastRead2 = void 0;\n if (result.done) {\n stream2Done = true;\n await readStream1(controller);\n }\n }\n } catch (error) {\n controller.error(error);\n }\n },\n cancel() {\n reader1.cancel();\n reader2.cancel();\n }\n });\n}\n\n// core/generate-text/run-tools-transformation.ts\nimport { generateId } from \"@ai-sdk/ui-utils\";\nfunction runToolsTransformation({\n tools,\n generatorStream,\n toolCallStreaming,\n tracer,\n telemetry,\n system,\n messages,\n abortSignal,\n repairToolCall\n}) {\n let toolResultsStreamController = null;\n const toolResultsStream = new ReadableStream({\n start(controller) {\n toolResultsStreamController = controller;\n }\n });\n const activeToolCalls = {};\n const outstandingToolResults = /* @__PURE__ */ new Set();\n let canClose = false;\n let finishChunk = void 0;\n function attemptClose() {\n if (canClose && outstandingToolResults.size === 0) {\n if (finishChunk != null) {\n toolResultsStreamController.enqueue(finishChunk);\n }\n toolResultsStreamController.close();\n }\n }\n const forwardStream = new TransformStream({\n async transform(chunk, controller) {\n const chunkType = chunk.type;\n switch (chunkType) {\n case \"text-delta\":\n case \"reasoning\":\n case \"reasoning-signature\":\n case \"redacted-reasoning\":\n case \"source\":\n case \"response-metadata\":\n case \"error\": {\n controller.enqueue(chunk);\n break;\n }\n case \"file\": {\n controller.enqueue(\n new DefaultGeneratedFileWithType({\n data: chunk.data,\n mimeType: chunk.mimeType\n })\n );\n break;\n }\n case \"tool-call-delta\": {\n if (toolCallStreaming) {\n if (!activeToolCalls[chunk.toolCallId]) {\n controller.enqueue({\n type: \"tool-call-streaming-start\",\n toolCallId: chunk.toolCallId,\n toolName: chunk.toolName\n });\n activeToolCalls[chunk.toolCallId] = true;\n }\n controller.enqueue({\n type: \"tool-call-delta\",\n toolCallId: chunk.toolCallId,\n toolName: chunk.toolName,\n argsTextDelta: chunk.argsTextDelta\n });\n }\n break;\n }\n case \"tool-call\": {\n try {\n const toolCall = await parseToolCall({\n toolCall: chunk,\n tools,\n repairToolCall,\n system,\n messages\n });\n controller.enqueue(toolCall);\n const tool2 = tools[toolCall.toolName];\n if (tool2.execute != null) {\n const toolExecutionId = generateId();\n outstandingToolResults.add(toolExecutionId);\n recordSpan({\n name: \"ai.toolCall\",\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: \"ai.toolCall\",\n telemetry\n }),\n \"ai.toolCall.name\": toolCall.toolName,\n \"ai.toolCall.id\": toolCall.toolCallId,\n \"ai.toolCall.args\": {\n output: () => JSON.stringify(toolCall.args)\n }\n }\n }),\n tracer,\n fn: async (span) => tool2.execute(toolCall.args, {\n toolCallId: toolCall.toolCallId,\n messages,\n abortSignal\n }).then(\n (result) => {\n toolResultsStreamController.enqueue({\n ...toolCall,\n type: \"tool-result\",\n result\n });\n outstandingToolResults.delete(toolExecutionId);\n attemptClose();\n try {\n span.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n \"ai.toolCall.result\": {\n output: () => JSON.stringify(result)\n }\n }\n })\n );\n } catch (ignored) {\n }\n },\n (error) => {\n recordErrorOnSpan(span, error);\n toolResultsStreamController.enqueue({\n type: \"error\",\n error: new ToolExecutionError({\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n toolArgs: toolCall.args,\n cause: error\n })\n });\n outstandingToolResults.delete(toolExecutionId);\n attemptClose();\n }\n )\n });\n }\n } catch (error) {\n toolResultsStreamController.enqueue({\n type: \"error\",\n error\n });\n }\n break;\n }\n case \"finish\": {\n finishChunk = {\n type: \"finish\",\n finishReason: chunk.finishReason,\n logprobs: chunk.logprobs,\n usage: calculateLanguageModelUsage(chunk.usage),\n experimental_providerMetadata: chunk.providerMetadata\n };\n break;\n }\n default: {\n const _exhaustiveCheck = chunkType;\n throw new Error(`Unhandled chunk type: ${_exhaustiveCheck}`);\n }\n }\n },\n flush() {\n canClose = true;\n attemptClose();\n }\n });\n return new ReadableStream({\n async start(controller) {\n return Promise.all([\n generatorStream.pipeThrough(forwardStream).pipeTo(\n new WritableStream({\n write(chunk) {\n controller.enqueue(chunk);\n },\n close() {\n }\n })\n ),\n toolResultsStream.pipeTo(\n new WritableStream({\n write(chunk) {\n controller.enqueue(chunk);\n },\n close() {\n controller.close();\n }\n })\n )\n ]);\n }\n });\n}\n\n// core/generate-text/stream-text.ts\nvar originalGenerateId4 = createIdGenerator4({\n prefix: \"aitxt\",\n size: 24\n});\nvar originalGenerateMessageId2 = createIdGenerator4({\n prefix: \"msg\",\n size: 24\n});\nfunction streamText({\n model,\n tools,\n toolChoice,\n system,\n prompt,\n messages,\n maxRetries,\n abortSignal,\n headers,\n maxSteps = 1,\n experimental_generateMessageId: generateMessageId = originalGenerateMessageId2,\n experimental_output: output,\n experimental_continueSteps: continueSteps = false,\n experimental_telemetry: telemetry,\n experimental_providerMetadata,\n providerOptions = experimental_providerMetadata,\n experimental_toolCallStreaming = false,\n toolCallStreaming = experimental_toolCallStreaming,\n experimental_activeTools: activeTools,\n experimental_repairToolCall: repairToolCall,\n experimental_transform: transform,\n onChunk,\n onError,\n onFinish,\n onStepFinish,\n _internal: {\n now: now2 = now,\n generateId: generateId3 = originalGenerateId4,\n currentDate = () => /* @__PURE__ */ new Date()\n } = {},\n ...settings\n}) {\n if (typeof model === \"string\" || model.specificationVersion !== \"v1\") {\n throw new UnsupportedModelVersionError();\n }\n return new DefaultStreamTextResult({\n model,\n telemetry,\n headers,\n settings,\n maxRetries,\n abortSignal,\n system,\n prompt,\n messages,\n tools,\n toolChoice,\n toolCallStreaming,\n transforms: asArray(transform),\n activeTools,\n repairToolCall,\n maxSteps,\n output,\n continueSteps,\n providerOptions,\n onChunk,\n onError,\n onFinish,\n onStepFinish,\n now: now2,\n currentDate,\n generateId: generateId3,\n generateMessageId\n });\n}\nfunction createOutputTransformStream(output) {\n if (!output) {\n return new TransformStream({\n transform(chunk, controller) {\n controller.enqueue({ part: chunk, partialOutput: void 0 });\n }\n });\n }\n let text2 = \"\";\n let textChunk = \"\";\n let lastPublishedJson = \"\";\n function publishTextChunk({\n controller,\n partialOutput = void 0\n }) {\n controller.enqueue({\n part: { type: \"text-delta\", textDelta: textChunk },\n partialOutput\n });\n textChunk = \"\";\n }\n return new TransformStream({\n transform(chunk, controller) {\n if (chunk.type === \"step-finish\") {\n publishTextChunk({ controller });\n }\n if (chunk.type !== \"text-delta\") {\n controller.enqueue({ part: chunk, partialOutput: void 0 });\n return;\n }\n text2 += chunk.textDelta;\n textChunk += chunk.textDelta;\n const result = output.parsePartial({ text: text2 });\n if (result != null) {\n const currentJson = JSON.stringify(result.partial);\n if (currentJson !== lastPublishedJson) {\n publishTextChunk({ controller, partialOutput: result.partial });\n lastPublishedJson = currentJson;\n }\n }\n },\n flush(controller) {\n if (textChunk.length > 0) {\n publishTextChunk({ controller });\n }\n }\n });\n}\nvar DefaultStreamTextResult = class {\n constructor({\n model,\n telemetry,\n headers,\n settings,\n maxRetries: maxRetriesArg,\n abortSignal,\n system,\n prompt,\n messages,\n tools,\n toolChoice,\n toolCallStreaming,\n transforms,\n activeTools,\n repairToolCall,\n maxSteps,\n output,\n continueSteps,\n providerOptions,\n now: now2,\n currentDate,\n generateId: generateId3,\n generateMessageId,\n onChunk,\n onError,\n onFinish,\n onStepFinish\n }) {\n this.warningsPromise = new DelayedPromise();\n this.usagePromise = new DelayedPromise();\n this.finishReasonPromise = new DelayedPromise();\n this.providerMetadataPromise = new DelayedPromise();\n this.textPromise = new DelayedPromise();\n this.reasoningPromise = new DelayedPromise();\n this.reasoningDetailsPromise = new DelayedPromise();\n this.sourcesPromise = new DelayedPromise();\n this.filesPromise = new DelayedPromise();\n this.toolCallsPromise = new DelayedPromise();\n this.toolResultsPromise = new DelayedPromise();\n this.requestPromise = new DelayedPromise();\n this.responsePromise = new DelayedPromise();\n this.stepsPromise = new DelayedPromise();\n var _a17;\n if (maxSteps < 1) {\n throw new InvalidArgumentError({\n parameter: \"maxSteps\",\n value: maxSteps,\n message: \"maxSteps must be at least 1\"\n });\n }\n this.output = output;\n let recordedStepText = \"\";\n let recordedContinuationText = \"\";\n let recordedFullText = \"\";\n let stepReasoning = [];\n let stepFiles = [];\n let activeReasoningText = void 0;\n let recordedStepSources = [];\n const recordedSources = [];\n const recordedResponse = {\n id: generateId3(),\n timestamp: currentDate(),\n modelId: model.modelId,\n messages: []\n };\n let recordedToolCalls = [];\n let recordedToolResults = [];\n let recordedFinishReason = void 0;\n let recordedUsage = void 0;\n let stepType = \"initial\";\n const recordedSteps = [];\n let rootSpan;\n const eventProcessor = new TransformStream({\n async transform(chunk, controller) {\n controller.enqueue(chunk);\n const { part } = chunk;\n if (part.type === \"text-delta\" || part.type === \"reasoning\" || part.type === \"source\" || part.type === \"tool-call\" || part.type === \"tool-result\" || part.type === \"tool-call-streaming-start\" || part.type === \"tool-call-delta\") {\n await (onChunk == null ? void 0 : onChunk({ chunk: part }));\n }\n if (part.type === \"error\") {\n await (onError == null ? void 0 : onError({ error: part.error }));\n }\n if (part.type === \"text-delta\") {\n recordedStepText += part.textDelta;\n recordedContinuationText += part.textDelta;\n recordedFullText += part.textDelta;\n }\n if (part.type === \"reasoning\") {\n if (activeReasoningText == null) {\n activeReasoningText = { type: \"text\", text: part.textDelta };\n stepReasoning.push(activeReasoningText);\n } else {\n activeReasoningText.text += part.textDelta;\n }\n }\n if (part.type === \"reasoning-signature\") {\n if (activeReasoningText == null) {\n throw new AISDKError18({\n name: \"InvalidStreamPart\",\n message: \"reasoning-signature without reasoning\"\n });\n }\n activeReasoningText.signature = part.signature;\n activeReasoningText = void 0;\n }\n if (part.type === \"redacted-reasoning\") {\n stepReasoning.push({ type: \"redacted\", data: part.data });\n }\n if (part.type === \"file\") {\n stepFiles.push(part);\n }\n if (part.type === \"source\") {\n recordedSources.push(part.source);\n recordedStepSources.push(part.source);\n }\n if (part.type === \"tool-call\") {\n recordedToolCalls.push(part);\n }\n if (part.type === \"tool-result\") {\n recordedToolResults.push(part);\n }\n if (part.type === \"step-finish\") {\n const stepMessages = toResponseMessages({\n text: recordedContinuationText,\n files: stepFiles,\n reasoning: stepReasoning,\n tools: tools != null ? tools : {},\n toolCalls: recordedToolCalls,\n toolResults: recordedToolResults,\n messageId: part.messageId,\n generateMessageId\n });\n const currentStep = recordedSteps.length;\n let nextStepType = \"done\";\n if (currentStep + 1 < maxSteps) {\n if (continueSteps && part.finishReason === \"length\" && // only use continue when there are no tool calls:\n recordedToolCalls.length === 0) {\n nextStepType = \"continue\";\n } else if (\n // there are tool calls:\n recordedToolCalls.length > 0 && // all current tool calls have results:\n recordedToolResults.length === recordedToolCalls.length\n ) {\n nextStepType = \"tool-result\";\n }\n }\n const currentStepResult = {\n stepType,\n text: recordedStepText,\n reasoning: asReasoningText(stepReasoning),\n reasoningDetails: stepReasoning,\n files: stepFiles,\n sources: recordedStepSources,\n toolCalls: recordedToolCalls,\n toolResults: recordedToolResults,\n finishReason: part.finishReason,\n usage: part.usage,\n warnings: part.warnings,\n logprobs: part.logprobs,\n request: part.request,\n response: {\n ...part.response,\n messages: [...recordedResponse.messages, ...stepMessages]\n },\n providerMetadata: part.experimental_providerMetadata,\n experimental_providerMetadata: part.experimental_providerMetadata,\n isContinued: part.isContinued\n };\n await (onStepFinish == null ? void 0 : onStepFinish(currentStepResult));\n recordedSteps.push(currentStepResult);\n recordedToolCalls = [];\n recordedToolResults = [];\n recordedStepText = \"\";\n recordedStepSources = [];\n stepReasoning = [];\n stepFiles = [];\n activeReasoningText = void 0;\n if (nextStepType !== \"done\") {\n stepType = nextStepType;\n }\n if (nextStepType !== \"continue\") {\n recordedResponse.messages.push(...stepMessages);\n recordedContinuationText = \"\";\n }\n }\n if (part.type === \"finish\") {\n recordedResponse.id = part.response.id;\n recordedResponse.timestamp = part.response.timestamp;\n recordedResponse.modelId = part.response.modelId;\n recordedResponse.headers = part.response.headers;\n recordedUsage = part.usage;\n recordedFinishReason = part.finishReason;\n }\n },\n async flush(controller) {\n var _a18;\n try {\n if (recordedSteps.length === 0) {\n return;\n }\n const lastStep = recordedSteps[recordedSteps.length - 1];\n self.warningsPromise.resolve(lastStep.warnings);\n self.requestPromise.resolve(lastStep.request);\n self.responsePromise.resolve(lastStep.response);\n self.toolCallsPromise.resolve(lastStep.toolCalls);\n self.toolResultsPromise.resolve(lastStep.toolResults);\n self.providerMetadataPromise.resolve(\n lastStep.experimental_providerMetadata\n );\n self.reasoningPromise.resolve(lastStep.reasoning);\n self.reasoningDetailsPromise.resolve(lastStep.reasoningDetails);\n const finishReason = recordedFinishReason != null ? recordedFinishReason : \"unknown\";\n const usage = recordedUsage != null ? recordedUsage : {\n completionTokens: NaN,\n promptTokens: NaN,\n totalTokens: NaN\n };\n self.finishReasonPromise.resolve(finishReason);\n self.usagePromise.resolve(usage);\n self.textPromise.resolve(recordedFullText);\n self.sourcesPromise.resolve(recordedSources);\n self.filesPromise.resolve(lastStep.files);\n self.stepsPromise.resolve(recordedSteps);\n await (onFinish == null ? void 0 : onFinish({\n finishReason,\n logprobs: void 0,\n usage,\n text: recordedFullText,\n reasoning: lastStep.reasoning,\n reasoningDetails: lastStep.reasoningDetails,\n files: lastStep.files,\n sources: lastStep.sources,\n toolCalls: lastStep.toolCalls,\n toolResults: lastStep.toolResults,\n request: (_a18 = lastStep.request) != null ? _a18 : {},\n response: lastStep.response,\n warnings: lastStep.warnings,\n providerMetadata: lastStep.providerMetadata,\n experimental_providerMetadata: lastStep.experimental_providerMetadata,\n steps: recordedSteps\n }));\n rootSpan.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n \"ai.response.finishReason\": finishReason,\n \"ai.response.text\": { output: () => recordedFullText },\n \"ai.response.toolCalls\": {\n output: () => {\n var _a19;\n return ((_a19 = lastStep.toolCalls) == null ? void 0 : _a19.length) ? JSON.stringify(lastStep.toolCalls) : void 0;\n }\n },\n \"ai.usage.promptTokens\": usage.promptTokens,\n \"ai.usage.completionTokens\": usage.completionTokens,\n \"ai.response.providerMetadata\": JSON.stringify(\n lastStep.providerMetadata\n )\n }\n })\n );\n } catch (error) {\n controller.error(error);\n } finally {\n rootSpan.end();\n }\n }\n });\n const stitchableStream = createStitchableStream();\n this.addStream = stitchableStream.addStream;\n this.closeStream = stitchableStream.close;\n let stream = stitchableStream.stream;\n for (const transform of transforms) {\n stream = stream.pipeThrough(\n transform({\n tools,\n stopStream() {\n stitchableStream.terminate();\n }\n })\n );\n }\n this.baseStream = stream.pipeThrough(createOutputTransformStream(output)).pipeThrough(eventProcessor);\n const { maxRetries, retry } = prepareRetries({\n maxRetries: maxRetriesArg\n });\n const tracer = getTracer(telemetry);\n const baseTelemetryAttributes = getBaseTelemetryAttributes({\n model,\n telemetry,\n headers,\n settings: { ...settings, maxRetries }\n });\n const initialPrompt = standardizePrompt({\n prompt: {\n system: (_a17 = output == null ? void 0 : output.injectIntoSystemPrompt({ system, model })) != null ? _a17 : system,\n prompt,\n messages\n },\n tools\n });\n const self = this;\n recordSpan({\n name: \"ai.streamText\",\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({ operationId: \"ai.streamText\", telemetry }),\n ...baseTelemetryAttributes,\n // specific settings that only make sense on the outer level:\n \"ai.prompt\": {\n input: () => JSON.stringify({ system, prompt, messages })\n },\n \"ai.settings.maxSteps\": maxSteps\n }\n }),\n tracer,\n endWhenDone: false,\n fn: async (rootSpanArg) => {\n rootSpan = rootSpanArg;\n async function streamStep({\n currentStep,\n responseMessages,\n usage,\n stepType: stepType2,\n previousStepText,\n hasLeadingWhitespace,\n messageId\n }) {\n var _a18;\n const promptFormat = responseMessages.length === 0 ? initialPrompt.type : \"messages\";\n const stepInputMessages = [\n ...initialPrompt.messages,\n ...responseMessages\n ];\n const promptMessages = await convertToLanguageModelPrompt({\n prompt: {\n type: promptFormat,\n system: initialPrompt.system,\n messages: stepInputMessages\n },\n modelSupportsImageUrls: model.supportsImageUrls,\n modelSupportsUrl: (_a18 = model.supportsUrl) == null ? void 0 : _a18.bind(model)\n // support 'this' context\n });\n const mode = {\n type: \"regular\",\n ...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })\n };\n const {\n result: { stream: stream2, warnings, rawResponse, request },\n doStreamSpan,\n startTimestampMs\n } = await retry(\n () => recordSpan({\n name: \"ai.streamText.doStream\",\n attributes: selectTelemetryAttributes({\n telemetry,\n attributes: {\n ...assembleOperationName({\n operationId: \"ai.streamText.doStream\",\n telemetry\n }),\n ...baseTelemetryAttributes,\n \"ai.prompt.format\": {\n input: () => promptFormat\n },\n \"ai.prompt.messages\": {\n input: () => stringifyForTelemetry(promptMessages)\n },\n \"ai.prompt.tools\": {\n // convert the language model level tools:\n input: () => {\n var _a19;\n return (_a19 = mode.tools) == null ? void 0 : _a19.map((tool2) => JSON.stringify(tool2));\n }\n },\n \"ai.prompt.toolChoice\": {\n input: () => mode.toolChoice != null ? JSON.stringify(mode.toolChoice) : void 0\n },\n // standardized gen-ai llm span attributes:\n \"gen_ai.system\": model.provider,\n \"gen_ai.request.model\": model.modelId,\n \"gen_ai.request.frequency_penalty\": settings.frequencyPenalty,\n \"gen_ai.request.max_tokens\": settings.maxTokens,\n \"gen_ai.request.presence_penalty\": settings.presencePenalty,\n \"gen_ai.request.stop_sequences\": settings.stopSequences,\n \"gen_ai.request.temperature\": settings.temperature,\n \"gen_ai.request.top_k\": settings.topK,\n \"gen_ai.request.top_p\": settings.topP\n }\n }),\n tracer,\n endWhenDone: false,\n fn: async (doStreamSpan2) => ({\n startTimestampMs: now2(),\n // get before the call\n doStreamSpan: doStreamSpan2,\n result: await model.doStream({\n mode,\n ...prepareCallSettings(settings),\n inputFormat: promptFormat,\n responseFormat: output == null ? void 0 : output.responseFormat({ model }),\n prompt: promptMessages,\n providerMetadata: providerOptions,\n abortSignal,\n headers\n })\n })\n })\n );\n const transformedStream = runToolsTransformation({\n tools,\n generatorStream: stream2,\n toolCallStreaming,\n tracer,\n telemetry,\n system,\n messages: stepInputMessages,\n repairToolCall,\n abortSignal\n });\n const stepRequest = request != null ? request : {};\n const stepToolCalls = [];\n const stepToolResults = [];\n const stepReasoning2 = [];\n const stepFiles2 = [];\n let activeReasoningText2 = void 0;\n let stepFinishReason = \"unknown\";\n let stepUsage = {\n promptTokens: 0,\n completionTokens: 0,\n totalTokens: 0\n };\n let stepProviderMetadata;\n let stepFirstChunk = true;\n let stepText = \"\";\n let fullStepText = stepType2 === \"continue\" ? previousStepText : \"\";\n let stepLogProbs;\n let stepResponse = {\n id: generateId3(),\n timestamp: currentDate(),\n modelId: model.modelId\n };\n let chunkBuffer = \"\";\n let chunkTextPublished = false;\n let inWhitespacePrefix = true;\n let hasWhitespaceSuffix = false;\n async function publishTextChunk({\n controller,\n chunk\n }) {\n controller.enqueue(chunk);\n stepText += chunk.textDelta;\n fullStepText += chunk.textDelta;\n chunkTextPublished = true;\n hasWhitespaceSuffix = chunk.textDelta.trimEnd() !== chunk.textDelta;\n }\n self.addStream(\n transformedStream.pipeThrough(\n new TransformStream({\n async transform(chunk, controller) {\n var _a19, _b, _c;\n if (stepFirstChunk) {\n const msToFirstChunk = now2() - startTimestampMs;\n stepFirstChunk = false;\n doStreamSpan.addEvent(\"ai.stream.firstChunk\", {\n \"ai.response.msToFirstChunk\": msToFirstChunk\n });\n doStreamSpan.setAttributes({\n \"ai.response.msToFirstChunk\": msToFirstChunk\n });\n controller.enqueue({\n type: \"step-start\",\n messageId,\n request: stepRequest,\n warnings: warnings != null ? warnings : []\n });\n }\n if (chunk.type === \"text-delta\" && chunk.textDelta.length === 0) {\n return;\n }\n const chunkType = chunk.type;\n switch (chunkType) {\n case \"text-delta\": {\n if (continueSteps) {\n const trimmedChunkText = inWhitespacePrefix && hasLeadingWhitespace ? chunk.textDelta.trimStart() : chunk.textDelta;\n if (trimmedChunkText.length === 0) {\n break;\n }\n inWhitespacePrefix = false;\n chunkBuffer += trimmedChunkText;\n const split = splitOnLastWhitespace(chunkBuffer);\n if (split != null) {\n chunkBuffer = split.suffix;\n await publishTextChunk({\n controller,\n chunk: {\n type: \"text-delta\",\n textDelta: split.prefix + split.whitespace\n }\n });\n }\n } else {\n await publishTextChunk({ controller, chunk });\n }\n break;\n }\n case \"reasoning\": {\n controller.enqueue(chunk);\n if (activeReasoningText2 == null) {\n activeReasoningText2 = {\n type: \"text\",\n text: chunk.textDelta\n };\n stepReasoning2.push(activeReasoningText2);\n } else {\n activeReasoningText2.text += chunk.textDelta;\n }\n break;\n }\n case \"reasoning-signature\": {\n controller.enqueue(chunk);\n if (activeReasoningText2 == null) {\n throw new InvalidStreamPartError({\n chunk,\n message: \"reasoning-signature without reasoning\"\n });\n }\n activeReasoningText2.signature = chunk.signature;\n activeReasoningText2 = void 0;\n break;\n }\n case \"redacted-reasoning\": {\n controller.enqueue(chunk);\n stepReasoning2.push({\n type: \"redacted\",\n data: chunk.data\n });\n break;\n }\n case \"tool-call\": {\n controller.enqueue(chunk);\n stepToolCalls.push(chunk);\n break;\n }\n case \"tool-result\": {\n controller.enqueue(chunk);\n stepToolResults.push(chunk);\n break;\n }\n case \"response-metadata\": {\n stepResponse = {\n id: (_a19 = chunk.id) != null ? _a19 : stepResponse.id,\n timestamp: (_b = chunk.timestamp) != null ? _b : stepResponse.timestamp,\n modelId: (_c = chunk.modelId) != null ? _c : stepResponse.modelId\n };\n break;\n }\n case \"finish\": {\n stepUsage = chunk.usage;\n stepFinishReason = chunk.finishReason;\n stepProviderMetadata = chunk.experimental_providerMetadata;\n stepLogProbs = chunk.logprobs;\n const msToFinish = now2() - startTimestampMs;\n doStreamSpan.addEvent(\"ai.stream.finish\");\n doStreamSpan.setAttributes({\n \"ai.response.msToFinish\": msToFinish,\n \"ai.response.avgCompletionTokensPerSecond\": 1e3 * stepUsage.completionTokens / msToFinish\n });\n break;\n }\n case \"file\": {\n stepFiles2.push(chunk);\n controller.enqueue(chunk);\n break;\n }\n case \"source\":\n case \"tool-call-streaming-start\":\n case \"tool-call-delta\": {\n controller.enqueue(chunk);\n break;\n }\n case \"error\": {\n controller.enqueue(chunk);\n stepFinishReason = \"error\";\n break;\n }\n default: {\n const exhaustiveCheck = chunkType;\n throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);\n }\n }\n },\n // invoke onFinish callback and resolve toolResults promise when the stream is about to close:\n async flush(controller) {\n const stepToolCallsJson = stepToolCalls.length > 0 ? JSON.stringify(stepToolCalls) : void 0;\n let nextStepType = \"done\";\n if (currentStep + 1 < maxSteps) {\n if (continueSteps && stepFinishReason === \"length\" && // only use continue when there are no tool calls:\n stepToolCalls.length === 0) {\n nextStepType = \"continue\";\n } else if (\n // there are tool calls:\n stepToolCalls.length > 0 && // all current tool calls have results:\n stepToolResults.length === stepToolCalls.length\n ) {\n nextStepType = \"tool-result\";\n }\n }\n if (continueSteps && chunkBuffer.length > 0 && (nextStepType !== \"continue\" || // when the next step is a regular step, publish the buffer\n stepType2 === \"continue\" && !chunkTextPublished)) {\n await publishTextChunk({\n controller,\n chunk: {\n type: \"text-delta\",\n textDelta: chunkBuffer\n }\n });\n chunkBuffer = \"\";\n }\n try {\n doStreamSpan.setAttributes(\n selectTelemetryAttributes({\n telemetry,\n attributes: {\n \"ai.response.finishReason\": stepFinishReason,\n \"ai.response.text\": { output: () => stepText },\n \"ai.response.toolCalls\": {\n output: () => stepToolCallsJson\n },\n \"ai.response.id\": stepResponse.id,\n \"ai.response.model\": stepResponse.modelId,\n \"ai.response.timestamp\": stepResponse.timestamp.toISOString(),\n \"ai.response.providerMetadata\": JSON.stringify(stepProviderMetadata),\n \"ai.usage.promptTokens\": stepUsage.promptTokens,\n \"ai.usage.completionTokens\": stepUsage.completionTokens,\n // standardized gen-ai llm span attributes:\n \"gen_ai.response.finish_reasons\": [stepFinishReason],\n \"gen_ai.response.id\": stepResponse.id,\n \"gen_ai.response.model\": stepResponse.modelId,\n \"gen_ai.usage.input_tokens\": stepUsage.promptTokens,\n \"gen_ai.usage.output_tokens\": stepUsage.completionTokens\n }\n })\n );\n } catch (error) {\n } finally {\n doStreamSpan.end();\n }\n controller.enqueue({\n type: \"step-finish\",\n finishReason: stepFinishReason,\n usage: stepUsage,\n providerMetadata: stepProviderMetadata,\n experimental_providerMetadata: stepProviderMetadata,\n logprobs: stepLogProbs,\n request: stepRequest,\n response: {\n ...stepResponse,\n headers: rawResponse == null ? void 0 : rawResponse.headers\n },\n warnings,\n isContinued: nextStepType === \"continue\",\n messageId\n });\n const combinedUsage = addLanguageModelUsage(usage, stepUsage);\n if (nextStepType === \"done\") {\n controller.enqueue({\n type: \"finish\",\n finishReason: stepFinishReason,\n usage: combinedUsage,\n providerMetadata: stepProviderMetadata,\n experimental_providerMetadata: stepProviderMetadata,\n logprobs: stepLogProbs,\n response: {\n ...stepResponse,\n headers: rawResponse == null ? void 0 : rawResponse.headers\n }\n });\n self.closeStream();\n } else {\n if (stepType2 === \"continue\") {\n const lastMessage = responseMessages[responseMessages.length - 1];\n if (typeof lastMessage.content === \"string\") {\n lastMessage.content += stepText;\n } else {\n lastMessage.content.push({\n text: stepText,\n type: \"text\"\n });\n }\n } else {\n responseMessages.push(\n ...toResponseMessages({\n text: stepText,\n files: stepFiles2,\n reasoning: stepReasoning2,\n tools: tools != null ? tools : {},\n toolCalls: stepToolCalls,\n toolResults: stepToolResults,\n messageId,\n generateMessageId\n })\n );\n }\n await streamStep({\n currentStep: currentStep + 1,\n responseMessages,\n usage: combinedUsage,\n stepType: nextStepType,\n previousStepText: fullStepText,\n hasLeadingWhitespace: hasWhitespaceSuffix,\n messageId: (\n // keep the same id when continuing a step:\n nextStepType === \"continue\" ? messageId : generateMessageId()\n )\n });\n }\n }\n })\n )\n );\n }\n await streamStep({\n currentStep: 0,\n responseMessages: [],\n usage: {\n promptTokens: 0,\n completionTokens: 0,\n totalTokens: 0\n },\n previousStepText: \"\",\n stepType: \"initial\",\n hasLeadingWhitespace: false,\n messageId: generateMessageId()\n });\n }\n }).catch((error) => {\n self.addStream(\n new ReadableStream({\n start(controller) {\n controller.enqueue({ type: \"error\", error });\n controller.close();\n }\n })\n );\n self.closeStream();\n });\n }\n get warnings() {\n return this.warningsPromise.value;\n }\n get usage() {\n return this.usagePromise.value;\n }\n get finishReason() {\n return this.finishReasonPromise.value;\n }\n get experimental_providerMetadata() {\n return this.providerMetadataPromise.value;\n }\n get providerMetadata() {\n return this.providerMetadataPromise.value;\n }\n get text() {\n return this.textPromise.value;\n }\n get reasoning() {\n return this.reasoningPromise.value;\n }\n get reasoningDetails() {\n return this.reasoningDetailsPromise.value;\n }\n get sources() {\n return this.sourcesPromise.value;\n }\n get files() {\n return this.filesPromise.value;\n }\n get toolCalls() {\n return this.toolCallsPromise.value;\n }\n get toolResults() {\n return this.toolResultsPromise.value;\n }\n get request() {\n return this.requestPromise.value;\n }\n get response() {\n return this.responsePromise.value;\n }\n get steps() {\n return this.stepsPromise.value;\n }\n /**\n Split out a new stream from the original stream.\n The original stream is replaced to allow for further splitting,\n since we do not know how many times the stream will be split.\n \n Note: this leads to buffering the stream content on the server.\n However, the LLM results are expected to be small enough to not cause issues.\n */\n teeStream() {\n const [stream1, stream2] = this.baseStream.tee();\n this.baseStream = stream2;\n return stream1;\n }\n get textStream() {\n return createAsyncIterableStream(\n this.teeStream().pipeThrough(\n new TransformStream({\n transform({ part }, controller) {\n if (part.type === \"text-delta\") {\n controller.enqueue(part.textDelta);\n }\n }\n })\n )\n );\n }\n get fullStream() {\n return createAsyncIterableStream(\n this.teeStream().pipeThrough(\n new TransformStream({\n transform({ part }, controller) {\n controller.enqueue(part);\n }\n })\n )\n );\n }\n async consumeStream(options) {\n var _a17;\n try {\n await consumeStream({\n stream: this.fullStream,\n onError: options == null ? void 0 : options.onError\n });\n } catch (error) {\n (_a17 = options == null ? void 0 : options.onError) == null ? void 0 : _a17.call(options, error);\n }\n }\n get experimental_partialOutputStream() {\n if (this.output == null) {\n throw new NoOutputSpecifiedError();\n }\n return createAsyncIterableStream(\n this.teeStream().pipeThrough(\n new TransformStream({\n transform({ partialOutput }, controller) {\n if (partialOutput != null) {\n controller.enqueue(partialOutput);\n }\n }\n })\n )\n );\n }\n toDataStreamInternal({\n getErrorMessage: getErrorMessage5 = () => \"An error occurred.\",\n // mask error messages for safety by default\n sendUsage = true,\n sendReasoning = false,\n sendSources = false,\n experimental_sendFinish = true\n }) {\n return this.fullStream.pipeThrough(\n new TransformStream({\n transform: async (chunk, controller) => {\n const chunkType = chunk.type;\n switch (chunkType) {\n case \"text-delta\": {\n controller.enqueue(formatDataStreamPart2(\"text\", chunk.textDelta));\n break;\n }\n case \"reasoning\": {\n if (sendReasoning) {\n controller.enqueue(\n formatDataStreamPart2(\"reasoning\", chunk.textDelta)\n );\n }\n break;\n }\n case \"redacted-reasoning\": {\n if (sendReasoning) {\n controller.enqueue(\n formatDataStreamPart2(\"redacted_reasoning\", {\n data: chunk.data\n })\n );\n }\n break;\n }\n case \"reasoning-signature\": {\n if (sendReasoning) {\n controller.enqueue(\n formatDataStreamPart2(\"reasoning_signature\", {\n signature: chunk.signature\n })\n );\n }\n break;\n }\n case \"file\": {\n controller.enqueue(\n formatDataStreamPart2(\"file\", {\n mimeType: chunk.mimeType,\n data: chunk.base64\n })\n );\n break;\n }\n case \"source\": {\n if (sendSources) {\n controller.enqueue(\n formatDataStreamPart2(\"source\", chunk.source)\n );\n }\n break;\n }\n case \"tool-call-streaming-start\": {\n controller.enqueue(\n formatDataStreamPart2(\"tool_call_streaming_start\", {\n toolCallId: chunk.toolCallId,\n toolName: chunk.toolName\n })\n );\n break;\n }\n case \"tool-call-delta\": {\n controller.enqueue(\n formatDataStreamPart2(\"tool_call_delta\", {\n toolCallId: chunk.toolCallId,\n argsTextDelta: chunk.argsTextDelta\n })\n );\n break;\n }\n case \"tool-call\": {\n controller.enqueue(\n formatDataStreamPart2(\"tool_call\", {\n toolCallId: chunk.toolCallId,\n toolName: chunk.toolName,\n args: chunk.args\n })\n );\n break;\n }\n case \"tool-result\": {\n controller.enqueue(\n formatDataStreamPart2(\"tool_result\", {\n toolCallId: chunk.toolCallId,\n result: chunk.result\n })\n );\n break;\n }\n case \"error\": {\n controller.enqueue(\n formatDataStreamPart2(\"error\", getErrorMessage5(chunk.error))\n );\n break;\n }\n case \"step-start\": {\n controller.enqueue(\n formatDataStreamPart2(\"start_step\", {\n messageId: chunk.messageId\n })\n );\n break;\n }\n case \"step-finish\": {\n controller.enqueue(\n formatDataStreamPart2(\"finish_step\", {\n finishReason: chunk.finishReason,\n usage: sendUsage ? {\n promptTokens: chunk.usage.promptTokens,\n completionTokens: chunk.usage.completionTokens\n } : void 0,\n isContinued: chunk.isContinued\n })\n );\n break;\n }\n case \"finish\": {\n if (experimental_sendFinish) {\n controller.enqueue(\n formatDataStreamPart2(\"finish_message\", {\n finishReason: chunk.finishReason,\n usage: sendUsage ? {\n promptTokens: chunk.usage.promptTokens,\n completionTokens: chunk.usage.completionTokens\n } : void 0\n })\n );\n }\n break;\n }\n default: {\n const exhaustiveCheck = chunkType;\n throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);\n }\n }\n }\n })\n );\n }\n pipeDataStreamToResponse(response, {\n status,\n statusText,\n headers,\n data,\n getErrorMessage: getErrorMessage5,\n sendUsage,\n sendReasoning,\n sendSources,\n experimental_sendFinish\n } = {}) {\n writeToServerResponse({\n response,\n status,\n statusText,\n headers: prepareOutgoingHttpHeaders(headers, {\n contentType: \"text/plain; charset=utf-8\",\n dataStreamVersion: \"v1\"\n }),\n stream: this.toDataStream({\n data,\n getErrorMessage: getErrorMessage5,\n sendUsage,\n sendReasoning,\n sendSources,\n experimental_sendFinish\n })\n });\n }\n pipeTextStreamToResponse(response, init) {\n writeToServerResponse({\n response,\n status: init == null ? void 0 : init.status,\n statusText: init == null ? void 0 : init.statusText,\n headers: prepareOutgoingHttpHeaders(init == null ? void 0 : init.headers, {\n contentType: \"text/plain; charset=utf-8\"\n }),\n stream: this.textStream.pipeThrough(new TextEncoderStream())\n });\n }\n // TODO breaking change 5.0: remove pipeThrough(new TextEncoderStream())\n toDataStream(options) {\n const stream = this.toDataStreamInternal({\n getErrorMessage: options == null ? void 0 : options.getErrorMessage,\n sendUsage: options == null ? void 0 : options.sendUsage,\n sendReasoning: options == null ? void 0 : options.sendReasoning,\n sendSources: options == null ? void 0 : options.sendSources,\n experimental_sendFinish: options == null ? void 0 : options.experimental_sendFinish\n }).pipeThrough(new TextEncoderStream());\n return (options == null ? void 0 : options.data) ? mergeStreams(options == null ? void 0 : options.data.stream, stream) : stream;\n }\n mergeIntoDataStream(writer, options) {\n writer.merge(\n this.toDataStreamInternal({\n getErrorMessage: writer.onError,\n sendUsage: options == null ? void 0 : options.sendUsage,\n sendReasoning: options == null ? void 0 : options.sendReasoning,\n sendSources: options == null ? void 0 : options.sendSources,\n experimental_sendFinish: options == null ? void 0 : options.experimental_sendFinish\n })\n );\n }\n toDataStreamResponse({\n headers,\n status,\n statusText,\n data,\n getErrorMessage: getErrorMessage5,\n sendUsage,\n sendReasoning,\n sendSources,\n experimental_sendFinish\n } = {}) {\n return new Response(\n this.toDataStream({\n data,\n getErrorMessage: getErrorMessage5,\n sendUsage,\n sendReasoning,\n sendSources,\n experimental_sendFinish\n }),\n {\n status,\n statusText,\n headers: prepareResponseHeaders(headers, {\n contentType: \"text/plain; charset=utf-8\",\n dataStreamVersion: \"v1\"\n })\n }\n );\n }\n toTextStreamResponse(init) {\n var _a17;\n return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {\n status: (_a17 = init == null ? void 0 : init.status) != null ? _a17 : 200,\n headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {\n contentType: \"text/plain; charset=utf-8\"\n })\n });\n }\n};\n\n// errors/no-speech-generated-error.ts\nimport { AISDKError as AISDKError19 } from \"@ai-sdk/provider\";\nvar NoSpeechGeneratedError = class extends AISDKError19 {\n constructor(options) {\n super({\n name: \"AI_NoSpeechGeneratedError\",\n message: \"No speech audio generated.\"\n });\n this.responses = options.responses;\n }\n};\n\n// core/generate-speech/generated-audio-file.ts\nvar DefaultGeneratedAudioFile = class extends DefaultGeneratedFile {\n constructor({\n data,\n mimeType\n }) {\n super({ data, mimeType });\n let format = \"mp3\";\n if (mimeType) {\n const mimeTypeParts = mimeType.split(\"/\");\n if (mimeTypeParts.length === 2) {\n if (mimeType !== \"audio/mpeg\") {\n format = mimeTypeParts[1];\n }\n }\n }\n if (!format) {\n throw new Error(\n \"Audio format must be provided or determinable from mimeType\"\n );\n }\n this.format = format;\n }\n};\n\n// core/generate-speech/generate-speech.ts\nasync function generateSpeech({\n model,\n text: text2,\n voice,\n outputFormat,\n instructions,\n speed,\n providerOptions = {},\n maxRetries: maxRetriesArg,\n abortSignal,\n headers\n}) {\n var _a17;\n if (typeof model === \"string\" || model.specificationVersion !== \"v1\") {\n throw new UnsupportedModelVersionError();\n }\n const { retry } = prepareRetries({ maxRetries: maxRetriesArg });\n const result = await retry(\n () => model.doGenerate({\n text: text2,\n voice,\n outputFormat,\n instructions,\n speed,\n abortSignal,\n headers,\n providerOptions\n })\n );\n if (!result.audio || result.audio.length === 0) {\n throw new NoSpeechGeneratedError({ responses: [result.response] });\n }\n return new DefaultSpeechResult({\n audio: new DefaultGeneratedAudioFile({\n data: result.audio,\n mimeType: (_a17 = detectMimeType({\n data: result.audio,\n signatures: audioMimeTypeSignatures\n })) != null ? _a17 : \"audio/mp3\"\n }),\n warnings: result.warnings,\n responses: [result.response],\n providerMetadata: result.providerMetadata\n });\n}\nvar DefaultSpeechResult = class {\n constructor(options) {\n var _a17;\n this.audio = options.audio;\n this.warnings = options.warnings;\n this.responses = options.responses;\n this.providerMetadata = (_a17 = options.providerMetadata) != null ? _a17 : {};\n }\n};\n\n// errors/no-transcript-generated-error.ts\nimport { AISDKError as AISDKError20 } from \"@ai-sdk/provider\";\nvar NoTranscriptGeneratedError = class extends AISDKError20 {\n constructor(options) {\n super({\n name: \"AI_NoTranscriptGeneratedError\",\n message: \"No transcript generated.\"\n });\n this.responses = options.responses;\n }\n};\n\n// core/transcribe/transcribe.ts\nasync function transcribe({\n model,\n audio,\n providerOptions = {},\n maxRetries: maxRetriesArg,\n abortSignal,\n headers\n}) {\n if (typeof model === \"string\" || model.specificationVersion !== \"v1\") {\n throw new UnsupportedModelVersionError();\n }\n const { retry } = prepareRetries({ maxRetries: maxRetriesArg });\n const audioData = audio instanceof URL ? (await download({ url: audio })).data : convertDataContentToUint8Array(audio);\n const result = await retry(\n () => {\n var _a17;\n return model.doGenerate({\n audio: audioData,\n abortSignal,\n headers,\n providerOptions,\n mediaType: (_a17 = detectMimeType({\n data: audioData,\n signatures: audioMimeTypeSignatures\n })) != null ? _a17 : \"audio/wav\"\n });\n }\n );\n if (!result.text) {\n throw new NoTranscriptGeneratedError({ responses: [result.response] });\n }\n return new DefaultTranscriptionResult({\n text: result.text,\n segments: result.segments,\n language: result.language,\n durationInSeconds: result.durationInSeconds,\n warnings: result.warnings,\n responses: [result.response],\n providerMetadata: result.providerMetadata\n });\n}\nvar DefaultTranscriptionResult = class {\n constructor(options) {\n var _a17;\n this.text = options.text;\n this.segments = options.segments;\n this.language = options.language;\n this.durationInSeconds = options.durationInSeconds;\n this.warnings = options.warnings;\n this.responses = options.responses;\n this.providerMetadata = (_a17 = options.providerMetadata) != null ? _a17 : {};\n }\n};\n\n// core/util/merge-objects.ts\nfunction mergeObjects(target, source) {\n if (target === void 0 && source === void 0) {\n return void 0;\n }\n if (target === void 0) {\n return source;\n }\n if (source === void 0) {\n return target;\n }\n const result = { ...target };\n for (const key in source) {\n if (Object.prototype.hasOwnProperty.call(source, key)) {\n const sourceValue = source[key];\n if (sourceValue === void 0)\n continue;\n const targetValue = key in target ? target[key] : void 0;\n const isSourceObject = sourceValue !== null && typeof sourceValue === \"object\" && !Array.isArray(sourceValue) && !(sourceValue instanceof Date) && !(sourceValue instanceof RegExp);\n const isTargetObject = targetValue !== null && targetValue !== void 0 && typeof targetValue === \"object\" && !Array.isArray(targetValue) && !(targetValue instanceof Date) && !(targetValue instanceof RegExp);\n if (isSourceObject && isTargetObject) {\n result[key] = mergeObjects(\n targetValue,\n sourceValue\n );\n } else {\n result[key] = sourceValue;\n }\n }\n }\n return result;\n}\n\n// core/middleware/default-settings-middleware.ts\nfunction defaultSettingsMiddleware({\n settings\n}) {\n return {\n middlewareVersion: \"v1\",\n transformParams: async ({ params }) => {\n var _a17;\n return {\n ...settings,\n ...params,\n providerMetadata: mergeObjects(\n settings.providerMetadata,\n params.providerMetadata\n ),\n // special case for temperature 0\n // TODO remove when temperature defaults to undefined\n temperature: params.temperature === 0 || params.temperature == null ? (_a17 = settings.temperature) != null ? _a17 : 0 : params.temperature\n };\n }\n };\n}\n\n// core/util/get-potential-start-index.ts\nfunction getPotentialStartIndex(text2, searchedText) {\n if (searchedText.length === 0) {\n return null;\n }\n const directIndex = text2.indexOf(searchedText);\n if (directIndex !== -1) {\n return directIndex;\n }\n for (let i = text2.length - 1; i >= 0; i--) {\n const suffix = text2.substring(i);\n if (searchedText.startsWith(suffix)) {\n return i;\n }\n }\n return null;\n}\n\n// core/middleware/extract-reasoning-middleware.ts\nfunction extractReasoningMiddleware({\n tagName,\n separator = \"\\n\",\n startWithReasoning = false\n}) {\n const openingTag = `<${tagName}>`;\n const closingTag = `</${tagName}>`;\n return {\n middlewareVersion: \"v1\",\n wrapGenerate: async ({ doGenerate }) => {\n const { text: rawText, ...rest } = await doGenerate();\n if (rawText == null) {\n return { text: rawText, ...rest };\n }\n const text2 = startWithReasoning ? openingTag + rawText : rawText;\n const regexp = new RegExp(`${openingTag}(.*?)${closingTag}`, \"gs\");\n const matches = Array.from(text2.matchAll(regexp));\n if (!matches.length) {\n return { text: text2, ...rest };\n }\n const reasoning = matches.map((match) => match[1]).join(separator);\n let textWithoutReasoning = text2;\n for (let i = matches.length - 1; i >= 0; i--) {\n const match = matches[i];\n const beforeMatch = textWithoutReasoning.slice(0, match.index);\n const afterMatch = textWithoutReasoning.slice(\n match.index + match[0].length\n );\n textWithoutReasoning = beforeMatch + (beforeMatch.length > 0 && afterMatch.length > 0 ? separator : \"\") + afterMatch;\n }\n return { ...rest, text: textWithoutReasoning, reasoning };\n },\n wrapStream: async ({ doStream }) => {\n const { stream, ...rest } = await doStream();\n let isFirstReasoning = true;\n let isFirstText = true;\n let afterSwitch = false;\n let isReasoning = startWithReasoning;\n let buffer = \"\";\n return {\n stream: stream.pipeThrough(\n new TransformStream({\n transform: (chunk, controller) => {\n if (chunk.type !== \"text-delta\") {\n controller.enqueue(chunk);\n return;\n }\n buffer += chunk.textDelta;\n function publish(text2) {\n if (text2.length > 0) {\n const prefix = afterSwitch && (isReasoning ? !isFirstReasoning : !isFirstText) ? separator : \"\";\n controller.enqueue({\n type: isReasoning ? \"reasoning\" : \"text-delta\",\n textDelta: prefix + text2\n });\n afterSwitch = false;\n if (isReasoning) {\n isFirstReasoning = false;\n } else {\n isFirstText = false;\n }\n }\n }\n do {\n const nextTag = isReasoning ? closingTag : openingTag;\n const startIndex = getPotentialStartIndex(buffer, nextTag);\n if (startIndex == null) {\n publish(buffer);\n buffer = \"\";\n break;\n }\n publish(buffer.slice(0, startIndex));\n const foundFullMatch = startIndex + nextTag.length <= buffer.length;\n if (foundFullMatch) {\n buffer = buffer.slice(startIndex + nextTag.length);\n isReasoning = !isReasoning;\n afterSwitch = true;\n } else {\n buffer = buffer.slice(startIndex);\n break;\n }\n } while (true);\n }\n })\n ),\n ...rest\n };\n }\n };\n}\n\n// core/middleware/simulate-streaming-middleware.ts\nfunction simulateStreamingMiddleware() {\n return {\n middlewareVersion: \"v1\",\n wrapStream: async ({ doGenerate }) => {\n const result = await doGenerate();\n const simulatedStream = new ReadableStream({\n start(controller) {\n controller.enqueue({ type: \"response-metadata\", ...result.response });\n if (result.reasoning) {\n if (typeof result.reasoning === \"string\") {\n controller.enqueue({\n type: \"reasoning\",\n textDelta: result.reasoning\n });\n } else {\n for (const reasoning of result.reasoning) {\n switch (reasoning.type) {\n case \"text\": {\n controller.enqueue({\n type: \"reasoning\",\n textDelta: reasoning.text\n });\n if (reasoning.signature != null) {\n controller.enqueue({\n type: \"reasoning-signature\",\n signature: reasoning.signature\n });\n }\n break;\n }\n case \"redacted\": {\n controller.enqueue({\n type: \"redacted-reasoning\",\n data: reasoning.data\n });\n break;\n }\n }\n }\n }\n }\n if (result.text) {\n controller.enqueue({\n type: \"text-delta\",\n textDelta: result.text\n });\n }\n if (result.toolCalls) {\n for (const toolCall of result.toolCalls) {\n controller.enqueue({\n type: \"tool-call-delta\",\n toolCallType: \"function\",\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n argsTextDelta: toolCall.args\n });\n controller.enqueue({\n type: \"tool-call\",\n ...toolCall\n });\n }\n }\n controller.enqueue({\n type: \"finish\",\n finishReason: result.finishReason,\n usage: result.usage,\n logprobs: result.logprobs,\n providerMetadata: result.providerMetadata\n });\n controller.close();\n }\n });\n return {\n stream: simulatedStream,\n rawCall: result.rawCall,\n rawResponse: result.rawResponse,\n warnings: result.warnings\n };\n }\n };\n}\n\n// core/middleware/wrap-language-model.ts\nvar wrapLanguageModel = ({\n model,\n middleware: middlewareArg,\n modelId,\n providerId\n}) => {\n return asArray(middlewareArg).reverse().reduce((wrappedModel, middleware) => {\n return doWrap({ model: wrappedModel, middleware, modelId, providerId });\n }, model);\n};\nvar doWrap = ({\n model,\n middleware: { transformParams, wrapGenerate, wrapStream },\n modelId,\n providerId\n}) => {\n var _a17;\n async function doTransform({\n params,\n type\n }) {\n return transformParams ? await transformParams({ params, type }) : params;\n }\n return {\n specificationVersion: \"v1\",\n provider: providerId != null ? providerId : model.provider,\n modelId: modelId != null ? modelId : model.modelId,\n defaultObjectGenerationMode: model.defaultObjectGenerationMode,\n supportsImageUrls: model.supportsImageUrls,\n supportsUrl: (_a17 = model.supportsUrl) == null ? void 0 : _a17.bind(model),\n supportsStructuredOutputs: model.supportsStructuredOutputs,\n async doGenerate(params) {\n const transformedParams = await doTransform({ params, type: \"generate\" });\n const doGenerate = async () => model.doGenerate(transformedParams);\n const doStream = async () => model.doStream(transformedParams);\n return wrapGenerate ? wrapGenerate({\n doGenerate,\n doStream,\n params: transformedParams,\n model\n }) : doGenerate();\n },\n async doStream(params) {\n const transformedParams = await doTransform({ params, type: \"stream\" });\n const doGenerate = async () => model.doGenerate(transformedParams);\n const doStream = async () => model.doStream(transformedParams);\n return wrapStream ? wrapStream({ doGenerate, doStream, params: transformedParams, model }) : doStream();\n }\n };\n};\nvar experimental_wrapLanguageModel = wrapLanguageModel;\n\n// core/prompt/append-client-message.ts\nfunction appendClientMessage({\n messages,\n message\n}) {\n return [\n ...messages.length > 0 && messages[messages.length - 1].id === message.id ? messages.slice(0, -1) : messages,\n message\n ];\n}\n\n// core/prompt/append-response-messages.ts\nimport {\n extractMaxToolInvocationStep\n} from \"@ai-sdk/ui-utils\";\nimport { AISDKError as AISDKError21 } from \"@ai-sdk/provider\";\nfunction appendResponseMessages({\n messages,\n responseMessages,\n _internal: { currentDate = () => /* @__PURE__ */ new Date() } = {}\n}) {\n var _a17, _b, _c, _d;\n const clonedMessages = structuredClone(messages);\n for (const message of responseMessages) {\n const role = message.role;\n const lastMessage = clonedMessages[clonedMessages.length - 1];\n const isLastMessageAssistant = lastMessage.role === \"assistant\";\n switch (role) {\n case \"assistant\": {\n let getToolInvocations2 = function(step) {\n return (typeof message.content === \"string\" ? [] : message.content.filter((part) => part.type === \"tool-call\")).map((call) => ({\n state: \"call\",\n step,\n args: call.args,\n toolCallId: call.toolCallId,\n toolName: call.toolName\n }));\n };\n var getToolInvocations = getToolInvocations2;\n const parts = [{ type: \"step-start\" }];\n let textContent = \"\";\n let reasoningTextContent = void 0;\n if (typeof message.content === \"string\") {\n textContent = message.content;\n parts.push({\n type: \"text\",\n text: message.content\n });\n } else {\n let reasoningPart = void 0;\n for (const part of message.content) {\n switch (part.type) {\n case \"text\": {\n reasoningPart = void 0;\n textContent += part.text;\n parts.push({\n type: \"text\",\n text: part.text\n });\n break;\n }\n case \"reasoning\": {\n if (reasoningPart == null) {\n reasoningPart = {\n type: \"reasoning\",\n reasoning: \"\",\n details: []\n };\n parts.push(reasoningPart);\n }\n reasoningTextContent = (reasoningTextContent != null ? reasoningTextContent : \"\") + part.text;\n reasoningPart.reasoning += part.text;\n reasoningPart.details.push({\n type: \"text\",\n text: part.text,\n signature: part.signature\n });\n break;\n }\n case \"redacted-reasoning\": {\n if (reasoningPart == null) {\n reasoningPart = {\n type: \"reasoning\",\n reasoning: \"\",\n details: []\n };\n parts.push(reasoningPart);\n }\n reasoningPart.details.push({\n type: \"redacted\",\n data: part.data\n });\n break;\n }\n case \"tool-call\":\n break;\n case \"file\":\n if (part.data instanceof URL) {\n throw new AISDKError21({\n name: \"InvalidAssistantFileData\",\n message: \"File data cannot be a URL\"\n });\n }\n parts.push({\n type: \"file\",\n mimeType: part.mimeType,\n data: convertDataContentToBase64String(part.data)\n });\n break;\n }\n }\n }\n if (isLastMessageAssistant) {\n const maxStep = extractMaxToolInvocationStep(\n lastMessage.toolInvocations\n );\n (_a17 = lastMessage.parts) != null ? _a17 : lastMessage.parts = [];\n lastMessage.content = textContent;\n lastMessage.reasoning = reasoningTextContent;\n lastMessage.parts.push(...parts);\n lastMessage.toolInvocations = [\n ...(_b = lastMessage.toolInvocations) != null ? _b : [],\n ...getToolInvocations2(maxStep === void 0 ? 0 : maxStep + 1)\n ];\n getToolInvocations2(maxStep === void 0 ? 0 : maxStep + 1).map((call) => ({\n type: \"tool-invocation\",\n toolInvocation: call\n })).forEach((part) => {\n lastMessage.parts.push(part);\n });\n } else {\n clonedMessages.push({\n role: \"assistant\",\n id: message.id,\n createdAt: currentDate(),\n // generate a createdAt date for the message, will be overridden by the client\n content: textContent,\n reasoning: reasoningTextContent,\n toolInvocations: getToolInvocations2(0),\n parts: [\n ...parts,\n ...getToolInvocations2(0).map((call) => ({\n type: \"tool-invocation\",\n toolInvocation: call\n }))\n ]\n });\n }\n break;\n }\n case \"tool\": {\n (_c = lastMessage.toolInvocations) != null ? _c : lastMessage.toolInvocations = [];\n if (lastMessage.role !== \"assistant\") {\n throw new Error(\n `Tool result must follow an assistant message: ${lastMessage.role}`\n );\n }\n (_d = lastMessage.parts) != null ? _d : lastMessage.parts = [];\n for (const contentPart of message.content) {\n const toolCall = lastMessage.toolInvocations.find(\n (call) => call.toolCallId === contentPart.toolCallId\n );\n const toolCallPart = lastMessage.parts.find(\n (part) => part.type === \"tool-invocation\" && part.toolInvocation.toolCallId === contentPart.toolCallId\n );\n if (!toolCall) {\n throw new Error(\"Tool call not found in previous message\");\n }\n toolCall.state = \"result\";\n const toolResult = toolCall;\n toolResult.result = contentPart.result;\n if (toolCallPart) {\n toolCallPart.toolInvocation = toolResult;\n } else {\n lastMessage.parts.push({\n type: \"tool-invocation\",\n toolInvocation: toolResult\n });\n }\n }\n break;\n }\n default: {\n const _exhaustiveCheck = role;\n throw new Error(`Unsupported message role: ${_exhaustiveCheck}`);\n }\n }\n }\n return clonedMessages;\n}\n\n// core/registry/custom-provider.ts\nimport { NoSuchModelError as NoSuchModelError2 } from \"@ai-sdk/provider\";\nfunction customProvider({\n languageModels,\n textEmbeddingModels,\n imageModels,\n fallbackProvider\n}) {\n return {\n languageModel(modelId) {\n if (languageModels != null && modelId in languageModels) {\n return languageModels[modelId];\n }\n if (fallbackProvider) {\n return fallbackProvider.languageModel(modelId);\n }\n throw new NoSuchModelError2({ modelId, modelType: \"languageModel\" });\n },\n textEmbeddingModel(modelId) {\n if (textEmbeddingModels != null && modelId in textEmbeddingModels) {\n return textEmbeddingModels[modelId];\n }\n if (fallbackProvider) {\n return fallbackProvider.textEmbeddingModel(modelId);\n }\n throw new NoSuchModelError2({ modelId, modelType: \"textEmbeddingModel\" });\n },\n imageModel(modelId) {\n if (imageModels != null && modelId in imageModels) {\n return imageModels[modelId];\n }\n if (fallbackProvider == null ? void 0 : fallbackProvider.imageModel) {\n return fallbackProvider.imageModel(modelId);\n }\n throw new NoSuchModelError2({ modelId, modelType: \"imageModel\" });\n }\n };\n}\nvar experimental_customProvider = customProvider;\n\n// core/registry/no-such-provider-error.ts\nimport { AISDKError as AISDKError22, NoSuchModelError as NoSuchModelError3 } from \"@ai-sdk/provider\";\nvar name16 = \"AI_NoSuchProviderError\";\nvar marker16 = `vercel.ai.error.${name16}`;\nvar symbol16 = Symbol.for(marker16);\nvar _a16;\nvar NoSuchProviderError = class extends NoSuchModelError3 {\n constructor({\n modelId,\n modelType,\n providerId,\n availableProviders,\n message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`\n }) {\n super({ errorName: name16, modelId, modelType, message });\n this[_a16] = true;\n this.providerId = providerId;\n this.availableProviders = availableProviders;\n }\n static isInstance(error) {\n return AISDKError22.hasMarker(error, marker16);\n }\n};\n_a16 = symbol16;\n\n// core/registry/provider-registry.ts\nimport { NoSuchModelError as NoSuchModelError4 } from \"@ai-sdk/provider\";\nfunction createProviderRegistry(providers, {\n separator = \":\"\n} = {}) {\n const registry = new DefaultProviderRegistry({\n separator\n });\n for (const [id, provider] of Object.entries(providers)) {\n registry.registerProvider({ id, provider });\n }\n return registry;\n}\nvar experimental_createProviderRegistry = createProviderRegistry;\nvar DefaultProviderRegistry = class {\n constructor({ separator }) {\n this.providers = {};\n this.separator = separator;\n }\n registerProvider({\n id,\n provider\n }) {\n this.providers[id] = provider;\n }\n getProvider(id) {\n const provider = this.providers[id];\n if (provider == null) {\n throw new NoSuchProviderError({\n modelId: id,\n modelType: \"languageModel\",\n providerId: id,\n availableProviders: Object.keys(this.providers)\n });\n }\n return provider;\n }\n splitId(id, modelType) {\n const index = id.indexOf(this.separator);\n if (index === -1) {\n throw new NoSuchModelError4({\n modelId: id,\n modelType,\n message: `Invalid ${modelType} id for registry: ${id} (must be in the format \"providerId${this.separator}modelId\")`\n });\n }\n return [id.slice(0, index), id.slice(index + this.separator.length)];\n }\n languageModel(id) {\n var _a17, _b;\n const [providerId, modelId] = this.splitId(id, \"languageModel\");\n const model = (_b = (_a17 = this.getProvider(providerId)).languageModel) == null ? void 0 : _b.call(_a17, modelId);\n if (model == null) {\n throw new NoSuchModelError4({ modelId: id, modelType: \"languageModel\" });\n }\n return model;\n }\n textEmbeddingModel(id) {\n var _a17;\n const [providerId, modelId] = this.splitId(id, \"textEmbeddingModel\");\n const provider = this.getProvider(providerId);\n const model = (_a17 = provider.textEmbeddingModel) == null ? void 0 : _a17.call(provider, modelId);\n if (model == null) {\n throw new NoSuchModelError4({\n modelId: id,\n modelType: \"textEmbeddingModel\"\n });\n }\n return model;\n }\n imageModel(id) {\n var _a17;\n const [providerId, modelId] = this.splitId(id, \"imageModel\");\n const provider = this.getProvider(providerId);\n const model = (_a17 = provider.imageModel) == null ? void 0 : _a17.call(provider, modelId);\n if (model == null) {\n throw new NoSuchModelError4({ modelId: id, modelType: \"imageModel\" });\n }\n return model;\n }\n};\n\n// core/tool/mcp/mcp-client.ts\nimport { jsonSchema } from \"@ai-sdk/ui-utils\";\n\n// core/tool/tool.ts\nfunction tool(tool2) {\n return tool2;\n}\n\n// core/tool/mcp/mcp-sse-transport.ts\nimport { createEventSourceParserStream } from \"@ai-sdk/provider-utils\";\n\n// core/tool/mcp/json-rpc-message.ts\nimport { z as z9 } from \"zod\";\n\n// core/tool/mcp/types.ts\nimport { z as z8 } from \"zod\";\nvar LATEST_PROTOCOL_VERSION = \"2024-11-05\";\nvar SUPPORTED_PROTOCOL_VERSIONS = [\n LATEST_PROTOCOL_VERSION,\n \"2024-10-07\"\n];\nvar ClientOrServerImplementationSchema = z8.object({\n name: z8.string(),\n version: z8.string()\n}).passthrough();\nvar BaseParamsSchema = z8.object({\n _meta: z8.optional(z8.object({}).passthrough())\n}).passthrough();\nvar ResultSchema = BaseParamsSchema;\nvar RequestSchema = z8.object({\n method: z8.string(),\n params: z8.optional(BaseParamsSchema)\n});\nvar ServerCapabilitiesSchema = z8.object({\n experimental: z8.optional(z8.object({}).passthrough()),\n logging: z8.optional(z8.object({}).passthrough()),\n prompts: z8.optional(\n z8.object({\n listChanged: z8.optional(z8.boolean())\n }).passthrough()\n ),\n resources: z8.optional(\n z8.object({\n subscribe: z8.optional(z8.boolean()),\n listChanged: z8.optional(z8.boolean())\n }).passthrough()\n ),\n tools: z8.optional(\n z8.object({\n listChanged: z8.optional(z8.boolean())\n }).passthrough()\n )\n}).passthrough();\nvar InitializeResultSchema = ResultSchema.extend({\n protocolVersion: z8.string(),\n capabilities: ServerCapabilitiesSchema,\n serverInfo: ClientOrServerImplementationSchema,\n instructions: z8.optional(z8.string())\n});\nvar PaginatedResultSchema = ResultSchema.extend({\n nextCursor: z8.optional(z8.string())\n});\nvar ToolSchema = z8.object({\n name: z8.string(),\n description: z8.optional(z8.string()),\n inputSchema: z8.object({\n type: z8.literal(\"object\"),\n properties: z8.optional(z8.object({}).passthrough())\n }).passthrough()\n}).passthrough();\nvar ListToolsResultSchema = PaginatedResultSchema.extend({\n tools: z8.array(ToolSchema)\n});\nvar TextContentSchema = z8.object({\n type: z8.literal(\"text\"),\n text: z8.string()\n}).passthrough();\nvar ImageContentSchema = z8.object({\n type: z8.literal(\"image\"),\n data: z8.string().base64(),\n mimeType: z8.string()\n}).passthrough();\nvar ResourceContentsSchema = z8.object({\n /**\n * The URI of this resource.\n */\n uri: z8.string(),\n /**\n * The MIME type of this resource, if known.\n */\n mimeType: z8.optional(z8.string())\n}).passthrough();\nvar TextResourceContentsSchema = ResourceContentsSchema.extend({\n text: z8.string()\n});\nvar BlobResourceContentsSchema = ResourceContentsSchema.extend({\n blob: z8.string().base64()\n});\nvar EmbeddedResourceSchema = z8.object({\n type: z8.literal(\"resource\"),\n resource: z8.union([TextResourceContentsSchema, BlobResourceContentsSchema])\n}).passthrough();\nvar CallToolResultSchema = ResultSchema.extend({\n content: z8.array(\n z8.union([TextContentSchema, ImageContentSchema, EmbeddedResourceSchema])\n ),\n isError: z8.boolean().default(false).optional()\n}).or(\n ResultSchema.extend({\n toolResult: z8.unknown()\n })\n);\n\n// core/tool/mcp/json-rpc-message.ts\nvar JSONRPC_VERSION = \"2.0\";\nvar JSONRPCRequestSchema = z9.object({\n jsonrpc: z9.literal(JSONRPC_VERSION),\n id: z9.union([z9.string(), z9.number().int()])\n}).merge(RequestSchema).strict();\nvar JSONRPCResponseSchema = z9.object({\n jsonrpc: z9.literal(JSONRPC_VERSION),\n id: z9.union([z9.string(), z9.number().int()]),\n result: ResultSchema\n}).strict();\nvar JSONRPCErrorSchema = z9.object({\n jsonrpc: z9.literal(JSONRPC_VERSION),\n id: z9.union([z9.string(), z9.number().int()]),\n error: z9.object({\n code: z9.number().int(),\n message: z9.string(),\n data: z9.optional(z9.unknown())\n })\n}).strict();\nvar JSONRPCNotificationSchema = z9.object({\n jsonrpc: z9.literal(JSONRPC_VERSION)\n}).merge(\n z9.object({\n method: z9.string(),\n params: z9.optional(BaseParamsSchema)\n })\n).strict();\nvar JSONRPCMessageSchema = z9.union([\n JSONRPCRequestSchema,\n JSONRPCNotificationSchema,\n JSONRPCResponseSchema,\n JSONRPCErrorSchema\n]);\n\n// core/tool/mcp/mcp-sse-transport.ts\nvar SseMCPTransport = class {\n constructor({\n url,\n headers\n }) {\n this.connected = false;\n this.url = new URL(url);\n this.headers = headers;\n }\n async start() {\n return new Promise((resolve, reject) => {\n if (this.connected) {\n return resolve();\n }\n this.abortController = new AbortController();\n const establishConnection = async () => {\n var _a17, _b, _c;\n try {\n const headers = new Headers(this.headers);\n headers.set(\"Accept\", \"text/event-stream\");\n const response = await fetch(this.url.href, {\n headers,\n signal: (_a17 = this.abortController) == null ? void 0 : _a17.signal\n });\n if (!response.ok || !response.body) {\n const error = new MCPClientError({\n message: `MCP SSE Transport Error: ${response.status} ${response.statusText}`\n });\n (_b = this.onerror) == null ? void 0 : _b.call(this, error);\n return reject(error);\n }\n const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough(createEventSourceParserStream());\n const reader = stream.getReader();\n const processEvents = async () => {\n var _a18, _b2, _c2;\n try {\n while (true) {\n const { done, value } = await reader.read();\n if (done) {\n if (this.connected) {\n this.connected = false;\n throw new MCPClientError({\n message: \"MCP SSE Transport Error: Connection closed unexpectedly\"\n });\n }\n return;\n }\n const { event, data } = value;\n if (event === \"endpoint\") {\n this.endpoint = new URL(data, this.url);\n if (this.endpoint.origin !== this.url.origin) {\n throw new MCPClientError({\n message: `MCP SSE Transport Error: Endpoint origin does not match connection origin: ${this.endpoint.origin}`\n });\n }\n this.connected = true;\n resolve();\n } else if (event === \"message\") {\n try {\n const message = JSONRPCMessageSchema.parse(\n JSON.parse(data)\n );\n (_a18 = this.onmessage) == null ? void 0 : _a18.call(this, message);\n } catch (error) {\n const e = new MCPClientError({\n message: \"MCP SSE Transport Error: Failed to parse message\",\n cause: error\n });\n (_b2 = this.onerror) == null ? void 0 : _b2.call(this, e);\n }\n }\n }\n } catch (error) {\n if (error instanceof Error && error.name === \"AbortError\") {\n return;\n }\n (_c2 = this.onerror) == null ? void 0 : _c2.call(this, error);\n reject(error);\n }\n };\n this.sseConnection = {\n close: () => reader.cancel()\n };\n processEvents();\n } catch (error) {\n if (error instanceof Error && error.name === \"AbortError\") {\n return;\n }\n (_c = this.onerror) == null ? void 0 : _c.call(this, error);\n reject(error);\n }\n };\n establishConnection();\n });\n }\n async close() {\n var _a17, _b, _c;\n this.connected = false;\n (_a17 = this.sseConnection) == null ? void 0 : _a17.close();\n (_b = this.abortController) == null ? void 0 : _b.abort();\n (_c = this.onclose) == null ? void 0 : _c.call(this);\n }\n async send(message) {\n var _a17, _b, _c;\n if (!this.endpoint || !this.connected) {\n throw new MCPClientError({\n message: \"MCP SSE Transport Error: Not connected\"\n });\n }\n try {\n const headers = new Headers(this.headers);\n headers.set(\"Content-Type\", \"application/json\");\n const init = {\n method: \"POST\",\n headers,\n body: JSON.stringify(message),\n signal: (_a17 = this.abortController) == null ? void 0 : _a17.signal\n };\n const response = await fetch(this.endpoint, init);\n if (!response.ok) {\n const text2 = await response.text().catch(() => null);\n const error = new MCPClientError({\n message: `MCP SSE Transport Error: POSTing to endpoint (HTTP ${response.status}): ${text2}`\n });\n (_b = this.onerror) == null ? void 0 : _b.call(this, error);\n return;\n }\n } catch (error) {\n (_c = this.onerror) == null ? void 0 : _c.call(this, error);\n return;\n }\n }\n};\n\n// core/tool/mcp/mcp-transport.ts\nfunction createMcpTransport(config) {\n if (config.type !== \"sse\") {\n throw new MCPClientError({\n message: \"Unsupported or invalid transport configuration. If you are using a custom transport, make sure it implements the MCPTransport interface.\"\n });\n }\n return new SseMCPTransport(config);\n}\nfunction isCustomMcpTransport(transport) {\n return \"start\" in transport && typeof transport.start === \"function\" && \"send\" in transport && typeof transport.send === \"function\" && \"close\" in transport && typeof transport.close === \"function\";\n}\n\n// core/tool/mcp/mcp-client.ts\nvar CLIENT_VERSION = \"1.0.0\";\nasync function createMCPClient(config) {\n const client = new MCPClient(config);\n await client.init();\n return client;\n}\nvar MCPClient = class {\n constructor({\n transport: transportConfig,\n name: name17 = \"ai-sdk-mcp-client\",\n onUncaughtError\n }) {\n this.requestMessageId = 0;\n this.responseHandlers = /* @__PURE__ */ new Map();\n this.serverCapabilities = {};\n this.isClosed = true;\n this.onUncaughtError = onUncaughtError;\n if (isCustomMcpTransport(transportConfig)) {\n this.transport = transportConfig;\n } else {\n this.transport = createMcpTransport(transportConfig);\n }\n this.transport.onclose = () => this.onClose();\n this.transport.onerror = (error) => this.onError(error);\n this.transport.onmessage = (message) => {\n if (\"method\" in message) {\n this.onError(\n new MCPClientError({\n message: \"Unsupported message type\"\n })\n );\n return;\n }\n this.onResponse(message);\n };\n this.clientInfo = {\n name: name17,\n version: CLIENT_VERSION\n };\n }\n async init() {\n try {\n await this.transport.start();\n this.isClosed = false;\n const result = await this.request({\n request: {\n method: \"initialize\",\n params: {\n protocolVersion: LATEST_PROTOCOL_VERSION,\n capabilities: {},\n clientInfo: this.clientInfo\n }\n },\n resultSchema: InitializeResultSchema\n });\n if (result === void 0) {\n throw new MCPClientError({\n message: \"Server sent invalid initialize result\"\n });\n }\n if (!SUPPORTED_PROTOCOL_VERSIONS.includes(result.protocolVersion)) {\n throw new MCPClientError({\n message: `Server's protocol version is not supported: ${result.protocolVersion}`\n });\n }\n this.serverCapabilities = result.capabilities;\n await this.notification({\n method: \"notifications/initialized\"\n });\n return this;\n } catch (error) {\n await this.close();\n throw error;\n }\n }\n async close() {\n var _a17;\n if (this.isClosed)\n return;\n await ((_a17 = this.transport) == null ? void 0 : _a17.close());\n this.onClose();\n }\n assertCapability(method) {\n switch (method) {\n case \"initialize\":\n break;\n case \"tools/list\":\n case \"tools/call\":\n if (!this.serverCapabilities.tools) {\n throw new MCPClientError({\n message: `Server does not support tools`\n });\n }\n break;\n default:\n throw new MCPClientError({\n message: `Unsupported method: ${method}`\n });\n }\n }\n async request({\n request,\n resultSchema,\n options\n }) {\n return new Promise((resolve, reject) => {\n if (this.isClosed) {\n return reject(\n new MCPClientError({\n message: \"Attempted to send a request from a closed client\"\n })\n );\n }\n this.assertCapability(request.method);\n const signal = options == null ? void 0 : options.signal;\n signal == null ? void 0 : signal.throwIfAborted();\n const messageId = this.requestMessageId++;\n const jsonrpcRequest = {\n ...request,\n jsonrpc: \"2.0\",\n id: messageId\n };\n const cleanup = () => {\n this.responseHandlers.delete(messageId);\n };\n this.responseHandlers.set(messageId, (response) => {\n if (signal == null ? void 0 : signal.aborted) {\n return reject(\n new MCPClientError({\n message: \"Request was aborted\",\n cause: signal.reason\n })\n );\n }\n if (response instanceof Error) {\n return reject(response);\n }\n try {\n const result = resultSchema.parse(response.result);\n resolve(result);\n } catch (error) {\n const parseError = new MCPClientError({\n message: \"Failed to parse server response\",\n cause: error\n });\n reject(parseError);\n }\n });\n this.transport.send(jsonrpcRequest).catch((error) => {\n cleanup();\n reject(error);\n });\n });\n }\n async listTools({\n params,\n options\n } = {}) {\n try {\n return this.request({\n request: { method: \"tools/list\", params },\n resultSchema: ListToolsResultSchema,\n options\n });\n } catch (error) {\n throw error;\n }\n }\n async callTool({\n name: name17,\n args,\n options\n }) {\n try {\n return this.request({\n request: { method: \"tools/call\", params: { name: name17, arguments: args } },\n resultSchema: CallToolResultSchema,\n options: {\n signal: options == null ? void 0 : options.abortSignal\n }\n });\n } catch (error) {\n throw error;\n }\n }\n async notification(notification) {\n const jsonrpcNotification = {\n ...notification,\n jsonrpc: \"2.0\"\n };\n await this.transport.send(jsonrpcNotification);\n }\n /**\n * Returns a set of AI SDK tools from the MCP server\n * @returns A record of tool names to their implementations\n */\n async tools({\n schemas = \"automatic\"\n } = {}) {\n var _a17;\n const tools = {};\n try {\n const listToolsResult = await this.listTools();\n for (const { name: name17, description, inputSchema } of listToolsResult.tools) {\n if (schemas !== \"automatic\" && !(name17 in schemas)) {\n continue;\n }\n const parameters = schemas === \"automatic\" ? jsonSchema({\n ...inputSchema,\n properties: (_a17 = inputSchema.properties) != null ? _a17 : {},\n additionalProperties: false\n }) : schemas[name17].parameters;\n const self = this;\n const toolWithExecute = tool({\n description,\n parameters,\n execute: async (args, options) => {\n var _a18;\n (_a18 = options == null ? void 0 : options.abortSignal) == null ? void 0 : _a18.throwIfAborted();\n return self.callTool({\n name: name17,\n args,\n options\n });\n }\n });\n tools[name17] = toolWithExecute;\n }\n return tools;\n } catch (error) {\n throw error;\n }\n }\n onClose() {\n if (this.isClosed)\n return;\n this.isClosed = true;\n const error = new MCPClientError({\n message: \"Connection closed\"\n });\n for (const handler of this.responseHandlers.values()) {\n handler(error);\n }\n this.responseHandlers.clear();\n }\n onError(error) {\n if (this.onUncaughtError) {\n this.onUncaughtError(error);\n }\n }\n onResponse(response) {\n const messageId = Number(response.id);\n const handler = this.responseHandlers.get(messageId);\n if (handler === void 0) {\n throw new MCPClientError({\n message: `Protocol error: Received a response for an unknown message ID: ${JSON.stringify(\n response\n )}`\n });\n }\n this.responseHandlers.delete(messageId);\n handler(\n \"result\" in response ? response : new MCPClientError({\n message: response.error.message,\n cause: response.error\n })\n );\n }\n};\n\n// core/util/cosine-similarity.ts\nfunction cosineSimilarity(vector1, vector2, options) {\n if (vector1.length !== vector2.length) {\n throw new InvalidArgumentError({\n parameter: \"vector1,vector2\",\n value: { vector1Length: vector1.length, vector2Length: vector2.length },\n message: `Vectors must have the same length`\n });\n }\n const n = vector1.length;\n if (n === 0) {\n if (options == null ? void 0 : options.throwErrorForEmptyVectors) {\n throw new InvalidArgumentError({\n parameter: \"vector1\",\n value: vector1,\n message: \"Vectors cannot be empty\"\n });\n }\n return 0;\n }\n let magnitudeSquared1 = 0;\n let magnitudeSquared2 = 0;\n let dotProduct = 0;\n for (let i = 0; i < n; i++) {\n const value1 = vector1[i];\n const value2 = vector2[i];\n magnitudeSquared1 += value1 * value1;\n magnitudeSquared2 += value2 * value2;\n dotProduct += value1 * value2;\n }\n return magnitudeSquared1 === 0 || magnitudeSquared2 === 0 ? 0 : dotProduct / (Math.sqrt(magnitudeSquared1) * Math.sqrt(magnitudeSquared2));\n}\n\n// core/util/simulate-readable-stream.ts\nimport { delay as delayFunction } from \"@ai-sdk/provider-utils\";\nfunction simulateReadableStream({\n chunks,\n initialDelayInMs = 0,\n chunkDelayInMs = 0,\n _internal\n}) {\n var _a17;\n const delay2 = (_a17 = _internal == null ? void 0 : _internal.delay) != null ? _a17 : delayFunction;\n let index = 0;\n return new ReadableStream({\n async pull(controller) {\n if (index < chunks.length) {\n await delay2(index === 0 ? initialDelayInMs : chunkDelayInMs);\n controller.enqueue(chunks[index++]);\n } else {\n controller.close();\n }\n }\n });\n}\n\n// streams/assistant-response.ts\nimport {\n formatAssistantStreamPart as formatAssistantStreamPart2\n} from \"@ai-sdk/ui-utils\";\nfunction AssistantResponse({ threadId, messageId }, process2) {\n const stream = new ReadableStream({\n async start(controller) {\n var _a17;\n const textEncoder = new TextEncoder();\n const sendMessage = (message) => {\n controller.enqueue(\n textEncoder.encode(\n formatAssistantStreamPart2(\"assistant_message\", message)\n )\n );\n };\n const sendDataMessage = (message) => {\n controller.enqueue(\n textEncoder.encode(\n formatAssistantStreamPart2(\"data_message\", message)\n )\n );\n };\n const sendError = (errorMessage) => {\n controller.enqueue(\n textEncoder.encode(formatAssistantStreamPart2(\"error\", errorMessage))\n );\n };\n const forwardStream = async (stream2) => {\n var _a18, _b;\n let result = void 0;\n for await (const value of stream2) {\n switch (value.event) {\n case \"thread.message.created\": {\n controller.enqueue(\n textEncoder.encode(\n formatAssistantStreamPart2(\"assistant_message\", {\n id: value.data.id,\n role: \"assistant\",\n content: [{ type: \"text\", text: { value: \"\" } }]\n })\n )\n );\n break;\n }\n case \"thread.message.delta\": {\n const content = (_a18 = value.data.delta.content) == null ? void 0 : _a18[0];\n if ((content == null ? void 0 : content.type) === \"text\" && ((_b = content.text) == null ? void 0 : _b.value) != null) {\n controller.enqueue(\n textEncoder.encode(\n formatAssistantStreamPart2(\"text\", content.text.value)\n )\n );\n }\n break;\n }\n case \"thread.run.completed\":\n case \"thread.run.requires_action\": {\n result = value.data;\n break;\n }\n }\n }\n return result;\n };\n controller.enqueue(\n textEncoder.encode(\n formatAssistantStreamPart2(\"assistant_control_data\", {\n threadId,\n messageId\n })\n )\n );\n try {\n await process2({\n sendMessage,\n sendDataMessage,\n forwardStream\n });\n } catch (error) {\n sendError((_a17 = error.message) != null ? _a17 : `${error}`);\n } finally {\n controller.close();\n }\n },\n pull(controller) {\n },\n cancel() {\n }\n });\n return new Response(stream, {\n status: 200,\n headers: {\n \"Content-Type\": \"text/plain; charset=utf-8\"\n }\n });\n}\n\n// streams/langchain-adapter.ts\nvar langchain_adapter_exports = {};\n__export(langchain_adapter_exports, {\n mergeIntoDataStream: () => mergeIntoDataStream,\n toDataStream: () => toDataStream,\n toDataStreamResponse: () => toDataStreamResponse\n});\nimport { formatDataStreamPart as formatDataStreamPart4 } from \"@ai-sdk/ui-utils\";\n\n// streams/stream-callbacks.ts\nfunction createCallbacksTransformer(callbacks = {}) {\n const textEncoder = new TextEncoder();\n let aggregatedResponse = \"\";\n return new TransformStream({\n async start() {\n if (callbacks.onStart)\n await callbacks.onStart();\n },\n async transform(message, controller) {\n controller.enqueue(textEncoder.encode(message));\n aggregatedResponse += message;\n if (callbacks.onToken)\n await callbacks.onToken(message);\n if (callbacks.onText && typeof message === \"string\") {\n await callbacks.onText(message);\n }\n },\n async flush() {\n if (callbacks.onCompletion) {\n await callbacks.onCompletion(aggregatedResponse);\n }\n if (callbacks.onFinal) {\n await callbacks.onFinal(aggregatedResponse);\n }\n }\n });\n}\n\n// streams/langchain-adapter.ts\nfunction toDataStreamInternal(stream, callbacks) {\n return stream.pipeThrough(\n new TransformStream({\n transform: async (value, controller) => {\n var _a17;\n if (typeof value === \"string\") {\n controller.enqueue(value);\n return;\n }\n if (\"event\" in value) {\n if (value.event === \"on_chat_model_stream\") {\n forwardAIMessageChunk(\n (_a17 = value.data) == null ? void 0 : _a17.chunk,\n controller\n );\n }\n return;\n }\n forwardAIMessageChunk(value, controller);\n }\n })\n ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(new TextDecoderStream()).pipeThrough(\n new TransformStream({\n transform: async (chunk, controller) => {\n controller.enqueue(formatDataStreamPart4(\"text\", chunk));\n }\n })\n );\n}\nfunction toDataStream(stream, callbacks) {\n return toDataStreamInternal(stream, callbacks).pipeThrough(\n new TextEncoderStream()\n );\n}\nfunction toDataStreamResponse(stream, options) {\n var _a17;\n const dataStream = toDataStreamInternal(\n stream,\n options == null ? void 0 : options.callbacks\n ).pipeThrough(new TextEncoderStream());\n const data = options == null ? void 0 : options.data;\n const init = options == null ? void 0 : options.init;\n const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;\n return new Response(responseStream, {\n status: (_a17 = init == null ? void 0 : init.status) != null ? _a17 : 200,\n statusText: init == null ? void 0 : init.statusText,\n headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {\n contentType: \"text/plain; charset=utf-8\",\n dataStreamVersion: \"v1\"\n })\n });\n}\nfunction mergeIntoDataStream(stream, options) {\n options.dataStream.merge(toDataStreamInternal(stream, options.callbacks));\n}\nfunction forwardAIMessageChunk(chunk, controller) {\n if (typeof chunk.content === \"string\") {\n controller.enqueue(chunk.content);\n } else {\n const content = chunk.content;\n for (const item of content) {\n if (item.type === \"text\") {\n controller.enqueue(item.text);\n }\n }\n }\n}\n\n// streams/llamaindex-adapter.ts\nvar llamaindex_adapter_exports = {};\n__export(llamaindex_adapter_exports, {\n mergeIntoDataStream: () => mergeIntoDataStream2,\n toDataStream: () => toDataStream2,\n toDataStreamResponse: () => toDataStreamResponse2\n});\nimport { convertAsyncIteratorToReadableStream } from \"@ai-sdk/provider-utils\";\nimport { formatDataStreamPart as formatDataStreamPart5 } from \"@ai-sdk/ui-utils\";\nfunction toDataStreamInternal2(stream, callbacks) {\n const trimStart = trimStartOfStream();\n return convertAsyncIteratorToReadableStream(stream[Symbol.asyncIterator]()).pipeThrough(\n new TransformStream({\n async transform(message, controller) {\n controller.enqueue(trimStart(message.delta));\n }\n })\n ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(new TextDecoderStream()).pipeThrough(\n new TransformStream({\n transform: async (chunk, controller) => {\n controller.enqueue(formatDataStreamPart5(\"text\", chunk));\n }\n })\n );\n}\nfunction toDataStream2(stream, callbacks) {\n return toDataStreamInternal2(stream, callbacks).pipeThrough(\n new TextEncoderStream()\n );\n}\nfunction toDataStreamResponse2(stream, options = {}) {\n var _a17;\n const { init, data, callbacks } = options;\n const dataStream = toDataStreamInternal2(stream, callbacks).pipeThrough(\n new TextEncoderStream()\n );\n const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;\n return new Response(responseStream, {\n status: (_a17 = init == null ? void 0 : init.status) != null ? _a17 : 200,\n statusText: init == null ? void 0 : init.statusText,\n headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {\n contentType: \"text/plain; charset=utf-8\",\n dataStreamVersion: \"v1\"\n })\n });\n}\nfunction mergeIntoDataStream2(stream, options) {\n options.dataStream.merge(toDataStreamInternal2(stream, options.callbacks));\n}\nfunction trimStartOfStream() {\n let isStreamStart = true;\n return (text2) => {\n if (isStreamStart) {\n text2 = text2.trimStart();\n if (text2)\n isStreamStart = false;\n }\n return text2;\n };\n}\n\n// streams/stream-data.ts\nimport { formatDataStreamPart as formatDataStreamPart6 } from \"@ai-sdk/ui-utils\";\n\n// util/constants.ts\nvar HANGING_STREAM_WARNING_TIME_MS = 15 * 1e3;\n\n// streams/stream-data.ts\nvar StreamData = class {\n constructor() {\n this.encoder = new TextEncoder();\n this.controller = null;\n this.isClosed = false;\n this.warningTimeout = null;\n const self = this;\n this.stream = new ReadableStream({\n start: async (controller) => {\n self.controller = controller;\n if (process.env.NODE_ENV === \"development\") {\n self.warningTimeout = setTimeout(() => {\n console.warn(\n \"The data stream is hanging. Did you forget to close it with `data.close()`?\"\n );\n }, HANGING_STREAM_WARNING_TIME_MS);\n }\n },\n pull: (controller) => {\n },\n cancel: (reason) => {\n this.isClosed = true;\n }\n });\n }\n async close() {\n if (this.isClosed) {\n throw new Error(\"Data Stream has already been closed.\");\n }\n if (!this.controller) {\n throw new Error(\"Stream controller is not initialized.\");\n }\n this.controller.close();\n this.isClosed = true;\n if (this.warningTimeout) {\n clearTimeout(this.warningTimeout);\n }\n }\n append(value) {\n if (this.isClosed) {\n throw new Error(\"Data Stream has already been closed.\");\n }\n if (!this.controller) {\n throw new Error(\"Stream controller is not initialized.\");\n }\n this.controller.enqueue(\n this.encoder.encode(formatDataStreamPart6(\"data\", [value]))\n );\n }\n appendMessageAnnotation(value) {\n if (this.isClosed) {\n throw new Error(\"Data Stream has already been closed.\");\n }\n if (!this.controller) {\n throw new Error(\"Stream controller is not initialized.\");\n }\n this.controller.enqueue(\n this.encoder.encode(formatDataStreamPart6(\"message_annotations\", [value]))\n );\n }\n};\nexport {\n AISDKError17 as AISDKError,\n APICallError2 as APICallError,\n AssistantResponse,\n DownloadError,\n EmptyResponseBodyError,\n InvalidArgumentError,\n InvalidDataContentError,\n InvalidMessageRoleError,\n InvalidPromptError2 as InvalidPromptError,\n InvalidResponseDataError,\n InvalidStreamPartError,\n InvalidToolArgumentsError,\n JSONParseError2 as JSONParseError,\n langchain_adapter_exports as LangChainAdapter,\n llamaindex_adapter_exports as LlamaIndexAdapter,\n LoadAPIKeyError,\n MCPClientError,\n MessageConversionError,\n NoContentGeneratedError,\n NoImageGeneratedError,\n NoObjectGeneratedError,\n NoOutputSpecifiedError,\n NoSuchModelError,\n NoSuchProviderError,\n NoSuchToolError,\n output_exports as Output,\n RetryError,\n StreamData,\n ToolCallRepairError,\n ToolExecutionError,\n TypeValidationError3 as TypeValidationError,\n UnsupportedFunctionalityError2 as UnsupportedFunctionalityError,\n UnsupportedModelVersionError,\n appendClientMessage,\n appendResponseMessages,\n convertToCoreMessages,\n coreAssistantMessageSchema,\n coreMessageSchema,\n coreSystemMessageSchema,\n coreToolMessageSchema,\n coreUserMessageSchema,\n cosineSimilarity,\n createDataStream,\n createDataStreamResponse,\n createIdGenerator5 as createIdGenerator,\n createProviderRegistry,\n customProvider,\n defaultSettingsMiddleware,\n embed,\n embedMany,\n createMCPClient as experimental_createMCPClient,\n experimental_createProviderRegistry,\n experimental_customProvider,\n generateImage as experimental_generateImage,\n generateSpeech as experimental_generateSpeech,\n transcribe as experimental_transcribe,\n experimental_wrapLanguageModel,\n extractReasoningMiddleware,\n formatAssistantStreamPart,\n formatDataStreamPart3 as formatDataStreamPart,\n generateId2 as generateId,\n generateObject,\n generateText,\n jsonSchema2 as jsonSchema,\n parseAssistantStreamPart,\n parseDataStreamPart,\n pipeDataStreamToResponse,\n processDataStream,\n processTextStream,\n simulateReadableStream,\n simulateStreamingMiddleware,\n smoothStream,\n streamObject,\n streamText,\n tool,\n wrapLanguageModel,\n zodSchema\n};\n//# sourceMappingURL=index.mjs.map"],"names":["AISDKError2","AISDKError3","convertBase64ToUint8Array2","AISDKError5","AISDKError6","AISDKError7","convertUint8ArrayToBase642","convertBase64ToUint8Array3","AISDKError8","AISDKError9","z2","z3","z4","z5","z6","z7","safeValidateTypes2","TypeValidationError2","createIdGenerator2","createIdGenerator3","asSchema4","parsePartialJson2","safeParseJSON3","safeValidateTypes4","createIdGenerator4","z8","z9","formatDataStreamPart4","formatDataStreamPart5"],"mappings":";;;;;;;;;AAAA,IAAI,SAAS,GAAG,MAAM,CAAC,cAAc;AACrC,IAAI,QAAQ,GAAG,CAAC,MAAM,EAAE,GAAG,KAAK;AAChC,EAAE,KAAK,IAAI,MAAM,IAAI,GAAG;AACxB,IAAI,SAAS,CAAC,MAAM,EAAE,MAAM,EAAE,EAAE,GAAG,EAAE,GAAG,CAAC,MAAM,CAAC,EAAE,UAAU,EAAE,IAAI,EAAE,CAAC;AACrE,CAAC;;AA2FD;AACA,SAAS,sBAAsB,CAAC,OAAO,EAAE;AACzC,EAAE,WAAW;AACb,EAAE;AACF,CAAC,EAAE;AACH,EAAE,MAAM,eAAe,GAAG,IAAI,OAAO,CAAC,OAAO,IAAI,IAAI,GAAG,OAAO,GAAG,EAAE,CAAC;AACrE,EAAE,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,cAAc,CAAC,EAAE;AAC5C,IAAI,eAAe,CAAC,GAAG,CAAC,cAAc,EAAE,WAAW,CAAC;AACpD,EAAE;AACF,EAAE,IAAI,iBAAiB,KAAK,MAAM,EAAE;AACpC,IAAI,eAAe,CAAC,GAAG,CAAC,yBAAyB,EAAE,iBAAiB,CAAC;AACrE,EAAE;AACF,EAAE,OAAO,eAAe;AACxB;AA8FG,IAAC,4BAA4B,GAAG,cAAc,UAAU,CAAC;AAC5D,EAAE,WAAW,GAAG;AAChB,IAAI,KAAK,CAAC;AACV,MAAM,IAAI,EAAE,iCAAiC;AAC7C,MAAM,OAAO,EAAE,CAAC,iJAAiJ;AACjK,KAAK,CAAC;AACN,EAAE;AACF;AAIA,IAAI,IAAI,GAAG,yBAAyB;AACpC,IAAI,MAAM,GAAG,CAAC,gBAAgB,EAAE,IAAI,CAAC,CAAC;AACtC,IAAI,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC;AAC/B,IAAI,EAAE;AACH,IAAC,oBAAoB,GAAG,cAAcA,UAAW,CAAC;AACrD,EAAE,WAAW,CAAC;AACd,IAAI,SAAS;AACb,IAAI,KAAK;AACT,IAAI;AACJ,GAAG,EAAE;AACL,IAAI,KAAK,CAAC;AACV,MAAM,IAAI;AACV,MAAM,OAAO,EAAE,CAAC,+BAA+B,EAAE,SAAS,CAAC,EAAE,EAAE,OAAO,CAAC;AACvE,KAAK,CAAC;AACN,IAAI,IAAI,CAAC,EAAE,CAAC,GAAG,IAAI;AACnB,IAAI,IAAI,CAAC,SAAS,GAAG,SAAS;AAC9B,IAAI,IAAI,CAAC,KAAK,GAAG,KAAK;AACtB,EAAE;AACF,EAAE,OAAO,UAAU,CAAC,KAAK,EAAE;AAC3B,IAAI,OAAOA,UAAW,CAAC,SAAS,CAAC,KAAK,EAAE,MAAM,CAAC;AAC/C,EAAE;AACF;AACA,EAAE,GAAG,MAAM;AAQX,IAAI,KAAK,GAAG,eAAe;AAC3B,IAAI,OAAO,GAAG,CAAC,gBAAgB,EAAE,KAAK,CAAC,CAAC;AACxC,IAAI,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC;AACjC,IAAI,GAAG;AACJ,IAAC,UAAU,GAAG,cAAcC,UAAW,CAAC;AAC3C,EAAE,WAAW,CAAC;AACd,IAAI,OAAO;AACX,IAAI,MAAM;AACV,IAAI;AACJ,GAAG,EAAE;AACL,IAAI,KAAK,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;AACnC,IAAI,IAAI,CAAC,GAAG,CAAC,GAAG,IAAI;AACpB,IAAI,IAAI,CAAC,MAAM,GAAG,MAAM;AACxB,IAAI,IAAI,CAAC,MAAM,GAAG,MAAM;AACxB,IAAI,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC;AAC9C,EAAE;AACF,EAAE,OAAO,UAAU,CAAC,KAAK,EAAE;AAC3B,IAAI,OAAOA,UAAW,CAAC,SAAS,CAAC,KAAK,EAAE,OAAO,CAAC;AAChD,EAAE;AACF;AACA,GAAG,GAAG,OAAO;;AAEb;AACA,IAAI,2BAA2B,GAAG,CAAC;AACnC,EAAE,UAAU,GAAG,CAAC;AAChB,EAAE,gBAAgB,GAAG,GAAG;AACxB,EAAE,aAAa,GAAG;AAClB,CAAC,GAAG,EAAE,KAAK,OAAO,CAAC,KAAK,4BAA4B,CAAC,CAAC,EAAE;AACxD,EAAE,UAAU;AACZ,EAAE,SAAS,EAAE,gBAAgB;AAC7B,EAAE;AACF,CAAC,CAAC;AACF,eAAe,4BAA4B,CAAC,CAAC,EAAE;AAC/C,EAAE,UAAU;AACZ,EAAE,SAAS;AACX,EAAE;AACF,CAAC,EAAE,MAAM,GAAG,EAAE,EAAE;AAChB,EAAE,IAAI;AACN,IAAI,OAAO,MAAM,CAAC,EAAE;AACpB,EAAE,CAAC,CAAC,OAAO,KAAK,EAAE;AAClB,IAAI,IAAI,YAAY,CAAC,KAAK,CAAC,EAAE;AAC7B,MAAM,MAAM,KAAK;AACjB,IAAI;AACJ,IAAI,IAAI,UAAU,KAAK,CAAC,EAAE;AAC1B,MAAM,MAAM,KAAK;AACjB,IAAI;AACJ,IAAI,MAAM,YAAY,GAAG,eAAe,CAAC,KAAK,CAAC;AAC/C,IAAI,MAAM,SAAS,GAAG,CAAC,GAAG,MAAM,EAAE,KAAK,CAAC;AACxC,IAAI,MAAM,SAAS,GAAG,SAAS,CAAC,MAAM;AACtC,IAAI,IAAI,SAAS,GAAG,UAAU,EAAE;AAChC,MAAM,MAAM,IAAI,UAAU,CAAC;AAC3B,QAAQ,OAAO,EAAE,CAAC,aAAa,EAAE,SAAS,CAAC,uBAAuB,EAAE,YAAY,CAAC,CAAC;AAClF,QAAQ,MAAM,EAAE,oBAAoB;AACpC,QAAQ,MAAM,EAAE;AAChB,OAAO,CAAC;AACR,IAAI;AACJ,IAAI,IAAI,KAAK,YAAY,KAAK,IAAI,YAAY,CAAC,UAAU,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,WAAW,KAAK,IAAI,IAAI,SAAS,IAAI,UAAU,EAAE;AAC3H,MAAM,MAAM,KAAK,CAAC,SAAS,CAAC;AAC5B,MAAM,OAAO,4BAA4B;AACzC,QAAQ,CAAC;AACT,QAAQ,EAAE,UAAU,EAAE,SAAS,EAAE,aAAa,GAAG,SAAS,EAAE,aAAa,EAAE;AAC3E,QAAQ;AACR,OAAO;AACP,IAAI;AACJ,IAAI,IAAI,SAAS,KAAK,CAAC,EAAE;AACzB,MAAM,MAAM,KAAK;AACjB,IAAI;AACJ,IAAI,MAAM,IAAI,UAAU,CAAC;AACzB,MAAM,OAAO,EAAE,CAAC,aAAa,EAAE,SAAS,CAAC,qCAAqC,EAAE,YAAY,CAAC,CAAC,CAAC;AAC/F,MAAM,MAAM,EAAE,mBAAmB;AACjC,MAAM,MAAM,EAAE;AACd,KAAK,CAAC;AACN,EAAE;AACF;;AAEA;AACA,SAAS,cAAc,CAAC;AACxB,EAAE;AACF,CAAC,EAAE;AACH,EAAE,IAAI,UAAU,IAAI,IAAI,EAAE;AAC1B,IAAI,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,UAAU,CAAC,EAAE;AACvC,MAAM,MAAM,IAAI,oBAAoB,CAAC;AACrC,QAAQ,SAAS,EAAE,YAAY;AAC/B,QAAQ,KAAK,EAAE,UAAU;AACzB,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,IAAI,IAAI,UAAU,GAAG,CAAC,EAAE;AACxB,MAAM,MAAM,IAAI,oBAAoB,CAAC;AACrC,QAAQ,SAAS,EAAE,YAAY;AAC/B,QAAQ,KAAK,EAAE,UAAU;AACzB,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,EAAE;AACF,EAAE,MAAM,gBAAgB,GAAG,UAAU,IAAI,IAAI,GAAG,UAAU,GAAG,CAAC;AAC9D,EAAE,OAAO;AACT,IAAI,UAAU,EAAE,gBAAgB;AAChC,IAAI,KAAK,EAAE,2BAA2B,CAAC,EAAE,UAAU,EAAE,gBAAgB,EAAE;AACvE,GAAG;AACH;;AAEA;AACA,SAAS,qBAAqB,CAAC;AAC/B,EAAE,WAAW;AACb,EAAE;AACF,CAAC,EAAE;AACH,EAAE,OAAO;AACT;AACA,IAAI,gBAAgB,EAAE,CAAC,EAAE,WAAW,CAAC,EAAE,CAAC,SAAS,IAAI,IAAI,GAAG,MAAM,GAAG,SAAS,CAAC,UAAU,KAAK,IAAI,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC;AACtI,IAAI,eAAe,EAAE,SAAS,IAAI,IAAI,GAAG,MAAM,GAAG,SAAS,CAAC,UAAU;AACtE;AACA,IAAI,gBAAgB,EAAE,WAAW;AACjC,IAAI,yBAAyB,EAAE,SAAS,IAAI,IAAI,GAAG,MAAM,GAAG,SAAS,CAAC;AACtE,GAAG;AACH;;AAEA;AACA,SAAS,0BAA0B,CAAC;AACpC,EAAE,KAAK;AACP,EAAE,QAAQ;AACV,EAAE,SAAS;AACX,EAAE;AACF,CAAC,EAAE;AACH,EAAE,IAAI,IAAI;AACV,EAAE,OAAO;AACT,IAAI,mBAAmB,EAAE,KAAK,CAAC,QAAQ;AACvC,IAAI,aAAa,EAAE,KAAK,CAAC,OAAO;AAChC;AACA,IAAI,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,UAAU,EAAE,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK;AACrE,MAAM,UAAU,CAAC,CAAC,YAAY,EAAE,GAAG,CAAC,CAAC,CAAC,GAAG,KAAK;AAC9C,MAAM,OAAO,UAAU;AACvB,IAAI,CAAC,EAAE,EAAE,CAAC;AACV;AACA,IAAI,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,GAAG,SAAS,IAAI,IAAI,GAAG,MAAM,GAAG,SAAS,CAAC,QAAQ,KAAK,IAAI,GAAG,IAAI,GAAG,EAAE,CAAC,CAAC,MAAM;AAC1G,MAAM,CAAC,UAAU,EAAE,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK;AACpC,QAAQ,UAAU,CAAC,CAAC,sBAAsB,EAAE,GAAG,CAAC,CAAC,CAAC,GAAG,KAAK;AAC1D,QAAQ,OAAO,UAAU;AACzB,MAAM,CAAC;AACP,MAAM;AACN,KAAK;AACL;AACA,IAAI,GAAG,MAAM,CAAC,OAAO,CAAC,OAAO,IAAI,IAAI,GAAG,OAAO,GAAG,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,UAAU,EAAE,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK;AAC3F,MAAM,IAAI,KAAK,KAAK,MAAM,EAAE;AAC5B,QAAQ,UAAU,CAAC,CAAC,mBAAmB,EAAE,GAAG,CAAC,CAAC,CAAC,GAAG,KAAK;AACvD,MAAM;AACN,MAAM,OAAO,UAAU;AACvB,IAAI,CAAC,EAAE,EAAE;AACT,GAAG;AACH;;AAKA;AACA,IAAI,UAAU,GAAG;AACjB,EAAE,SAAS,GAAG;AACd,IAAI,OAAO,QAAQ;AACnB,EAAE,CAAC;AACH,EAAE,eAAe,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE;AAC5C,IAAI,IAAI,OAAO,IAAI,KAAK,UAAU,EAAE;AACpC,MAAM,OAAO,IAAI,CAAC,QAAQ,CAAC;AAC3B,IAAI;AACJ,IAAI,IAAI,OAAO,IAAI,KAAK,UAAU,EAAE;AACpC,MAAM,OAAO,IAAI,CAAC,QAAQ,CAAC;AAC3B,IAAI;AACJ,IAAI,IAAI,OAAO,IAAI,KAAK,UAAU,EAAE;AACpC,MAAM,OAAO,IAAI,CAAC,QAAQ,CAAC;AAC3B,IAAI;AACJ,EAAE;AACF,CAAC;AACD,IAAI,QAAQ,GAAG;AACf,EAAE,WAAW,GAAG;AAChB,IAAI,OAAO,eAAe;AAC1B,EAAE,CAAC;AACH,EAAE,YAAY,GAAG;AACjB,IAAI,OAAO,IAAI;AACf,EAAE,CAAC;AACH,EAAE,aAAa,GAAG;AAClB,IAAI,OAAO,IAAI;AACf,EAAE,CAAC;AACH,EAAE,QAAQ,GAAG;AACb,IAAI,OAAO,IAAI;AACf,EAAE,CAAC;AACH,EAAE,OAAO,GAAG;AACZ,IAAI,OAAO,IAAI;AACf,EAAE,CAAC;AACH,EAAE,QAAQ,GAAG;AACb,IAAI,OAAO,IAAI;AACf,EAAE,CAAC;AACH,EAAE,SAAS,GAAG;AACd,IAAI,OAAO,IAAI;AACf,EAAE,CAAC;AACH,EAAE,UAAU,GAAG;AACf,IAAI,OAAO,IAAI;AACf,EAAE,CAAC;AACH,EAAE,GAAG,GAAG;AACR,IAAI,OAAO,IAAI;AACf,EAAE,CAAC;AACH,EAAE,WAAW,GAAG;AAChB,IAAI,OAAO,KAAK;AAChB,EAAE,CAAC;AACH,EAAE,eAAe,GAAG;AACpB,IAAI,OAAO,IAAI;AACf,EAAE;AACF,CAAC;AACD,IAAI,eAAe,GAAG;AACtB,EAAE,OAAO,EAAE,EAAE;AACb,EAAE,MAAM,EAAE,EAAE;AACZ,EAAE,UAAU,EAAE;AACd,CAAC;;AAED;AACA,SAAS,SAAS,CAAC;AACnB,EAAE,SAAS,GAAG,KAAK;AACnB,EAAE;AACF,CAAC,GAAG,EAAE,EAAE;AACR,EAAE,IAAI,CAAC,SAAS,EAAE;AAClB,IAAI,OAAO,UAAU;AACrB,EAAE;AACF,EAAE,IAAI,MAAM,EAAE;AACd,IAAI,OAAO,MAAM;AACjB,EAAE;AACF,EAAE,OAAO,KAAK,CAAC,SAAS,CAAC,IAAI,CAAC;AAC9B;AAIA,SAAS,UAAU,CAAC;AACpB,EAAE,IAAI,EAAE,MAAM;AACd,EAAE,MAAM;AACR,EAAE,UAAU;AACZ,EAAE,EAAE;AACJ,EAAE,WAAW,GAAG;AAChB,CAAC,EAAE;AACH,EAAE,OAAO,MAAM,CAAC,eAAe,CAAC,MAAM,EAAE,EAAE,UAAU,EAAE,EAAE,OAAO,IAAI,KAAK;AACxE,IAAI,IAAI;AACR,MAAM,MAAM,MAAM,GAAG,MAAM,EAAE,CAAC,IAAI,CAAC;AACnC,MAAM,IAAI,WAAW,EAAE;AACvB,QAAQ,IAAI,CAAC,GAAG,EAAE;AAClB,MAAM;AACN,MAAM,OAAO,MAAM;AACnB,IAAI,CAAC,CAAC,OAAO,KAAK,EAAE;AACpB,MAAM,IAAI;AACV,QAAQ,iBAAiB,CAAC,IAAI,EAAE,KAAK,CAAC;AACtC,MAAM,CAAC,SAAS;AAChB,QAAQ,IAAI,CAAC,GAAG,EAAE;AAClB,MAAM;AACN,MAAM,MAAM,KAAK;AACjB,IAAI;AACJ,EAAE,CAAC,CAAC;AACJ;AACA,SAAS,iBAAiB,CAAC,IAAI,EAAE,KAAK,EAAE;AACxC,EAAE,IAAI,KAAK,YAAY,KAAK,EAAE;AAC9B,IAAI,IAAI,CAAC,eAAe,CAAC;AACzB,MAAM,IAAI,EAAE,KAAK,CAAC,IAAI;AACtB,MAAM,OAAO,EAAE,KAAK,CAAC,OAAO;AAC5B,MAAM,KAAK,EAAE,KAAK,CAAC;AACnB,KAAK,CAAC;AACN,IAAI,IAAI,CAAC,SAAS,CAAC;AACnB,MAAM,IAAI,EAAE,cAAc,CAAC,KAAK;AAChC,MAAM,OAAO,EAAE,KAAK,CAAC;AACrB,KAAK,CAAC;AACN,EAAE,CAAC,MAAM;AACT,IAAI,IAAI,CAAC,SAAS,CAAC,EAAE,IAAI,EAAE,cAAc,CAAC,KAAK,EAAE,CAAC;AAClD,EAAE;AACF;;AAEA;AACA,SAAS,yBAAyB,CAAC;AACnC,EAAE,SAAS;AACX,EAAE;AACF,CAAC,EAAE;AACH,EAAE,IAAI,CAAC,SAAS,IAAI,IAAI,GAAG,MAAM,GAAG,SAAS,CAAC,SAAS,MAAM,IAAI,EAAE;AACnE,IAAI,OAAO,EAAE;AACb,EAAE;AACF,EAAE,OAAO,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC,MAAM,CAAC,CAAC,WAAW,EAAE,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK;AAC1E,IAAI,IAAI,KAAK,KAAK,MAAM,EAAE;AAC1B,MAAM,OAAO,WAAW;AACxB,IAAI;AACJ,IAAI,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,IAAI,KAAK,IAAI,OAAO,KAAK,CAAC,KAAK,KAAK,UAAU,EAAE;AAC5F,MAAM,IAAI,CAAC,SAAS,IAAI,IAAI,GAAG,MAAM,GAAG,SAAS,CAAC,YAAY,MAAM,KAAK,EAAE;AAC3E,QAAQ,OAAO,WAAW;AAC1B,MAAM;AACN,MAAM,MAAM,MAAM,GAAG,KAAK,CAAC,KAAK,EAAE;AAClC,MAAM,OAAO,MAAM,KAAK,MAAM,GAAG,WAAW,GAAG,EAAE,GAAG,WAAW,EAAE,CAAC,GAAG,GAAG,MAAM,EAAE;AAChF,IAAI;AACJ,IAAI,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,QAAQ,IAAI,KAAK,IAAI,OAAO,KAAK,CAAC,MAAM,KAAK,UAAU,EAAE;AAC9F,MAAM,IAAI,CAAC,SAAS,IAAI,IAAI,GAAG,MAAM,GAAG,SAAS,CAAC,aAAa,MAAM,KAAK,EAAE;AAC5E,QAAQ,OAAO,WAAW;AAC1B,MAAM;AACN,MAAM,MAAM,MAAM,GAAG,KAAK,CAAC,MAAM,EAAE;AACnC,MAAM,OAAO,MAAM,KAAK,MAAM,GAAG,WAAW,GAAG,EAAE,GAAG,WAAW,EAAE,CAAC,GAAG,GAAG,MAAM,EAAE;AAChF,IAAI;AACJ,IAAI,OAAO,EAAE,GAAG,WAAW,EAAE,CAAC,GAAG,GAAG,KAAK,EAAE;AAC3C,EAAE,CAAC,EAAE,EAAE,CAAC;AACR;AA0VA,IAAI,uBAAuB,GAAG;AAC9B,EAAE;AACF,IAAI,QAAQ,EAAE,WAAW;AACzB,IAAI,WAAW,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC;AAC7B,IAAI,YAAY,EAAE;AAClB,GAAG;AACH,EAAE;AACF,IAAI,QAAQ,EAAE,WAAW;AACzB,IAAI,WAAW,EAAE,CAAC,GAAG,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC;AAClC,IAAI,YAAY,EAAE;AAClB,GAAG;AACH,EAAE;AACF,IAAI,QAAQ,EAAE,YAAY;AAC1B,IAAI,WAAW,EAAE,CAAC,GAAG,EAAE,GAAG,CAAC;AAC3B,IAAI,YAAY,EAAE;AAClB,GAAG;AACH,EAAE;AACF,IAAI,QAAQ,EAAE,YAAY;AAC1B,IAAI,WAAW,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC;AACjC,IAAI,YAAY,EAAE;AAClB,GAAG;AACH,EAAE;AACF,IAAI,QAAQ,EAAE,WAAW;AACzB,IAAI,WAAW,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC;AACzB,IAAI,YAAY,EAAE;AAClB,GAAG;AACH,EAAE;AACF,IAAI,QAAQ,EAAE,YAAY;AAC1B,IAAI,WAAW,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC;AAChC,IAAI,YAAY,EAAE;AAClB,GAAG;AACH,EAAE;AACF,IAAI,QAAQ,EAAE,YAAY;AAC1B,IAAI,WAAW,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC;AAChC,IAAI,YAAY,EAAE;AAClB,GAAG;AACH,EAAE;AACF,IAAI,QAAQ,EAAE,YAAY;AAC1B,IAAI,WAAW,EAAE;AACjB,MAAM,CAAC;AACP,MAAM,CAAC;AACP,MAAM,CAAC;AACP,MAAM,EAAE;AACR,MAAM,GAAG;AACT,MAAM,GAAG;AACT,MAAM,GAAG;AACT,MAAM,GAAG;AACT,MAAM,EAAE;AACR,MAAM,GAAG;AACT,MAAM,GAAG;AACT,MAAM;AACN,KAAK;AACL,IAAI,YAAY,EAAE;AAClB,GAAG;AACH,EAAE;AACF,IAAI,QAAQ,EAAE,YAAY;AAC1B,IAAI,WAAW,EAAE;AACjB,MAAM,CAAC;AACP,MAAM,CAAC;AACP,MAAM,CAAC;AACP,MAAM,EAAE;AACR,MAAM,GAAG;AACT,MAAM,GAAG;AACT,MAAM,GAAG;AACT,MAAM,GAAG;AACT,MAAM,GAAG;AACT,MAAM,GAAG;AACT,MAAM,GAAG;AACT,MAAM;AACN,KAAK;AACL,IAAI,YAAY,EAAE;AAClB;AACA,CAAC;AAiCD,IAAI,QAAQ,GAAG,CAAC,IAAI,KAAK;AACzB,EAAE,MAAM,KAAK,GAAG,OAAO,IAAI,KAAK,QAAQ,GAAGC,yBAA0B,CAAC,IAAI,CAAC,GAAG,IAAI;AAClF,EAAE,MAAM,OAAO,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,GAAG,KAAK,EAAE,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,GAAG,KAAK,EAAE,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,GAAG,KAAK,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,GAAG,GAAG;AAC1G,EAAE,OAAO,KAAK,CAAC,KAAK,CAAC,OAAO,GAAG,EAAE,CAAC;AAClC,CAAC;AACD,SAAS,qBAAqB,CAAC,IAAI,EAAE;AACrC,EAAE,MAAM,MAAM,GAAG,OAAO,IAAI,KAAK,QAAQ,IAAI,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,IAAI,OAAO,IAAI,KAAK,QAAQ,IAAI,IAAI,CAAC,MAAM,GAAG,EAAE,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,EAAE;AACtI,EAAE,IAAI,CAAC,CAAC,CAAC,KAAK,EAAE;AAChB,EAAE,IAAI,CAAC,CAAC,CAAC,KAAK,EAAE;AAChB,EAAE,OAAO,MAAM,GAAG,QAAQ,CAAC,IAAI,CAAC,GAAG,IAAI;AACvC;AACA,SAAS,cAAc,CAAC;AACxB,EAAE,IAAI;AACN,EAAE;AACF,CAAC,EAAE;AACH,EAAE,MAAM,aAAa,GAAG,qBAAqB,CAAC,IAAI,CAAC;AACnD,EAAE,KAAK,MAAM,SAAS,IAAI,UAAU,EAAE;AACtC,IAAI,IAAI,OAAO,aAAa,KAAK,QAAQ,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CAAC,YAAY,CAAC,GAAG,aAAa,CAAC,MAAM,IAAI,SAAS,CAAC,WAAW,CAAC,MAAM,IAAI,SAAS,CAAC,WAAW,CAAC,KAAK;AAClL,MAAM,CAAC,IAAI,EAAE,KAAK,KAAK,aAAa,CAAC,KAAK,CAAC,KAAK;AAChD,KAAK,EAAE;AACP,MAAM,OAAO,SAAS,CAAC,QAAQ;AAC/B,IAAI;AACJ,EAAE;AACF,EAAE,OAAO,MAAM;AACf;AA2FA,IAAI,KAAK,GAAG,2BAA2B;AACvC,IAAI,OAAO,GAAG,CAAC,gBAAgB,EAAE,KAAK,CAAC,CAAC;AACxC,IAAI,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC;AACjC,IAAI,GAAG;AACJ,IAAC,sBAAsB,GAAG,cAAcC,UAAW,CAAC;AACvD,EAAE,WAAW,CAAC;AACd,IAAI,OAAO,GAAG,sBAAsB;AACpC,IAAI,KAAK;AACT,IAAI,IAAI,EAAE,KAAK;AACf,IAAI,QAAQ;AACZ,IAAI,KAAK;AACT,IAAI;AACJ,GAAG,EAAE;AACL,IAAI,KAAK,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC;AAC1C,IAAI,IAAI,CAAC,GAAG,CAAC,GAAG,IAAI;AACpB,IAAI,IAAI,CAAC,IAAI,GAAG,KAAK;AACrB,IAAI,IAAI,CAAC,QAAQ,GAAG,QAAQ;AAC5B,IAAI,IAAI,CAAC,KAAK,GAAG,KAAK;AACtB,IAAI,IAAI,CAAC,YAAY,GAAG,YAAY;AACpC,EAAE;AACF,EAAE,OAAO,UAAU,CAAC,KAAK,EAAE;AAC3B,IAAI,OAAOA,UAAW,CAAC,SAAS,CAAC,KAAK,EAAE,OAAO,CAAC;AAChD,EAAE;AACF;AACA,GAAG,GAAG,OAAO;AAIb,IAAI,KAAK,GAAG,kBAAkB;AAC9B,IAAI,OAAO,GAAG,CAAC,gBAAgB,EAAE,KAAK,CAAC,CAAC;AACxC,IAAI,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC;AACjC,IAAI,GAAG;AACJ,IAAC,aAAa,GAAG,cAAcC,UAAW,CAAC;AAC9C,EAAE,WAAW,CAAC;AACd,IAAI,GAAG;AACP,IAAI,UAAU;AACd,IAAI,UAAU;AACd,IAAI,KAAK;AACT,IAAI,OAAO,GAAG,KAAK,IAAI,IAAI,GAAG,CAAC,mBAAmB,EAAE,GAAG,CAAC,EAAE,EAAE,UAAU,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,GAAG,CAAC,mBAAmB,EAAE,GAAG,CAAC,EAAE,EAAE,KAAK,CAAC;AAC7H,GAAG,EAAE;AACL,IAAI,KAAK,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC;AAC1C,IAAI,IAAI,CAAC,GAAG,CAAC,GAAG,IAAI;AACpB,IAAI,IAAI,CAAC,GAAG,GAAG,GAAG;AAClB,IAAI,IAAI,CAAC,UAAU,GAAG,UAAU;AAChC,IAAI,IAAI,CAAC,UAAU,GAAG,UAAU;AAChC,EAAE;AACF,EAAE,OAAO,UAAU,CAAC,KAAK,EAAE;AAC3B,IAAI,OAAOA,UAAW,CAAC,SAAS,CAAC,KAAK,EAAE,OAAO,CAAC;AAChD,EAAE;AACF;AACA,GAAG,GAAG,OAAO;;AAEb;AACA,eAAe,QAAQ,CAAC,EAAE,GAAG,EAAE,EAAE;AACjC,EAAE,IAAI,IAAI;AACV,EAAE,MAAM,OAAO,GAAG,GAAG,CAAC,QAAQ,EAAE;AAChC,EAAE,IAAI;AACN,IAAI,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,OAAO,CAAC;AACzC,IAAI,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;AACtB,MAAM,MAAM,IAAI,aAAa,CAAC;AAC9B,QAAQ,GAAG,EAAE,OAAO;AACpB,QAAQ,UAAU,EAAE,QAAQ,CAAC,MAAM;AACnC,QAAQ,UAAU,EAAE,QAAQ,CAAC;AAC7B,OAAO,CAAC;AACR,IAAI;AACJ,IAAI,OAAO;AACX,MAAM,IAAI,EAAE,IAAI,UAAU,CAAC,MAAM,QAAQ,CAAC,WAAW,EAAE,CAAC;AACxD,MAAM,QAAQ,EAAE,CAAC,IAAI,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,IAAI,GAAG,IAAI,GAAG,KAAK;AACpF,KAAK;AACL,EAAE,CAAC,CAAC,OAAO,KAAK,EAAE;AAClB,IAAI,IAAI,aAAa,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;AACzC,MAAM,MAAM,KAAK;AACjB,IAAI;AACJ,IAAI,MAAM,IAAI,aAAa,CAAC,EAAE,GAAG,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,CAAC;AAC3D,EAAE;AACF;AAUA,IAAI,KAAK,GAAG,4BAA4B;AACxC,IAAI,OAAO,GAAG,CAAC,gBAAgB,EAAE,KAAK,CAAC,CAAC;AACxC,IAAI,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC;AACjC,IAAI,GAAG;AACJ,IAAC,uBAAuB,GAAG,cAAcC,UAAW,CAAC;AACxD,EAAE,WAAW,CAAC;AACd,IAAI,OAAO;AACX,IAAI,KAAK;AACT,IAAI,OAAO,GAAG,CAAC,4FAA4F,EAAE,OAAO,OAAO,CAAC,CAAC;AAC7H,GAAG,EAAE;AACL,IAAI,KAAK,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC;AAC1C,IAAI,IAAI,CAAC,GAAG,CAAC,GAAG,IAAI;AACpB,IAAI,IAAI,CAAC,OAAO,GAAG,OAAO;AAC1B,EAAE;AACF,EAAE,OAAO,UAAU,CAAC,KAAK,EAAE;AAC3B,IAAI,OAAOA,UAAW,CAAC,SAAS,CAAC,KAAK,EAAE,OAAO,CAAC;AAChD,EAAE;AACF;AACA,GAAG,GAAG,OAAO;AAIb,IAAI,iBAAiB,GAAG,CAAC,CAAC,KAAK,CAAC;AAChC,EAAE,CAAC,CAAC,MAAM,EAAE;AACZ,EAAE,CAAC,CAAC,UAAU,CAAC,UAAU,CAAC;AAC1B,EAAE,CAAC,CAAC,UAAU,CAAC,WAAW,CAAC;AAC3B,EAAE,CAAC,CAAC,MAAM;AACV;AACA,IAAI,CAAC,KAAK,KAAK;AACf,MAAM,IAAI,IAAI,EAAE,EAAE;AAClB,MAAM,OAAO,CAAC,EAAE,GAAG,CAAC,IAAI,GAAG,UAAU,CAAC,MAAM,KAAK,IAAI,GAAG,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,KAAK,IAAI,GAAG,EAAE,GAAG,KAAK;AAC3G,IAAI,CAAC;AACL,IAAI,EAAE,OAAO,EAAE,kBAAkB;AACjC;AACA,CAAC,CAAC;AACF,SAAS,gCAAgC,CAAC,OAAO,EAAE;AACnD,EAAE,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;AACnC,IAAI,OAAO,OAAO;AAClB,EAAE;AACF,EAAE,IAAI,OAAO,YAAY,WAAW,EAAE;AACtC,IAAI,OAAOC,yBAA0B,CAAC,IAAI,UAAU,CAAC,OAAO,CAAC,CAAC;AAC9D,EAAE;AACF,EAAE,OAAOA,yBAA0B,CAAC,OAAO,CAAC;AAC5C;AACA,SAAS,8BAA8B,CAAC,OAAO,EAAE;AACjD,EAAE,IAAI,OAAO,YAAY,UAAU,EAAE;AACrC,IAAI,OAAO,OAAO;AAClB,EAAE;AACF,EAAE,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;AACnC,IAAI,IAAI;AACR,MAAM,OAAOC,yBAA0B,CAAC,OAAO,CAAC;AAChD,IAAI,CAAC,CAAC,OAAO,KAAK,EAAE;AACpB,MAAM,MAAM,IAAI,uBAAuB,CAAC;AACxC,QAAQ,OAAO,EAAE,qEAAqE;AACtF,QAAQ,OAAO;AACf,QAAQ,KAAK,EAAE;AACf,OAAO,CAAC;AACR,IAAI;AACJ,EAAE;AACF,EAAE,IAAI,OAAO,YAAY,WAAW,EAAE;AACtC,IAAI,OAAO,IAAI,UAAU,CAAC,OAAO,CAAC;AAClC,EAAE;AACF,EAAE,MAAM,IAAI,uBAAuB,CAAC,EAAE,OAAO,EAAE,CAAC;AAChD;AACA,SAAS,uBAAuB,CAAC,UAAU,EAAE;AAC7C,EAAE,IAAI;AACN,IAAI,OAAO,IAAI,WAAW,EAAE,CAAC,MAAM,CAAC,UAAU,CAAC;AAC/C,EAAE,CAAC,CAAC,OAAO,KAAK,EAAE;AAClB,IAAI,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;AACxD,EAAE;AACF;AAIA,IAAI,KAAK,GAAG,4BAA4B;AACxC,IAAI,OAAO,GAAG,CAAC,gBAAgB,EAAE,KAAK,CAAC,CAAC;AACxC,IAAI,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC;AACjC,IAAI,GAAG;AACJ,IAAC,uBAAuB,GAAG,cAAcC,UAAW,CAAC;AACxD,EAAE,WAAW,CAAC;AACd,IAAI,IAAI;AACR,IAAI,OAAO,GAAG,CAAC,uBAAuB,EAAE,IAAI,CAAC,yDAAyD;AACtG,GAAG,EAAE;AACL,IAAI,KAAK,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;AACnC,IAAI,IAAI,CAAC,GAAG,CAAC,GAAG,IAAI;AACpB,IAAI,IAAI,CAAC,IAAI,GAAG,IAAI;AACpB,EAAE;AACF,EAAE,OAAO,UAAU,CAAC,KAAK,EAAE;AAC3B,IAAI,OAAOA,UAAW,CAAC,SAAS,CAAC,KAAK,EAAE,OAAO,CAAC;AAChD,EAAE;AACF;AACA,GAAG,GAAG,OAAO;;AAEb;AACA,SAAS,YAAY,CAAC,OAAO,EAAE;AAC/B,EAAE,IAAI;AACN,IAAI,MAAM,CAAC,MAAM,EAAE,aAAa,CAAC,GAAG,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC;AACtD,IAAI,OAAO;AACX,MAAM,QAAQ,EAAE,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AAClD,MAAM;AACN,KAAK;AACL,EAAE,CAAC,CAAC,OAAO,KAAK,EAAE;AAClB,IAAI,OAAO;AACX,MAAM,QAAQ,EAAE,MAAM;AACtB,MAAM,aAAa,EAAE;AACrB,KAAK;AACL,EAAE;AACF;;AAEA;AACA,eAAe,4BAA4B,CAAC;AAC5C,EAAE,MAAM;AACR,EAAE,sBAAsB,GAAG,IAAI;AAC/B,EAAE,gBAAgB,GAAG,MAAM,KAAK;AAChC,EAAE,sBAAsB,GAAG;AAC3B,CAAC,EAAE;AACH,EAAE,MAAM,gBAAgB,GAAG,MAAM,cAAc;AAC/C,IAAI,MAAM,CAAC,QAAQ;AACnB,IAAI,sBAAsB;AAC1B,IAAI,sBAAsB;AAC1B,IAAI;AACJ,GAAG;AACH,EAAE,OAAO;AACT,IAAI,GAAG,MAAM,CAAC,MAAM,IAAI,IAAI,GAAG,CAAC,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,EAAE;AAChF,IAAI,GAAG,MAAM,CAAC,QAAQ,CAAC,GAAG;AAC1B,MAAM,CAAC,OAAO,KAAK,6BAA6B,CAAC,OAAO,EAAE,gBAAgB;AAC1E;AACA,GAAG;AACH;AACA,SAAS,6BAA6B,CAAC,OAAO,EAAE,gBAAgB,EAAE;AAClE,EAAE,IAAI,IAAI,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE;AAC9B,EAAE,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI;AAC3B,EAAE,QAAQ,IAAI;AACd,IAAI,KAAK,QAAQ,EAAE;AACnB,MAAM,OAAO;AACb,QAAQ,IAAI,EAAE,QAAQ;AACtB,QAAQ,OAAO,EAAE,OAAO,CAAC,OAAO;AAChC,QAAQ,gBAAgB,EAAE,CAAC,IAAI,GAAG,OAAO,CAAC,eAAe,KAAK,IAAI,GAAG,IAAI,GAAG,OAAO,CAAC;AACpF,OAAO;AACP,IAAI;AACJ,IAAI,KAAK,MAAM,EAAE;AACjB,MAAM,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;AAC/C,QAAQ,OAAO;AACf,UAAU,IAAI,EAAE,MAAM;AACtB,UAAU,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,CAAC,OAAO,EAAE,CAAC;AAC5D,UAAU,gBAAgB,EAAE,CAAC,EAAE,GAAG,OAAO,CAAC,eAAe,KAAK,IAAI,GAAG,EAAE,GAAG,OAAO,CAAC;AAClF,SAAS;AACT,MAAM;AACN,MAAM,OAAO;AACb,QAAQ,IAAI,EAAE,MAAM;AACpB,QAAQ,OAAO,EAAE,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,KAAK,8BAA8B,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,KAAK,IAAI,CAAC,IAAI,KAAK,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,EAAE,CAAC;AACjK,QAAQ,gBAAgB,EAAE,CAAC,EAAE,GAAG,OAAO,CAAC,eAAe,KAAK,IAAI,GAAG,EAAE,GAAG,OAAO,CAAC;AAChF,OAAO;AACP,IAAI;AACJ,IAAI,KAAK,WAAW,EAAE;AACtB,MAAM,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;AAC/C,QAAQ,OAAO;AACf,UAAU,IAAI,EAAE,WAAW;AAC3B,UAAU,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,CAAC,OAAO,EAAE,CAAC;AAC5D,UAAU,gBAAgB,EAAE,CAAC,EAAE,GAAG,OAAO,CAAC,eAAe,KAAK,IAAI,GAAG,EAAE,GAAG,OAAO,CAAC;AAClF,SAAS;AACT,MAAM;AACN,MAAM,OAAO;AACb,QAAQ,IAAI,EAAE,WAAW;AACzB,QAAQ,OAAO,EAAE,OAAO,CAAC,OAAO,CAAC,MAAM;AACvC;AACA,UAAU,CAAC,IAAI,KAAK,IAAI,CAAC,IAAI,KAAK,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK;AAC1D,SAAS,CAAC,GAAG,CAAC,CAAC,IAAI,KAAK;AACxB,UAAU,IAAI,IAAI;AAClB,UAAU,MAAM,eAAe,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,eAAe,KAAK,IAAI,GAAG,IAAI,GAAG,IAAI,CAAC,6BAA6B;AACnH,UAAU,QAAQ,IAAI,CAAC,IAAI;AAC3B,YAAY,KAAK,MAAM,EAAE;AACzB,cAAc,OAAO;AACrB,gBAAgB,IAAI,EAAE,MAAM;AAC5B,gBAAgB,IAAI,EAAE,IAAI,CAAC,IAAI,YAAY,GAAG,GAAG,IAAI,CAAC,IAAI,GAAG,gCAAgC,CAAC,IAAI,CAAC,IAAI,CAAC;AACxG,gBAAgB,QAAQ,EAAE,IAAI,CAAC,QAAQ;AACvC,gBAAgB,QAAQ,EAAE,IAAI,CAAC,QAAQ;AACvC,gBAAgB,gBAAgB,EAAE;AAClC,eAAe;AACf,YAAY;AACZ,YAAY,KAAK,WAAW,EAAE;AAC9B,cAAc,OAAO;AACrB,gBAAgB,IAAI,EAAE,WAAW;AACjC,gBAAgB,IAAI,EAAE,IAAI,CAAC,IAAI;AAC/B,gBAAgB,SAAS,EAAE,IAAI,CAAC,SAAS;AACzC,gBAAgB,gBAAgB,EAAE;AAClC,eAAe;AACf,YAAY;AACZ,YAAY,KAAK,oBAAoB,EAAE;AACvC,cAAc,OAAO;AACrB,gBAAgB,IAAI,EAAE,oBAAoB;AAC1C,gBAAgB,IAAI,EAAE,IAAI,CAAC,IAAI;AAC/B,gBAAgB,gBAAgB,EAAE;AAClC,eAAe;AACf,YAAY;AACZ,YAAY,KAAK,MAAM,EAAE;AACzB,cAAc,OAAO;AACrB,gBAAgB,IAAI,EAAE,MAAM;AAC5B,gBAAgB,IAAI,EAAE,IAAI,CAAC,IAAI;AAC/B,gBAAgB,gBAAgB,EAAE;AAClC,eAAe;AACf,YAAY;AACZ,YAAY,KAAK,WAAW,EAAE;AAC9B,cAAc,OAAO;AACrB,gBAAgB,IAAI,EAAE,WAAW;AACjC,gBAAgB,UAAU,EAAE,IAAI,CAAC,UAAU;AAC3C,gBAAgB,QAAQ,EAAE,IAAI,CAAC,QAAQ;AACvC,gBAAgB,IAAI,EAAE,IAAI,CAAC,IAAI;AAC/B,gBAAgB,gBAAgB,EAAE;AAClC,eAAe;AACf,YAAY;AACZ;AACA,QAAQ,CAAC,CAAC;AACV,QAAQ,gBAAgB,EAAE,CAAC,EAAE,GAAG,OAAO,CAAC,eAAe,KAAK,IAAI,GAAG,EAAE,GAAG,OAAO,CAAC;AAChF,OAAO;AACP,IAAI;AACJ,IAAI,KAAK,MAAM,EAAE;AACjB,MAAM,OAAO;AACb,QAAQ,IAAI,EAAE,MAAM;AACpB,QAAQ,OAAO,EAAE,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,KAAK;AAC/C,UAAU,IAAI,IAAI;AAClB,UAAU,OAAO;AACjB,YAAY,IAAI,EAAE,aAAa;AAC/B,YAAY,UAAU,EAAE,IAAI,CAAC,UAAU;AACvC,YAAY,QAAQ,EAAE,IAAI,CAAC,QAAQ;AACnC,YAAY,MAAM,EAAE,IAAI,CAAC,MAAM;AAC/B,YAAY,OAAO,EAAE,IAAI,CAAC,oBAAoB;AAC9C,YAAY,OAAO,EAAE,IAAI,CAAC,OAAO;AACjC,YAAY,gBAAgB,EAAE,CAAC,IAAI,GAAG,IAAI,CAAC,eAAe,KAAK,IAAI,GAAG,IAAI,GAAG,IAAI,CAAC;AAClF,WAAW;AACX,QAAQ,CAAC,CAAC;AACV,QAAQ,gBAAgB,EAAE,CAAC,EAAE,GAAG,OAAO,CAAC,eAAe,KAAK,IAAI,GAAG,EAAE,GAAG,OAAO,CAAC;AAChF,OAAO;AACP,IAAI;AACJ,IAAI,SAAS;AACb,MAAM,MAAM,gBAAgB,GAAG,IAAI;AACnC,MAAM,MAAM,IAAI,uBAAuB,CAAC,EAAE,IAAI,EAAE,gBAAgB,EAAE,CAAC;AACnE,IAAI;AACJ;AACA;AACA,eAAe,cAAc,CAAC,QAAQ,EAAE,sBAAsB,EAAE,sBAAsB,EAAE,gBAAgB,EAAE;AAC1G,EAAE,MAAM,IAAI,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,OAAO,KAAK,OAAO,CAAC,IAAI,KAAK,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,OAAO,KAAK,OAAO,CAAC,OAAO,CAAC,CAAC,MAAM;AAC7G,IAAI,CAAC,OAAO,KAAK,KAAK,CAAC,OAAO,CAAC,OAAO;AACtC,GAAG,CAAC,IAAI,EAAE,CAAC,MAAM;AACjB,IAAI,CAAC,IAAI,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,IAAI,IAAI,CAAC,IAAI,KAAK;AACrD,GAAG,CAAC,MAAM;AACV,IAAI,CAAC,IAAI,KAAK,EAAE,IAAI,CAAC,IAAI,KAAK,OAAO,IAAI,sBAAsB,KAAK,IAAI;AACxE,GAAG,CAAC,GAAG,CAAC,CAAC,IAAI,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,GAAG,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG;AACrE,IAAI,CAAC,IAAI;AACT;AACA,MAAM,OAAO,IAAI,KAAK,QAAQ,KAAK,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,IAAI,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,IAAI,CAAC,GAAG;AAC5G;AACA,GAAG,CAAC,MAAM,CAAC,CAAC,KAAK,KAAK,KAAK,YAAY,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,KAAK,CAAC,gBAAgB,CAAC,GAAG,CAAC,CAAC;AACnF,EAAE,MAAM,gBAAgB,GAAG,MAAM,OAAO,CAAC,GAAG;AAC5C,IAAI,IAAI,CAAC,GAAG,CAAC,OAAO,GAAG,MAAM;AAC7B,MAAM,GAAG;AACT,MAAM,IAAI,EAAE,MAAM,sBAAsB,CAAC,EAAE,GAAG,EAAE;AAChD,KAAK,CAAC;AACN,GAAG;AACH,EAAE,OAAO,MAAM,CAAC,WAAW;AAC3B,IAAI,gBAAgB,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,EAAE,IAAI,EAAE,KAAK,CAAC,GAAG,CAAC,QAAQ,EAAE,EAAE,IAAI,CAAC;AAClE,GAAG;AACH;AACA,SAAS,8BAA8B,CAAC,IAAI,EAAE,gBAAgB,EAAE;AAChE,EAAE,IAAI,IAAI,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE;AACtB,EAAE,IAAI,IAAI,CAAC,IAAI,KAAK,MAAM,EAAE;AAC5B,IAAI,OAAO;AACX,MAAM,IAAI,EAAE,MAAM;AAClB,MAAM,IAAI,EAAE,IAAI,CAAC,IAAI;AACrB,MAAM,gBAAgB,EAAE,CAAC,IAAI,GAAG,IAAI,CAAC,eAAe,KAAK,IAAI,GAAG,IAAI,GAAG,IAAI,CAAC;AAC5E,KAAK;AACL,EAAE;AACF,EAAE,IAAI,QAAQ,GAAG,IAAI,CAAC,QAAQ;AAC9B,EAAE,IAAI,IAAI;AACV,EAAE,IAAI,OAAO;AACb,EAAE,IAAI,cAAc;AACpB,EAAE,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI;AACxB,EAAE,QAAQ,IAAI;AACd,IAAI,KAAK,OAAO;AAChB,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK;AACvB,MAAM;AACN,IAAI,KAAK,MAAM;AACf,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI;AACtB,MAAM;AACN,IAAI;AACJ,MAAM,MAAM,IAAI,KAAK,CAAC,CAAC,uBAAuB,EAAE,IAAI,CAAC,CAAC,CAAC;AACvD;AACA,EAAE,IAAI;AACN,IAAI,OAAO,GAAG,OAAO,IAAI,KAAK,QAAQ,GAAG,IAAI,GAAG,CAAC,IAAI,CAAC,GAAG,IAAI;AAC7D,EAAE,CAAC,CAAC,OAAO,KAAK,EAAE;AAClB,IAAI,OAAO,GAAG,IAAI;AAClB,EAAE;AACF,EAAE,IAAI,OAAO,YAAY,GAAG,EAAE;AAC9B,IAAI,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;AACtC,MAAM,MAAM,EAAE,QAAQ,EAAE,eAAe,EAAE,aAAa,EAAE,GAAG,YAAY;AACvE,QAAQ,OAAO,CAAC,QAAQ;AACxB,OAAO;AACP,MAAM,IAAI,eAAe,IAAI,IAAI,IAAI,aAAa,IAAI,IAAI,EAAE;AAC5D,QAAQ,MAAM,IAAI,KAAK,CAAC,CAAC,gCAAgC,EAAE,IAAI,CAAC,CAAC,CAAC;AAClE,MAAM;AACN,MAAM,QAAQ,GAAG,eAAe;AAChC,MAAM,cAAc,GAAG,8BAA8B,CAAC,aAAa,CAAC;AACpE,IAAI,CAAC,MAAM;AACX,MAAM,MAAM,cAAc,GAAG,gBAAgB,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC;AACjE,MAAM,IAAI,cAAc,EAAE;AAC1B,QAAQ,cAAc,GAAG,cAAc,CAAC,IAAI;AAC5C,QAAQ,QAAQ,IAAI,IAAI,GAAG,QAAQ,GAAG,QAAQ,GAAG,cAAc,CAAC,QAAQ;AACxE,MAAM,CAAC,MAAM;AACb,QAAQ,cAAc,GAAG,OAAO;AAChC,MAAM;AACN,IAAI;AACJ,EAAE,CAAC,MAAM;AACT,IAAI,cAAc,GAAG,8BAA8B,CAAC,OAAO,CAAC;AAC5D,EAAE;AACF,EAAE,QAAQ,IAAI;AACd,IAAI,KAAK,OAAO,EAAE;AAClB,MAAM,IAAI,cAAc,YAAY,UAAU,EAAE;AAChD,QAAQ,QAAQ,GAAG,CAAC,EAAE,GAAG,cAAc,CAAC;AACxC,UAAU,IAAI,EAAE,cAAc;AAC9B,UAAU,UAAU,EAAE;AACtB,SAAS,CAAC,KAAK,IAAI,GAAG,EAAE,GAAG,QAAQ;AACnC,MAAM;AACN,MAAM,OAAO;AACb,QAAQ,IAAI,EAAE,OAAO;AACrB,QAAQ,KAAK,EAAE,cAAc;AAC7B,QAAQ,QAAQ;AAChB,QAAQ,gBAAgB,EAAE,CAAC,EAAE,GAAG,IAAI,CAAC,eAAe,KAAK,IAAI,GAAG,EAAE,GAAG,IAAI,CAAC;AAC1E,OAAO;AACP,IAAI;AACJ,IAAI,KAAK,MAAM,EAAE;AACjB,MAAM,IAAI,QAAQ,IAAI,IAAI,EAAE;AAC5B,QAAQ,MAAM,IAAI,KAAK,CAAC,CAAC,kCAAkC,CAAC,CAAC;AAC7D,MAAM;AACN,MAAM,OAAO;AACb,QAAQ,IAAI,EAAE,MAAM;AACpB,QAAQ,IAAI,EAAE,cAAc,YAAY,UAAU,GAAG,gCAAgC,CAAC,cAAc,CAAC,GAAG,cAAc;AACtH,QAAQ,QAAQ,EAAE,IAAI,CAAC,QAAQ;AAC/B,QAAQ,QAAQ;AAChB,QAAQ,gBAAgB,EAAE,CAAC,EAAE,GAAG,IAAI,CAAC,eAAe,KAAK,IAAI,GAAG,EAAE,GAAG,IAAI,CAAC;AAC1E,OAAO;AACP,IAAI;AACJ;AACA;;AAEA;AACA,SAAS,mBAAmB,CAAC;AAC7B,EAAE,SAAS;AACX,EAAE,WAAW;AACb,EAAE,IAAI;AACN,EAAE,IAAI;AACN,EAAE,eAAe;AACjB,EAAE,gBAAgB;AAClB,EAAE,aAAa;AACf,EAAE;AACF,CAAC,EAAE;AACH,EAAE,IAAI,SAAS,IAAI,IAAI,EAAE;AACzB,IAAI,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,SAAS,CAAC,EAAE;AACtC,MAAM,MAAM,IAAI,oBAAoB,CAAC;AACrC,QAAQ,SAAS,EAAE,WAAW;AAC9B,QAAQ,KAAK,EAAE,SAAS;AACxB,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,IAAI,IAAI,SAAS,GAAG,CAAC,EAAE;AACvB,MAAM,MAAM,IAAI,oBAAoB,CAAC;AACrC,QAAQ,SAAS,EAAE,WAAW;AAC9B,QAAQ,KAAK,EAAE,SAAS;AACxB,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,EAAE;AACF,EAAE,IAAI,WAAW,IAAI,IAAI,EAAE;AAC3B,IAAI,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;AACzC,MAAM,MAAM,IAAI,oBAAoB,CAAC;AACrC,QAAQ,SAAS,EAAE,aAAa;AAChC,QAAQ,KAAK,EAAE,WAAW;AAC1B,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,EAAE;AACF,EAAE,IAAI,IAAI,IAAI,IAAI,EAAE;AACpB,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;AAClC,MAAM,MAAM,IAAI,oBAAoB,CAAC;AACrC,QAAQ,SAAS,EAAE,MAAM;AACzB,QAAQ,KAAK,EAAE,IAAI;AACnB,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,EAAE;AACF,EAAE,IAAI,IAAI,IAAI,IAAI,EAAE;AACpB,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;AAClC,MAAM,MAAM,IAAI,oBAAoB,CAAC;AACrC,QAAQ,SAAS,EAAE,MAAM;AACzB,QAAQ,KAAK,EAAE,IAAI;AACnB,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,EAAE;AACF,EAAE,IAAI,eAAe,IAAI,IAAI,EAAE;AAC/B,IAAI,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;AAC7C,MAAM,MAAM,IAAI,oBAAoB,CAAC;AACrC,QAAQ,SAAS,EAAE,iBAAiB;AACpC,QAAQ,KAAK,EAAE,eAAe;AAC9B,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,EAAE;AACF,EAAE,IAAI,gBAAgB,IAAI,IAAI,EAAE;AAChC,IAAI,IAAI,OAAO,gBAAgB,KAAK,QAAQ,EAAE;AAC9C,MAAM,MAAM,IAAI,oBAAoB,CAAC;AACrC,QAAQ,SAAS,EAAE,kBAAkB;AACrC,QAAQ,KAAK,EAAE,gBAAgB;AAC/B,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,EAAE;AACF,EAAE,IAAI,IAAI,IAAI,IAAI,EAAE;AACpB,IAAI,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;AACjC,MAAM,MAAM,IAAI,oBAAoB,CAAC;AACrC,QAAQ,SAAS,EAAE,MAAM;AACzB,QAAQ,KAAK,EAAE,IAAI;AACnB,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,EAAE;AACF,EAAE,OAAO;AACT,IAAI,SAAS;AACb;AACA,IAAI,WAAW,EAAE,WAAW,IAAI,IAAI,GAAG,WAAW,GAAG,CAAC;AACtD,IAAI,IAAI;AACR,IAAI,IAAI;AACR,IAAI,eAAe;AACnB,IAAI,gBAAgB;AACpB,IAAI,aAAa,EAAE,aAAa,IAAI,IAAI,IAAI,aAAa,CAAC,MAAM,GAAG,CAAC,GAAG,aAAa,GAAG,MAAM;AAC7F,IAAI;AACJ,GAAG;AACH;;AAOA;AACA,SAAS,kBAAkB,CAAC,WAAW,EAAE;AACzC,EAAE,IAAI,IAAI,EAAE,EAAE,EAAE,EAAE;AAClB,EAAE,MAAM,KAAK,GAAG,EAAE;AAClB,EAAE,KAAK,MAAM,UAAU,IAAI,WAAW,EAAE;AACxC,IAAI,IAAI,GAAG;AACX,IAAI,IAAI;AACR,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC;AACnC,IAAI,CAAC,CAAC,OAAO,KAAK,EAAE;AACpB,MAAM,MAAM,IAAI,KAAK,CAAC,CAAC,aAAa,EAAE,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC;AACvD,IAAI;AACJ,IAAI,QAAQ,GAAG,CAAC,QAAQ;AACxB,MAAM,KAAK,OAAO;AAClB,MAAM,KAAK,QAAQ,EAAE;AACrB,QAAQ,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC,WAAW,KAAK,IAAI,GAAG,MAAM,GAAG,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;AAC1F,UAAU,KAAK,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,GAAG,EAAE,CAAC;AACnD,QAAQ,CAAC,MAAM;AACf,UAAU,IAAI,CAAC,UAAU,CAAC,WAAW,EAAE;AACvC,YAAY,MAAM,IAAI,KAAK;AAC3B,cAAc;AACd,aAAa;AACb,UAAU;AACV,UAAU,KAAK,CAAC,IAAI,CAAC;AACrB,YAAY,IAAI,EAAE,MAAM;AACxB,YAAY,IAAI,EAAE,GAAG;AACrB,YAAY,QAAQ,EAAE,UAAU,CAAC;AACjC,WAAW,CAAC;AACZ,QAAQ;AACR,QAAQ;AACR,MAAM;AACN,MAAM,KAAK,OAAO,EAAE;AACpB,QAAQ,IAAI,MAAM;AAClB,QAAQ,IAAI,aAAa;AACzB,QAAQ,IAAI,QAAQ;AACpB,QAAQ,IAAI;AACZ,UAAU,CAAC,MAAM,EAAE,aAAa,CAAC,GAAG,UAAU,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC;AAC7D,UAAU,QAAQ,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACvD,QAAQ,CAAC,CAAC,OAAO,KAAK,EAAE;AACxB,UAAU,MAAM,IAAI,KAAK,CAAC,CAAC,2BAA2B,EAAE,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC;AACzE,QAAQ;AACR,QAAQ,IAAI,QAAQ,IAAI,IAAI,IAAI,aAAa,IAAI,IAAI,EAAE;AACvD,UAAU,MAAM,IAAI,KAAK,CAAC,CAAC,yBAAyB,EAAE,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC;AACvE,QAAQ;AACR,QAAQ,IAAI,CAAC,EAAE,GAAG,UAAU,CAAC,WAAW,KAAK,IAAI,GAAG,MAAM,GAAG,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;AACtF,UAAU,KAAK,CAAC,IAAI,CAAC;AACrB,YAAY,IAAI,EAAE,OAAO;AACzB,YAAY,KAAK,EAAE,8BAA8B,CAAC,aAAa;AAC/D,WAAW,CAAC;AACZ,QAAQ,CAAC,MAAM,IAAI,CAAC,EAAE,GAAG,UAAU,CAAC,WAAW,KAAK,IAAI,GAAG,MAAM,GAAG,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;AAC5F,UAAU,KAAK,CAAC,IAAI,CAAC;AACrB,YAAY,IAAI,EAAE,MAAM;AACxB,YAAY,IAAI,EAAE,uBAAuB;AACzC,cAAc,8BAA8B,CAAC,aAAa;AAC1D;AACA,WAAW,CAAC;AACZ,QAAQ,CAAC,MAAM;AACf,UAAU,IAAI,CAAC,UAAU,CAAC,WAAW,EAAE;AACvC,YAAY,MAAM,IAAI,KAAK;AAC3B,cAAc;AACd,aAAa;AACb,UAAU;AACV,UAAU,KAAK,CAAC,IAAI,CAAC;AACrB,YAAY,IAAI,EAAE,MAAM;AACxB,YAAY,IAAI,EAAE,aAAa;AAC/B,YAAY,QAAQ,EAAE,UAAU,CAAC;AACjC,WAAW,CAAC;AACZ,QAAQ;AACR,QAAQ;AACR,MAAM;AACN,MAAM,SAAS;AACf,QAAQ,MAAM,IAAI,KAAK,CAAC,CAAC,0BAA0B,EAAE,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC;AACpE,MAAM;AACN;AACA,EAAE;AACF,EAAE,OAAO,KAAK;AACd;AAIA,IAAI,KAAK,GAAG,2BAA2B;AACvC,IAAI,OAAO,GAAG,CAAC,gBAAgB,EAAE,KAAK,CAAC,CAAC;AACxC,IAAI,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC;AACjC,IAAI,GAAG;AACJ,IAAC,sBAAsB,GAAG,cAAcC,UAAW,CAAC;AACvD,EAAE,WAAW,CAAC;AACd,IAAI,eAAe;AACnB,IAAI;AACJ,GAAG,EAAE;AACL,IAAI,KAAK,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;AACnC,IAAI,IAAI,CAAC,GAAG,CAAC,GAAG,IAAI;AACpB,IAAI,IAAI,CAAC,eAAe,GAAG,eAAe;AAC1C,EAAE;AACF,EAAE,OAAO,UAAU,CAAC,KAAK,EAAE;AAC3B,IAAI,OAAOA,UAAW,CAAC,SAAS,CAAC,KAAK,EAAE,OAAO,CAAC;AAChD,EAAE;AACF;AACA,GAAG,GAAG,OAAO;;AAEb;AACA,SAAS,qBAAqB,CAAC,QAAQ,EAAE,OAAO,EAAE;AAClD,EAAE,IAAI,IAAI,EAAE,EAAE;AACd,EAAE,MAAM,KAAK,GAAG,CAAC,IAAI,GAAG,OAAO,IAAI,IAAI,GAAG,MAAM,GAAG,OAAO,CAAC,KAAK,KAAK,IAAI,GAAG,IAAI,GAAG,EAAE;AACrF,EAAE,MAAM,YAAY,GAAG,EAAE;AACzB,EAAE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AAC5C,IAAI,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC;AAC/B,IAAI,MAAM,aAAa,GAAG,CAAC,KAAK,QAAQ,CAAC,MAAM,GAAG,CAAC;AACnD,IAAI,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,wBAAwB,EAAE,GAAG,OAAO;AAC/D,IAAI,QAAQ,IAAI;AAChB,MAAM,KAAK,QAAQ,EAAE;AACrB,QAAQ,YAAY,CAAC,IAAI,CAAC;AAC1B,UAAU,IAAI,EAAE,QAAQ;AACxB,UAAU;AACV,SAAS,CAAC;AACV,QAAQ;AACR,MAAM;AACN,MAAM,KAAK,MAAM,EAAE;AACnB,QAAQ,IAAI,OAAO,CAAC,KAAK,IAAI,IAAI,EAAE;AACnC,UAAU,YAAY,CAAC,IAAI,CAAC;AAC5B,YAAY,IAAI,EAAE,MAAM;AACxB,YAAY,OAAO,EAAE,wBAAwB,GAAG;AAChD,cAAc,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE;AAC7C,cAAc,GAAG,kBAAkB,CAAC,wBAAwB;AAC5D,aAAa,GAAG;AAChB,WAAW,CAAC;AACZ,QAAQ,CAAC,MAAM;AACf,UAAU,MAAM,SAAS,GAAG,OAAO,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,IAAI,KAAK,IAAI,CAAC,IAAI,KAAK,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,MAAM;AAChG,YAAY,IAAI,EAAE,MAAM;AACxB,YAAY,IAAI,EAAE,IAAI,CAAC;AACvB,WAAW,CAAC,CAAC;AACb,UAAU,YAAY,CAAC,IAAI,CAAC;AAC5B,YAAY,IAAI,EAAE,MAAM;AACxB,YAAY,OAAO,EAAE,wBAAwB,GAAG,CAAC,GAAG,SAAS,EAAE,GAAG,kBAAkB,CAAC,wBAAwB,CAAC,CAAC,GAAG;AAClH,WAAW,CAAC;AACZ,QAAQ;AACR,QAAQ;AACR,MAAM;AACN,MAAM,KAAK,WAAW,EAAE;AACxB,QAAQ,IAAI,OAAO,CAAC,KAAK,IAAI,IAAI,EAAE;AACnC,UAAU,IAAI,aAAa,GAAG,WAAW;AACzC,YAAY,MAAM,QAAQ,GAAG,EAAE;AAC/B,YAAY,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;AACtC,cAAc,QAAQ,IAAI,CAAC,IAAI;AAC/B,gBAAgB,KAAK,MAAM;AAC3B,gBAAgB,KAAK,MAAM,EAAE;AAC7B,kBAAkB,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC;AACrC,kBAAkB;AAClB,gBAAgB;AAChB,gBAAgB,KAAK,WAAW,EAAE;AAClC,kBAAkB,KAAK,MAAM,MAAM,IAAI,IAAI,CAAC,OAAO,EAAE;AACrD,oBAAoB,QAAQ,MAAM,CAAC,IAAI;AACvC,sBAAsB,KAAK,MAAM;AACjC,wBAAwB,QAAQ,CAAC,IAAI,CAAC;AACtC,0BAA0B,IAAI,EAAE,WAAW;AAC3C,0BAA0B,IAAI,EAAE,MAAM,CAAC,IAAI;AAC3C,0BAA0B,SAAS,EAAE,MAAM,CAAC;AAC5C,yBAAyB,CAAC;AAC1B,wBAAwB;AACxB,sBAAsB,KAAK,UAAU;AACrC,wBAAwB,QAAQ,CAAC,IAAI,CAAC;AACtC,0BAA0B,IAAI,EAAE,oBAAoB;AACpD,0BAA0B,IAAI,EAAE,MAAM,CAAC;AACvC,yBAAyB,CAAC;AAC1B,wBAAwB;AACxB;AACA,kBAAkB;AAClB,kBAAkB;AAClB,gBAAgB;AAChB,gBAAgB,KAAK,iBAAiB;AACtC,kBAAkB,QAAQ,CAAC,IAAI,CAAC;AAChC,oBAAoB,IAAI,EAAE,WAAW;AACrC,oBAAoB,UAAU,EAAE,IAAI,CAAC,cAAc,CAAC,UAAU;AAC9D,oBAAoB,QAAQ,EAAE,IAAI,CAAC,cAAc,CAAC,QAAQ;AAC1D,oBAAoB,IAAI,EAAE,IAAI,CAAC,cAAc,CAAC;AAC9C,mBAAmB,CAAC;AACpB,kBAAkB;AAClB,gBAAgB,SAAS;AACzB,kBAAkB,MAAM,gBAAgB,GAAG,IAAI;AAC/C,kBAAkB,MAAM,IAAI,KAAK,CAAC,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC,CAAC;AAC1E,gBAAgB;AAChB;AACA,YAAY;AACZ,YAAY,YAAY,CAAC,IAAI,CAAC;AAC9B,cAAc,IAAI,EAAE,WAAW;AAC/B,cAAc,OAAO,EAAE;AACvB,aAAa,CAAC;AACd,YAAY,MAAM,eAAe,GAAG,KAAK,CAAC,MAAM;AAChD,cAAc,CAAC,IAAI,KAAK,IAAI,CAAC,IAAI,KAAK;AACtC,aAAa,CAAC,GAAG,CAAC,CAAC,IAAI,KAAK,IAAI,CAAC,cAAc,CAAC;AAChD,YAAY,IAAI,eAAe,CAAC,MAAM,GAAG,CAAC,EAAE;AAC5C,cAAc,YAAY,CAAC,IAAI,CAAC;AAChC,gBAAgB,IAAI,EAAE,MAAM;AAC5B,gBAAgB,OAAO,EAAE,eAAe,CAAC,GAAG;AAC5C,kBAAkB,CAAC,cAAc,KAAK;AACtC,oBAAoB,IAAI,EAAE,QAAQ,IAAI,cAAc,CAAC,EAAE;AACvD,sBAAsB,MAAM,IAAI,sBAAsB,CAAC;AACvD,wBAAwB,eAAe,EAAE,OAAO;AAChD,wBAAwB,OAAO,EAAE,qCAAqC,GAAG,IAAI,CAAC,SAAS,CAAC,cAAc;AACtG,uBAAuB,CAAC;AACxB,oBAAoB;AACpB,oBAAoB,MAAM,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,cAAc;AAC3E,oBAAoB,MAAM,KAAK,GAAG,KAAK,CAAC,QAAQ,CAAC;AACjD,oBAAoB,OAAO,CAAC,KAAK,IAAI,IAAI,GAAG,MAAM,GAAG,KAAK,CAAC,gCAAgC,KAAK,IAAI,GAAG;AACvG,sBAAsB,IAAI,EAAE,aAAa;AACzC,sBAAsB,UAAU;AAChC,sBAAsB,QAAQ;AAC9B,sBAAsB,MAAM,EAAE,KAAK,CAAC,gCAAgC,CAAC,MAAM,CAAC;AAC5E,sBAAsB,oBAAoB,EAAE,KAAK,CAAC,gCAAgC,CAAC,MAAM;AACzF,qBAAqB,GAAG;AACxB,sBAAsB,IAAI,EAAE,aAAa;AACzC,sBAAsB,UAAU;AAChC,sBAAsB,QAAQ;AAC9B,sBAAsB;AACtB,qBAAqB;AACrB,kBAAkB;AAClB;AACA,eAAe,CAAC;AAChB,YAAY;AACZ,YAAY,KAAK,GAAG,EAAE;AACtB,YAAY,uBAAuB,GAAG,KAAK;AAC3C,YAAY,WAAW,EAAE;AACzB,UAAU,CAAC;AAEX,UAAU,IAAI,WAAW,GAAG,CAAC;AAC7B,UAAU,IAAI,uBAAuB,GAAG,KAAK;AAC7C,UAAU,IAAI,KAAK,GAAG,EAAE;AACxB,UAAU,KAAK,MAAM,IAAI,IAAI,OAAO,CAAC,KAAK,EAAE;AAC5C,YAAY,QAAQ,IAAI,CAAC,IAAI;AAC7B,cAAc,KAAK,MAAM,EAAE;AAC3B,gBAAgB,IAAI,uBAAuB,EAAE;AAC7C,kBAAkB,aAAa,EAAE;AACjC,gBAAgB;AAChB,gBAAgB,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC;AAChC,gBAAgB;AAChB,cAAc;AACd,cAAc,KAAK,MAAM;AACzB,cAAc,KAAK,WAAW,EAAE;AAChC,gBAAgB,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC;AAChC,gBAAgB;AAChB,cAAc;AACd,cAAc,KAAK,iBAAiB,EAAE;AACtC,gBAAgB,IAAI,CAAC,CAAC,EAAE,GAAG,IAAI,CAAC,cAAc,CAAC,IAAI,KAAK,IAAI,GAAG,EAAE,GAAG,CAAC,MAAM,WAAW,EAAE;AACxF,kBAAkB,aAAa,EAAE;AACjC,gBAAgB;AAChB,gBAAgB,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC;AAChC,gBAAgB,uBAAuB,GAAG,IAAI;AAC9C,gBAAgB;AAChB,cAAc;AACd;AACA,UAAU;AACV,UAAU,aAAa,EAAE;AACzB,UAAU;AACV,QAAQ;AACR,QAAQ,MAAM,eAAe,GAAG,OAAO,CAAC,eAAe;AACvD,QAAQ,IAAI,eAAe,IAAI,IAAI,IAAI,eAAe,CAAC,MAAM,KAAK,CAAC,EAAE;AACrE,UAAU,YAAY,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,WAAW,EAAE,OAAO,EAAE,CAAC;AAC3D,UAAU;AACV,QAAQ;AACR,QAAQ,MAAM,OAAO,GAAG,eAAe,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,cAAc,KAAK;AACxE,UAAU,IAAI,IAAI;AAClB,UAAU,OAAO,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC,IAAI,GAAG,cAAc,CAAC,IAAI,KAAK,IAAI,GAAG,IAAI,GAAG,CAAC,CAAC;AAC/E,QAAQ,CAAC,EAAE,CAAC,CAAC;AACb,QAAQ,KAAK,IAAI,EAAE,GAAG,CAAC,EAAE,EAAE,IAAI,OAAO,EAAE,EAAE,EAAE,EAAE;AAC9C,UAAU,MAAM,eAAe,GAAG,eAAe,CAAC,MAAM;AACxD,YAAY,CAAC,cAAc,KAAK;AAChC,cAAc,IAAI,IAAI;AACtB,cAAc,OAAO,CAAC,CAAC,IAAI,GAAG,cAAc,CAAC,IAAI,KAAK,IAAI,GAAG,IAAI,GAAG,CAAC,MAAM,EAAE;AAC7E,YAAY;AACZ,WAAW;AACX,UAAU,IAAI,eAAe,CAAC,MAAM,KAAK,CAAC,EAAE;AAC5C,YAAY;AACZ,UAAU;AACV,UAAU,YAAY,CAAC,IAAI,CAAC;AAC5B,YAAY,IAAI,EAAE,WAAW;AAC7B,YAAY,OAAO,EAAE;AACrB,cAAc,GAAG,aAAa,IAAI,OAAO,IAAI,EAAE,KAAK,CAAC,GAAG,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,GAAG,EAAE;AAC9F,cAAc,GAAG,eAAe,CAAC,GAAG;AACpC,gBAAgB,CAAC,EAAE,UAAU,EAAE,QAAQ,EAAE,IAAI,EAAE,MAAM;AACrD,kBAAkB,IAAI,EAAE,WAAW;AACnC,kBAAkB,UAAU;AAC5B,kBAAkB,QAAQ;AAC1B,kBAAkB;AAClB,iBAAiB;AACjB;AACA;AACA,WAAW,CAAC;AACZ,UAAU,YAAY,CAAC,IAAI,CAAC;AAC5B,YAAY,IAAI,EAAE,MAAM;AACxB,YAAY,OAAO,EAAE,eAAe,CAAC,GAAG,CAAC,CAAC,cAAc,KAAK;AAC7D,cAAc,IAAI,EAAE,QAAQ,IAAI,cAAc,CAAC,EAAE;AACjD,gBAAgB,MAAM,IAAI,sBAAsB,CAAC;AACjD,kBAAkB,eAAe,EAAE,OAAO;AAC1C,kBAAkB,OAAO,EAAE,qCAAqC,GAAG,IAAI,CAAC,SAAS,CAAC,cAAc;AAChG,iBAAiB,CAAC;AAClB,cAAc;AACd,cAAc,MAAM,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,cAAc;AACrE,cAAc,MAAM,KAAK,GAAG,KAAK,CAAC,QAAQ,CAAC;AAC3C,cAAc,OAAO,CAAC,KAAK,IAAI,IAAI,GAAG,MAAM,GAAG,KAAK,CAAC,gCAAgC,KAAK,IAAI,GAAG;AACjG,gBAAgB,IAAI,EAAE,aAAa;AACnC,gBAAgB,UAAU;AAC1B,gBAAgB,QAAQ;AACxB,gBAAgB,MAAM,EAAE,KAAK,CAAC,gCAAgC,CAAC,MAAM,CAAC;AACtE,gBAAgB,oBAAoB,EAAE,KAAK,CAAC,gCAAgC,CAAC,MAAM;AACnF,eAAe,GAAG;AAClB,gBAAgB,IAAI,EAAE,aAAa;AACnC,gBAAgB,UAAU;AAC1B,gBAAgB,QAAQ;AACxB,gBAAgB;AAChB,eAAe;AACf,YAAY,CAAC;AACb,WAAW,CAAC;AACZ,QAAQ;AACR,QAAQ,IAAI,OAAO,IAAI,CAAC,aAAa,EAAE;AACvC,UAAU,YAAY,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,WAAW,EAAE,OAAO,EAAE,CAAC;AAC3D,QAAQ;AACR,QAAQ;AACR,MAAM;AACN,MAAM,KAAK,MAAM,EAAE;AACnB,QAAQ;AACR,MAAM;AACN,MAAM,SAAS;AACf,QAAQ,MAAM,gBAAgB,GAAG,IAAI;AACrC,QAAQ,MAAM,IAAI,sBAAsB,CAAC;AACzC,UAAU,eAAe,EAAE,OAAO;AAClC,UAAU,OAAO,EAAE,CAAC,kBAAkB,EAAE,gBAAgB,CAAC;AACzD,SAAS,CAAC;AACV,MAAM;AACN;AACA,EAAE;AACF,EAAE,OAAO,YAAY;AACrB;AAUA,IAAI,eAAe,GAAGC,CAAE,CAAC,IAAI;AAC7B,EAAE,MAAMA,CAAE,CAAC,KAAK,CAAC;AACjB,IAAIA,CAAE,CAAC,IAAI,EAAE;AACb,IAAIA,CAAE,CAAC,MAAM,EAAE;AACf,IAAIA,CAAE,CAAC,MAAM,EAAE;AACf,IAAIA,CAAE,CAAC,OAAO,EAAE;AAChB,IAAIA,CAAE,CAAC,MAAM,CAACA,CAAE,CAAC,MAAM,EAAE,EAAE,eAAe,CAAC;AAC3C,IAAIA,CAAE,CAAC,KAAK,CAAC,eAAe;AAC5B,GAAG;AACH,CAAC;;AAED;AACA,IAAI,sBAAsB,GAAGC,CAAE,CAAC,MAAM;AACtC,EAAEA,CAAE,CAAC,MAAM,EAAE;AACb,EAAEA,CAAE,CAAC,MAAM,CAACA,CAAE,CAAC,MAAM,EAAE,EAAE,eAAe;AACxC,CAAC;AAOD,IAAI,uBAAuB,GAAGC,CAAE,CAAC,KAAK;AACtC,EAAEA,CAAE,CAAC,KAAK,CAAC;AACX,IAAIA,CAAE,CAAC,MAAM,CAAC,EAAE,IAAI,EAAEA,CAAE,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE,IAAI,EAAEA,CAAE,CAAC,MAAM,EAAE,EAAE,CAAC;AAC9D,IAAIA,CAAE,CAAC,MAAM,CAAC;AACd,MAAM,IAAI,EAAEA,CAAE,CAAC,OAAO,CAAC,OAAO,CAAC;AAC/B,MAAM,IAAI,EAAEA,CAAE,CAAC,MAAM,EAAE;AACvB,MAAM,QAAQ,EAAEA,CAAE,CAAC,MAAM,EAAE,CAAC,QAAQ;AACpC,KAAK;AACL,GAAG;AACH,CAAC;;AAED;AACA,IAAI,cAAc,GAAGC,CAAE,CAAC,MAAM,CAAC;AAC/B,EAAE,IAAI,EAAEA,CAAE,CAAC,OAAO,CAAC,MAAM,CAAC;AAC1B,EAAE,IAAI,EAAEA,CAAE,CAAC,MAAM,EAAE;AACnB,EAAE,eAAe,EAAE,sBAAsB,CAAC,QAAQ,EAAE;AACpD,EAAE,6BAA6B,EAAE,sBAAsB,CAAC,QAAQ;AAChE,CAAC,CAAC;AACF,IAAI,eAAe,GAAGA,CAAE,CAAC,MAAM,CAAC;AAChC,EAAE,IAAI,EAAEA,CAAE,CAAC,OAAO,CAAC,OAAO,CAAC;AAC3B,EAAE,KAAK,EAAEA,CAAE,CAAC,KAAK,CAAC,CAAC,iBAAiB,EAAEA,CAAE,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC;AAC1D,EAAE,QAAQ,EAAEA,CAAE,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;AAClC,EAAE,eAAe,EAAE,sBAAsB,CAAC,QAAQ,EAAE;AACpD,EAAE,6BAA6B,EAAE,sBAAsB,CAAC,QAAQ;AAChE,CAAC,CAAC;AACF,IAAI,cAAc,GAAGA,CAAE,CAAC,MAAM,CAAC;AAC/B,EAAE,IAAI,EAAEA,CAAE,CAAC,OAAO,CAAC,MAAM,CAAC;AAC1B,EAAE,IAAI,EAAEA,CAAE,CAAC,KAAK,CAAC,CAAC,iBAAiB,EAAEA,CAAE,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC;AACzD,EAAE,QAAQ,EAAEA,CAAE,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;AAClC,EAAE,QAAQ,EAAEA,CAAE,CAAC,MAAM,EAAE;AACvB,EAAE,eAAe,EAAE,sBAAsB,CAAC,QAAQ,EAAE;AACpD,EAAE,6BAA6B,EAAE,sBAAsB,CAAC,QAAQ;AAChE,CAAC,CAAC;AACF,IAAI,mBAAmB,GAAGA,CAAE,CAAC,MAAM,CAAC;AACpC,EAAE,IAAI,EAAEA,CAAE,CAAC,OAAO,CAAC,WAAW,CAAC;AAC/B,EAAE,IAAI,EAAEA,CAAE,CAAC,MAAM,EAAE;AACnB,EAAE,eAAe,EAAE,sBAAsB,CAAC,QAAQ,EAAE;AACpD,EAAE,6BAA6B,EAAE,sBAAsB,CAAC,QAAQ;AAChE,CAAC,CAAC;AACF,IAAI,2BAA2B,GAAGA,CAAE,CAAC,MAAM,CAAC;AAC5C,EAAE,IAAI,EAAEA,CAAE,CAAC,OAAO,CAAC,oBAAoB,CAAC;AACxC,EAAE,IAAI,EAAEA,CAAE,CAAC,MAAM,EAAE;AACnB,EAAE,eAAe,EAAE,sBAAsB,CAAC,QAAQ,EAAE;AACpD,EAAE,6BAA6B,EAAE,sBAAsB,CAAC,QAAQ;AAChE,CAAC,CAAC;AACF,IAAI,kBAAkB,GAAGA,CAAE,CAAC,MAAM,CAAC;AACnC,EAAE,IAAI,EAAEA,CAAE,CAAC,OAAO,CAAC,WAAW,CAAC;AAC/B,EAAE,UAAU,EAAEA,CAAE,CAAC,MAAM,EAAE;AACzB,EAAE,QAAQ,EAAEA,CAAE,CAAC,MAAM,EAAE;AACvB,EAAE,IAAI,EAAEA,CAAE,CAAC,OAAO,EAAE;AACpB,EAAE,eAAe,EAAE,sBAAsB,CAAC,QAAQ,EAAE;AACpD,EAAE,6BAA6B,EAAE,sBAAsB,CAAC,QAAQ;AAChE,CAAC,CAAC;AACF,IAAI,oBAAoB,GAAGA,CAAE,CAAC,MAAM,CAAC;AACrC,EAAE,IAAI,EAAEA,CAAE,CAAC,OAAO,CAAC,aAAa,CAAC;AACjC,EAAE,UAAU,EAAEA,CAAE,CAAC,MAAM,EAAE;AACzB,EAAE,QAAQ,EAAEA,CAAE,CAAC,MAAM,EAAE;AACvB,EAAE,MAAM,EAAEA,CAAE,CAAC,OAAO,EAAE;AACtB,EAAE,OAAO,EAAE,uBAAuB,CAAC,QAAQ,EAAE;AAC7C,EAAE,OAAO,EAAEA,CAAE,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE;AAClC,EAAE,eAAe,EAAE,sBAAsB,CAAC,QAAQ,EAAE;AACpD,EAAE,6BAA6B,EAAE,sBAAsB,CAAC,QAAQ;AAChE,CAAC,CAAC;;AAEF;AACG,IAAC,uBAAuB,GAAGC,CAAE,CAAC,MAAM,CAAC;AACxC,EAAE,IAAI,EAAEA,CAAE,CAAC,OAAO,CAAC,QAAQ,CAAC;AAC5B,EAAE,OAAO,EAAEA,CAAE,CAAC,MAAM,EAAE;AACtB,EAAE,eAAe,EAAE,sBAAsB,CAAC,QAAQ,EAAE;AACpD,EAAE,6BAA6B,EAAE,sBAAsB,CAAC,QAAQ;AAChE,CAAC;AACE,IAAC,qBAAqB,GAAGA,CAAE,CAAC,MAAM,CAAC;AACtC,EAAE,IAAI,EAAEA,CAAE,CAAC,OAAO,CAAC,MAAM,CAAC;AAC1B,EAAE,OAAO,EAAEA,CAAE,CAAC,KAAK,CAAC;AACpB,IAAIA,CAAE,CAAC,MAAM,EAAE;AACf,IAAIA,CAAE,CAAC,KAAK,CAACA,CAAE,CAAC,KAAK,CAAC,CAAC,cAAc,EAAE,eAAe,EAAE,cAAc,CAAC,CAAC;AACxE,GAAG,CAAC;AACJ,EAAE,eAAe,EAAE,sBAAsB,CAAC,QAAQ,EAAE;AACpD,EAAE,6BAA6B,EAAE,sBAAsB,CAAC,QAAQ;AAChE,CAAC;AACE,IAAC,0BAA0B,GAAGA,CAAE,CAAC,MAAM,CAAC;AAC3C,EAAE,IAAI,EAAEA,CAAE,CAAC,OAAO,CAAC,WAAW,CAAC;AAC/B,EAAE,OAAO,EAAEA,CAAE,CAAC,KAAK,CAAC;AACpB,IAAIA,CAAE,CAAC,MAAM,EAAE;AACf,IAAIA,CAAE,CAAC,KAAK;AACZ,MAAMA,CAAE,CAAC,KAAK,CAAC;AACf,QAAQ,cAAc;AACtB,QAAQ,cAAc;AACtB,QAAQ,mBAAmB;AAC3B,QAAQ,2BAA2B;AACnC,QAAQ;AACR,OAAO;AACP;AACA,GAAG,CAAC;AACJ,EAAE,eAAe,EAAE,sBAAsB,CAAC,QAAQ,EAAE;AACpD,EAAE,6BAA6B,EAAE,sBAAsB,CAAC,QAAQ;AAChE,CAAC;AACE,IAAC,qBAAqB,GAAGA,CAAE,CAAC,MAAM,CAAC;AACtC,EAAE,IAAI,EAAEA,CAAE,CAAC,OAAO,CAAC,MAAM,CAAC;AAC1B,EAAE,OAAO,EAAEA,CAAE,CAAC,KAAK,CAAC,oBAAoB,CAAC;AACzC,EAAE,eAAe,EAAE,sBAAsB,CAAC,QAAQ,EAAE;AACpD,EAAE,6BAA6B,EAAE,sBAAsB,CAAC,QAAQ;AAChE,CAAC;AACE,IAAC,iBAAiB,GAAGA,CAAE,CAAC,KAAK,CAAC;AACjC,EAAE,uBAAuB;AACzB,EAAE,qBAAqB;AACvB,EAAE,0BAA0B;AAC5B,EAAE;AACF,CAAC;;AAED;AACA,SAAS,iBAAiB,CAAC;AAC3B,EAAE,MAAM;AACR,EAAE;AACF,CAAC,EAAE;AACH,EAAE,IAAI,MAAM,CAAC,MAAM,IAAI,IAAI,IAAI,MAAM,CAAC,QAAQ,IAAI,IAAI,EAAE;AACxD,IAAI,MAAM,IAAI,kBAAkB,CAAC;AACjC,MAAM,MAAM;AACZ,MAAM,OAAO,EAAE;AACf,KAAK,CAAC;AACN,EAAE;AACF,EAAE,IAAI,MAAM,CAAC,MAAM,IAAI,IAAI,IAAI,MAAM,CAAC,QAAQ,IAAI,IAAI,EAAE;AACxD,IAAI,MAAM,IAAI,kBAAkB,CAAC;AACjC,MAAM,MAAM;AACZ,MAAM,OAAO,EAAE;AACf,KAAK,CAAC;AACN,EAAE;AACF,EAAE,IAAI,MAAM,CAAC,MAAM,IAAI,IAAI,IAAI,OAAO,MAAM,CAAC,MAAM,KAAK,QAAQ,EAAE;AAClE,IAAI,MAAM,IAAI,kBAAkB,CAAC;AACjC,MAAM,MAAM;AACZ,MAAM,OAAO,EAAE;AACf,KAAK,CAAC;AACN,EAAE;AACF,EAAE,IAAI,MAAM,CAAC,MAAM,IAAI,IAAI,EAAE;AAC7B,IAAI,IAAI,OAAO,MAAM,CAAC,MAAM,KAAK,QAAQ,EAAE;AAC3C,MAAM,MAAM,IAAI,kBAAkB,CAAC;AACnC,QAAQ,MAAM;AACd,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,IAAI,OAAO;AACX,MAAM,IAAI,EAAE,QAAQ;AACpB,MAAM,MAAM,EAAE,MAAM,CAAC,MAAM;AAC3B,MAAM,QAAQ,EAAE;AAChB,QAAQ;AACR,UAAU,IAAI,EAAE,MAAM;AACtB,UAAU,OAAO,EAAE,MAAM,CAAC;AAC1B;AACA;AACA,KAAK;AACL,EAAE;AACF,EAAE,IAAI,MAAM,CAAC,QAAQ,IAAI,IAAI,EAAE;AAC/B,IAAI,MAAM,UAAU,GAAG,gBAAgB,CAAC,MAAM,CAAC,QAAQ,CAAC;AACxD,IAAI,MAAM,QAAQ,GAAG,UAAU,KAAK,aAAa,GAAG,qBAAqB,CAAC,MAAM,CAAC,QAAQ,EAAE;AAC3F,MAAM;AACN,KAAK,CAAC,GAAG,MAAM,CAAC,QAAQ;AACxB,IAAI,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE;AAC/B,MAAM,MAAM,IAAI,kBAAkB,CAAC;AACnC,QAAQ,MAAM;AACd,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,IAAI,MAAM,gBAAgB,GAAG,iBAAiB,CAAC;AAC/C,MAAM,KAAK,EAAE,QAAQ;AACrB,MAAM,MAAM,EAAEC,CAAE,CAAC,KAAK,CAAC,iBAAiB;AACxC,KAAK,CAAC;AACN,IAAI,IAAI,CAAC,gBAAgB,CAAC,OAAO,EAAE;AACnC,MAAM,MAAM,IAAI,kBAAkB,CAAC;AACnC,QAAQ,MAAM;AACd,QAAQ,OAAO,EAAE;AACjB,UAAU,+CAA+C;AACzD,UAAU,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,KAAK,CAAC,OAAO,CAAC;AAC9D,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC;AACpB,QAAQ,KAAK,EAAE,gBAAgB,CAAC;AAChC,OAAO,CAAC;AACR,IAAI;AACJ,IAAI,OAAO;AACX,MAAM,IAAI,EAAE,UAAU;AACtB,MAAM,QAAQ;AACd,MAAM,MAAM,EAAE,MAAM,CAAC;AACrB,KAAK;AACL,EAAE;AACF,EAAE,MAAM,IAAI,KAAK,CAAC,aAAa,CAAC;AAChC;AACA,SAAS,gBAAgB,CAAC,MAAM,EAAE;AAClC,EAAE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;AAC9B,IAAI,MAAM,IAAI,kBAAkB,CAAC;AACjC,MAAM,MAAM;AACZ,MAAM,OAAO,EAAE;AACf,QAAQ,uDAAuD;AAC/D,QAAQ,CAAC,0BAA0B,EAAE,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;AAC5D,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC;AAClB,MAAM,KAAK,EAAE;AACb,KAAK,CAAC;AACN,EAAE;AACF,EAAE,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE;AAC3B,IAAI,OAAO,UAAU;AACrB,EAAE;AACF,EAAE,MAAM,eAAe,GAAG,MAAM,CAAC,GAAG,CAAC,kCAAkC,CAAC;AACxE,EAAE,IAAI,eAAe,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,uBAAuB,CAAC,EAAE;AAClE,IAAI,OAAO,aAAa;AACxB,EAAE;AACF,EAAE,MAAM,eAAe,GAAG,eAAe,CAAC,SAAS;AACnD,IAAI,CAAC,CAAC,KAAK,CAAC,KAAK,yBAAyB,IAAI,CAAC,KAAK;AACpD,GAAG;AACH,EAAE,IAAI,eAAe,KAAK,EAAE,EAAE;AAC9B,IAAI,OAAO,UAAU;AACrB,EAAE;AACF,EAAE,MAAM,IAAI,kBAAkB,CAAC;AAC/B,IAAI,MAAM;AACV,IAAI,OAAO,EAAE;AACb,MAAM,uDAAuD;AAC7D,MAAM,CAAC,2BAA2B,EAAE,eAAe,CAAC,eAAe,CAAC,CAAC,WAAW,EAAE,eAAe,CAAC,CAAC;AACnG,MAAM,CAAC,SAAS,EAAE,eAAe,CAAC,GAAG,EAAE,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,CAAC,CAAC;AAC/E,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC;AAChB,IAAI,KAAK,EAAE;AACX,GAAG,CAAC;AACJ;AACA,SAAS,kCAAkC,CAAC,OAAO,EAAE;AACrD,EAAE,IAAI,OAAO,OAAO,KAAK,QAAQ,IAAI,OAAO,KAAK,IAAI,KAAK,OAAO,CAAC,IAAI,KAAK,UAAU;AACrF,EAAE,OAAO,CAAC,IAAI,KAAK,MAAM;AACzB,EAAE,iBAAiB,IAAI,OAAO;AAC9B,EAAE,OAAO,IAAI,OAAO;AACpB,EAAE,0BAA0B,IAAI,OAAO,CAAC,EAAE;AAC1C,IAAI,OAAO,uBAAuB;AAClC,EAAE,CAAC,MAAM,IAAI,OAAO,OAAO,KAAK,QAAQ,IAAI,OAAO,KAAK,IAAI,IAAI,SAAS,IAAI,OAAO,KAAK,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;AACvH,EAAE,+BAA+B,IAAI,OAAO,IAAI,iBAAiB,IAAI,OAAO,CAAC,EAAE;AAC/E,IAAI,OAAO,yBAAyB;AACpC,EAAE,CAAC,MAAM,IAAI,OAAO,OAAO,KAAK,QAAQ,IAAI,OAAO,KAAK,IAAI,IAAI,MAAM,IAAI,OAAO,IAAI,SAAS,IAAI,OAAO,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,IAAI,CAAC,QAAQ,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE;AACpN,IAAI,OAAO,SAAS;AACpB,EAAE,CAAC,MAAM;AACT,IAAI,OAAO,OAAO;AAClB,EAAE;AACF;;AAEA;AACA,SAAS,2BAA2B,CAAC;AACrC,EAAE,YAAY;AACd,EAAE;AACF,CAAC,EAAE;AACH,EAAE,OAAO;AACT,IAAI,YAAY;AAChB,IAAI,gBAAgB;AACpB,IAAI,WAAW,EAAE,YAAY,GAAG;AAChC,GAAG;AACH;;AASA;AACA,IAAI,qBAAqB,GAAG,cAAc;AAC1C,IAAI,qBAAqB,GAAG,wEAAwE;AACpG,IAAI,sBAAsB,GAAG,4BAA4B;AACzD,SAAS,qBAAqB,CAAC;AAC/B,EAAE,MAAM;AACR,EAAE,MAAM;AACR,EAAE,YAAY,GAAG,MAAM,IAAI,IAAI,GAAG,qBAAqB,GAAG,MAAM;AAChE,EAAE,YAAY,GAAG,MAAM,IAAI,IAAI,GAAG,qBAAqB,GAAG;AAC1D,CAAC,EAAE;AACH,EAAE,OAAO;AACT,IAAI,MAAM,IAAI,IAAI,IAAI,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,MAAM,GAAG,MAAM;AACzD,IAAI,MAAM,IAAI,IAAI,IAAI,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,EAAE,GAAG,MAAM;AACrD;AACA,IAAI,YAAY;AAChB,IAAI,MAAM,IAAI,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,GAAG,MAAM;AACpD,IAAI;AACJ,GAAG,CAAC,MAAM,CAAC,CAAC,IAAI,KAAK,IAAI,IAAI,IAAI,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC;AAC7C;;AAYA;AACA,SAAS,yBAAyB,CAAC,MAAM,EAAE;AAC3C,EAAE,MAAM,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,IAAI,eAAe,EAAE,CAAC;AAC1D,EAAE,MAAM,CAAC,MAAM,CAAC,aAAa,CAAC,GAAG,MAAM;AACvC,IAAI,MAAM,MAAM,GAAG,MAAM,CAAC,SAAS,EAAE;AACrC,IAAI,OAAO;AACX,MAAM,MAAM,IAAI,GAAG;AACnB,QAAQ,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,MAAM,MAAM,CAAC,IAAI,EAAE;AACnD,QAAQ,OAAO,IAAI,GAAG,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE;AAC5E,MAAM;AACN,KAAK;AACL,EAAE,CAAC;AACH,EAAE,OAAO,MAAM;AACf;;AAEA;AACA,IAAI,sBAAsB,GAAG;AAC7B,EAAE,IAAI,EAAE,WAAW;AACnB,EAAE,UAAU,EAAE,MAAM;AACpB,EAAE,qBAAqB,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,EAAE;AAC9C,IAAI,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE,KAAK,EAAE,SAAS,EAAE,EAAE;AAClE,EAAE,CAAC;AACH,EAAE,mBAAmB,CAAC,KAAK,EAAE,OAAO,EAAE;AACtC,IAAI,OAAO,KAAK,KAAK,MAAM,GAAG;AAC9B,MAAM,OAAO,EAAE,KAAK;AACpB,MAAM,KAAK,EAAE,IAAI,sBAAsB,CAAC;AACxC,QAAQ,OAAO,EAAE,qDAAqD;AACtE,QAAQ,IAAI,EAAE,OAAO,CAAC,IAAI;AAC1B,QAAQ,QAAQ,EAAE,OAAO,CAAC,QAAQ;AAClC,QAAQ,KAAK,EAAE,OAAO,CAAC,KAAK;AAC5B,QAAQ,YAAY,EAAE,OAAO,CAAC;AAC9B,OAAO;AACP,KAAK,GAAG,EAAE,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE;AAChC,EAAE,CAAC;AACH,EAAE,mBAAmB,GAAG;AACxB,IAAI,MAAM,IAAI,6BAA6B,CAAC;AAC5C,MAAM,aAAa,EAAE;AACrB,KAAK,CAAC;AACN,EAAE;AACF,CAAC;AACD,IAAI,oBAAoB,GAAG,CAAC,MAAM,MAAM;AACxC,EAAE,IAAI,EAAE,QAAQ;AAChB,EAAE,UAAU,EAAE,MAAM,CAAC,UAAU;AAC/B,EAAE,qBAAqB,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,EAAE;AAC9C,IAAI,OAAO;AACX,MAAM,OAAO,EAAE,IAAI;AACnB,MAAM,KAAK,EAAE;AACb;AACA,QAAQ,OAAO,EAAE,KAAK;AACtB,QAAQ;AACR;AACA,KAAK;AACL,EAAE,CAAC;AACH,EAAE,mBAAmB,CAAC,KAAK,EAAE;AAC7B,IAAI,OAAOC,iBAAkB,CAAC,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC;AAChD,EAAE,CAAC;AACH,EAAE,mBAAmB,GAAG;AACxB,IAAI,MAAM,IAAI,6BAA6B,CAAC;AAC5C,MAAM,aAAa,EAAE;AACrB,KAAK,CAAC;AACN,EAAE;AACF,CAAC,CAAC;AACF,IAAI,mBAAmB,GAAG,CAAC,MAAM,KAAK;AACtC,EAAE,MAAM,EAAE,OAAO,EAAE,GAAG,UAAU,EAAE,GAAG,MAAM,CAAC,UAAU;AACtD,EAAE,OAAO;AACT,IAAI,IAAI,EAAE,MAAM;AAChB;AACA;AACA;AACA,IAAI,UAAU,EAAE;AAChB,MAAM,OAAO,EAAE,yCAAyC;AACxD,MAAM,IAAI,EAAE,QAAQ;AACpB,MAAM,UAAU,EAAE;AAClB,QAAQ,QAAQ,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,UAAU;AACpD,OAAO;AACP,MAAM,QAAQ,EAAE,CAAC,UAAU,CAAC;AAC5B,MAAM,oBAAoB,EAAE;AAC5B,KAAK;AACL,IAAI,qBAAqB,CAAC,EAAE,KAAK,EAAE,YAAY,EAAE,YAAY,EAAE,YAAY,EAAE,EAAE;AAC/E,MAAM,IAAI,IAAI;AACd,MAAM,IAAI,CAAC,YAAY,CAAC,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE;AAChE,QAAQ,OAAO;AACf,UAAU,OAAO,EAAE,KAAK;AACxB,UAAU,KAAK,EAAE,IAAI,mBAAmB,CAAC;AACzC,YAAY,KAAK;AACjB,YAAY,KAAK,EAAE;AACnB,WAAW;AACX,SAAS;AACT,MAAM;AACN,MAAM,MAAM,UAAU,GAAG,KAAK,CAAC,QAAQ;AACvC,MAAM,MAAM,WAAW,GAAG,EAAE;AAC5B,MAAM,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AAClD,QAAQ,MAAM,OAAO,GAAG,UAAU,CAAC,CAAC,CAAC;AACrC,QAAQ,MAAM,MAAM,GAAGA,iBAAkB,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC;AACrE,QAAQ,IAAI,CAAC,KAAK,UAAU,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,YAAY,EAAE;AAC1D,UAAU;AACV,QAAQ;AACR,QAAQ,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE;AAC7B,UAAU,OAAO,MAAM;AACvB,QAAQ;AACR,QAAQ,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC;AACtC,MAAM;AACN,MAAM,MAAM,qBAAqB,GAAG,CAAC,IAAI,GAAG,YAAY,IAAI,IAAI,GAAG,MAAM,GAAG,YAAY,CAAC,MAAM,KAAK,IAAI,GAAG,IAAI,GAAG,CAAC;AACnH,MAAM,IAAI,SAAS,GAAG,EAAE;AACxB,MAAM,IAAI,YAAY,EAAE;AACxB,QAAQ,SAAS,IAAI,GAAG;AACxB,MAAM;AACN,MAAM,IAAI,qBAAqB,GAAG,CAAC,EAAE;AACrC,QAAQ,SAAS,IAAI,GAAG;AACxB,MAAM;AACN,MAAM,SAAS,IAAI,WAAW,CAAC,KAAK,CAAC,qBAAqB,CAAC,CAAC,GAAG,CAAC,CAAC,OAAO,KAAK,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC;AAC/G,MAAM,IAAI,YAAY,EAAE;AACxB,QAAQ,SAAS,IAAI,GAAG;AACxB,MAAM;AACN,MAAM,OAAO;AACb,QAAQ,OAAO,EAAE,IAAI;AACrB,QAAQ,KAAK,EAAE;AACf,UAAU,OAAO,EAAE,WAAW;AAC9B,UAAU;AACV;AACA,OAAO;AACP,IAAI,CAAC;AACL,IAAI,mBAAmB,CAAC,KAAK,EAAE;AAC/B,MAAM,IAAI,CAAC,YAAY,CAAC,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE;AAChE,QAAQ,OAAO;AACf,UAAU,OAAO,EAAE,KAAK;AACxB,UAAU,KAAK,EAAE,IAAI,mBAAmB,CAAC;AACzC,YAAY,KAAK;AACjB,YAAY,KAAK,EAAE;AACnB,WAAW;AACX,SAAS;AACT,MAAM;AACN,MAAM,MAAM,UAAU,GAAG,KAAK,CAAC,QAAQ;AACvC,MAAM,KAAK,MAAM,OAAO,IAAI,UAAU,EAAE;AACxC,QAAQ,MAAM,MAAM,GAAGA,iBAAkB,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC;AACrE,QAAQ,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE;AAC7B,UAAU,OAAO,MAAM;AACvB,QAAQ;AACR,MAAM;AACN,MAAM,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,UAAU,EAAE;AACjD,IAAI,CAAC;AACL,IAAI,mBAAmB,CAAC,cAAc,EAAE;AACxC,MAAM,IAAI,iBAAiB,GAAG,CAAC;AAC/B,MAAM,OAAO,yBAAyB;AACtC,QAAQ,cAAc,CAAC,WAAW;AAClC,UAAU,IAAI,eAAe,CAAC;AAC9B,YAAY,SAAS,CAAC,KAAK,EAAE,UAAU,EAAE;AACzC,cAAc,QAAQ,KAAK,CAAC,IAAI;AAChC,gBAAgB,KAAK,QAAQ,EAAE;AAC/B,kBAAkB,MAAM,KAAK,GAAG,KAAK,CAAC,MAAM;AAC5C,kBAAkB,OAAO,iBAAiB,GAAG,KAAK,CAAC,MAAM,EAAE,iBAAiB,EAAE,EAAE;AAChF,oBAAoB,UAAU,CAAC,OAAO,CAAC,KAAK,CAAC,iBAAiB,CAAC,CAAC;AAChE,kBAAkB;AAClB,kBAAkB;AAClB,gBAAgB;AAChB,gBAAgB,KAAK,YAAY;AACjC,gBAAgB,KAAK,QAAQ;AAC7B,gBAAgB,KAAK,OAAO;AAC5B,kBAAkB;AAClB,gBAAgB,SAAS;AACzB,kBAAkB,MAAM,gBAAgB,GAAG,KAAK;AAChD,kBAAkB,MAAM,IAAI,KAAK;AACjC,oBAAoB,CAAC,wBAAwB,EAAE,gBAAgB,CAAC;AAChE,mBAAmB;AACnB,gBAAgB;AAChB;AACA,YAAY;AACZ,WAAW;AACX;AACA,OAAO;AACP,IAAI;AACJ,GAAG;AACH,CAAC;AACD,IAAI,kBAAkB,GAAG,CAAC,UAAU,KAAK;AACzC,EAAE,OAAO;AACT,IAAI,IAAI,EAAE,MAAM;AAChB;AACA;AACA;AACA,IAAI,UAAU,EAAE;AAChB,MAAM,OAAO,EAAE,yCAAyC;AACxD,MAAM,IAAI,EAAE,QAAQ;AACpB,MAAM,UAAU,EAAE;AAClB,QAAQ,MAAM,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,EAAE,UAAU;AAClD,OAAO;AACP,MAAM,QAAQ,EAAE,CAAC,QAAQ,CAAC;AAC1B,MAAM,oBAAoB,EAAE;AAC5B,KAAK;AACL,IAAI,mBAAmB,CAAC,KAAK,EAAE;AAC/B,MAAM,IAAI,CAAC,YAAY,CAAC,KAAK,CAAC,IAAI,OAAO,KAAK,CAAC,MAAM,KAAK,QAAQ,EAAE;AACpE,QAAQ,OAAO;AACf,UAAU,OAAO,EAAE,KAAK;AACxB,UAAU,KAAK,EAAE,IAAI,mBAAmB,CAAC;AACzC,YAAY,KAAK;AACjB,YAAY,KAAK,EAAE;AACnB,WAAW;AACX,SAAS;AACT,MAAM;AACN,MAAM,MAAM,MAAM,GAAG,KAAK,CAAC,MAAM;AACjC,MAAM,OAAO,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,EAAE,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG;AAC9E,QAAQ,OAAO,EAAE,KAAK;AACtB,QAAQ,KAAK,EAAE,IAAI,mBAAmB,CAAC;AACvC,UAAU,KAAK;AACf,UAAU,KAAK,EAAE;AACjB,SAAS;AACT,OAAO;AACP,IAAI,CAAC;AACL,IAAI,qBAAqB,GAAG;AAC5B,MAAM,MAAM,IAAI,6BAA6B,CAAC;AAC9C,QAAQ,aAAa,EAAE;AACvB,OAAO,CAAC;AACR,IAAI,CAAC;AACL,IAAI,mBAAmB,GAAG;AAC1B,MAAM,MAAM,IAAI,6BAA6B,CAAC;AAC9C,QAAQ,aAAa,EAAE;AACvB,OAAO,CAAC;AACR,IAAI;AACJ,GAAG;AACH,CAAC;AACD,SAAS,iBAAiB,CAAC;AAC3B,EAAE,MAAM;AACR,EAAE,MAAM;AACR,EAAE;AACF,CAAC,EAAE;AACH,EAAE,QAAQ,MAAM;AAChB,IAAI,KAAK,QAAQ;AACjB,MAAM,OAAO,oBAAoB,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;AACnD,IAAI,KAAK,OAAO;AAChB,MAAM,OAAO,mBAAmB,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;AAClD,IAAI,KAAK,MAAM;AACf,MAAM,OAAO,kBAAkB,CAAC,UAAU,CAAC;AAC3C,IAAI,KAAK,WAAW;AACpB,MAAM,OAAO,sBAAsB;AACnC,IAAI,SAAS;AACb,MAAM,MAAM,gBAAgB,GAAG,MAAM;AACrC,MAAM,MAAM,IAAI,KAAK,CAAC,CAAC,oBAAoB,EAAE,gBAAgB,CAAC,CAAC,CAAC;AAChE,IAAI;AACJ;AACA;;AAEA;AACA,SAAS,6BAA6B,CAAC;AACvC,EAAE,MAAM;AACR,EAAE,IAAI;AACN,EAAE,MAAM;AACR,EAAE,UAAU;AACZ,EAAE,iBAAiB;AACnB,EAAE;AACF,CAAC,EAAE;AACH,EAAE,IAAI,MAAM,IAAI,IAAI,IAAI,MAAM,KAAK,QAAQ,IAAI,MAAM,KAAK,OAAO,IAAI,MAAM,KAAK,MAAM,IAAI,MAAM,KAAK,WAAW,EAAE;AAClH,IAAI,MAAM,IAAI,oBAAoB,CAAC;AACnC,MAAM,SAAS,EAAE,QAAQ;AACzB,MAAM,KAAK,EAAE,MAAM;AACnB,MAAM,OAAO,EAAE;AACf,KAAK,CAAC;AACN,EAAE;AACF,EAAE,IAAI,MAAM,KAAK,WAAW,EAAE;AAC9B,IAAI,IAAI,IAAI,KAAK,MAAM,IAAI,IAAI,KAAK,MAAM,EAAE;AAC5C,MAAM,MAAM,IAAI,oBAAoB,CAAC;AACrC,QAAQ,SAAS,EAAE,MAAM;AACzB,QAAQ,KAAK,EAAE,IAAI;AACnB,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,IAAI,IAAI,MAAM,IAAI,IAAI,EAAE;AACxB,MAAM,MAAM,IAAI,oBAAoB,CAAC;AACrC,QAAQ,SAAS,EAAE,QAAQ;AAC3B,QAAQ,KAAK,EAAE,MAAM;AACrB,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,IAAI,IAAI,iBAAiB,IAAI,IAAI,EAAE;AACnC,MAAM,MAAM,IAAI,oBAAoB,CAAC;AACrC,QAAQ,SAAS,EAAE,mBAAmB;AACtC,QAAQ,KAAK,EAAE,iBAAiB;AAChC,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,IAAI,IAAI,UAAU,IAAI,IAAI,EAAE;AAC5B,MAAM,MAAM,IAAI,oBAAoB,CAAC;AACrC,QAAQ,SAAS,EAAE,YAAY;AAC/B,QAAQ,KAAK,EAAE,UAAU;AACzB,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,IAAI,IAAI,UAAU,IAAI,IAAI,EAAE;AAC5B,MAAM,MAAM,IAAI,oBAAoB,CAAC;AACrC,QAAQ,SAAS,EAAE,YAAY;AAC/B,QAAQ,KAAK,EAAE,UAAU;AACzB,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,EAAE;AACF,EAAE,IAAI,MAAM,KAAK,QAAQ,EAAE;AAC3B,IAAI,IAAI,MAAM,IAAI,IAAI,EAAE;AACxB,MAAM,MAAM,IAAI,oBAAoB,CAAC;AACrC,QAAQ,SAAS,EAAE,QAAQ;AAC3B,QAAQ,KAAK,EAAE,MAAM;AACrB,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,IAAI,IAAI,UAAU,IAAI,IAAI,EAAE;AAC5B,MAAM,MAAM,IAAI,oBAAoB,CAAC;AACrC,QAAQ,SAAS,EAAE,YAAY;AAC/B,QAAQ,KAAK,EAAE,UAAU;AACzB,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,EAAE;AACF,EAAE,IAAI,MAAM,KAAK,OAAO,EAAE;AAC1B,IAAI,IAAI,MAAM,IAAI,IAAI,EAAE;AACxB,MAAM,MAAM,IAAI,oBAAoB,CAAC;AACrC,QAAQ,SAAS,EAAE,QAAQ;AAC3B,QAAQ,KAAK,EAAE,MAAM;AACrB,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,IAAI,IAAI,UAAU,IAAI,IAAI,EAAE;AAC5B,MAAM,MAAM,IAAI,oBAAoB,CAAC;AACrC,QAAQ,SAAS,EAAE,YAAY;AAC/B,QAAQ,KAAK,EAAE,UAAU;AACzB,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,EAAE;AACF,EAAE,IAAI,MAAM,KAAK,MAAM,EAAE;AACzB,IAAI,IAAI,MAAM,IAAI,IAAI,EAAE;AACxB,MAAM,MAAM,IAAI,oBAAoB,CAAC;AACrC,QAAQ,SAAS,EAAE,QAAQ;AAC3B,QAAQ,KAAK,EAAE,MAAM;AACrB,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,IAAI,IAAI,iBAAiB,IAAI,IAAI,EAAE;AACnC,MAAM,MAAM,IAAI,oBAAoB,CAAC;AACrC,QAAQ,SAAS,EAAE,mBAAmB;AACtC,QAAQ,KAAK,EAAE,iBAAiB;AAChC,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,IAAI,IAAI,UAAU,IAAI,IAAI,EAAE;AAC5B,MAAM,MAAM,IAAI,oBAAoB,CAAC;AACrC,QAAQ,SAAS,EAAE,YAAY;AAC/B,QAAQ,KAAK,EAAE,UAAU;AACzB,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,IAAI,IAAI,UAAU,IAAI,IAAI,EAAE;AAC5B,MAAM,MAAM,IAAI,oBAAoB,CAAC;AACrC,QAAQ,SAAS,EAAE,YAAY;AAC/B,QAAQ,KAAK,EAAE,UAAU;AACzB,QAAQ,OAAO,EAAE;AACjB,OAAO,CAAC;AACR,IAAI;AACJ,IAAI,KAAK,MAAM,KAAK,IAAI,UAAU,EAAE;AACpC,MAAM,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;AACrC,QAAQ,MAAM,IAAI,oBAAoB,CAAC;AACvC,UAAU,SAAS,EAAE,YAAY;AACjC,UAAU,KAAK;AACf,UAAU,OAAO,EAAE;AACnB,SAAS,CAAC;AACV,MAAM;AACN,IAAI;AACJ,EAAE;AACF;;AAEA;AACA,SAAS,qBAAqB,CAAC,MAAM,EAAE;AACvC,EAAE,MAAM,eAAe,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,OAAO,KAAK;AAClD,IAAI,OAAO;AACX,MAAM,GAAG,OAAO;AAChB,MAAM,OAAO,EAAE,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,GAAG,OAAO,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,WAAW;AACtG,KAAK;AACL,EAAE,CAAC,CAAC;AACJ,EAAE,OAAO,IAAI,CAAC,SAAS,CAAC,eAAe,CAAC;AACxC;AACA,SAAS,WAAW,CAAC,IAAI,EAAE;AAC3B,EAAE,IAAI,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE;AAC7B,IAAI,OAAO;AACX,MAAM,GAAG,IAAI;AACb,MAAM,KAAK,EAAE,IAAI,CAAC,KAAK,YAAY,UAAU,GAAG,gCAAgC,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC;AACpG,KAAK;AACL,EAAE;AACF,EAAE,OAAO,IAAI;AACb;;AAEA;AACA,IAAI,kBAAkB,GAAG,iBAAiB,CAAC,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;AACzE,eAAe,cAAc,CAAC;AAC9B,EAAE,KAAK;AACP,EAAE,IAAI,EAAE,UAAU;AAClB;AACA,EAAE,MAAM,EAAE,WAAW;AACrB,EAAE,UAAU;AACZ,EAAE,iBAAiB;AACnB,EAAE,IAAI;AACN,EAAE,MAAM,GAAG,QAAQ;AACnB,EAAE,MAAM;AACR,EAAE,MAAM;AACR,EAAE,QAAQ;AACV,EAAE,UAAU,EAAE,aAAa;AAC3B,EAAE,WAAW;AACb,EAAE,OAAO;AACT,EAAE,uBAAuB,EAAE,UAAU;AACrC,EAAE,sBAAsB,EAAE,SAAS;AACnC,EAAE,6BAA6B;AAC/B,EAAE,eAAe,GAAG,6BAA6B;AACjD,EAAE,SAAS,EAAE;AACb,IAAI,UAAU,EAAE,WAAW,GAAG,kBAAkB;AAChD,IAAI,WAAW,GAAG,sBAAsB,IAAI,IAAI;AAChD,GAAG,GAAG,EAAE;AACR,EAAE,GAAG;AACL,CAAC,EAAE;AACH,EAAE,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,CAAC,oBAAoB,KAAK,IAAI,EAAE;AACxE,IAAI,MAAM,IAAI,4BAA4B,EAAE;AAC5C,EAAE;AACF,EAAE,6BAA6B,CAAC;AAChC,IAAI,MAAM;AACV,IAAI,IAAI;AACR,IAAI,MAAM,EAAE,WAAW;AACvB,IAAI,UAAU;AACd,IAAI,iBAAiB;AACrB,IAAI;AACJ,GAAG,CAAC;AACJ,EAAE,MAAM,EAAE,UAAU,EAAE,KAAK,EAAE,GAAG,cAAc,CAAC,EAAE,UAAU,EAAE,aAAa,EAAE,CAAC;AAC7E,EAAE,MAAM,cAAc,GAAG,iBAAiB,CAAC;AAC3C,IAAI,MAAM;AACV,IAAI,MAAM,EAAE,WAAW;AACvB,IAAI;AACJ,GAAG,CAAC;AACJ,EAAE,IAAI,cAAc,CAAC,IAAI,KAAK,WAAW,IAAI,IAAI,KAAK,MAAM,EAAE;AAC9D,IAAI,IAAI,GAAG,MAAM;AACjB,EAAE;AACF,EAAE,MAAM,uBAAuB,GAAG,0BAA0B,CAAC;AAC7D,IAAI,KAAK;AACT,IAAI,SAAS;AACb,IAAI,OAAO;AACX,IAAI,QAAQ,EAAE,EAAE,GAAG,QAAQ,EAAE,UAAU;AACvC,GAAG,CAAC;AACJ,EAAE,MAAM,MAAM,GAAG,SAAS,CAAC,SAAS,CAAC;AACrC,EAAE,OAAO,UAAU,CAAC;AACpB,IAAI,IAAI,EAAE,mBAAmB;AAC7B,IAAI,UAAU,EAAE,yBAAyB,CAAC;AAC1C,MAAM,SAAS;AACf,MAAM,UAAU,EAAE;AAClB,QAAQ,GAAG,qBAAqB,CAAC;AACjC,UAAU,WAAW,EAAE,mBAAmB;AAC1C,UAAU;AACV,SAAS,CAAC;AACV,QAAQ,GAAG,uBAAuB;AAClC;AACA,QAAQ,WAAW,EAAE;AACrB,UAAU,KAAK,EAAE,MAAM,IAAI,CAAC,SAAS,CAAC,EAAE,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE;AAClE,SAAS;AACT,QAAQ,WAAW,EAAE,cAAc,CAAC,UAAU,IAAI,IAAI,GAAG,EAAE,KAAK,EAAE,MAAM,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,UAAU,CAAC,EAAE,GAAG,MAAM;AAC5H,QAAQ,gBAAgB,EAAE,UAAU;AACpC,QAAQ,uBAAuB,EAAE,iBAAiB;AAClD,QAAQ,oBAAoB,EAAE,cAAc,CAAC,IAAI;AACjD,QAAQ,kBAAkB,EAAE;AAC5B;AACA,KAAK,CAAC;AACN,IAAI,MAAM;AACV,IAAI,EAAE,EAAE,OAAO,IAAI,KAAK;AACxB,MAAM,IAAI,IAAI,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE;AAC1B,MAAM,IAAI,IAAI,KAAK,MAAM,IAAI,IAAI,IAAI,IAAI,EAAE;AAC3C,QAAQ,IAAI,GAAG,KAAK,CAAC,2BAA2B;AAChD,MAAM;AACN,MAAM,IAAI,MAAM;AAChB,MAAM,IAAI,YAAY;AACtB,MAAM,IAAI,KAAK;AACf,MAAM,IAAI,QAAQ;AAClB,MAAM,IAAI,WAAW;AACrB,MAAM,IAAI,QAAQ;AAClB,MAAM,IAAI,OAAO;AACjB,MAAM,IAAI,QAAQ;AAClB,MAAM,IAAI,sBAAsB;AAChC,MAAM,QAAQ,IAAI;AAClB,QAAQ,KAAK,MAAM,EAAE;AACrB,UAAU,MAAM,kBAAkB,GAAG,iBAAiB,CAAC;AACvD,YAAY,MAAM,EAAE;AACpB,cAAc,MAAM,EAAE,cAAc,CAAC,UAAU,IAAI,IAAI,GAAG,qBAAqB,CAAC,EAAE,MAAM,EAAE,MAAM,EAAE,CAAC,GAAG,KAAK,CAAC,yBAAyB,GAAG,MAAM,GAAG,qBAAqB,CAAC;AACvK,gBAAgB,MAAM,EAAE,MAAM;AAC9B,gBAAgB,MAAM,EAAE,cAAc,CAAC;AACvC,eAAe,CAAC;AAChB,cAAc,MAAM;AACpB,cAAc;AACd,aAAa;AACb,YAAY,KAAK,EAAE,KAAK;AACxB,WAAW,CAAC;AACZ,UAAU,MAAM,cAAc,GAAG,MAAM,4BAA4B,CAAC;AACpE,YAAY,MAAM,EAAE,kBAAkB;AACtC,YAAY,sBAAsB,EAAE,KAAK,CAAC,iBAAiB;AAC3D,YAAY,gBAAgB,EAAE,CAAC,IAAI,GAAG,KAAK,CAAC,WAAW,KAAK,IAAI,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK;AAC3F;AACA,WAAW,CAAC;AACZ,UAAU,MAAM,cAAc,GAAG,MAAM,KAAK;AAC5C,YAAY,MAAM,UAAU,CAAC;AAC7B,cAAc,IAAI,EAAE,8BAA8B;AAClD,cAAc,UAAU,EAAE,yBAAyB,CAAC;AACpD,gBAAgB,SAAS;AACzB,gBAAgB,UAAU,EAAE;AAC5B,kBAAkB,GAAG,qBAAqB,CAAC;AAC3C,oBAAoB,WAAW,EAAE,8BAA8B;AAC/D,oBAAoB;AACpB,mBAAmB,CAAC;AACpB,kBAAkB,GAAG,uBAAuB;AAC5C,kBAAkB,kBAAkB,EAAE;AACtC,oBAAoB,KAAK,EAAE,MAAM,kBAAkB,CAAC;AACpD,mBAAmB;AACnB,kBAAkB,oBAAoB,EAAE;AACxC,oBAAoB,KAAK,EAAE,MAAM,IAAI,CAAC,SAAS,CAAC,cAAc;AAC9D,mBAAmB;AACnB,kBAAkB,kBAAkB,EAAE,IAAI;AAC1C;AACA,kBAAkB,eAAe,EAAE,KAAK,CAAC,QAAQ;AACjD,kBAAkB,sBAAsB,EAAE,KAAK,CAAC,OAAO;AACvD,kBAAkB,kCAAkC,EAAE,QAAQ,CAAC,gBAAgB;AAC/E,kBAAkB,2BAA2B,EAAE,QAAQ,CAAC,SAAS;AACjE,kBAAkB,iCAAiC,EAAE,QAAQ,CAAC,eAAe;AAC7E,kBAAkB,4BAA4B,EAAE,QAAQ,CAAC,WAAW;AACpE,kBAAkB,sBAAsB,EAAE,QAAQ,CAAC,IAAI;AACvD,kBAAkB,sBAAsB,EAAE,QAAQ,CAAC;AACnD;AACA,eAAe,CAAC;AAChB,cAAc,MAAM;AACpB,cAAc,EAAE,EAAE,OAAO,KAAK,KAAK;AACnC,gBAAgB,IAAI,IAAI,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,EAAE,EAAE,EAAE;AAC/C,gBAAgB,MAAM,OAAO,GAAG,MAAM,KAAK,CAAC,UAAU,CAAC;AACvD,kBAAkB,IAAI,EAAE;AACxB,oBAAoB,IAAI,EAAE,aAAa;AACvC,oBAAoB,MAAM,EAAE,cAAc,CAAC,UAAU;AACrD,oBAAoB,IAAI,EAAE,UAAU;AACpC,oBAAoB,WAAW,EAAE;AACjC,mBAAmB;AACnB,kBAAkB,GAAG,mBAAmB,CAAC,QAAQ,CAAC;AAClD,kBAAkB,WAAW,EAAE,kBAAkB,CAAC,IAAI;AACtD,kBAAkB,MAAM,EAAE,cAAc;AACxC,kBAAkB,gBAAgB,EAAE,eAAe;AACnD,kBAAkB,WAAW;AAC7B,kBAAkB;AAClB,iBAAiB,CAAC;AAClB,gBAAgB,MAAM,YAAY,GAAG;AACrC,kBAAkB,EAAE,EAAE,CAAC,GAAG,GAAG,CAAC,IAAI,GAAG,OAAO,CAAC,QAAQ,KAAK,IAAI,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,EAAE,KAAK,IAAI,GAAG,GAAG,GAAG,WAAW,EAAE;AAChH,kBAAkB,SAAS,EAAE,CAAC,GAAG,GAAG,CAAC,GAAG,GAAG,OAAO,CAAC,QAAQ,KAAK,IAAI,GAAG,KAAK,CAAC,GAAG,GAAG,CAAC,SAAS,KAAK,IAAI,GAAG,GAAG,GAAG,WAAW,EAAE;AAC5H,kBAAkB,OAAO,EAAE,CAAC,EAAE,GAAG,CAAC,EAAE,GAAG,OAAO,CAAC,QAAQ,KAAK,IAAI,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC,OAAO,KAAK,IAAI,GAAG,EAAE,GAAG,KAAK,CAAC;AAC7G,iBAAiB;AACjB,gBAAgB,IAAI,OAAO,CAAC,IAAI,KAAK,KAAK,CAAC,EAAE;AAC7C,kBAAkB,MAAM,IAAI,sBAAsB,CAAC;AACnD,oBAAoB,OAAO,EAAE,2DAA2D;AACxF,oBAAoB,QAAQ,EAAE,YAAY;AAC1C,oBAAoB,KAAK,EAAE,2BAA2B,CAAC,OAAO,CAAC,KAAK,CAAC;AACrE,oBAAoB,YAAY,EAAE,OAAO,CAAC;AAC1C,mBAAmB,CAAC;AACpB,gBAAgB;AAChB,gBAAgB,KAAK,CAAC,aAAa;AACnC,kBAAkB,yBAAyB,CAAC;AAC5C,oBAAoB,SAAS;AAC7B,oBAAoB,UAAU,EAAE;AAChC,sBAAsB,0BAA0B,EAAE,OAAO,CAAC,YAAY;AACtE,sBAAsB,oBAAoB,EAAE,EAAE,MAAM,EAAE,MAAM,OAAO,CAAC,IAAI,EAAE;AAC1E,sBAAsB,gBAAgB,EAAE,YAAY,CAAC,EAAE;AACvD,sBAAsB,mBAAmB,EAAE,YAAY,CAAC,OAAO;AAC/D,sBAAsB,uBAAuB,EAAE,YAAY,CAAC,SAAS,CAAC,WAAW,EAAE;AACnF,sBAAsB,8BAA8B,EAAE,IAAI,CAAC,SAAS;AACpE,wBAAwB,OAAO,CAAC;AAChC,uBAAuB;AACvB,sBAAsB,uBAAuB,EAAE,OAAO,CAAC,KAAK,CAAC,YAAY;AACzE,sBAAsB,2BAA2B,EAAE,OAAO,CAAC,KAAK,CAAC,gBAAgB;AACjF;AACA,sBAAsB,gCAAgC,EAAE,CAAC,OAAO,CAAC,YAAY,CAAC;AAC9E,sBAAsB,oBAAoB,EAAE,YAAY,CAAC,EAAE;AAC3D,sBAAsB,uBAAuB,EAAE,YAAY,CAAC,OAAO;AACnE,sBAAsB,4BAA4B,EAAE,OAAO,CAAC,KAAK,CAAC,YAAY;AAC9E,sBAAsB,gCAAgC,EAAE,OAAO,CAAC,KAAK,CAAC;AACtE;AACA,mBAAmB;AACnB,iBAAiB;AACjB,gBAAgB,OAAO,EAAE,GAAG,OAAO,EAAE,UAAU,EAAE,OAAO,CAAC,IAAI,EAAE,YAAY,EAAE;AAC7E,cAAc;AACd,aAAa;AACb,WAAW;AACX,UAAU,MAAM,GAAG,cAAc,CAAC,UAAU;AAC5C,UAAU,YAAY,GAAG,cAAc,CAAC,YAAY;AACpD,UAAU,KAAK,GAAG,cAAc,CAAC,KAAK;AACtC,UAAU,QAAQ,GAAG,cAAc,CAAC,QAAQ;AAC5C,UAAU,WAAW,GAAG,cAAc,CAAC,WAAW;AAClD,UAAU,QAAQ,GAAG,cAAc,CAAC,QAAQ;AAC5C,UAAU,sBAAsB,GAAG,cAAc,CAAC,gBAAgB;AAClE,UAAU,OAAO,GAAG,CAAC,EAAE,GAAG,cAAc,CAAC,OAAO,KAAK,IAAI,GAAG,EAAE,GAAG,EAAE;AACnE,UAAU,QAAQ,GAAG,cAAc,CAAC,YAAY;AAChD,UAAU;AACV,QAAQ;AACR,QAAQ,KAAK,MAAM,EAAE;AACrB,UAAU,MAAM,kBAAkB,GAAG,iBAAiB,CAAC;AACvD,YAAY,MAAM,EAAE,EAAE,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE;AAChD,YAAY,KAAK,EAAE,KAAK;AACxB,WAAW,CAAC;AACZ,UAAU,MAAM,cAAc,GAAG,MAAM,4BAA4B,CAAC;AACpE,YAAY,MAAM,EAAE,kBAAkB;AACtC,YAAY,sBAAsB,EAAE,KAAK,CAAC,iBAAiB;AAC3D,YAAY,gBAAgB,EAAE,CAAC,EAAE,GAAG,KAAK,CAAC,WAAW,KAAK,IAAI,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,KAAK;AACvF;AACA,WAAW,CAAC;AACZ,UAAU,MAAM,WAAW,GAAG,kBAAkB,CAAC,IAAI;AACrD,UAAU,MAAM,cAAc,GAAG,MAAM,KAAK;AAC5C,YAAY,MAAM,UAAU,CAAC;AAC7B,cAAc,IAAI,EAAE,8BAA8B;AAClD,cAAc,UAAU,EAAE,yBAAyB,CAAC;AACpD,gBAAgB,SAAS;AACzB,gBAAgB,UAAU,EAAE;AAC5B,kBAAkB,GAAG,qBAAqB,CAAC;AAC3C,oBAAoB,WAAW,EAAE,8BAA8B;AAC/D,oBAAoB;AACpB,mBAAmB,CAAC;AACpB,kBAAkB,GAAG,uBAAuB;AAC5C,kBAAkB,kBAAkB,EAAE;AACtC,oBAAoB,KAAK,EAAE,MAAM;AACjC,mBAAmB;AACnB,kBAAkB,oBAAoB,EAAE;AACxC,oBAAoB,KAAK,EAAE,MAAM,qBAAqB,CAAC,cAAc;AACrE,mBAAmB;AACnB,kBAAkB,kBAAkB,EAAE,IAAI;AAC1C;AACA,kBAAkB,eAAe,EAAE,KAAK,CAAC,QAAQ;AACjD,kBAAkB,sBAAsB,EAAE,KAAK,CAAC,OAAO;AACvD,kBAAkB,kCAAkC,EAAE,QAAQ,CAAC,gBAAgB;AAC/E,kBAAkB,2BAA2B,EAAE,QAAQ,CAAC,SAAS;AACjE,kBAAkB,iCAAiC,EAAE,QAAQ,CAAC,eAAe;AAC7E,kBAAkB,4BAA4B,EAAE,QAAQ,CAAC,WAAW;AACpE,kBAAkB,sBAAsB,EAAE,QAAQ,CAAC,IAAI;AACvD,kBAAkB,sBAAsB,EAAE,QAAQ,CAAC;AACnD;AACA,eAAe,CAAC;AAChB,cAAc,MAAM;AACpB,cAAc,EAAE,EAAE,OAAO,KAAK,KAAK;AACnC,gBAAgB,IAAI,IAAI,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE;AACvD,gBAAgB,MAAM,OAAO,GAAG,MAAM,KAAK,CAAC,UAAU,CAAC;AACvD,kBAAkB,IAAI,EAAE;AACxB,oBAAoB,IAAI,EAAE,aAAa;AACvC,oBAAoB,IAAI,EAAE;AAC1B,sBAAsB,IAAI,EAAE,UAAU;AACtC,sBAAsB,IAAI,EAAE,UAAU,IAAI,IAAI,GAAG,UAAU,GAAG,MAAM;AACpE,sBAAsB,WAAW,EAAE,iBAAiB,IAAI,IAAI,GAAG,iBAAiB,GAAG,6BAA6B;AAChH,sBAAsB,UAAU,EAAE,cAAc,CAAC;AACjD;AACA,mBAAmB;AACnB,kBAAkB,GAAG,mBAAmB,CAAC,QAAQ,CAAC;AAClD,kBAAkB,WAAW;AAC7B,kBAAkB,MAAM,EAAE,cAAc;AACxC,kBAAkB,gBAAgB,EAAE,eAAe;AACnD,kBAAkB,WAAW;AAC7B,kBAAkB;AAClB,iBAAiB,CAAC;AAClB,gBAAgB,MAAM,UAAU,GAAG,CAAC,GAAG,GAAG,CAAC,IAAI,GAAG,OAAO,CAAC,SAAS,KAAK,IAAI,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,KAAK,IAAI,GAAG,KAAK,CAAC,GAAG,GAAG,CAAC,IAAI;AAC5H,gBAAgB,MAAM,YAAY,GAAG;AACrC,kBAAkB,EAAE,EAAE,CAAC,GAAG,GAAG,CAAC,GAAG,GAAG,OAAO,CAAC,QAAQ,KAAK,IAAI,GAAG,KAAK,CAAC,GAAG,GAAG,CAAC,EAAE,KAAK,IAAI,GAAG,GAAG,GAAG,WAAW,EAAE;AAC9G,kBAAkB,SAAS,EAAE,CAAC,EAAE,GAAG,CAAC,EAAE,GAAG,OAAO,CAAC,QAAQ,KAAK,IAAI,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC,SAAS,KAAK,IAAI,GAAG,EAAE,GAAG,WAAW,EAAE;AACxH,kBAAkB,OAAO,EAAE,CAAC,EAAE,GAAG,CAAC,EAAE,GAAG,OAAO,CAAC,QAAQ,KAAK,IAAI,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC,OAAO,KAAK,IAAI,GAAG,EAAE,GAAG,KAAK,CAAC;AAC7G,iBAAiB;AACjB,gBAAgB,IAAI,UAAU,KAAK,KAAK,CAAC,EAAE;AAC3C,kBAAkB,MAAM,IAAI,sBAAsB,CAAC;AACnD,oBAAoB,OAAO,EAAE,+CAA+C;AAC5E,oBAAoB,QAAQ,EAAE,YAAY;AAC1C,oBAAoB,KAAK,EAAE,2BAA2B,CAAC,OAAO,CAAC,KAAK,CAAC;AACrE,oBAAoB,YAAY,EAAE,OAAO,CAAC;AAC1C,mBAAmB,CAAC;AACpB,gBAAgB;AAChB,gBAAgB,KAAK,CAAC,aAAa;AACnC,kBAAkB,yBAAyB,CAAC;AAC5C,oBAAoB,SAAS;AAC7B,oBAAoB,UAAU,EAAE;AAChC,sBAAsB,0BAA0B,EAAE,OAAO,CAAC,YAAY;AACtE,sBAAsB,oBAAoB,EAAE,EAAE,MAAM,EAAE,MAAM,UAAU,EAAE;AACxE,sBAAsB,gBAAgB,EAAE,YAAY,CAAC,EAAE;AACvD,sBAAsB,mBAAmB,EAAE,YAAY,CAAC,OAAO;AAC/D,sBAAsB,uBAAuB,EAAE,YAAY,CAAC,SAAS,CAAC,WAAW,EAAE;AACnF,sBAAsB,8BAA8B,EAAE,IAAI,CAAC,SAAS;AACpE,wBAAwB,OAAO,CAAC;AAChC,uBAAuB;AACvB,sBAAsB,uBAAuB,EAAE,OAAO,CAAC,KAAK,CAAC,YAAY;AACzE,sBAAsB,2BAA2B,EAAE,OAAO,CAAC,KAAK,CAAC,gBAAgB;AACjF;AACA,sBAAsB,gCAAgC,EAAE,CAAC,OAAO,CAAC,YAAY,CAAC;AAC9E,sBAAsB,oBAAoB,EAAE,YAAY,CAAC,EAAE;AAC3D,sBAAsB,uBAAuB,EAAE,YAAY,CAAC,OAAO;AACnE,sBAAsB,2BAA2B,EAAE,OAAO,CAAC,KAAK,CAAC,YAAY;AAC7E,sBAAsB,4BAA4B,EAAE,OAAO,CAAC,KAAK,CAAC;AAClE;AACA,mBAAmB;AACnB,iBAAiB;AACjB,gBAAgB,OAAO,EAAE,GAAG,OAAO,EAAE,UAAU,EAAE,YAAY,EAAE;AAC/D,cAAc;AACd,aAAa;AACb,WAAW;AACX,UAAU,MAAM,GAAG,cAAc,CAAC,UAAU;AAC5C,UAAU,YAAY,GAAG,cAAc,CAAC,YAAY;AACpD,UAAU,KAAK,GAAG,cAAc,CAAC,KAAK;AACtC,UAAU,QAAQ,GAAG,cAAc,CAAC,QAAQ;AAC5C,UAAU,WAAW,GAAG,cAAc,CAAC,WAAW;AAClD,UAAU,QAAQ,GAAG,cAAc,CAAC,QAAQ;AAC5C,UAAU,sBAAsB,GAAG,cAAc,CAAC,gBAAgB;AAClE,UAAU,OAAO,GAAG,CAAC,EAAE,GAAG,cAAc,CAAC,OAAO,KAAK,IAAI,GAAG,EAAE,GAAG,EAAE;AACnE,UAAU,QAAQ,GAAG,cAAc,CAAC,YAAY;AAChD,UAAU;AACV,QAAQ;AACR,QAAQ,KAAK,KAAK,CAAC,EAAE;AACrB,UAAU,MAAM,IAAI,KAAK;AACzB,YAAY;AACZ,WAAW;AACX,QAAQ;AACR,QAAQ,SAAS;AACjB,UAAU,MAAM,gBAAgB,GAAG,IAAI;AACvC,UAAU,MAAM,IAAI,KAAK,CAAC,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC,CAAC;AAClE,QAAQ;AACR;AACA,MAAM,SAAS,aAAa,CAAC,OAAO,EAAE;AACtC,QAAQ,MAAM,WAAW,GAAG,aAAa,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC;AAC5D,QAAQ,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;AAClC,UAAU,MAAM,IAAI,sBAAsB,CAAC;AAC3C,YAAY,OAAO,EAAE,oDAAoD;AACzE,YAAY,KAAK,EAAE,WAAW,CAAC,KAAK;AACpC,YAAY,IAAI,EAAE,OAAO;AACzB,YAAY,QAAQ;AACpB,YAAY,KAAK,EAAE,2BAA2B,CAAC,KAAK,CAAC;AACrD,YAAY;AACZ,WAAW,CAAC;AACZ,QAAQ;AACR,QAAQ,MAAM,gBAAgB,GAAG,cAAc,CAAC,mBAAmB;AACnE,UAAU,WAAW,CAAC,KAAK;AAC3B,UAAU;AACV,YAAY,IAAI,EAAE,OAAO;AACzB,YAAY,QAAQ;AACpB,YAAY,KAAK,EAAE,2BAA2B,CAAC,KAAK;AACpD;AACA,SAAS;AACT,QAAQ,IAAI,CAAC,gBAAgB,CAAC,OAAO,EAAE;AACvC,UAAU,MAAM,IAAI,sBAAsB,CAAC;AAC3C,YAAY,OAAO,EAAE,qDAAqD;AAC1E,YAAY,KAAK,EAAE,gBAAgB,CAAC,KAAK;AACzC,YAAY,IAAI,EAAE,OAAO;AACzB,YAAY,QAAQ;AACpB,YAAY,KAAK,EAAE,2BAA2B,CAAC,KAAK,CAAC;AACrD,YAAY;AACZ,WAAW,CAAC;AACZ,QAAQ;AACR,QAAQ,OAAO,gBAAgB,CAAC,KAAK;AACrC,MAAM;AACN,MAAM,IAAI,OAAO;AACjB,MAAM,IAAI;AACV,QAAQ,OAAO,GAAG,aAAa,CAAC,MAAM,CAAC;AACvC,MAAM,CAAC,CAAC,OAAO,KAAK,EAAE;AACtB,QAAQ,IAAI,UAAU,IAAI,IAAI,IAAI,sBAAsB,CAAC,UAAU,CAAC,KAAK,CAAC,KAAK,cAAc,CAAC,UAAU,CAAC,KAAK,CAAC,KAAK,CAAC,IAAIC,mBAAoB,CAAC,UAAU,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE;AACxK,UAAU,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC;AAChD,YAAY,IAAI,EAAE,MAAM;AACxB,YAAY,KAAK,EAAE,KAAK,CAAC;AACzB,WAAW,CAAC;AACZ,UAAU,IAAI,YAAY,KAAK,IAAI,EAAE;AACrC,YAAY,MAAM,KAAK;AACvB,UAAU;AACV,UAAU,OAAO,GAAG,aAAa,CAAC,YAAY,CAAC;AAC/C,QAAQ,CAAC,MAAM;AACf,UAAU,MAAM,KAAK;AACrB,QAAQ;AACR,MAAM;AACN,MAAM,IAAI,CAAC,aAAa;AACxB,QAAQ,yBAAyB,CAAC;AAClC,UAAU,SAAS;AACnB,UAAU,UAAU,EAAE;AACtB,YAAY,0BAA0B,EAAE,YAAY;AACpD,YAAY,oBAAoB,EAAE;AAClC,cAAc,MAAM,EAAE,MAAM,IAAI,CAAC,SAAS,CAAC,OAAO;AAClD,aAAa;AACb,YAAY,uBAAuB,EAAE,KAAK,CAAC,YAAY;AACvD,YAAY,2BAA2B,EAAE,KAAK,CAAC;AAC/C;AACA,SAAS;AACT,OAAO;AACP,MAAM,OAAO,IAAI,2BAA2B,CAAC;AAC7C,QAAQ,MAAM,EAAE,OAAO;AACvB,QAAQ,YAAY;AACpB,QAAQ,KAAK,EAAE,2BAA2B,CAAC,KAAK,CAAC;AACjD,QAAQ,QAAQ;AAChB,QAAQ,OAAO;AACf,QAAQ,QAAQ,EAAE;AAClB,UAAU,GAAG,QAAQ;AACrB,UAAU,OAAO,EAAE,WAAW,IAAI,IAAI,GAAG,KAAK,CAAC,GAAG,WAAW,CAAC,OAAO;AACrE,UAAU,IAAI,EAAE,WAAW,IAAI,IAAI,GAAG,KAAK,CAAC,GAAG,WAAW,CAAC;AAC3D,SAAS;AACT,QAAQ,QAAQ;AAChB,QAAQ,gBAAgB,EAAE;AAC1B,OAAO,CAAC;AACR,IAAI;AACJ,GAAG,CAAC;AACJ;AACA,IAAI,2BAA2B,GAAG,MAAM;AACxC,EAAE,WAAW,CAAC,OAAO,EAAE;AACvB,IAAI,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM;AAChC,IAAI,IAAI,CAAC,YAAY,GAAG,OAAO,CAAC,YAAY;AAC5C,IAAI,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC,KAAK;AAC9B,IAAI,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ;AACpC,IAAI,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB;AACpD,IAAI,IAAI,CAAC,6BAA6B,GAAG,OAAO,CAAC,gBAAgB;AACjE,IAAI,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ;AACpC,IAAI,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO;AAClC,IAAI,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ;AACpC,EAAE;AACF,EAAE,cAAc,CAAC,IAAI,EAAE;AACvB,IAAI,IAAI,IAAI;AACZ,IAAI,OAAO,IAAI,QAAQ,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;AACrD,MAAM,MAAM,EAAE,CAAC,IAAI,GAAG,IAAI,IAAI,IAAI,GAAG,MAAM,GAAG,IAAI,CAAC,MAAM,KAAK,IAAI,GAAG,IAAI,GAAG,GAAG;AAC/E,MAAM,OAAO,EAAE,sBAAsB,CAAC,IAAI,IAAI,IAAI,GAAG,MAAM,GAAG,IAAI,CAAC,OAAO,EAAE;AAC5E,QAAQ,WAAW,EAAE;AACrB,OAAO;AACP,KAAK,CAAC;AACN,EAAE;AACF,CAAC;;AAsJD;AAC0BC,iBAAkB,CAAC,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,EAAE,EAAE;;AAy4B1E;AAC0BC,iBAAkB,CAAC;AAC7C,EAAE,MAAM,EAAE,OAAO;AACjB,EAAE,IAAI,EAAE;AACR,CAAC;AAC+BA,iBAAkB,CAAC;AACnD,EAAE,MAAM,EAAE,KAAK;AACf,EAAE,IAAI,EAAE;AACR,CAAC;;AAweD;AACG,IAAC,cAAc,GAAG;AACrB,QAAQ,CAAC,cAAc,EAAE;AACzB,EAAE,MAAM,EAAE,MAAM,MAAM;AACtB,EAAE,IAAI,EAAE,MAAM;AACd,CAAC,CAAC;;AAgEF;AACA,IAAI,IAAI,GAAG,OAAO;AAClB,EAAE,IAAI,EAAE,MAAM;AACd,EAAE,cAAc,EAAE,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC;AAC1C,EAAE,sBAAsB,CAAC,EAAE,MAAM,EAAE,EAAE;AACrC,IAAI,OAAO,MAAM;AACjB,EAAE,CAAC;AACH,EAAE,YAAY,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,EAAE;AAChC,IAAI,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE;AAC7B,EAAE,CAAC;AACH,EAAE,WAAW,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,EAAE;AAC/B,IAAI,OAAO,KAAK;AAChB,EAAE;AACF,CAAC,CAAC;AACF,IAAI,MAAM,GAAG,CAAC;AACd,EAAE,MAAM,EAAE;AACV,CAAC,KAAK;AACN,EAAE,MAAM,MAAM,GAAGC,QAAS,CAAC,WAAW,CAAC;AACvC,EAAE,OAAO;AACT,IAAI,IAAI,EAAE,QAAQ;AAClB,IAAI,cAAc,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM;AACpC,MAAM,IAAI,EAAE,MAAM;AAClB,MAAM,MAAM,EAAE,KAAK,CAAC,yBAAyB,GAAG,MAAM,CAAC,UAAU,GAAG;AACpE,KAAK,CAAC;AACN,IAAI,sBAAsB,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,EAAE;AAC9C,MAAM,OAAO,KAAK,CAAC,yBAAyB,GAAG,MAAM,GAAG,qBAAqB,CAAC;AAC9E,QAAQ,MAAM,EAAE,MAAM;AACtB,QAAQ,MAAM,EAAE,MAAM,CAAC;AACvB,OAAO,CAAC;AACR,IAAI,CAAC;AACL,IAAI,YAAY,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,EAAE;AAClC,MAAM,MAAM,MAAM,GAAGC,gBAAiB,CAAC,KAAK,CAAC;AAC7C,MAAM,QAAQ,MAAM,CAAC,KAAK;AAC1B,QAAQ,KAAK,cAAc;AAC3B,QAAQ,KAAK,iBAAiB;AAC9B,UAAU,OAAO,MAAM;AACvB,QAAQ,KAAK,gBAAgB;AAC7B,QAAQ,KAAK,kBAAkB;AAC/B,UAAU,OAAO;AACjB;AACA,YAAY,OAAO,EAAE,MAAM,CAAC;AAC5B,WAAW;AACX,QAAQ,SAAS;AACjB,UAAU,MAAM,gBAAgB,GAAG,MAAM,CAAC,KAAK;AAC/C,UAAU,MAAM,IAAI,KAAK,CAAC,CAAC,yBAAyB,EAAE,gBAAgB,CAAC,CAAC,CAAC;AACzE,QAAQ;AACR;AACA,IAAI,CAAC;AACL,IAAI,WAAW,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE;AAC1C,MAAM,MAAM,WAAW,GAAGC,aAAc,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC;AACzD,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;AAChC,QAAQ,MAAM,IAAI,sBAAsB,CAAC;AACzC,UAAU,OAAO,EAAE,oDAAoD;AACvE,UAAU,KAAK,EAAE,WAAW,CAAC,KAAK;AAClC,UAAU,IAAI,EAAE,KAAK;AACrB,UAAU,QAAQ,EAAE,OAAO,CAAC,QAAQ;AACpC,UAAU,KAAK,EAAE,OAAO,CAAC,KAAK;AAC9B,UAAU,YAAY,EAAE,OAAO,CAAC;AAChC,SAAS,CAAC;AACV,MAAM;AACN,MAAM,MAAM,gBAAgB,GAAGC,iBAAkB,CAAC;AAClD,QAAQ,KAAK,EAAE,WAAW,CAAC,KAAK;AAChC,QAAQ;AACR,OAAO,CAAC;AACR,MAAM,IAAI,CAAC,gBAAgB,CAAC,OAAO,EAAE;AACrC,QAAQ,MAAM,IAAI,sBAAsB,CAAC;AACzC,UAAU,OAAO,EAAE,qDAAqD;AACxE,UAAU,KAAK,EAAE,gBAAgB,CAAC,KAAK;AACvC,UAAU,IAAI,EAAE,KAAK;AACrB,UAAU,QAAQ,EAAE,OAAO,CAAC,QAAQ;AACpC,UAAU,KAAK,EAAE,OAAO,CAAC,KAAK;AAC9B,UAAU,YAAY,EAAE,OAAO,CAAC;AAChC,SAAS,CAAC;AACV,MAAM;AACN,MAAM,OAAO,gBAAgB,CAAC,KAAK;AACnC,IAAI;AACJ,GAAG;AACH,CAAC;;AAoGD;AACA,SAAS,YAAY,CAAC,OAAO,EAAE,OAAO,EAAE;AACxC,EAAE,MAAM,OAAO,GAAG,OAAO,CAAC,SAAS,EAAE;AACrC,EAAE,MAAM,OAAO,GAAG,OAAO,CAAC,SAAS,EAAE;AACrC,EAAE,IAAI,SAAS,GAAG,MAAM;AACxB,EAAE,IAAI,SAAS,GAAG,MAAM;AACxB,EAAE,IAAI,WAAW,GAAG,KAAK;AACzB,EAAE,IAAI,WAAW,GAAG,KAAK;AACzB,EAAE,eAAe,WAAW,CAAC,UAAU,EAAE;AACzC,IAAI,IAAI;AACR,MAAM,IAAI,SAAS,IAAI,IAAI,EAAE;AAC7B,QAAQ,SAAS,GAAG,OAAO,CAAC,IAAI,EAAE;AAClC,MAAM;AACN,MAAM,MAAM,MAAM,GAAG,MAAM,SAAS;AACpC,MAAM,SAAS,GAAG,KAAK,CAAC;AACxB,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE;AACxB,QAAQ,UAAU,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC;AACxC,MAAM,CAAC,MAAM;AACb,QAAQ,UAAU,CAAC,KAAK,EAAE;AAC1B,MAAM;AACN,IAAI,CAAC,CAAC,OAAO,KAAK,EAAE;AACpB,MAAM,UAAU,CAAC,KAAK,CAAC,KAAK,CAAC;AAC7B,IAAI;AACJ,EAAE;AACF,EAAE,eAAe,WAAW,CAAC,UAAU,EAAE;AACzC,IAAI,IAAI;AACR,MAAM,IAAI,SAAS,IAAI,IAAI,EAAE;AAC7B,QAAQ,SAAS,GAAG,OAAO,CAAC,IAAI,EAAE;AAClC,MAAM;AACN,MAAM,MAAM,MAAM,GAAG,MAAM,SAAS;AACpC,MAAM,SAAS,GAAG,KAAK,CAAC;AACxB,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE;AACxB,QAAQ,UAAU,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC;AACxC,MAAM,CAAC,MAAM;AACb,QAAQ,UAAU,CAAC,KAAK,EAAE;AAC1B,MAAM;AACN,IAAI,CAAC,CAAC,OAAO,KAAK,EAAE;AACpB,MAAM,UAAU,CAAC,KAAK,CAAC,KAAK,CAAC;AAC7B,IAAI;AACJ,EAAE;AACF,EAAE,OAAO,IAAI,cAAc,CAAC;AAC5B,IAAI,MAAM,IAAI,CAAC,UAAU,EAAE;AAC3B,MAAM,IAAI;AACV,QAAQ,IAAI,WAAW,EAAE;AACzB,UAAU,MAAM,WAAW,CAAC,UAAU,CAAC;AACvC,UAAU;AACV,QAAQ;AACR,QAAQ,IAAI,WAAW,EAAE;AACzB,UAAU,MAAM,WAAW,CAAC,UAAU,CAAC;AACvC,UAAU;AACV,QAAQ;AACR,QAAQ,IAAI,SAAS,IAAI,IAAI,EAAE;AAC/B,UAAU,SAAS,GAAG,OAAO,CAAC,IAAI,EAAE;AACpC,QAAQ;AACR,QAAQ,IAAI,SAAS,IAAI,IAAI,EAAE;AAC/B,UAAU,SAAS,GAAG,OAAO,CAAC,IAAI,EAAE;AACpC,QAAQ;AACR,QAAQ,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,OAAO,CAAC,IAAI,CAAC;AACtD,UAAU,SAAS,CAAC,IAAI,CAAC,CAAC,OAAO,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC;AAC7E,UAAU,SAAS,CAAC,IAAI,CAAC,CAAC,OAAO,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;AAC5E,SAAS,CAAC;AACV,QAAQ,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE;AAC1B,UAAU,UAAU,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC;AAC1C,QAAQ;AACR,QAAQ,IAAI,MAAM,KAAK,OAAO,EAAE;AAChC,UAAU,SAAS,GAAG,KAAK,CAAC;AAC5B,UAAU,IAAI,MAAM,CAAC,IAAI,EAAE;AAC3B,YAAY,MAAM,WAAW,CAAC,UAAU,CAAC;AACzC,YAAY,WAAW,GAAG,IAAI;AAC9B,UAAU;AACV,QAAQ,CAAC,MAAM;AACf,UAAU,SAAS,GAAG,KAAK,CAAC;AAC5B,UAAU,IAAI,MAAM,CAAC,IAAI,EAAE;AAC3B,YAAY,WAAW,GAAG,IAAI;AAC9B,YAAY,MAAM,WAAW,CAAC,UAAU,CAAC;AACzC,UAAU;AACV,QAAQ;AACR,MAAM,CAAC,CAAC,OAAO,KAAK,EAAE;AACtB,QAAQ,UAAU,CAAC,KAAK,CAAC,KAAK,CAAC;AAC/B,MAAM;AACN,IAAI,CAAC;AACL,IAAI,MAAM,GAAG;AACb,MAAM,OAAO,CAAC,MAAM,EAAE;AACtB,MAAM,OAAO,CAAC,MAAM,EAAE;AACtB,IAAI;AACJ,GAAG,CAAC;AACJ;;AA8MA;AAC0BC,iBAAkB,CAAC;AAC7C,EAAE,MAAM,EAAE,OAAO;AACjB,EAAE,IAAI,EAAE;AACR,CAAC;AACgCA,iBAAkB,CAAC;AACpD,EAAE,MAAM,EAAE,KAAK;AACf,EAAE,IAAI,EAAE;AACR,CAAC;AAigED,IAAI,kCAAkC,GAAGC,CAAE,CAAC,MAAM,CAAC;AACnD,EAAE,IAAI,EAAEA,CAAE,CAAC,MAAM,EAAE;AACnB,EAAE,OAAO,EAAEA,CAAE,CAAC,MAAM;AACpB,CAAC,CAAC,CAAC,WAAW,EAAE;AAChB,IAAI,gBAAgB,GAAGA,CAAE,CAAC,MAAM,CAAC;AACjC,EAAE,KAAK,EAAEA,CAAE,CAAC,QAAQ,CAACA,CAAE,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,WAAW,EAAE;AAChD,CAAC,CAAC,CAAC,WAAW,EAAE;AAChB,IAAI,YAAY,GAAG,gBAAgB;AACnC,IAAI,aAAa,GAAGA,CAAE,CAAC,MAAM,CAAC;AAC9B,EAAE,MAAM,EAAEA,CAAE,CAAC,MAAM,EAAE;AACrB,EAAE,MAAM,EAAEA,CAAE,CAAC,QAAQ,CAAC,gBAAgB;AACtC,CAAC,CAAC;AACF,IAAI,wBAAwB,GAAGA,CAAE,CAAC,MAAM,CAAC;AACzC,EAAE,YAAY,EAAEA,CAAE,CAAC,QAAQ,CAACA,CAAE,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC;AACxD,EAAE,OAAO,EAAEA,CAAE,CAAC,QAAQ,CAACA,CAAE,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC;AACnD,EAAE,OAAO,EAAEA,CAAE,CAAC,QAAQ;AACtB,IAAIA,CAAE,CAAC,MAAM,CAAC;AACd,MAAM,WAAW,EAAEA,CAAE,CAAC,QAAQ,CAACA,CAAE,CAAC,OAAO,EAAE;AAC3C,KAAK,CAAC,CAAC,WAAW;AAClB,GAAG;AACH,EAAE,SAAS,EAAEA,CAAE,CAAC,QAAQ;AACxB,IAAIA,CAAE,CAAC,MAAM,CAAC;AACd,MAAM,SAAS,EAAEA,CAAE,CAAC,QAAQ,CAACA,CAAE,CAAC,OAAO,EAAE,CAAC;AAC1C,MAAM,WAAW,EAAEA,CAAE,CAAC,QAAQ,CAACA,CAAE,CAAC,OAAO,EAAE;AAC3C,KAAK,CAAC,CAAC,WAAW;AAClB,GAAG;AACH,EAAE,KAAK,EAAEA,CAAE,CAAC,QAAQ;AACpB,IAAIA,CAAE,CAAC,MAAM,CAAC;AACd,MAAM,WAAW,EAAEA,CAAE,CAAC,QAAQ,CAACA,CAAE,CAAC,OAAO,EAAE;AAC3C,KAAK,CAAC,CAAC,WAAW;AAClB;AACA,CAAC,CAAC,CAAC,WAAW,EAAE;AACa,YAAY,CAAC,MAAM,CAAC;AACjD,EAAE,eAAe,EAAEA,CAAE,CAAC,MAAM,EAAE;AAC9B,EAAE,YAAY,EAAE,wBAAwB;AACxC,EAAE,UAAU,EAAE,kCAAkC;AAChD,EAAE,YAAY,EAAEA,CAAE,CAAC,QAAQ,CAACA,CAAE,CAAC,MAAM,EAAE;AACvC,CAAC;AACD,IAAI,qBAAqB,GAAG,YAAY,CAAC,MAAM,CAAC;AAChD,EAAE,UAAU,EAAEA,CAAE,CAAC,QAAQ,CAACA,CAAE,CAAC,MAAM,EAAE;AACrC,CAAC,CAAC;AACF,IAAI,UAAU,GAAGA,CAAE,CAAC,MAAM,CAAC;AAC3B,EAAE,IAAI,EAAEA,CAAE,CAAC,MAAM,EAAE;AACnB,EAAE,WAAW,EAAEA,CAAE,CAAC,QAAQ,CAACA,CAAE,CAAC,MAAM,EAAE,CAAC;AACvC,EAAE,WAAW,EAAEA,CAAE,CAAC,MAAM,CAAC;AACzB,IAAI,IAAI,EAAEA,CAAE,CAAC,OAAO,CAAC,QAAQ,CAAC;AAC9B,IAAI,UAAU,EAAEA,CAAE,CAAC,QAAQ,CAACA,CAAE,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,WAAW,EAAE;AACvD,GAAG,CAAC,CAAC,WAAW;AAChB,CAAC,CAAC,CAAC,WAAW,EAAE;AACY,qBAAqB,CAAC,MAAM,CAAC;AACzD,EAAE,KAAK,EAAEA,CAAE,CAAC,KAAK,CAAC,UAAU;AAC5B,CAAC;AACD,IAAI,iBAAiB,GAAGA,CAAE,CAAC,MAAM,CAAC;AAClC,EAAE,IAAI,EAAEA,CAAE,CAAC,OAAO,CAAC,MAAM,CAAC;AAC1B,EAAE,IAAI,EAAEA,CAAE,CAAC,MAAM;AACjB,CAAC,CAAC,CAAC,WAAW,EAAE;AAChB,IAAI,kBAAkB,GAAGA,CAAE,CAAC,MAAM,CAAC;AACnC,EAAE,IAAI,EAAEA,CAAE,CAAC,OAAO,CAAC,OAAO,CAAC;AAC3B,EAAE,IAAI,EAAEA,CAAE,CAAC,MAAM,EAAE,CAAC,MAAM,EAAE;AAC5B,EAAE,QAAQ,EAAEA,CAAE,CAAC,MAAM;AACrB,CAAC,CAAC,CAAC,WAAW,EAAE;AAChB,IAAI,sBAAsB,GAAGA,CAAE,CAAC,MAAM,CAAC;AACvC;AACA;AACA;AACA,EAAE,GAAG,EAAEA,CAAE,CAAC,MAAM,EAAE;AAClB;AACA;AACA;AACA,EAAE,QAAQ,EAAEA,CAAE,CAAC,QAAQ,CAACA,CAAE,CAAC,MAAM,EAAE;AACnC,CAAC,CAAC,CAAC,WAAW,EAAE;AAChB,IAAI,0BAA0B,GAAG,sBAAsB,CAAC,MAAM,CAAC;AAC/D,EAAE,IAAI,EAAEA,CAAE,CAAC,MAAM;AACjB,CAAC,CAAC;AACF,IAAI,0BAA0B,GAAG,sBAAsB,CAAC,MAAM,CAAC;AAC/D,EAAE,IAAI,EAAEA,CAAE,CAAC,MAAM,EAAE,CAAC,MAAM;AAC1B,CAAC,CAAC;AACF,IAAI,sBAAsB,GAAGA,CAAE,CAAC,MAAM,CAAC;AACvC,EAAE,IAAI,EAAEA,CAAE,CAAC,OAAO,CAAC,UAAU,CAAC;AAC9B,EAAE,QAAQ,EAAEA,CAAE,CAAC,KAAK,CAAC,CAAC,0BAA0B,EAAE,0BAA0B,CAAC;AAC7E,CAAC,CAAC,CAAC,WAAW,EAAE;AACW,YAAY,CAAC,MAAM,CAAC;AAC/C,EAAE,OAAO,EAAEA,CAAE,CAAC,KAAK;AACnB,IAAIA,CAAE,CAAC,KAAK,CAAC,CAAC,iBAAiB,EAAE,kBAAkB,EAAE,sBAAsB,CAAC;AAC5E,GAAG;AACH,EAAE,OAAO,EAAEA,CAAE,CAAC,OAAO,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,QAAQ;AAC/C,CAAC,CAAC,CAAC,EAAE;AACL,EAAE,YAAY,CAAC,MAAM,CAAC;AACtB,IAAI,UAAU,EAAEA,CAAE,CAAC,OAAO;AAC1B,GAAG;AACH;;AAEA;AACA,IAAI,eAAe,GAAG,KAAK;AAC3B,IAAI,oBAAoB,GAAGC,CAAE,CAAC,MAAM,CAAC;AACrC,EAAE,OAAO,EAAEA,CAAE,CAAC,OAAO,CAAC,eAAe,CAAC;AACtC,EAAE,EAAE,EAAEA,CAAE,CAAC,KAAK,CAAC,CAACA,CAAE,CAAC,MAAM,EAAE,EAAEA,CAAE,CAAC,MAAM,EAAE,CAAC,GAAG,EAAE,CAAC;AAC/C,CAAC,CAAC,CAAC,KAAK,CAAC,aAAa,CAAC,CAAC,MAAM,EAAE;AAChC,IAAI,qBAAqB,GAAGA,CAAE,CAAC,MAAM,CAAC;AACtC,EAAE,OAAO,EAAEA,CAAE,CAAC,OAAO,CAAC,eAAe,CAAC;AACtC,EAAE,EAAE,EAAEA,CAAE,CAAC,KAAK,CAAC,CAACA,CAAE,CAAC,MAAM,EAAE,EAAEA,CAAE,CAAC,MAAM,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC;AAChD,EAAE,MAAM,EAAE;AACV,CAAC,CAAC,CAAC,MAAM,EAAE;AACX,IAAI,kBAAkB,GAAGA,CAAE,CAAC,MAAM,CAAC;AACnC,EAAE,OAAO,EAAEA,CAAE,CAAC,OAAO,CAAC,eAAe,CAAC;AACtC,EAAE,EAAE,EAAEA,CAAE,CAAC,KAAK,CAAC,CAACA,CAAE,CAAC,MAAM,EAAE,EAAEA,CAAE,CAAC,MAAM,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC;AAChD,EAAE,KAAK,EAAEA,CAAE,CAAC,MAAM,CAAC;AACnB,IAAI,IAAI,EAAEA,CAAE,CAAC,MAAM,EAAE,CAAC,GAAG,EAAE;AAC3B,IAAI,OAAO,EAAEA,CAAE,CAAC,MAAM,EAAE;AACxB,IAAI,IAAI,EAAEA,CAAE,CAAC,QAAQ,CAACA,CAAE,CAAC,OAAO,EAAE;AAClC,GAAG;AACH,CAAC,CAAC,CAAC,MAAM,EAAE;AACX,IAAI,yBAAyB,GAAGA,CAAE,CAAC,MAAM,CAAC;AAC1C,EAAE,OAAO,EAAEA,CAAE,CAAC,OAAO,CAAC,eAAe;AACrC,CAAC,CAAC,CAAC,KAAK;AACR,EAAEA,CAAE,CAAC,MAAM,CAAC;AACZ,IAAI,MAAM,EAAEA,CAAE,CAAC,MAAM,EAAE;AACvB,IAAI,MAAM,EAAEA,CAAE,CAAC,QAAQ,CAAC,gBAAgB;AACxC,GAAG;AACH,CAAC,CAAC,MAAM,EAAE;AACiBA,CAAE,CAAC,KAAK,CAAC;AACpC,EAAE,oBAAoB;AACtB,EAAE,yBAAyB;AAC3B,EAAE,qBAAqB;AACvB,EAAE;AACF,CAAC;;AA+jBD;AACG,IAAC,yBAAyB,GAAG;AAChC,QAAQ,CAAC,yBAAyB,EAAE;AACpC,EAAE,mBAAmB,EAAE,MAAM,mBAAmB;AAChD,EAAE,YAAY,EAAE,MAAM,YAAY;AAClC,EAAE,oBAAoB,EAAE,MAAM;AAC9B,CAAC,CAAC;;AAGF;AACA,SAAS,0BAA0B,CAAC,SAAS,GAAG,EAAE,EAAE;AACpD,EAAE,MAAM,WAAW,GAAG,IAAI,WAAW,EAAE;AACvC,EAAE,IAAI,kBAAkB,GAAG,EAAE;AAC7B,EAAE,OAAO,IAAI,eAAe,CAAC;AAC7B,IAAI,MAAM,KAAK,GAAG;AAClB,MAAM,IAAI,SAAS,CAAC,OAAO;AAC3B,QAAQ,MAAM,SAAS,CAAC,OAAO,EAAE;AACjC,IAAI,CAAC;AACL,IAAI,MAAM,SAAS,CAAC,OAAO,EAAE,UAAU,EAAE;AACzC,MAAM,UAAU,CAAC,OAAO,CAAC,WAAW,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;AACrD,MAAM,kBAAkB,IAAI,OAAO;AACnC,MAAM,IAAI,SAAS,CAAC,OAAO;AAC3B,QAAQ,MAAM,SAAS,CAAC,OAAO,CAAC,OAAO,CAAC;AACxC,MAAM,IAAI,SAAS,CAAC,MAAM,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;AAC3D,QAAQ,MAAM,SAAS,CAAC,MAAM,CAAC,OAAO,CAAC;AACvC,MAAM;AACN,IAAI,CAAC;AACL,IAAI,MAAM,KAAK,GAAG;AAClB,MAAM,IAAI,SAAS,CAAC,YAAY,EAAE;AAClC,QAAQ,MAAM,SAAS,CAAC,YAAY,CAAC,kBAAkB,CAAC;AACxD,MAAM;AACN,MAAM,IAAI,SAAS,CAAC,OAAO,EAAE;AAC7B,QAAQ,MAAM,SAAS,CAAC,OAAO,CAAC,kBAAkB,CAAC;AACnD,MAAM;AACN,IAAI;AACJ,GAAG,CAAC;AACJ;;AAEA;AACA,SAAS,oBAAoB,CAAC,MAAM,EAAE,SAAS,EAAE;AACjD,EAAE,OAAO,MAAM,CAAC,WAAW;AAC3B,IAAI,IAAI,eAAe,CAAC;AACxB,MAAM,SAAS,EAAE,OAAO,KAAK,EAAE,UAAU,KAAK;AAC9C,QAAQ,IAAI,IAAI;AAChB,QAAQ,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;AACvC,UAAU,UAAU,CAAC,OAAO,CAAC,KAAK,CAAC;AACnC,UAAU;AACV,QAAQ;AACR,QAAQ,IAAI,OAAO,IAAI,KAAK,EAAE;AAC9B,UAAU,IAAI,KAAK,CAAC,KAAK,KAAK,sBAAsB,EAAE;AACtD,YAAY,qBAAqB;AACjC,cAAc,CAAC,IAAI,GAAG,KAAK,CAAC,IAAI,KAAK,IAAI,GAAG,MAAM,GAAG,IAAI,CAAC,KAAK;AAC/D,cAAc;AACd,aAAa;AACb,UAAU;AACV,UAAU;AACV,QAAQ;AACR,QAAQ,qBAAqB,CAAC,KAAK,EAAE,UAAU,CAAC;AAChD,MAAM;AACN,KAAK;AACL,GAAG,CAAC,WAAW,CAAC,0BAA0B,CAAC,SAAS,CAAC,CAAC,CAAC,WAAW,CAAC,IAAI,iBAAiB,EAAE,CAAC,CAAC,WAAW;AACvG,IAAI,IAAI,eAAe,CAAC;AACxB,MAAM,SAAS,EAAE,OAAO,KAAK,EAAE,UAAU,KAAK;AAC9C,QAAQ,UAAU,CAAC,OAAO,CAACC,oBAAqB,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;AAChE,MAAM;AACN,KAAK;AACL,GAAG;AACH;AACA,SAAS,YAAY,CAAC,MAAM,EAAE,SAAS,EAAE;AACzC,EAAE,OAAO,oBAAoB,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC,WAAW;AAC5D,IAAI,IAAI,iBAAiB;AACzB,GAAG;AACH;AACA,SAAS,oBAAoB,CAAC,MAAM,EAAE,OAAO,EAAE;AAC/C,EAAE,IAAI,IAAI;AACV,EAAE,MAAM,UAAU,GAAG,oBAAoB;AACzC,IAAI,MAAM;AACV,IAAI,OAAO,IAAI,IAAI,GAAG,MAAM,GAAG,OAAO,CAAC;AACvC,GAAG,CAAC,WAAW,CAAC,IAAI,iBAAiB,EAAE,CAAC;AACxC,EAAE,MAAM,IAAI,GAAG,OAAO,IAAI,IAAI,GAAG,MAAM,GAAG,OAAO,CAAC,IAAI;AACtD,EAAE,MAAM,IAAI,GAAG,OAAO,IAAI,IAAI,GAAG,MAAM,GAAG,OAAO,CAAC,IAAI;AACtD,EAAE,MAAM,cAAc,GAAG,IAAI,GAAG,YAAY,CAAC,IAAI,CAAC,MAAM,EAAE,UAAU,CAAC,GAAG,UAAU;AAClF,EAAE,OAAO,IAAI,QAAQ,CAAC,cAAc,EAAE;AACtC,IAAI,MAAM,EAAE,CAAC,IAAI,GAAG,IAAI,IAAI,IAAI,GAAG,MAAM,GAAG,IAAI,CAAC,MAAM,KAAK,IAAI,GAAG,IAAI,GAAG,GAAG;AAC7E,IAAI,UAAU,EAAE,IAAI,IAAI,IAAI,GAAG,MAAM,GAAG,IAAI,CAAC,UAAU;AACvD,IAAI,OAAO,EAAE,sBAAsB,CAAC,IAAI,IAAI,IAAI,GAAG,MAAM,GAAG,IAAI,CAAC,OAAO,EAAE;AAC1E,MAAM,WAAW,EAAE,2BAA2B;AAC9C,MAAM,iBAAiB,EAAE;AACzB,KAAK;AACL,GAAG,CAAC;AACJ;AACA,SAAS,mBAAmB,CAAC,MAAM,EAAE,OAAO,EAAE;AAC9C,EAAE,OAAO,CAAC,UAAU,CAAC,KAAK,CAAC,oBAAoB,CAAC,MAAM,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC;AAC3E;AACA,SAAS,qBAAqB,CAAC,KAAK,EAAE,UAAU,EAAE;AAClD,EAAE,IAAI,OAAO,KAAK,CAAC,OAAO,KAAK,QAAQ,EAAE;AACzC,IAAI,UAAU,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC;AACrC,EAAE,CAAC,MAAM;AACT,IAAI,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO;AACjC,IAAI,KAAK,MAAM,IAAI,IAAI,OAAO,EAAE;AAChC,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,MAAM,EAAE;AAChC,QAAQ,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC;AACrC,MAAM;AACN,IAAI;AACJ,EAAE;AACF;;AAEA;AACG,IAAC,0BAA0B,GAAG;AACjC,QAAQ,CAAC,0BAA0B,EAAE;AACrC,EAAE,mBAAmB,EAAE,MAAM,oBAAoB;AACjD,EAAE,YAAY,EAAE,MAAM,aAAa;AACnC,EAAE,oBAAoB,EAAE,MAAM;AAC9B,CAAC,CAAC;AAGF,SAAS,qBAAqB,CAAC,MAAM,EAAE,SAAS,EAAE;AAClD,EAAE,MAAM,SAAS,GAAG,iBAAiB,EAAE;AACvC,EAAE,OAAO,oCAAoC,CAAC,MAAM,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE,CAAC,CAAC,WAAW;AACzF,IAAI,IAAI,eAAe,CAAC;AACxB,MAAM,MAAM,SAAS,CAAC,OAAO,EAAE,UAAU,EAAE;AAC3C,QAAQ,UAAU,CAAC,OAAO,CAAC,SAAS,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;AACpD,MAAM;AACN,KAAK;AACL,GAAG,CAAC,WAAW,CAAC,0BAA0B,CAAC,SAAS,CAAC,CAAC,CAAC,WAAW,CAAC,IAAI,iBAAiB,EAAE,CAAC,CAAC,WAAW;AACvG,IAAI,IAAI,eAAe,CAAC;AACxB,MAAM,SAAS,EAAE,OAAO,KAAK,EAAE,UAAU,KAAK;AAC9C,QAAQ,UAAU,CAAC,OAAO,CAACC,oBAAqB,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;AAChE,MAAM;AACN,KAAK;AACL,GAAG;AACH;AACA,SAAS,aAAa,CAAC,MAAM,EAAE,SAAS,EAAE;AAC1C,EAAE,OAAO,qBAAqB,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC,WAAW;AAC7D,IAAI,IAAI,iBAAiB;AACzB,GAAG;AACH;AACA,SAAS,qBAAqB,CAAC,MAAM,EAAE,OAAO,GAAG,EAAE,EAAE;AACrD,EAAE,IAAI,IAAI;AACV,EAAE,MAAM,EAAE,IAAI,EAAE,IAAI,EAAE,SAAS,EAAE,GAAG,OAAO;AAC3C,EAAE,MAAM,UAAU,GAAG,qBAAqB,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC,WAAW;AACzE,IAAI,IAAI,iBAAiB;AACzB,GAAG;AACH,EAAE,MAAM,cAAc,GAAG,IAAI,GAAG,YAAY,CAAC,IAAI,CAAC,MAAM,EAAE,UAAU,CAAC,GAAG,UAAU;AAClF,EAAE,OAAO,IAAI,QAAQ,CAAC,cAAc,EAAE;AACtC,IAAI,MAAM,EAAE,CAAC,IAAI,GAAG,IAAI,IAAI,IAAI,GAAG,MAAM,GAAG,IAAI,CAAC,MAAM,KAAK,IAAI,GAAG,IAAI,GAAG,GAAG;AAC7E,IAAI,UAAU,EAAE,IAAI,IAAI,IAAI,GAAG,MAAM,GAAG,IAAI,CAAC,UAAU;AACvD,IAAI,OAAO,EAAE,sBAAsB,CAAC,IAAI,IAAI,IAAI,GAAG,MAAM,GAAG,IAAI,CAAC,OAAO,EAAE;AAC1E,MAAM,WAAW,EAAE,2BAA2B;AAC9C,MAAM,iBAAiB,EAAE;AACzB,KAAK;AACL,GAAG,CAAC;AACJ;AACA,SAAS,oBAAoB,CAAC,MAAM,EAAE,OAAO,EAAE;AAC/C,EAAE,OAAO,CAAC,UAAU,CAAC,KAAK,CAAC,qBAAqB,CAAC,MAAM,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC;AAC5E;AACA,SAAS,iBAAiB,GAAG;AAC7B,EAAE,IAAI,aAAa,GAAG,IAAI;AAC1B,EAAE,OAAO,CAAC,KAAK,KAAK;AACpB,IAAI,IAAI,aAAa,EAAE;AACvB,MAAM,KAAK,GAAG,KAAK,CAAC,SAAS,EAAE;AAC/B,MAAM,IAAI,KAAK;AACf,QAAQ,aAAa,GAAG,KAAK;AAC7B,IAAI;AACJ,IAAI,OAAO,KAAK;AAChB,EAAE,CAAC;AACH;;;;","x_google_ignoreList":[0]}
|