@botpress/cognitive 0.1.42 → 0.1.44
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +7 -7
- package/dist/index.cjs +125 -35
- package/dist/index.cjs.map +2 -2
- package/dist/index.d.ts +7 -3
- package/dist/index.mjs +125 -35
- package/dist/index.mjs.map +2 -2
- package/package.json +1 -1
package/dist/index.mjs.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../node_modules/.pnpm/exponential-backoff@3.1.1/node_modules/exponential-backoff/src/options.ts", "../../../node_modules/.pnpm/exponential-backoff@3.1.1/node_modules/exponential-backoff/src/jitter/full/full.jitter.ts", "../../../node_modules/.pnpm/exponential-backoff@3.1.1/node_modules/exponential-backoff/src/jitter/no/no.jitter.ts", "../../../node_modules/.pnpm/exponential-backoff@3.1.1/node_modules/exponential-backoff/src/jitter/jitter.factory.ts", "../../../node_modules/.pnpm/exponential-backoff@3.1.1/node_modules/exponential-backoff/src/delay/delay.base.ts", "../../../node_modules/.pnpm/exponential-backoff@3.1.1/node_modules/exponential-backoff/src/delay/skip-first/skip-first.delay.ts", "../../../node_modules/.pnpm/exponential-backoff@3.1.1/node_modules/exponential-backoff/src/delay/always/always.delay.ts", "../../../node_modules/.pnpm/exponential-backoff@3.1.1/node_modules/exponential-backoff/src/delay/delay.factory.ts", "../../../node_modules/.pnpm/exponential-backoff@3.1.1/node_modules/exponential-backoff/src/backoff.ts", "../src/client.ts", "../../../node_modules/.pnpm/nanoevents@9.1.0/node_modules/nanoevents/index.js", "../src/bp-client.ts", "../src/cognitive-v2/index.ts", "../src/cognitive-v2/models.ts", "../src/errors.ts", "../src/interceptors.ts", "../src/models.ts"],
|
|
4
|
-
"sourcesContent": [null, null, null, null, null, null, null, null, null, "import { backOff } from 'exponential-backoff'\nimport { createNanoEvents, Unsubscribe } from 'nanoevents'\n\nimport { ExtendedClient, getExtendedClient } from './bp-client'\nimport { CognitiveBeta, getCognitiveV2Model } from './cognitive-v2'\n\nimport { getActionFromError } from './errors'\nimport { InterceptorManager } from './interceptors'\nimport {\n DOWNTIME_THRESHOLD_MINUTES,\n getBestModels,\n getFastModels,\n Model,\n ModelPreferences,\n ModelProvider,\n ModelRef,\n pickModel,\n RemoteModelProvider,\n} from './models'\nimport { GenerateContentOutput } from './schemas.gen'\nimport { CognitiveProps, Events, InputProps, Request, Response } from './types'\n\nexport class Cognitive {\n public ['$$IS_COGNITIVE'] = true\n\n public static isCognitiveClient(obj: any): obj is Cognitive {\n return obj?.$$IS_COGNITIVE === true\n }\n\n public interceptors = {\n request: new InterceptorManager<Request>(),\n response: new InterceptorManager<Response>(),\n }\n\n protected _models: Model[] = []\n protected _timeoutMs: number = 5 * 60 * 1000 // Default timeout of 5 minutes\n protected _maxRetries: number = 5 // Default max retries\n protected _client: ExtendedClient\n protected _preferences: ModelPreferences | null = null\n protected _provider: ModelProvider\n protected _downtimes: ModelPreferences['downtimes'] = []\n protected _useBeta: boolean = false\n\n private _events = createNanoEvents<Events>()\n\n public constructor(props: CognitiveProps) {\n this._client = getExtendedClient(props.client)\n this._provider = props.provider ?? new RemoteModelProvider(props.client)\n this._timeoutMs = props.timeout ?? this._timeoutMs\n this._maxRetries = props.maxRetries ?? this._maxRetries\n this._useBeta = props.__experimental_beta ?? false\n }\n\n public get client(): ExtendedClient {\n return this._client\n }\n\n public clone(): Cognitive {\n const copy = new Cognitive({\n client: this._client.clone(),\n provider: this._provider,\n timeout: this._timeoutMs,\n maxRetries: this._maxRetries,\n })\n\n copy._models = [...this._models]\n copy._preferences = this._preferences ? { ...this._preferences } : null\n copy._downtimes = [...this._downtimes]\n copy.interceptors.request = this.interceptors.request\n copy.interceptors.response = this.interceptors.response\n\n return copy\n }\n\n public on<K extends keyof Events>(this: this, event: K, cb: Events[K]): Unsubscribe {\n return this._events.on(event, cb)\n }\n\n public async fetchInstalledModels(): Promise<Model[]> {\n if (!this._models.length) {\n this._models = await this._provider.fetchInstalledModels()\n }\n\n return this._models\n }\n\n public async fetchPreferences(): Promise<ModelPreferences> {\n if (this._preferences) {\n return this._preferences\n }\n\n this._preferences = await this._provider.fetchModelPreferences()\n\n if (this._preferences) {\n return this._preferences\n }\n\n const models = await this.fetchInstalledModels()\n\n this._preferences = {\n best: getBestModels(models).map((m) => m.ref),\n fast: getFastModels(models).map((m) => m.ref),\n downtimes: [],\n }\n\n await this._provider.saveModelPreferences(this._preferences)\n\n return this._preferences\n }\n\n public async setPreferences(preferences: ModelPreferences, save: boolean = false): Promise<void> {\n this._preferences = preferences\n\n if (save) {\n await this._provider.saveModelPreferences(preferences)\n }\n }\n\n private _cleanupOldDowntimes(): void {\n const now = Date.now()\n const thresholdMs = 1000 * 60 * DOWNTIME_THRESHOLD_MINUTES\n\n this._preferences!.downtimes = this._preferences!.downtimes.filter((downtime) => {\n const downtimeStart = new Date(downtime.startedAt).getTime()\n return now - downtimeStart <= thresholdMs\n })\n }\n\n private async _selectModel(ref: string): Promise<{ integration: string; model: string }> {\n const parseRef = (ref: string) => {\n const parts = ref.split(':')\n return { integration: parts[0]!, model: parts.slice(1).join(':') }\n }\n\n const preferences = await this.fetchPreferences()\n\n preferences.best ??= []\n preferences.fast ??= []\n preferences.downtimes ??= []\n\n const downtimes = [...preferences.downtimes, ...(this._downtimes ?? [])]\n\n if (ref === 'best') {\n return parseRef(pickModel(preferences.best, downtimes))\n }\n\n if (ref === 'fast') {\n return parseRef(pickModel(preferences.fast, downtimes))\n }\n\n return parseRef(pickModel([ref as ModelRef, ...preferences.best, ...preferences.fast], downtimes))\n }\n\n public async getModelDetails(model: string): Promise<Model> {\n if (this._useBeta) {\n const resolvedModel = getCognitiveV2Model(model)\n if (resolvedModel) {\n return { ...resolvedModel, ref: resolvedModel.id as ModelRef, integration: 'cognitive-v2' }\n }\n }\n\n await this.fetchInstalledModels()\n const { integration, model: modelName } = await this._selectModel(model)\n const def = this._models.find((m) => m.integration === integration && (m.name === modelName || m.id === modelName))\n if (!def) {\n throw new Error(`Model ${modelName} not found`)\n }\n\n return def\n }\n\n public async generateContent(input: InputProps): Promise<Response> {\n if (!this._useBeta || !getCognitiveV2Model(input.model!)) {\n return this._generateContent(input)\n }\n\n const betaClient = new CognitiveBeta(this._client.config as any)\n const response = await betaClient.generateText(input as any)\n\n return {\n output: {\n id: 'beta-output',\n provider: response.metadata.provider,\n model: response.metadata.model!,\n choices: [\n {\n type: 'text',\n content: response.output,\n role: 'assistant',\n index: 0,\n stopReason: response.metadata.stopReason! as any,\n },\n ],\n usage: {\n inputTokens: response.metadata.usage.inputTokens,\n inputCost: 0,\n outputTokens: response.metadata.usage.outputTokens,\n outputCost: response.metadata.cost ?? 0,\n },\n botpress: {\n cost: response.metadata.cost ?? 0,\n },\n },\n meta: {\n cached: response.metadata.cached,\n model: { integration: response.metadata.provider, model: response.metadata.model! },\n latency: response.metadata.latency!,\n cost: {\n input: 0,\n output: response.metadata.cost || 0,\n },\n tokens: {\n input: response.metadata.usage.inputTokens,\n output: response.metadata.usage.outputTokens,\n },\n },\n }\n }\n\n private async _generateContent(input: InputProps): Promise<Response> {\n const start = Date.now()\n\n const signal = input.signal ?? AbortSignal.timeout(this._timeoutMs)\n\n const client = this._client.abortable(signal)\n\n let props: Request = { input }\n let integration: string\n let model: string\n\n this._events.emit('request', props)\n\n const { output, meta } = await backOff<{\n output: GenerateContentOutput\n meta: any\n }>(\n async () => {\n const selection = await this._selectModel(input.model ?? 'best')\n\n integration = selection.integration\n model = selection.model\n\n props = await this.interceptors.request.run({ input }, signal)\n\n return client.callAction({\n type: `${integration}:generateContent`,\n input: {\n ...props.input,\n model: { id: model },\n },\n }) as Promise<{ output: GenerateContentOutput; meta: any }>\n },\n {\n retry: async (err, _attempt) => {\n if (signal?.aborted) {\n // We don't want to retry if the request was aborted\n this._events.emit('aborted', props, err)\n signal.throwIfAborted()\n return false\n }\n\n if (_attempt > this._maxRetries) {\n this._events.emit('error', props, err)\n return false\n }\n\n const action = getActionFromError(err)\n\n if (action === 'abort') {\n this._events.emit('error', props, err)\n return false\n }\n\n if (action === 'fallback') {\n // We don't want to retry if the request was already retried with a fallback model\n this._downtimes.push({\n ref: `${integration!}:${model!}`,\n startedAt: new Date().toISOString(),\n reason: 'Model is down',\n })\n\n this._cleanupOldDowntimes()\n\n await this._provider.saveModelPreferences({\n ...(this._preferences ?? { best: [], downtimes: [], fast: [] }),\n downtimes: [...(this._preferences!.downtimes ?? []), ...(this._downtimes ?? [])],\n })\n\n this._events.emit('fallback', props, err)\n return true\n }\n\n this._events.emit('retry', props, err)\n return true\n },\n }\n )\n\n const response = {\n output,\n meta: {\n cached: meta.cached ?? false,\n model: { integration: integration!, model: model! },\n latency: Date.now() - start,\n cost: { input: output.usage.inputCost, output: output.usage.outputCost },\n tokens: { input: output.usage.inputTokens, output: output.usage.outputTokens },\n },\n } satisfies Response\n\n this._events.emit('response', props, response)\n\n return this.interceptors.response.run(response, signal)\n }\n}\n", "export let createNanoEvents = () => ({\n emit(event, ...args) {\n for (\n let callbacks = this.events[event] || [],\n i = 0,\n length = callbacks.length;\n i < length;\n i++\n ) {\n callbacks[i](...args)\n }\n },\n events: {},\n on(event, cb) {\n ;(this.events[event] ||= []).push(cb)\n return () => {\n this.events[event] = this.events[event]?.filter(i => cb !== i)\n }\n }\n})\n", "import { type Client } from '@botpress/client'\nimport { type AxiosInstance } from 'axios'\nimport { BotpressClientLike } from './types'\n\n/** @internal */\nexport type ExtendedClient = Client & {\n botId: string\n axios: AxiosInstance\n clone: () => ExtendedClient\n abortable: (signal: AbortSignal) => ExtendedClient\n}\n\ntype InternalClientType = BotpressClientLike & {\n _client?: InternalClientType\n config: {\n headers: Record<string, string>\n }\n}\n\nexport const getExtendedClient = (_client: unknown): ExtendedClient => {\n const client = _client as InternalClientType\n\n if (!client || client === null || typeof client !== 'object') {\n throw new Error('Client must be a valid instance of a Botpress client (@botpress/client)')\n }\n\n if (typeof client._client === 'object' && !!client._client) {\n try {\n return getExtendedClient(client._client)\n } catch {}\n }\n\n if (\n typeof client.constructor !== 'function' ||\n typeof client.callAction !== 'function' ||\n !client.config ||\n typeof client.config !== 'object' ||\n !client.config.headers\n ) {\n throw new Error('Client must be a valid instance of a Botpress client (@botpress/client)')\n }\n\n const botId = client.config.headers['x-bot-id'] as string\n\n if (!botId?.length) {\n throw new Error('Client must be instanciated with Bot ID')\n }\n\n const clone = () => {\n const c = client as any\n if (c.clone && typeof c.clone === 'function') {\n return getExtendedClient(c.clone())\n }\n return getExtendedClient(new c.constructor(c.config))\n }\n\n return {\n ...client,\n botId,\n axios: (client as any).axiosInstance as AxiosInstance,\n clone,\n abortable: (signal: AbortSignal) => {\n const abortable = clone()\n const instance = abortable.axios\n instance.defaults.signal = signal\n return abortable\n },\n } as ExtendedClient\n}\n", "import axios, { AxiosInstance } from 'axios'\nimport { backOff } from 'exponential-backoff'\nimport { defaultModel, knownTags, models } from './models'\nimport { CognitiveRequest, CognitiveResponse, CognitiveStreamChunk, Model } from './types'\n\nexport { CognitiveRequest, CognitiveResponse, CognitiveStreamChunk }\n\ntype ClientProps = {\n apiUrl?: string\n timeout?: number\n botId?: string\n token?: string\n withCredentials?: boolean\n headers?: Record<string, string>\n}\n\ntype RequestOptions = {\n signal?: AbortSignal\n timeout?: number\n}\n\nconst isBrowser = () => typeof window !== 'undefined' && typeof window.fetch === 'function'\n\nexport class CognitiveBeta {\n private _axiosClient: AxiosInstance\n private readonly _apiUrl: string\n private readonly _timeout: number\n private readonly _withCredentials: boolean\n private readonly _headers: Record<string, string>\n\n public constructor(props: ClientProps) {\n this._apiUrl = props.apiUrl || 'https://api.botpress.cloud'\n this._timeout = props.timeout || 60_001\n this._withCredentials = props.withCredentials || false\n this._headers = { ...props.headers }\n\n if (props.botId) {\n this._headers['X-Bot-Id'] = props.botId\n }\n\n if (props.token) {\n this._headers['Authorization'] = `Bearer ${props.token}`\n }\n\n this._axiosClient = axios.create({\n headers: this._headers,\n withCredentials: this._withCredentials,\n baseURL: this._apiUrl,\n })\n }\n\n public async generateText(input: CognitiveRequest, options: RequestOptions = {}) {\n const signal = options.signal ?? AbortSignal.timeout(this._timeout)\n\n const { data } = await this._withServerRetry(() =>\n this._axiosClient.post<CognitiveResponse>('/v2/cognitive/generate-text', input, {\n signal,\n timeout: options.timeout ?? this._timeout,\n })\n )\n\n return data\n }\n\n public async listModels() {\n const { data } = await this._withServerRetry(() =>\n this._axiosClient.get<{ models: Model[] }>('/v2/cognitive/models')\n )\n\n return data.models\n }\n\n public async *generateTextStream(\n request: CognitiveRequest,\n options: RequestOptions = {}\n ): AsyncGenerator<CognitiveStreamChunk, void, unknown> {\n const signal = options.signal ?? AbortSignal.timeout(this._timeout)\n\n if (isBrowser()) {\n const res = await fetch(`${this._apiUrl}/v2/cognitive/generate-text-stream`, {\n method: 'POST',\n headers: {\n ...this._headers,\n 'Content-Type': 'application/json',\n },\n credentials: this._withCredentials ? 'include' : 'omit',\n body: JSON.stringify({ ...request, stream: true }),\n signal,\n })\n\n if (!res.ok) {\n const text = await res.text().catch(() => '')\n const err = new Error(`HTTP ${res.status}: ${text || res.statusText}`)\n ;(err as any).response = { status: res.status, data: text }\n throw err\n }\n\n const body = res.body\n if (!body) {\n throw new Error('No response body received for streaming request')\n }\n\n const reader = body.getReader()\n const iterable = (async function* () {\n for (;;) {\n const { value, done } = await reader.read()\n if (done) {\n break\n }\n if (value) {\n yield value\n }\n }\n })()\n\n for await (const obj of this._ndjson<CognitiveStreamChunk>(iterable)) {\n yield obj\n }\n return\n }\n\n const res = await this._withServerRetry(() =>\n this._axiosClient.post(\n '/v1/generate-text-stream',\n { ...request, stream: true },\n {\n responseType: 'stream',\n signal,\n timeout: options.timeout ?? this._timeout,\n }\n )\n )\n\n const nodeStream: AsyncIterable<Uint8Array> = res.data as any\n if (!nodeStream) {\n throw new Error('No response body received for streaming request')\n }\n\n for await (const obj of this._ndjson<CognitiveStreamChunk>(nodeStream)) {\n yield obj\n }\n }\n\n private async *_ndjson<T>(stream: AsyncIterable<Uint8Array>): AsyncGenerator<T, void, unknown> {\n const decoder = new TextDecoder('utf-8')\n let buffer = ''\n\n for await (const chunk of stream) {\n buffer += decoder.decode(chunk, { stream: true })\n\n for (;;) {\n const i = buffer.indexOf('\\n')\n if (i < 0) {\n break\n }\n\n const line = buffer.slice(0, i).replace(/\\r$/, '')\n buffer = buffer.slice(i + 1)\n\n if (!line) {\n continue\n }\n\n yield JSON.parse(line) as T\n }\n }\n\n buffer += decoder.decode()\n\n const tail = buffer.trim()\n if (tail) {\n yield JSON.parse(tail) as T\n }\n }\n\n private _isRetryableServerError(error: any): boolean {\n if (axios.isAxiosError(error)) {\n if (!error.response) {\n return true\n }\n\n const status = error.response?.status\n if (status && [502, 503, 504].includes(status)) {\n return true\n }\n\n if (\n error.code &&\n ['ECONNABORTED', 'ECONNRESET', 'ETIMEDOUT', 'EAI_AGAIN', 'ENOTFOUND', 'EPIPE'].includes(error.code)\n ) {\n return true\n }\n }\n\n return false\n }\n\n private async _withServerRetry<T>(fn: () => Promise<T>): Promise<T> {\n return backOff(fn, {\n numOfAttempts: 3,\n startingDelay: 300,\n timeMultiple: 2,\n jitter: 'full',\n retry: (e) => this._isRetryableServerError(e),\n })\n }\n}\n\nexport const getCognitiveV2Model = (model: string): Model | undefined => {\n if (models[model]) {\n return models[model]\n }\n\n // Some models (ex fireworks) have a long name (the internal id) so it is now an alias instead of the main id\n const alias = Object.values(models).find((x) => x.aliases?.includes(model))\n if (alias) {\n return alias\n }\n\n // Special tags like auto, fast, coding don't have explicit limits so we give a default model\n if (knownTags.includes(model)) {\n return { ...defaultModel, id: model, name: model }\n }\n return undefined\n}\n", "import { Model } from 'src/schemas.gen'\n\nexport type RemoteModel = Model & { aliases?: string[]; lifecycle: 'live' | 'beta' | 'deprecated' | 'discontinued' }\n\nexport const models: Record<string, RemoteModel> = {\n 'openai:gpt-5-2025-08-07': {\n id: 'openai:gpt-5-2025-08-07',\n name: 'GPT-5',\n description:\n \"GPT-5 is OpenAI's latest and most advanced AI model. It is a reasoning model that chooses the best way to respond based on task complexity and user intent. GPT-5 delivers expert-level performance across coding, math, writing, health, and visual perception, with improved accuracy, speed, and reduced hallucinations. It excels in complex tasks, long-context understanding, multimodal inputs (text and images), and safe, nuanced responses.\",\n input: {\n maxTokens: 400000,\n costPer1MTokens: 1.25,\n },\n output: {\n maxTokens: 128000,\n costPer1MTokens: 10,\n },\n tags: ['recommended', 'reasoning', 'general-purpose'],\n lifecycle: 'live',\n },\n 'openai:gpt-5-mini-2025-08-07': {\n id: 'openai:gpt-5-mini-2025-08-07',\n name: 'GPT-5 Mini',\n description:\n 'GPT-5 Mini is a lightweight and cost-effective version of GPT-5, optimized for applications where speed and efficiency matter more than full advanced capabilities. It is designed for cost-sensitive use cases such as chatbots, content generation, and high-volume usage, striking a balance between performance and affordability, making it suitable for simpler tasks that do not require deep multi-step reasoning or the full reasoning power of GPT-5',\n input: {\n maxTokens: 400000,\n costPer1MTokens: 0.25,\n },\n output: {\n maxTokens: 128000,\n costPer1MTokens: 2,\n },\n tags: ['recommended', 'reasoning', 'general-purpose'],\n lifecycle: 'live',\n },\n 'openai:gpt-5-nano-2025-08-07': {\n id: 'openai:gpt-5-nano-2025-08-07',\n name: 'GPT-5 Nano',\n description:\n 'GPT-5 Nano is an ultra-lightweight version of GPT-5 optimized for speed and very low latency, making it ideal for use cases like simple chatbots, basic content generation, summarization, and classification tasks.',\n input: {\n maxTokens: 400000,\n costPer1MTokens: 0.05,\n },\n output: {\n maxTokens: 128000,\n costPer1MTokens: 0.4,\n },\n tags: ['low-cost', 'reasoning', 'general-purpose'],\n lifecycle: 'live',\n },\n 'openai:o4-mini-2025-04-16': {\n id: 'openai:o4-mini-2025-04-16',\n name: 'GPT o4-mini',\n description:\n \"o4-mini is OpenAI's latest small o-series model. It's optimized for fast, effective reasoning with exceptionally efficient performance in coding and visual tasks.\",\n input: {\n maxTokens: 200000,\n costPer1MTokens: 1.1,\n },\n output: {\n maxTokens: 100000,\n costPer1MTokens: 4.4,\n },\n tags: ['reasoning', 'vision', 'coding'],\n lifecycle: 'live',\n },\n 'openai:o3-2025-04-16': {\n id: 'openai:o3-2025-04-16',\n name: 'GPT o3',\n description:\n 'o3 is a well-rounded and powerful model across domains. It sets a new standard for math, science, coding, and visual reasoning tasks. It also excels at technical writing and instruction-following.',\n input: {\n maxTokens: 200000,\n costPer1MTokens: 2,\n },\n output: {\n maxTokens: 100000,\n costPer1MTokens: 8,\n },\n tags: ['reasoning', 'vision', 'coding'],\n lifecycle: 'live',\n },\n 'openai:gpt-4.1-2025-04-14': {\n id: 'openai:gpt-4.1-2025-04-14',\n name: 'GPT 4.1',\n description:\n 'GPT 4.1 is our flagship model for complex tasks. It is well suited for problem solving across domains. The knowledge cutoff is June 2024.',\n input: {\n maxTokens: 1047576,\n costPer1MTokens: 2,\n },\n output: {\n maxTokens: 32768,\n costPer1MTokens: 8,\n },\n tags: ['recommended', 'vision', 'general-purpose'],\n lifecycle: 'live',\n },\n 'openai:gpt-4.1-mini-2025-04-14': {\n id: 'openai:gpt-4.1-mini-2025-04-14',\n name: 'GPT 4.1 Mini',\n description:\n 'GPT 4.1 mini provides a balance between intelligence, speed, and cost that makes it an attractive model for many use cases. The knowledge cutoff is June 2024.',\n input: {\n maxTokens: 1047576,\n costPer1MTokens: 0.4,\n },\n output: {\n maxTokens: 32768,\n costPer1MTokens: 1.6,\n },\n tags: ['recommended', 'vision', 'general-purpose'],\n lifecycle: 'live',\n },\n 'openai:gpt-4.1-nano-2025-04-14': {\n id: 'openai:gpt-4.1-nano-2025-04-14',\n name: 'GPT 4.1 Nano',\n description: 'GPT-4.1 nano is the fastest, most cost-effective GPT 4.1 model. The knowledge cutoff is June 2024.',\n input: {\n maxTokens: 1047576,\n costPer1MTokens: 0.1,\n },\n output: {\n maxTokens: 32768,\n costPer1MTokens: 0.4,\n },\n tags: ['low-cost', 'vision', 'general-purpose'],\n lifecycle: 'live',\n },\n 'openai:o3-mini-2025-01-31': {\n id: 'openai:o3-mini-2025-01-31',\n name: 'GPT o3-mini',\n description:\n 'o3-mini is the most recent small reasoning model from OpenAI, providing high intelligence at the same cost and latency targets of o1-mini. Also supports key developer features like Structured Outputs and function calling.',\n input: {\n maxTokens: 200000,\n costPer1MTokens: 1.1,\n },\n output: {\n maxTokens: 100000,\n costPer1MTokens: 4.4,\n },\n tags: ['reasoning', 'general-purpose', 'coding'],\n lifecycle: 'live',\n },\n 'openai:o1-2024-12-17': {\n id: 'openai:o1-2024-12-17',\n name: 'GPT o1',\n description:\n 'The o1 model is designed to solve hard problems across domains. Trained with reinforcement learning to perform complex reasoning with a long internal chain of thought.',\n input: {\n maxTokens: 200000,\n costPer1MTokens: 15,\n },\n output: {\n maxTokens: 100000,\n costPer1MTokens: 60,\n },\n tags: ['reasoning', 'vision', 'general-purpose'],\n lifecycle: 'live',\n },\n 'openai:o1-mini-2024-09-12': {\n id: 'openai:o1-mini-2024-09-12',\n name: 'GPT o1-mini',\n description:\n 'The o1-mini model is a fast and affordable reasoning model for specialized tasks. Trained with reinforcement learning to perform complex reasoning.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 1.1,\n },\n output: {\n maxTokens: 65536,\n costPer1MTokens: 4.4,\n },\n tags: ['reasoning', 'vision', 'general-purpose'],\n lifecycle: 'live',\n },\n 'openai:gpt-4o-mini-2024-07-18': {\n id: 'openai:gpt-4o-mini-2024-07-18',\n name: 'GPT-4o Mini',\n description:\n \"GPT-4o mini is OpenAI's most advanced model in the small models category, and their cheapest model yet. Multimodal with higher intelligence than gpt-3.5-turbo but just as fast.\",\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.15,\n },\n output: {\n maxTokens: 16384,\n costPer1MTokens: 0.6,\n },\n tags: ['recommended', 'vision', 'low-cost', 'general-purpose', 'function-calling'],\n lifecycle: 'live',\n },\n 'openai:gpt-4o-2024-11-20': {\n id: 'openai:gpt-4o-2024-11-20',\n name: 'GPT-4o (November 2024)',\n description:\n \"GPT-4o is OpenAI's most advanced model. Multimodal with the same high intelligence as GPT-4 Turbo but cheaper and more efficient.\",\n input: {\n maxTokens: 128000,\n costPer1MTokens: 2.5,\n },\n output: {\n maxTokens: 16384,\n costPer1MTokens: 10,\n },\n tags: ['recommended', 'vision', 'general-purpose', 'coding', 'agents', 'function-calling'],\n lifecycle: 'live',\n },\n 'openai:gpt-4o-2024-08-06': {\n id: 'openai:gpt-4o-2024-08-06',\n name: 'GPT-4o (August 2024)',\n description:\n \"GPT-4o is OpenAI's most advanced model. Multimodal with the same high intelligence as GPT-4 Turbo but cheaper and more efficient.\",\n input: {\n maxTokens: 128000,\n costPer1MTokens: 2.5,\n },\n output: {\n maxTokens: 16384,\n costPer1MTokens: 10,\n },\n tags: ['deprecated', 'vision', 'general-purpose', 'coding', 'agents', 'function-calling'],\n lifecycle: 'deprecated',\n },\n 'openai:gpt-4o-2024-05-13': {\n id: 'openai:gpt-4o-2024-05-13',\n name: 'GPT-4o (May 2024)',\n description:\n \"GPT-4o is OpenAI's most advanced model. Multimodal with the same high intelligence as GPT-4 Turbo but cheaper and more efficient.\",\n input: {\n maxTokens: 128000,\n costPer1MTokens: 5,\n },\n output: {\n maxTokens: 4096,\n costPer1MTokens: 15,\n },\n tags: ['deprecated', 'vision', 'general-purpose', 'coding', 'agents', 'function-calling'],\n lifecycle: 'deprecated',\n },\n 'openai:gpt-4-turbo-2024-04-09': {\n id: 'openai:gpt-4-turbo-2024-04-09',\n name: 'GPT-4 Turbo',\n description:\n 'GPT-4 is a large multimodal model that can solve difficult problems with greater accuracy than previous models, thanks to its broader general knowledge and advanced reasoning capabilities.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 10,\n },\n output: {\n maxTokens: 4096,\n costPer1MTokens: 30,\n },\n tags: ['deprecated', 'general-purpose', 'coding', 'agents', 'function-calling'],\n lifecycle: 'deprecated',\n },\n 'openai:gpt-3.5-turbo-0125': {\n id: 'openai:gpt-3.5-turbo-0125',\n name: 'GPT-3.5 Turbo',\n description:\n 'GPT-3.5 Turbo can understand and generate natural language or code and has been optimized for chat but works well for non-chat tasks as well.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.5,\n },\n output: {\n maxTokens: 4096,\n costPer1MTokens: 1.5,\n },\n tags: ['deprecated', 'general-purpose', 'low-cost'],\n lifecycle: 'deprecated',\n },\n 'anthropic:claude-sonnet-4-20250514': {\n id: 'anthropic:claude-sonnet-4-20250514',\n name: 'Claude Sonnet 4',\n description:\n 'Claude Sonnet 4 significantly enhances the capabilities of its predecessor, Sonnet 3.7, excelling in both coding and reasoning tasks with improved precision and controllability. Sonnet 4 balances capability and computational efficiency, making it suitable for a broad range of applications from routine coding tasks to complex software development projects. Key enhancements include improved autonomous codebase navigation, reduced error rates in agent-driven workflows, and increased reliability in following intricate instructions.',\n input: {\n maxTokens: 200000,\n costPer1MTokens: 3,\n },\n output: {\n maxTokens: 64000,\n costPer1MTokens: 15,\n },\n tags: ['recommended', 'reasoning', 'agents', 'vision', 'general-purpose', 'coding'],\n lifecycle: 'live',\n },\n 'anthropic:claude-sonnet-4-reasoning-20250514': {\n id: 'anthropic:claude-sonnet-4-reasoning-20250514',\n name: 'Claude Sonnet 4 (Reasoning Mode)',\n description:\n 'This model uses the \"Extended Thinking\" mode and will use a significantly higher amount of output tokens than the Standard Mode, so this model should only be used for tasks that actually require it.\\n\\nClaude Sonnet 4 significantly enhances the capabilities of its predecessor, Sonnet 3.7, excelling in both coding and reasoning tasks with improved precision and controllability.',\n input: {\n maxTokens: 200000,\n costPer1MTokens: 3,\n },\n output: {\n maxTokens: 64000,\n costPer1MTokens: 15,\n },\n tags: ['deprecated', 'vision', 'reasoning', 'general-purpose', 'agents', 'coding'],\n lifecycle: 'deprecated',\n },\n 'anthropic:claude-3-7-sonnet-20250219': {\n id: 'anthropic:claude-3-7-sonnet-20250219',\n name: 'Claude 3.7 Sonnet',\n description:\n 'Claude 3.7 Sonnet is an advanced large language model with improved reasoning, coding, and problem-solving capabilities. The model demonstrates notable improvements in coding, particularly in front-end development and full-stack updates, and excels in agentic workflows, where it can autonomously navigate multi-step processes.',\n input: {\n maxTokens: 200000,\n costPer1MTokens: 3,\n },\n output: {\n maxTokens: 64000,\n costPer1MTokens: 15,\n },\n tags: ['recommended', 'reasoning', 'agents', 'vision', 'general-purpose', 'coding'],\n lifecycle: 'live',\n },\n 'anthropic:claude-3-7-sonnet-reasoning-20250219': {\n id: 'anthropic:claude-3-7-sonnet-reasoning-20250219',\n name: 'Claude 3.7 Sonnet (Reasoning Mode)',\n description:\n 'This model uses the \"Extended Thinking\" mode and will use a significantly higher amount of output tokens than the Standard Mode, so this model should only be used for tasks that actually require it.\\n\\nClaude 3.7 Sonnet is an advanced large language model with improved reasoning, coding, and problem-solving capabilities.',\n input: {\n maxTokens: 200000,\n costPer1MTokens: 3,\n },\n output: {\n maxTokens: 64000,\n costPer1MTokens: 15,\n },\n tags: ['deprecated', 'vision', 'reasoning', 'general-purpose', 'agents', 'coding'],\n lifecycle: 'deprecated',\n },\n 'anthropic:claude-3-5-haiku-20241022': {\n id: 'anthropic:claude-3-5-haiku-20241022',\n name: 'Claude 3.5 Haiku',\n description:\n 'Claude 3.5 Haiku features offers enhanced capabilities in speed, coding accuracy, and tool use. Engineered to excel in real-time applications, it delivers quick response times that are essential for dynamic tasks such as chat interactions and immediate coding suggestions.',\n input: {\n maxTokens: 200000,\n costPer1MTokens: 0.8,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 4,\n },\n tags: ['general-purpose', 'low-cost'],\n lifecycle: 'live',\n },\n 'anthropic:claude-3-5-sonnet-20241022': {\n id: 'anthropic:claude-3-5-sonnet-20241022',\n name: 'Claude 3.5 Sonnet (October 2024)',\n description:\n 'Claude 3.5 Sonnet delivers better-than-Opus capabilities, faster-than-Sonnet speeds, at the same Sonnet prices. Sonnet is particularly good at coding, data science, visual processing, and agentic tasks.',\n input: {\n maxTokens: 200000,\n costPer1MTokens: 3,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 15,\n },\n tags: ['vision', 'general-purpose', 'agents', 'coding', 'function-calling', 'storytelling'],\n lifecycle: 'live',\n },\n 'anthropic:claude-3-5-sonnet-20240620': {\n id: 'anthropic:claude-3-5-sonnet-20240620',\n name: 'Claude 3.5 Sonnet (June 2024)',\n description:\n 'Claude 3.5 Sonnet delivers better-than-Opus capabilities, faster-than-Sonnet speeds, at the same Sonnet prices. Sonnet is particularly good at coding, data science, visual processing, and agentic tasks.',\n input: {\n maxTokens: 200000,\n costPer1MTokens: 3,\n },\n output: {\n maxTokens: 4096,\n costPer1MTokens: 15,\n },\n tags: ['vision', 'general-purpose', 'agents', 'coding', 'function-calling', 'storytelling'],\n lifecycle: 'live',\n },\n 'anthropic:claude-3-haiku-20240307': {\n id: 'anthropic:claude-3-haiku-20240307',\n name: 'Claude 3 Haiku',\n description:\n \"Claude 3 Haiku is Anthropic's fastest and most compact model for near-instant responsiveness. Quick and accurate targeted performance.\",\n input: {\n maxTokens: 200000,\n costPer1MTokens: 0.25,\n },\n output: {\n maxTokens: 4096,\n costPer1MTokens: 1.25,\n },\n tags: ['low-cost', 'general-purpose'],\n lifecycle: 'live',\n },\n 'google-ai:gemini-2.5-flash': {\n id: 'google-ai:gemini-2.5-flash',\n name: 'Gemini 2.5 Flash',\n description:\n 'Google\\'s state-of-the-art workhorse model with advanced reasoning, coding, mathematics, and scientific capabilities. Includes built-in \"thinking\" capabilities for enhanced accuracy.',\n input: {\n maxTokens: 1048576,\n costPer1MTokens: 0.3,\n },\n output: {\n maxTokens: 65536,\n costPer1MTokens: 2.5,\n },\n tags: ['recommended', 'reasoning', 'agents', 'general-purpose', 'vision'],\n lifecycle: 'live',\n },\n 'google-ai:gemini-2.5-pro': {\n id: 'google-ai:gemini-2.5-pro',\n name: 'Gemini 2.5 Pro',\n description:\n 'Google\\'s most advanced AI model designed for complex reasoning, coding, mathematics, and scientific tasks. Features \"thinking\" capabilities for superior human-preference alignment and problem-solving.',\n input: {\n maxTokens: 200000,\n costPer1MTokens: 1.25,\n },\n output: {\n maxTokens: 65536,\n costPer1MTokens: 10,\n },\n tags: ['recommended', 'reasoning', 'agents', 'general-purpose', 'vision', 'coding'],\n lifecycle: 'live',\n },\n 'google-ai:models/gemini-2.0-flash': {\n id: 'google-ai:models/gemini-2.0-flash',\n name: 'Gemini 2.0 Flash',\n description:\n 'Next-gen Gemini model with improved capabilities, superior speed, native tool use, multimodal generation, and 1M token context window.',\n input: {\n maxTokens: 1048576,\n costPer1MTokens: 0.1,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 0.4,\n },\n tags: ['low-cost', 'general-purpose', 'vision'],\n lifecycle: 'live',\n },\n 'cerebras:gpt-oss-120b': {\n id: 'cerebras:gpt-oss-120b',\n name: 'GPT-OSS 120B (Preview)',\n description:\n 'gpt-oss-120b is a high-performance, open-weight language model designed for production-grade, general-purpose use cases. It excels at complex reasoning and supports configurable reasoning effort, full chain-of-thought transparency for easier debugging and trust, and native agentic capabilities for function calling, tool use, and structured outputs.',\n input: {\n maxTokens: 131000,\n costPer1MTokens: 0.35,\n },\n output: {\n maxTokens: 16000,\n costPer1MTokens: 0.75,\n },\n tags: ['preview', 'general-purpose', 'reasoning'],\n lifecycle: 'live',\n },\n 'cerebras:qwen-3-32b': {\n id: 'cerebras:qwen-3-32b',\n name: 'Qwen3 32B',\n description:\n 'Qwen3-32B is a world-class reasoning model with comparable quality to DeepSeek R1 while outperforming GPT-4.1 and Claude Sonnet 3.7. It excels in code-gen, tool-calling, and advanced reasoning, making it an exceptional model for a wide range of production use cases.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.4,\n },\n output: {\n maxTokens: 16000,\n costPer1MTokens: 0.8,\n },\n tags: ['general-purpose', 'reasoning'],\n lifecycle: 'live',\n },\n 'cerebras:llama-4-scout-17b-16e-instruct': {\n id: 'cerebras:llama-4-scout-17b-16e-instruct',\n name: 'Llama 4 Scout 17B',\n description:\n 'Llama 4 Scout 17B Instruct (16E) is a mixture-of-experts (MoE) language model developed by Meta, uses 16 experts per forward pass, activating 17 billion parameters out of a total of 109B. It supports native multimodal input (text and image) and multilingual output (text and code) across 12 supported languages.',\n input: {\n maxTokens: 32000,\n costPer1MTokens: 0.65,\n },\n output: {\n maxTokens: 16000,\n costPer1MTokens: 0.85,\n },\n tags: ['general-purpose', 'vision'],\n lifecycle: 'live',\n },\n 'cerebras:llama3.1-8b': {\n id: 'cerebras:llama3.1-8b',\n name: 'Llama 3.1 8B',\n description:\n 'Meta developed and released the Meta Llama 3 family of large language models (LLMs), a collection of pretrained and instruction tuned generative text models in 8B and 70B sizes. The Llama 3 instruction tuned models are optimized for dialogue use cases and outperform many of the available open source chat models on common industry benchmarks.',\n input: {\n maxTokens: 32000,\n costPer1MTokens: 0.1,\n },\n output: {\n maxTokens: 16000,\n costPer1MTokens: 0.1,\n },\n tags: ['low-cost', 'general-purpose'],\n lifecycle: 'live',\n },\n 'cerebras:llama3.3-70b': {\n id: 'cerebras:llama3.3-70b',\n name: 'Llama 3.3 70B',\n description:\n 'Meta developed and released the Meta Llama 3 family of large language models (LLMs), a collection of pretrained and instruction tuned generative text models in 8B and 70B sizes. The Llama 3 instruction tuned models are optimized for dialogue use cases and outperform many of the available open source chat models on common industry benchmarks.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.85,\n },\n output: {\n maxTokens: 16000,\n costPer1MTokens: 1.2,\n },\n tags: ['general-purpose'],\n lifecycle: 'live',\n },\n 'groq:openai/gpt-oss-20b': {\n id: 'groq:openai/gpt-oss-20b',\n name: 'GPT-OSS 20B (Preview)',\n description:\n 'gpt-oss-20b is a compact, open-weight language model optimized for low-latency. It shares the same training foundation and capabilities as the GPT-OSS 120B model, with faster responses and lower cost.',\n input: {\n maxTokens: 131000,\n costPer1MTokens: 0.1,\n },\n output: {\n maxTokens: 32000,\n costPer1MTokens: 0.5,\n },\n tags: ['preview', 'general-purpose', 'reasoning', 'low-cost'],\n lifecycle: 'live',\n },\n 'groq:openai/gpt-oss-120b': {\n id: 'groq:openai/gpt-oss-120b',\n name: 'GPT-OSS 120B (Preview)',\n description:\n 'gpt-oss-120b is a high-performance, open-weight language model designed for production-grade, general-purpose use cases. It excels at complex reasoning and supports configurable reasoning effort, full chain-of-thought transparency for easier debugging and trust, and native agentic capabilities for function calling, tool use, and structured outputs.',\n input: {\n maxTokens: 131000,\n costPer1MTokens: 0.15,\n },\n output: {\n maxTokens: 32000,\n costPer1MTokens: 0.75,\n },\n tags: ['preview', 'general-purpose', 'reasoning'],\n lifecycle: 'live',\n },\n 'groq:deepseek-r1-distill-llama-70b': {\n id: 'groq:deepseek-r1-distill-llama-70b',\n name: 'DeepSeek R1-Distill Llama 3.3 70B (Preview)',\n description:\n 'A fine-tuned version of Llama 3.3 70B using samples generated by DeepSeek-R1, making it smarter than the original Llama 70B, particularly for tasks requiring mathematical and factual precision.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.75,\n },\n output: {\n maxTokens: 32768,\n costPer1MTokens: 0.99,\n },\n tags: ['general-purpose', 'reasoning', 'preview'],\n lifecycle: 'live',\n },\n 'groq:llama-3.3-70b-versatile': {\n id: 'groq:llama-3.3-70b-versatile',\n name: 'LLaMA 3.3 70B',\n description:\n 'The Meta Llama 3.3 multilingual large language model (LLM) is a pretrained and instruction tuned generative model in 70B (text in/text out). The Llama 3.3 instruction tuned text only model is optimized for multilingual dialogue use cases and outperforms many of the available open source and closed chat models on common industry benchmarks.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.59,\n },\n output: {\n maxTokens: 32768,\n costPer1MTokens: 0.79,\n },\n tags: ['recommended', 'general-purpose', 'coding'],\n lifecycle: 'live',\n },\n 'groq:llama-3.2-1b-preview': {\n id: 'groq:llama-3.2-1b-preview',\n name: 'LLaMA 3.2 1B (Preview)',\n description:\n 'The Llama 3.2 instruction-tuned, text-only models are optimized for multilingual dialogue use cases, including agentic retrieval and summarization tasks.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.04,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 0.04,\n },\n tags: ['low-cost', 'deprecated'],\n lifecycle: 'discontinued',\n },\n 'groq:llama-3.2-3b-preview': {\n id: 'groq:llama-3.2-3b-preview',\n name: 'LLaMA 3.2 3B (Preview)',\n description:\n 'The Llama 3.2 instruction-tuned, text-only models are optimized for multilingual dialogue use cases, including agentic retrieval and summarization tasks.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.06,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 0.06,\n },\n tags: ['low-cost', 'general-purpose', 'deprecated'],\n lifecycle: 'discontinued',\n },\n 'groq:llama-3.2-11b-vision-preview': {\n id: 'groq:llama-3.2-11b-vision-preview',\n name: 'LLaMA 3.2 11B Vision (Preview)',\n description:\n 'The Llama 3.2-Vision instruction-tuned models are optimized for visual recognition, image reasoning, captioning, and answering general questions about an image.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.18,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 0.18,\n },\n tags: ['low-cost', 'vision', 'general-purpose', 'deprecated'],\n lifecycle: 'discontinued',\n },\n 'groq:llama-3.2-90b-vision-preview': {\n id: 'groq:llama-3.2-90b-vision-preview',\n name: 'LLaMA 3.2 90B Vision (Preview)',\n description:\n 'The Llama 3.2-Vision instruction-tuned models are optimized for visual recognition, image reasoning, captioning, and answering general questions about an image.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.9,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 0.9,\n },\n tags: ['vision', 'general-purpose', 'deprecated'],\n lifecycle: 'discontinued',\n },\n 'groq:llama-3.1-8b-instant': {\n id: 'groq:llama-3.1-8b-instant',\n name: 'LLaMA 3.1 8B',\n description: 'The Llama 3.1 instruction-tuned, text-only models are optimized for multilingual dialogue use cases.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.05,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 0.08,\n },\n tags: ['low-cost', 'general-purpose'],\n lifecycle: 'live',\n },\n 'groq:llama3-8b-8192': {\n id: 'groq:llama3-8b-8192',\n name: 'LLaMA 3 8B',\n description:\n 'Meta developed and released the Meta Llama 3 family of large language models (LLMs), a collection of pretrained and instruction tuned generative text models in 8 and 70B sizes. The Llama 3 instruction tuned models are optimized for dialogue use cases and outperform many of the available open source chat models on common industry benchmarks.',\n input: {\n maxTokens: 8192,\n costPer1MTokens: 0.05,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 0.08,\n },\n tags: ['low-cost', 'general-purpose', 'deprecated'],\n lifecycle: 'discontinued',\n },\n 'groq:llama3-70b-8192': {\n id: 'groq:llama3-70b-8192',\n name: 'LLaMA 3 70B',\n description:\n 'Meta developed and released the Meta Llama 3 family of large language models (LLMs), a collection of pretrained and instruction tuned generative text models in 8 and 70B sizes. The Llama 3 instruction tuned models are optimized for dialogue use cases and outperform many of the available open source chat models on common industry benchmarks.',\n input: {\n maxTokens: 8192,\n costPer1MTokens: 0.59,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 0.79,\n },\n tags: ['general-purpose', 'deprecated'],\n lifecycle: 'discontinued',\n },\n 'groq:gemma2-9b-it': {\n id: 'groq:gemma2-9b-it',\n name: 'Gemma2 9B',\n description:\n 'Redesigned for outsized performance and unmatched efficiency, Gemma 2 optimizes for blazing-fast inference on diverse hardware. Gemma is a family of lightweight, state-of-the-art open models from Google, built from the same research and technology used to create the Gemini models. They are text-to-text, decoder-only large language models, available in English, with open weights, pre-trained variants, and instruction-tuned variants. Gemma models are well-suited for a variety of text generation tasks, including question answering, summarization, and reasoning.',\n input: {\n maxTokens: 8192,\n costPer1MTokens: 0.2,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 0.2,\n },\n tags: ['low-cost', 'general-purpose'],\n lifecycle: 'live',\n },\n 'openrouter:gpt-oss-120b': {\n id: 'openrouter:gpt-oss-120b',\n name: 'GPT-OSS 120B (Preview)',\n description:\n 'gpt-oss-120b is a high-performance, open-weight language model designed for production-grade, general-purpose use cases. It excels at complex reasoning and supports configurable reasoning effort, full chain-of-thought transparency for easier debugging and trust, and native agentic capabilities for function calling, tool use, and structured outputs.',\n input: {\n maxTokens: 131000,\n costPer1MTokens: 0.15,\n },\n output: {\n maxTokens: 32000,\n costPer1MTokens: 0.75,\n },\n tags: ['preview', 'general-purpose', 'reasoning'],\n lifecycle: 'live',\n },\n 'fireworks:gpt-oss-20b': {\n id: 'fireworks:gpt-oss-20b',\n name: 'GPT-OSS 20B',\n description:\n 'gpt-oss-20b is a compact, open-weight language model optimized for low-latency. It shares the same training foundation and capabilities as the GPT-OSS 120B model, with faster responses and lower cost.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.07,\n },\n output: {\n maxTokens: 16000,\n costPer1MTokens: 0.3,\n },\n tags: ['general-purpose', 'reasoning', 'low-cost'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/gpt-oss-20b'],\n },\n 'fireworks:gpt-oss-120b': {\n id: 'fireworks:gpt-oss-120b',\n name: 'GPT-OSS 120B',\n description:\n 'gpt-oss-120b is a high-performance, open-weight language model designed for production-grade, general-purpose use cases. It excels at complex reasoning and supports configurable reasoning effort, full chain-of-thought transparency for easier debugging and trust, and native agentic capabilities for function calling, tool use, and structured outputs.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.15,\n },\n output: {\n maxTokens: 16000,\n costPer1MTokens: 0.6,\n },\n tags: ['general-purpose', 'reasoning'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/gpt-oss-120b'],\n },\n 'fireworks:deepseek-r1-0528': {\n id: 'fireworks:deepseek-r1-0528',\n name: 'DeepSeek R1 0528',\n description:\n 'The updated DeepSeek R1 0528 model delivers major improvements in reasoning, inference, and accuracy through enhanced post-training optimization and greater computational resources. It now performs at a level approaching top-tier models like OpenAI o3 and Gemini 2.5 Pro, with notable gains in complex tasks such as math and programming. The update also reduces hallucinations, improves function calling, and enhances the coding experience.',\n input: {\n maxTokens: 160000,\n costPer1MTokens: 3,\n },\n output: {\n maxTokens: 16384,\n costPer1MTokens: 8,\n },\n tags: ['recommended', 'reasoning', 'general-purpose', 'coding'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/deepseek-r1-0528'],\n },\n 'fireworks:deepseek-v3-0324': {\n id: 'fireworks:deepseek-v3-0324',\n name: 'DeepSeek V3 0324',\n description:\n 'DeepSeek V3, a 685B-parameter, mixture-of-experts model, is the latest iteration of the flagship chat model family from the DeepSeek team. It succeeds the DeepSeek V3 model and performs really well on a variety of tasks.',\n input: {\n maxTokens: 160000,\n costPer1MTokens: 0.9,\n },\n output: {\n maxTokens: 16384,\n costPer1MTokens: 0.9,\n },\n tags: ['recommended', 'general-purpose'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/deepseek-v3-0324'],\n },\n 'fireworks:llama4-maverick-instruct-basic': {\n id: 'fireworks:llama4-maverick-instruct-basic',\n name: 'Llama 4 Maverick Instruct (Basic)',\n description:\n 'Llama 4 Maverick 17B Instruct (128E) is a high-capacity multimodal language model from Meta, built on a mixture-of-experts (MoE) architecture with 128 experts and 17 billion active parameters per forward pass (400B total). It supports multilingual text and image input, and produces multilingual text and code output across 12 supported languages. Optimized for vision-language tasks, Maverick is instruction-tuned for assistant-like behavior, image reasoning, and general-purpose multimodal interaction, and suited for research and commercial applications requiring advanced multimodal understanding and high model throughput.',\n input: {\n maxTokens: 1000000,\n costPer1MTokens: 0.22,\n },\n output: {\n maxTokens: 16384,\n costPer1MTokens: 0.88,\n },\n tags: ['general-purpose', 'vision'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/llama4-maverick-instruct-basic'],\n },\n 'fireworks:llama4-scout-instruct-basic': {\n id: 'fireworks:llama4-scout-instruct-basic',\n name: 'Llama 4 Scout Instruct (Basic)',\n description:\n 'Llama 4 Scout 17B Instruct (16E) is a mixture-of-experts (MoE) language model developed by Meta, uses 16 experts per forward pass, activating 17 billion parameters out of a total of 109B. It supports native multimodal input (text and image) and multilingual output (text and code) across 12 supported languages. Designed for assistant-style interaction and visual reasoning, it is instruction-tuned for use in multilingual chat, captioning, and image understanding tasks.',\n input: {\n maxTokens: 1048576,\n costPer1MTokens: 0.15,\n },\n output: {\n maxTokens: 16384,\n costPer1MTokens: 0.6,\n },\n tags: ['general-purpose', 'vision'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/llama4-scout-instruct-basic'],\n },\n 'fireworks:llama-v3p3-70b-instruct': {\n id: 'fireworks:llama-v3p3-70b-instruct',\n name: 'Llama 3.3 70B Instruct',\n description:\n 'Llama 3.3 70B Instruct is the December update of Llama 3.1 70B. The model improves upon Llama 3.1 70B (released July 2024) with advances in tool calling, multilingual text support, math and coding. The model achieves industry leading results in reasoning, math and instruction following and provides similar performance as 3.1 405B but with significant speed and cost improvements.',\n input: {\n maxTokens: 131072,\n costPer1MTokens: 0.9,\n },\n output: {\n maxTokens: 16384,\n costPer1MTokens: 0.9,\n },\n tags: ['general-purpose'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/llama-v3p3-70b-instruct'],\n },\n 'fireworks:deepseek-r1': {\n id: 'fireworks:deepseek-r1',\n name: 'DeepSeek R1 (Fast)',\n description:\n 'This version of the R1 model has a perfect balance between speed and cost-efficiency for real-time interactive experiences, with speeds up to 90 tokens per second.\\n\\nDeepSeek-R1 is a state-of-the-art large language model optimized with reinforcement learning and cold-start data for exceptional reasoning, math, and code performance. **Note**: This model will always use a temperature of 0.6 as recommended by DeepSeek.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 3,\n },\n output: {\n maxTokens: 32768,\n costPer1MTokens: 8,\n },\n tags: ['reasoning', 'general-purpose', 'coding'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/deepseek-r1'],\n },\n 'fireworks:deepseek-r1-basic': {\n id: 'fireworks:deepseek-r1-basic',\n name: 'DeepSeek R1 (Basic)',\n description:\n 'This version of the R1 model is optimized for throughput and cost-effectiveness and has a lower cost but slightly higher latency than the \"Fast\" version of the model.\\n\\nDeepSeek-R1 is a state-of-the-art large language model optimized with reinforcement learning and cold-start data for exceptional reasoning, math, and code performance. **Note**: This model will always use a temperature of 0.6 as recommended by DeepSeek.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.55,\n },\n output: {\n maxTokens: 32768,\n costPer1MTokens: 2.19,\n },\n tags: ['recommended', 'reasoning', 'general-purpose', 'coding'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/deepseek-r1-basic'],\n },\n 'fireworks:deepseek-v3': {\n id: 'fireworks:deepseek-v3',\n name: 'DeepSeek V3',\n description:\n 'A a strong Mixture-of-Experts (MoE) language model with 671B total parameters with 37B activated for each token from Deepseek.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.9,\n },\n output: {\n maxTokens: 8000,\n costPer1MTokens: 0.9,\n },\n tags: ['deprecated', 'general-purpose'],\n lifecycle: 'deprecated',\n aliases: ['accounts/fireworks/models/deepseek-v3'],\n },\n 'fireworks:llama-v3p1-405b-instruct': {\n id: 'fireworks:llama-v3p1-405b-instruct',\n name: 'Llama 3.1 405B Instruct',\n description:\n 'The Meta Llama 3.1 collection of multilingual large language models (LLMs) is a collection of pretrained and instruction tuned generative models in 8B, 70B and 405B sizes. The Llama 3.1 instruction tuned text only models (8B, 70B, 405B) are optimized for multilingual dialogue use cases and outperform many of the available open source and closed chat models on common industry benchmarks.',\n input: {\n maxTokens: 131072,\n costPer1MTokens: 3,\n },\n output: {\n maxTokens: 131072,\n costPer1MTokens: 3,\n },\n tags: ['deprecated', 'general-purpose'],\n lifecycle: 'deprecated',\n aliases: ['accounts/fireworks/models/llama-v3p1-405b-instruct'],\n },\n 'fireworks:llama-v3p1-70b-instruct': {\n id: 'fireworks:llama-v3p1-70b-instruct',\n name: 'Llama 3.1 70B Instruct',\n description:\n 'The Meta Llama 3.1 collection of multilingual large language models (LLMs) is a collection of pretrained and instruction tuned generative models in 8B, 70B and 405B sizes. The Llama 3.1 instruction tuned text only models (8B, 70B, 405B) are optimized for multilingual dialogue use cases and outperform many of the available open source and closed chat models on common industry benchmarks.',\n input: {\n maxTokens: 131072,\n costPer1MTokens: 0.9,\n },\n output: {\n maxTokens: 131072,\n costPer1MTokens: 0.9,\n },\n tags: ['deprecated', 'general-purpose'],\n lifecycle: 'deprecated',\n aliases: ['accounts/fireworks/models/llama-v3p1-70b-instruct'],\n },\n 'fireworks:llama-v3p1-8b-instruct': {\n id: 'fireworks:llama-v3p1-8b-instruct',\n name: 'Llama 3.1 8B Instruct',\n description:\n 'The Meta Llama 3.1 collection of multilingual large language models (LLMs) is a collection of pretrained and instruction tuned generative models in 8B, 70B and 405B sizes. The Llama 3.1 instruction tuned text only models (8B, 70B, 405B) are optimized for multilingual dialogue use cases and outperform many of the available open source and closed chat models on common industry benchmarks.',\n input: {\n maxTokens: 131072,\n costPer1MTokens: 0.2,\n },\n output: {\n maxTokens: 131072,\n costPer1MTokens: 0.2,\n },\n tags: ['low-cost', 'general-purpose'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/llama-v3p1-8b-instruct'],\n },\n 'fireworks:mixtral-8x22b-instruct': {\n id: 'fireworks:mixtral-8x22b-instruct',\n name: 'Mixtral MoE 8x22B Instruct',\n description:\n 'Mistral MoE 8x22B Instruct v0.1 model with Sparse Mixture of Experts. Fine tuned for instruction following.',\n input: {\n maxTokens: 65536,\n costPer1MTokens: 1.2,\n },\n output: {\n maxTokens: 65536,\n costPer1MTokens: 1.2,\n },\n tags: ['general-purpose'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/mixtral-8x22b-instruct'],\n },\n 'fireworks:mixtral-8x7b-instruct': {\n id: 'fireworks:mixtral-8x7b-instruct',\n name: 'Mixtral MoE 8x7B Instruct',\n description:\n 'Mistral MoE 8x7B Instruct v0.1 model with Sparse Mixture of Experts. Fine tuned for instruction following',\n input: {\n maxTokens: 32768,\n costPer1MTokens: 0.5,\n },\n output: {\n maxTokens: 32768,\n costPer1MTokens: 0.5,\n },\n tags: ['low-cost', 'general-purpose'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/mixtral-8x7b-instruct'],\n },\n 'fireworks:mythomax-l2-13b': {\n id: 'fireworks:mythomax-l2-13b',\n name: 'MythoMax L2 13b',\n description:\n 'MythoMax L2 is designed to excel at both roleplaying and storytelling, and is an improved variant of the previous MythoMix model, combining the MythoLogic-L2 and Huginn models.',\n input: {\n maxTokens: 4096,\n costPer1MTokens: 0.2,\n },\n output: {\n maxTokens: 4096,\n costPer1MTokens: 0.2,\n },\n tags: ['roleplay', 'storytelling', 'low-cost'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/mythomax-l2-13b'],\n },\n 'fireworks:gemma2-9b-it': {\n id: 'fireworks:gemma2-9b-it',\n name: 'Gemma 2 9B Instruct',\n description:\n 'Redesigned for outsized performance and unmatched efficiency, Gemma 2 optimizes for blazing-fast inference on diverse hardware. Gemma is a family of lightweight, state-of-the-art open models from Google, built from the same research and technology used to create the Gemini models. They are text-to-text, decoder-only large language models, available in English, with open weights, pre-trained variants, and instruction-tuned variants. Gemma models are well-suited for a variety of text generation tasks, including question answering, summarization, and reasoning.',\n input: {\n maxTokens: 8192,\n costPer1MTokens: 0.2,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 0.2,\n },\n tags: ['deprecated', 'low-cost', 'general-purpose'],\n lifecycle: 'deprecated',\n aliases: ['accounts/fireworks/models/gemma2-9b-it'],\n },\n}\n\nexport const knownTags = [\n 'auto',\n 'best',\n 'fast',\n 'reasoning',\n 'cheapest',\n 'balance',\n 'recommended',\n 'reasoning',\n 'general-purpose',\n 'low-cost',\n 'vision',\n 'coding',\n 'function-calling',\n 'agents',\n 'storytelling',\n 'preview',\n 'roleplay',\n]\n\nexport const defaultModel: RemoteModel = {\n id: '',\n name: '',\n description: '',\n input: {\n costPer1MTokens: 0,\n maxTokens: 1000000,\n },\n output: {\n costPer1MTokens: 0,\n maxTokens: 1000000,\n },\n tags: [],\n lifecycle: 'live',\n}\n", "import { type ErrorType } from '@botpress/client'\n\nexport type BotpressError = {\n isApiError: boolean\n code: number\n description: string\n type: ErrorType\n subtype?: string\n error?: unknown\n metadata?: unknown\n message?: string\n id: string\n}\n\ntype Action = 'fallback' | 'retry' | 'abort'\n\nexport const getActionFromError = (error: any): Action => {\n if (!isBotpressError(error)) {\n return 'retry'\n }\n\n if (error.type === 'InvalidDataFormat') {\n if (error.message?.includes('data/model/id')) {\n // Invalid Model ID, so we want to try another model\n return 'fallback'\n }\n\n // Usually means the request was malformed\n return 'abort'\n }\n\n if (\n error.type === 'QuotaExceeded' ||\n error.type === 'RateLimited' ||\n error.type === 'Unknown' ||\n error.type === 'LimitExceeded'\n ) {\n // These errors are usually temporary, so we want to retry\n return 'retry'\n }\n\n const subtype = (error.metadata as any)?.subtype\n if (subtype === 'UPSTREAM_PROVIDER_FAILED') {\n // The model is degraded, so we want to try another model\n return 'fallback'\n }\n\n if (error.type === 'Internal') {\n // This is an internal error, probably a lambda timeout\n return 'retry'\n }\n\n return 'abort'\n}\n\nexport const isNotFoundError = (error: any): boolean => isBotpressError(error) && error.type === 'ResourceNotFound'\n\nexport const isForbiddenOrUnauthorizedError = (error: any): boolean =>\n isBotpressError(error) && (error.type === 'Forbidden' || error.type === 'Unauthorized')\n\nexport const isBotpressError = (error: any): error is BotpressError =>\n typeof error === 'object' &&\n error !== null &&\n 'isApiError' in error &&\n 'code' in error &&\n 'type' in error &&\n 'id' in error\n", "export type Callback<T> = (error: any | null, value: T) => void\nexport type Interceptor<T> = (error: any | null, value: T, next: Callback<T>, done: Callback<T>) => Promise<void> | void\n\nexport class InterceptorManager<T> {\n private _interceptors: Interceptor<T>[] = []\n\n public use(interceptor: Interceptor<T>) {\n this._interceptors.push(interceptor)\n return () => this.remove(interceptor)\n }\n\n public remove(interceptor: Interceptor<T>) {\n this._interceptors = this._interceptors.filter((i) => i !== interceptor)\n }\n\n public async run(value: T, signal: AbortSignal): Promise<T> {\n let error: any | null = null\n let result: T = value\n let done = false\n\n for (const interceptor of this._interceptors) {\n if (done) {\n break\n }\n\n if (signal.aborted) {\n throw signal.reason\n }\n\n await new Promise<void>((resolve) => {\n void interceptor(\n error,\n result,\n (err, val) => {\n error = err\n result = val\n resolve()\n },\n (err, val) => {\n error = err\n result = val\n done = true\n resolve()\n }\n )\n })\n }\n\n if (error) {\n throw error\n }\n\n return result\n }\n}\n", "import { ExtendedClient, getExtendedClient } from './bp-client'\nimport { isForbiddenOrUnauthorizedError, isNotFoundError } from './errors'\nimport { Model as RawModel } from './schemas.gen'\nimport { BotpressClientLike } from './types'\n\nexport const DOWNTIME_THRESHOLD_MINUTES = 5\nconst PREFERENCES_FILE_SUFFIX = 'models.config.json'\n\nexport const DEFAULT_INTEGRATIONS = ['google-ai', 'anthropic', 'openai', 'cerebras', 'fireworks-ai', 'groq']\n\n// Biases for vendors and models\nconst VendorPreferences = ['google-ai', 'anthropic', 'openai']\nconst BestModelPreferences = ['4.1', '4o', '3-5-sonnet', 'gemini-1.5-pro']\nconst FastModelPreferences = ['gemini-1.5-flash', '4.1-mini', '4.1-nano', '4o-mini', 'flash', 'haiku']\n\nconst InputPricePenalty = 3 // $3 per 1M tokens\nconst OutputPricePenalty = 10 // $10 per 1M tokens\nconst LowTokensPenalty = 128_000 // 128k tokens\n\nexport type Model = RawModel & {\n ref: ModelRef\n integration: string\n}\n\nexport type ModelRef = `${string}:${string}`\n\nexport type ModelPreferences = {\n best: ModelRef[]\n fast: ModelRef[]\n downtimes: Array<{ ref: ModelRef; startedAt: string; reason: string }>\n}\n\nconst isRecommended = (model: Model) => model.tags.includes('recommended')\nconst isDeprecated = (model: Model) => model.tags.includes('deprecated')\nconst isLowCost = (model: Model) => model.tags.includes('low-cost')\nconst hasVisionSupport = (model: Model) => model.tags.includes('vision')\nconst isGeneralPurpose = (model: Model) => model.tags.includes('general-purpose')\n\nconst scoreModel = (model: Model, type: 'best' | 'fast', boosts: Record<ModelRef, number> = {}) => {\n let score: number = 0\n\n const scores: Array<[string, boolean, number]> = [\n ['input price penalty', model.input.costPer1MTokens > InputPricePenalty, -1],\n ['output price penalty', model.output.costPer1MTokens > OutputPricePenalty, -1],\n ['low tokens penalty', (model.input.maxTokens ?? 0) + (model.output.maxTokens ?? 0) < LowTokensPenalty, -1],\n ['recommended', isRecommended(model), 2],\n ['deprecated', isDeprecated(model), -2],\n ['vision support', hasVisionSupport(model), 1],\n ['general purpose', isGeneralPurpose(model), 1],\n ['vendor preference', VendorPreferences.includes(model.integration), 1],\n ['best model preference', type === 'best' && BestModelPreferences.some((x) => model.id.includes(x)), 1],\n ['fast model preference penalty', type === 'best' && FastModelPreferences.some((x) => model.id.includes(x)), -2],\n ['fast model preference', type === 'fast' && FastModelPreferences.some((x) => model.id.includes(x)), 2],\n ['low cost', type === 'fast' && isLowCost(model), 1],\n ]\n\n for (const rule in boosts) {\n if (model.ref.includes(rule)) {\n scores.push([`boost (${rule})`, true, Number(boosts[rule as ModelRef]) ?? 0] as const)\n }\n }\n\n for (const [, condition, value] of scores) {\n if (condition) {\n score += value\n }\n }\n\n return score\n}\n\nexport const getBestModels = (models: Model[], boosts: Record<ModelRef, number> = {}) =>\n models.sort((a, b) => scoreModel(b, 'best', boosts) - scoreModel(a, 'best', boosts))\n\nexport const getFastModels = (models: Model[], boosts: Record<ModelRef, number> = {}) =>\n models.sort((a, b) => scoreModel(b, 'fast', boosts) - scoreModel(a, 'fast', boosts))\n\nexport const pickModel = (models: ModelRef[], downtimes: ModelPreferences['downtimes'] = []) => {\n const copy = [...models]\n const elasped = (date: string) => new Date().getTime() - new Date(date).getTime()\n const DOWNTIME_THRESHOLD = 1000 * 60 * DOWNTIME_THRESHOLD_MINUTES\n\n if (!copy.length) {\n throw new Error('At least one model is required')\n }\n\n while (copy.length) {\n const ref = copy.shift() as ModelRef\n const downtime = downtimes.find((o) => o.ref === ref && elasped(o.startedAt) < DOWNTIME_THRESHOLD)\n if (downtime) {\n continue\n } else {\n return ref\n }\n }\n\n throw new Error(`All models are down: ${models.join(', ')}`)\n}\n\nexport abstract class ModelProvider {\n public abstract fetchInstalledModels(): Promise<Model[]>\n public abstract fetchModelPreferences(): Promise<ModelPreferences | null>\n public abstract saveModelPreferences(preferences: ModelPreferences): Promise<void>\n public abstract deleteModelPreferences(): Promise<void>\n}\n\nexport class RemoteModelProvider extends ModelProvider {\n private _client: ExtendedClient\n\n public constructor(client: BotpressClientLike) {\n super()\n this._client = getExtendedClient(client)\n }\n\n private async _fetchInstalledIntegrationNames() {\n try {\n const { bot } = await this._client.getBot({ id: this._client.botId })\n const integrations = Object.values(bot.integrations).filter((x) => x.status === 'registered')\n return integrations.map((x) => x.name)\n } catch (err) {\n if (isForbiddenOrUnauthorizedError(err)) {\n // This happens when the bot (with a BAK token) tries to access the .getBot endpoint\n return DEFAULT_INTEGRATIONS\n }\n\n throw err\n }\n }\n\n public async fetchInstalledModels() {\n const integrationNames = await this._fetchInstalledIntegrationNames()\n const models: Model[] = []\n\n await Promise.allSettled(\n integrationNames.map(async (integration) => {\n const { output } = await this._client.callAction({\n type: `${integration}:listLanguageModels`,\n input: {},\n })\n\n if (!output?.models?.length) {\n return\n }\n\n for (const model of output.models as RawModel[]) {\n if (model.name && model.id && model.input && model.tags) {\n models.push({\n ref: `${integration}:${model.id}`,\n integration,\n id: model.id,\n name: model.name,\n description: model.description,\n input: model.input,\n output: model.output,\n tags: model.tags,\n })\n }\n }\n })\n )\n\n return models\n }\n\n public async fetchModelPreferences(): Promise<ModelPreferences | null> {\n try {\n const { file } = await this._client.getFile({ id: this._preferenceFileKey })\n\n if (globalThis.fetch !== undefined) {\n const response = await fetch(file.url)\n return (await response.json()) as ModelPreferences\n } else {\n const { data } = await this._client.axios.get(file.url, {\n // we piggy-back axios to avoid adding a new dependency\n // unset all headers to avoid S3 pre-signed signature mismatch\n headers: Object.keys(this._client.config.headers).reduce(\n (acc, key) => {\n acc[key] = undefined\n return acc\n },\n {} as Record<string, undefined>\n ),\n })\n return data as ModelPreferences\n }\n } catch (err) {\n if (isNotFoundError(err)) {\n return null\n }\n\n throw err\n }\n }\n\n public async saveModelPreferences(preferences: ModelPreferences) {\n await this._client.uploadFile({\n key: this._preferenceFileKey,\n content: JSON.stringify(preferences, null, 2),\n index: false,\n tags: {\n system: 'true',\n purpose: 'config',\n },\n })\n }\n\n public async deleteModelPreferences() {\n await this._client.deleteFile({ id: this._preferenceFileKey }).catch(() => {})\n }\n\n private get _preferenceFileKey() {\n return `bot->${this._client.botId}->${PREFERENCES_FILE_SUFFIX}`\n }\n}\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAcA,QAAM,iBAAkC;MACtC,mBAAmB;MACnB,QAAQ;MACR,UAAU;MACV,eAAe;MACf,OAAO,WAAA;AAAM,eAAA;MAAA;MACb,eAAe;MACf,cAAc;;AAGhB,aAAgB,oBAAoB,SAAuB;AACzD,UAAM,YAAS,SAAA,SAAA,CAAA,GAAyB,cAAc,GAAK,OAAO;AAElE,UAAI,UAAU,gBAAgB,GAAG;AAC/B,kBAAU,gBAAgB;;AAG5B,aAAO;IACT;AARA,YAAA,sBAAA;;;;;;;;;ACxBA,aAAgB,WAAW,OAAa;AACpC,UAAM,gBAAgB,KAAK,OAAM,IAAK;AACtC,aAAO,KAAK,MAAM,aAAa;IACnC;AAHA,YAAA,aAAA;;;;;;;;;ACAA,aAAgB,SAAS,OAAa;AAClC,aAAO;IACX;AAFA,YAAA,WAAA;;;;;;;;;ACCA,QAAA,gBAAA;AACA,QAAA,cAAA;AAIA,aAAgB,cAAc,SAAwB;AACpD,cAAQ,QAAQ,QAAQ;QACtB,KAAK;AACH,iBAAO,cAAA;QAET,KAAK;QACL;AACE,iBAAO,YAAA;;IAEb;AATA,YAAA,gBAAA;;;;;;;;;ACJA,QAAA,mBAAA;AAEA,QAAA;;OAAA,WAAA;AAEE,iBAAAA,OAAoB,SAAwB;AAAxB,eAAA,UAAA;AADV,eAAA,UAAU;QAC2B;AAExC,QAAAA,OAAA,UAAA,QAAP,WAAA;AAAA,cAAA,QAAA;AACE,iBAAO,IAAI,QAAQ,SAAA,SAAO;AAAI,mBAAA,WAAW,SAAS,MAAK,aAAa;UAAtC,CAAuC;QACvE;AAEO,QAAAA,OAAA,UAAA,mBAAP,SAAwB,SAAe;AACrC,eAAK,UAAU;QACjB;AAEA,eAAA,eAAYA,OAAA,WAAA,iBAAa;eAAzB,WAAA;AACE,gBAAM,SAAS,iBAAA,cAAc,KAAK,OAAO;AACzC,mBAAO,OAAO,KAAK,KAAK;UAC1B;;;;AAEA,eAAA,eAAYA,OAAA,WAAA,SAAK;eAAjB,WAAA;AACE,gBAAM,WAAW,KAAK,QAAQ;AAC9B,gBAAM,OAAO,KAAK,QAAQ;AAC1B,gBAAM,QAAQ,KAAK;AACnB,gBAAM,QAAQ,WAAW,KAAK,IAAI,MAAM,KAAK;AAE7C,mBAAO,KAAK,IAAI,OAAO,KAAK,QAAQ,QAAQ;UAC9C;;;;AAEA,eAAA,eAAcA,OAAA,WAAA,wBAAoB;eAAlC,WAAA;AACE,mBAAO,KAAK;UACd;;;;AACF,eAAAA;MAAA,GA7BA;;AAAsB,YAAA,QAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACJtB,QAAA,eAAA;AAEA,QAAA;;OAAA,SAAA,QAAA;AAAoC,kBAAAC,iBAAA,MAAA;AAApC,iBAAAA,kBAAA;;QAYA;AAXiB,QAAAA,gBAAA,UAAA,QAAb,WAAA;;;AACI,qBAAA,CAAA,GAAO,KAAK,iBAAiB,OAAO,OAAA,UAAM,MAAK,KAAA,IAAA,CAAE;;;;AAGrD,eAAA,eAAYA,gBAAA,WAAA,kBAAc;eAA1B,WAAA;AACI,mBAAO,KAAK,YAAY;UAC5B;;;;AAEA,eAAA,eAAcA,gBAAA,WAAA,wBAAoB;eAAlC,WAAA;AACI,mBAAO,KAAK,UAAU;UAC1B;;;;AACJ,eAAAA;MAAA,GAZoC,aAAA,KAAK;;AAA5B,YAAA,iBAAA;;;;;;;;;;;;;;;;;;;;;;;;;;ACFb,QAAA,eAAA;AAEA,QAAA;;OAAA,SAAA,QAAA;AAAiC,kBAAAC,cAAA,MAAA;AAAjC,iBAAAA,eAAA;;QAAwC;AAAA,eAAAA;MAAA,GAAP,aAAA,KAAK;;AAAzB,YAAA,cAAA;;;;;;;;;ACDb,QAAA,qBAAA;AACA,QAAA,iBAAA;AAGA,aAAgB,aAAa,SAA0B,SAAe;AAClE,UAAM,QAAQ,eAAe,OAAO;AACpC,YAAM,iBAAiB,OAAO;AAC9B,aAAO;IACX;AAJA,YAAA,eAAA;AAMA,aAAS,eAAe,SAAwB;AAC5C,UAAI,CAAC,QAAQ,mBAAmB;AAC5B,eAAO,IAAI,mBAAA,eAAe,OAAO;;AAGrC,aAAO,IAAI,eAAA,YAAY,OAAO;IAClC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACjBA,QAAA,YAAA;AAKA,QAAA,kBAAA;AAIA,aAAsBC,SACpB,SACA,SAA4B;AAA5B,UAAA,YAAA,QAAA;AAAA,kBAAA,CAAA;MAA4B;;;;;;AAEtB,iCAAmB,UAAA,oBAAoB,OAAO;AAC9C,cAAAA,WAAU,IAAI,QAAQ,SAAS,gBAAgB;AAE9C,qBAAA,CAAA,GAAMA,SAAQ,QAAO,CAAE;;AAA9B,qBAAA,CAAA,GAAO,GAAA,KAAA,CAAuB;;;;;AAPhC,YAAA,UAAAA;AAUA,QAAA;;OAAA,WAAA;AAGE,iBAAAC,SACU,SACA,SAAwB;AADxB,eAAA,UAAA;AACA,eAAA,UAAA;AAJF,eAAA,gBAAgB;QAKrB;AAEU,QAAAA,SAAA,UAAA,UAAb,WAAA;;;;;;uBACS,CAAC,KAAK,oBAAmB,QAAA,CAAA,GAAA,CAAA;;;;AAE5B,yBAAA,CAAA,GAAM,KAAK,WAAU,CAAE;;AAAvB,qBAAA,KAAA;AACO,yBAAA,CAAA,GAAM,KAAK,QAAO,CAAE;;AAA3B,yBAAA,CAAA,GAAO,GAAA,KAAA,CAAoB;;;AAE3B,uBAAK;AACe,yBAAA,CAAA,GAAM,KAAK,QAAQ,MAAM,KAAG,KAAK,aAAa,CAAC;;AAA7D,gCAAc,GAAA,KAAA;AAEpB,sBAAI,CAAC,eAAe,KAAK,qBAAqB;AAC5C,0BAAM;;;;;;AAKZ,wBAAM,IAAI,MAAM,uBAAuB;;;;;AAGzC,eAAA,eAAYA,SAAA,WAAA,uBAAmB;eAA/B,WAAA;AACE,mBAAO,KAAK,iBAAiB,KAAK,QAAQ;UAC5C;;;;AAEc,QAAAA,SAAA,UAAA,aAAd,WAAA;;;;;;AACQ,0BAAQ,gBAAA,aAAa,KAAK,SAAS,KAAK,aAAa;AAC3D,yBAAA,CAAA,GAAM,MAAM,MAAK,CAAE;;AAAnB,qBAAA,KAAA;;;;;;;;;AAEJ,eAAAA;MAAA,GAlCA;;;;;;ACnBA,IAAAC,8BAAwB;;;ACAjB,IAAI,mBAAmB,OAAO;AAAA,EACnC,KAAK,UAAU,MAAM;AACnB,aACM,YAAY,KAAK,OAAO,KAAK,KAAK,CAAC,GACrC,IAAI,GACJ,SAAS,UAAU,QACrB,IAAI,QACJ,KACA;AACA,gBAAU,CAAC,EAAE,GAAG,IAAI;AAAA,IACtB;AAAA,EACF;AAAA,EACA,QAAQ,CAAC;AAAA,EACT,GAAG,OAAO,IAAI;AACZ;AAAC,KAAC,KAAK,OAAO,KAAK,MAAM,CAAC,GAAG,KAAK,EAAE;AACpC,WAAO,MAAM;AACX,WAAK,OAAO,KAAK,IAAI,KAAK,OAAO,KAAK,GAAG,OAAO,OAAK,OAAO,CAAC;AAAA,IAC/D;AAAA,EACF;AACF;;;ACAO,IAAM,oBAAoB,CAAC,YAAqC;AACrE,QAAM,SAAS;AAEf,MAAI,CAAC,UAAU,WAAW,QAAQ,OAAO,WAAW,UAAU;AAC5D,UAAM,IAAI,MAAM,yEAAyE;AAAA,EAC3F;AAEA,MAAI,OAAO,OAAO,YAAY,YAAY,CAAC,CAAC,OAAO,SAAS;AAC1D,QAAI;AACF,aAAO,kBAAkB,OAAO,OAAO;AAAA,IACzC,QAAQ;AAAA,IAAC;AAAA,EACX;AAEA,MACE,OAAO,OAAO,gBAAgB,cAC9B,OAAO,OAAO,eAAe,cAC7B,CAAC,OAAO,UACR,OAAO,OAAO,WAAW,YACzB,CAAC,OAAO,OAAO,SACf;AACA,UAAM,IAAI,MAAM,yEAAyE;AAAA,EAC3F;AAEA,QAAM,QAAQ,OAAO,OAAO,QAAQ,UAAU;AAE9C,MAAI,CAAC,OAAO,QAAQ;AAClB,UAAM,IAAI,MAAM,yCAAyC;AAAA,EAC3D;AAEA,QAAM,QAAQ,MAAM;AAClB,UAAM,IAAI;AACV,QAAI,EAAE,SAAS,OAAO,EAAE,UAAU,YAAY;AAC5C,aAAO,kBAAkB,EAAE,MAAM,CAAC;AAAA,IACpC;AACA,WAAO,kBAAkB,IAAI,EAAE,YAAY,EAAE,MAAM,CAAC;AAAA,EACtD;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,IACA,OAAQ,OAAe;AAAA,IACvB;AAAA,IACA,WAAW,CAAC,WAAwB;AAClC,YAAM,YAAY,MAAM;AACxB,YAAM,WAAW,UAAU;AAC3B,eAAS,SAAS,SAAS;AAC3B,aAAO;AAAA,IACT;AAAA,EACF;AACF;;;ACnEA,iCAAwB;AADxB,OAAO,WAA8B;;;ACI9B,IAAM,SAAsC;AAAA,EACjD,2BAA2B;AAAA,IACzB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,aAAa,iBAAiB;AAAA,IACpD,WAAW;AAAA,EACb;AAAA,EACA,gCAAgC;AAAA,IAC9B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,aAAa,iBAAiB;AAAA,IACpD,WAAW;AAAA,EACb;AAAA,EACA,gCAAgC;AAAA,IAC9B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,aAAa,iBAAiB;AAAA,IACjD,WAAW;AAAA,EACb;AAAA,EACA,6BAA6B;AAAA,IAC3B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,aAAa,UAAU,QAAQ;AAAA,IACtC,WAAW;AAAA,EACb;AAAA,EACA,wBAAwB;AAAA,IACtB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,aAAa,UAAU,QAAQ;AAAA,IACtC,WAAW;AAAA,EACb;AAAA,EACA,6BAA6B;AAAA,IAC3B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,UAAU,iBAAiB;AAAA,IACjD,WAAW;AAAA,EACb;AAAA,EACA,kCAAkC;AAAA,IAChC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,UAAU,iBAAiB;AAAA,IACjD,WAAW;AAAA,EACb;AAAA,EACA,kCAAkC;AAAA,IAChC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,IACb,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,UAAU,iBAAiB;AAAA,IAC9C,WAAW;AAAA,EACb;AAAA,EACA,6BAA6B;AAAA,IAC3B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,aAAa,mBAAmB,QAAQ;AAAA,IAC/C,WAAW;AAAA,EACb;AAAA,EACA,wBAAwB;AAAA,IACtB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,aAAa,UAAU,iBAAiB;AAAA,IAC/C,WAAW;AAAA,EACb;AAAA,EACA,6BAA6B;AAAA,IAC3B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,aAAa,UAAU,iBAAiB;AAAA,IAC/C,WAAW;AAAA,EACb;AAAA,EACA,iCAAiC;AAAA,IAC/B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,UAAU,YAAY,mBAAmB,kBAAkB;AAAA,IACjF,WAAW;AAAA,EACb;AAAA,EACA,4BAA4B;AAAA,IAC1B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,UAAU,mBAAmB,UAAU,UAAU,kBAAkB;AAAA,IACzF,WAAW;AAAA,EACb;AAAA,EACA,4BAA4B;AAAA,IAC1B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,cAAc,UAAU,mBAAmB,UAAU,UAAU,kBAAkB;AAAA,IACxF,WAAW;AAAA,EACb;AAAA,EACA,4BAA4B;AAAA,IAC1B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,cAAc,UAAU,mBAAmB,UAAU,UAAU,kBAAkB;AAAA,IACxF,WAAW;AAAA,EACb;AAAA,EACA,iCAAiC;AAAA,IAC/B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,cAAc,mBAAmB,UAAU,UAAU,kBAAkB;AAAA,IAC9E,WAAW;AAAA,EACb;AAAA,EACA,6BAA6B;AAAA,IAC3B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,cAAc,mBAAmB,UAAU;AAAA,IAClD,WAAW;AAAA,EACb;AAAA,EACA,sCAAsC;AAAA,IACpC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,aAAa,UAAU,UAAU,mBAAmB,QAAQ;AAAA,IAClF,WAAW;AAAA,EACb;AAAA,EACA,gDAAgD;AAAA,IAC9C,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,cAAc,UAAU,aAAa,mBAAmB,UAAU,QAAQ;AAAA,IACjF,WAAW;AAAA,EACb;AAAA,EACA,wCAAwC;AAAA,IACtC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,aAAa,UAAU,UAAU,mBAAmB,QAAQ;AAAA,IAClF,WAAW;AAAA,EACb;AAAA,EACA,kDAAkD;AAAA,IAChD,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,cAAc,UAAU,aAAa,mBAAmB,UAAU,QAAQ;AAAA,IACjF,WAAW;AAAA,EACb;AAAA,EACA,uCAAuC;AAAA,IACrC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,mBAAmB,UAAU;AAAA,IACpC,WAAW;AAAA,EACb;AAAA,EACA,wCAAwC;AAAA,IACtC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,UAAU,mBAAmB,UAAU,UAAU,oBAAoB,cAAc;AAAA,IAC1F,WAAW;AAAA,EACb;AAAA,EACA,wCAAwC;AAAA,IACtC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,UAAU,mBAAmB,UAAU,UAAU,oBAAoB,cAAc;AAAA,IAC1F,WAAW;AAAA,EACb;AAAA,EACA,qCAAqC;AAAA,IACnC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,iBAAiB;AAAA,IACpC,WAAW;AAAA,EACb;AAAA,EACA,8BAA8B;AAAA,IAC5B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,aAAa,UAAU,mBAAmB,QAAQ;AAAA,IACxE,WAAW;AAAA,EACb;AAAA,EACA,4BAA4B;AAAA,IAC1B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,aAAa,UAAU,mBAAmB,UAAU,QAAQ;AAAA,IAClF,WAAW;AAAA,EACb;AAAA,EACA,qCAAqC;AAAA,IACnC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,mBAAmB,QAAQ;AAAA,IAC9C,WAAW;AAAA,EACb;AAAA,EACA,yBAAyB;AAAA,IACvB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,WAAW,mBAAmB,WAAW;AAAA,IAChD,WAAW;AAAA,EACb;AAAA,EACA,uBAAuB;AAAA,IACrB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,mBAAmB,WAAW;AAAA,IACrC,WAAW;AAAA,EACb;AAAA,EACA,2CAA2C;AAAA,IACzC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,mBAAmB,QAAQ;AAAA,IAClC,WAAW;AAAA,EACb;AAAA,EACA,wBAAwB;AAAA,IACtB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,iBAAiB;AAAA,IACpC,WAAW;AAAA,EACb;AAAA,EACA,yBAAyB;AAAA,IACvB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,iBAAiB;AAAA,IACxB,WAAW;AAAA,EACb;AAAA,EACA,2BAA2B;AAAA,IACzB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,WAAW,mBAAmB,aAAa,UAAU;AAAA,IAC5D,WAAW;AAAA,EACb;AAAA,EACA,4BAA4B;AAAA,IAC1B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,WAAW,mBAAmB,WAAW;AAAA,IAChD,WAAW;AAAA,EACb;AAAA,EACA,sCAAsC;AAAA,IACpC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,mBAAmB,aAAa,SAAS;AAAA,IAChD,WAAW;AAAA,EACb;AAAA,EACA,gCAAgC;AAAA,IAC9B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,mBAAmB,QAAQ;AAAA,IACjD,WAAW;AAAA,EACb;AAAA,EACA,6BAA6B;AAAA,IAC3B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,YAAY;AAAA,IAC/B,WAAW;AAAA,EACb;AAAA,EACA,6BAA6B;AAAA,IAC3B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,mBAAmB,YAAY;AAAA,IAClD,WAAW;AAAA,EACb;AAAA,EACA,qCAAqC;AAAA,IACnC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,UAAU,mBAAmB,YAAY;AAAA,IAC5D,WAAW;AAAA,EACb;AAAA,EACA,qCAAqC;AAAA,IACnC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,UAAU,mBAAmB,YAAY;AAAA,IAChD,WAAW;AAAA,EACb;AAAA,EACA,6BAA6B;AAAA,IAC3B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,IACb,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,iBAAiB;AAAA,IACpC,WAAW;AAAA,EACb;AAAA,EACA,uBAAuB;AAAA,IACrB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,mBAAmB,YAAY;AAAA,IAClD,WAAW;AAAA,EACb;AAAA,EACA,wBAAwB;AAAA,IACtB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,mBAAmB,YAAY;AAAA,IACtC,WAAW;AAAA,EACb;AAAA,EACA,qBAAqB;AAAA,IACnB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,iBAAiB;AAAA,IACpC,WAAW;AAAA,EACb;AAAA,EACA,2BAA2B;AAAA,IACzB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,WAAW,mBAAmB,WAAW;AAAA,IAChD,WAAW;AAAA,EACb;AAAA,EACA,yBAAyB;AAAA,IACvB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,mBAAmB,aAAa,UAAU;AAAA,IACjD,WAAW;AAAA,IACX,SAAS,CAAC,uCAAuC;AAAA,EACnD;AAAA,EACA,0BAA0B;AAAA,IACxB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,mBAAmB,WAAW;AAAA,IACrC,WAAW;AAAA,IACX,SAAS,CAAC,wCAAwC;AAAA,EACpD;AAAA,EACA,8BAA8B;AAAA,IAC5B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,aAAa,mBAAmB,QAAQ;AAAA,IAC9D,WAAW;AAAA,IACX,SAAS,CAAC,4CAA4C;AAAA,EACxD;AAAA,EACA,8BAA8B;AAAA,IAC5B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,iBAAiB;AAAA,IACvC,WAAW;AAAA,IACX,SAAS,CAAC,4CAA4C;AAAA,EACxD;AAAA,EACA,4CAA4C;AAAA,IAC1C,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,mBAAmB,QAAQ;AAAA,IAClC,WAAW;AAAA,IACX,SAAS,CAAC,0DAA0D;AAAA,EACtE;AAAA,EACA,yCAAyC;AAAA,IACvC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,mBAAmB,QAAQ;AAAA,IAClC,WAAW;AAAA,IACX,SAAS,CAAC,uDAAuD;AAAA,EACnE;AAAA,EACA,qCAAqC;AAAA,IACnC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,iBAAiB;AAAA,IACxB,WAAW;AAAA,IACX,SAAS,CAAC,mDAAmD;AAAA,EAC/D;AAAA,EACA,yBAAyB;AAAA,IACvB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,aAAa,mBAAmB,QAAQ;AAAA,IAC/C,WAAW;AAAA,IACX,SAAS,CAAC,uCAAuC;AAAA,EACnD;AAAA,EACA,+BAA+B;AAAA,IAC7B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,aAAa,mBAAmB,QAAQ;AAAA,IAC9D,WAAW;AAAA,IACX,SAAS,CAAC,6CAA6C;AAAA,EACzD;AAAA,EACA,yBAAyB;AAAA,IACvB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,cAAc,iBAAiB;AAAA,IACtC,WAAW;AAAA,IACX,SAAS,CAAC,uCAAuC;AAAA,EACnD;AAAA,EACA,sCAAsC;AAAA,IACpC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,cAAc,iBAAiB;AAAA,IACtC,WAAW;AAAA,IACX,SAAS,CAAC,oDAAoD;AAAA,EAChE;AAAA,EACA,qCAAqC;AAAA,IACnC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,cAAc,iBAAiB;AAAA,IACtC,WAAW;AAAA,IACX,SAAS,CAAC,mDAAmD;AAAA,EAC/D;AAAA,EACA,oCAAoC;AAAA,IAClC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,iBAAiB;AAAA,IACpC,WAAW;AAAA,IACX,SAAS,CAAC,kDAAkD;AAAA,EAC9D;AAAA,EACA,oCAAoC;AAAA,IAClC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,iBAAiB;AAAA,IACxB,WAAW;AAAA,IACX,SAAS,CAAC,kDAAkD;AAAA,EAC9D;AAAA,EACA,mCAAmC;AAAA,IACjC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,iBAAiB;AAAA,IACpC,WAAW;AAAA,IACX,SAAS,CAAC,iDAAiD;AAAA,EAC7D;AAAA,EACA,6BAA6B;AAAA,IAC3B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,gBAAgB,UAAU;AAAA,IAC7C,WAAW;AAAA,IACX,SAAS,CAAC,2CAA2C;AAAA,EACvD;AAAA,EACA,0BAA0B;AAAA,IACxB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,cAAc,YAAY,iBAAiB;AAAA,IAClD,WAAW;AAAA,IACX,SAAS,CAAC,wCAAwC;AAAA,EACpD;AACF;AAEO,IAAM,YAAY;AAAA,EACvB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEO,IAAM,eAA4B;AAAA,EACvC,IAAI;AAAA,EACJ,MAAM;AAAA,EACN,aAAa;AAAA,EACb,OAAO;AAAA,IACL,iBAAiB;AAAA,IACjB,WAAW;AAAA,EACb;AAAA,EACA,QAAQ;AAAA,IACN,iBAAiB;AAAA,IACjB,WAAW;AAAA,EACb;AAAA,EACA,MAAM,CAAC;AAAA,EACP,WAAW;AACb;;;ADnhCA,IAAM,YAAY,MAAM,OAAO,WAAW,eAAe,OAAO,OAAO,UAAU;AAE1E,IAAM,gBAAN,MAAoB;AAAA,EACjB;AAAA,EACS;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEV,YAAY,OAAoB;AACrC,SAAK,UAAU,MAAM,UAAU;AAC/B,SAAK,WAAW,MAAM,WAAW;AACjC,SAAK,mBAAmB,MAAM,mBAAmB;AACjD,SAAK,WAAW,EAAE,GAAG,MAAM,QAAQ;AAEnC,QAAI,MAAM,OAAO;AACf,WAAK,SAAS,UAAU,IAAI,MAAM;AAAA,IACpC;AAEA,QAAI,MAAM,OAAO;AACf,WAAK,SAAS,eAAe,IAAI,UAAU,MAAM,KAAK;AAAA,IACxD;AAEA,SAAK,eAAe,MAAM,OAAO;AAAA,MAC/B,SAAS,KAAK;AAAA,MACd,iBAAiB,KAAK;AAAA,MACtB,SAAS,KAAK;AAAA,IAChB,CAAC;AAAA,EACH;AAAA,EAEA,MAAa,aAAa,OAAyB,UAA0B,CAAC,GAAG;AAC/E,UAAM,SAAS,QAAQ,UAAU,YAAY,QAAQ,KAAK,QAAQ;AAElE,UAAM,EAAE,KAAK,IAAI,MAAM,KAAK;AAAA,MAAiB,MAC3C,KAAK,aAAa,KAAwB,+BAA+B,OAAO;AAAA,QAC9E;AAAA,QACA,SAAS,QAAQ,WAAW,KAAK;AAAA,MACnC,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAa,aAAa;AACxB,UAAM,EAAE,KAAK,IAAI,MAAM,KAAK;AAAA,MAAiB,MAC3C,KAAK,aAAa,IAAyB,sBAAsB;AAAA,IACnE;AAEA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,OAAc,mBACZ,SACA,UAA0B,CAAC,GAC0B;AACrD,UAAM,SAAS,QAAQ,UAAU,YAAY,QAAQ,KAAK,QAAQ;AAElE,QAAI,UAAU,GAAG;AACf,YAAMC,OAAM,MAAM,MAAM,GAAG,KAAK,OAAO,sCAAsC;AAAA,QAC3E,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,GAAG,KAAK;AAAA,UACR,gBAAgB;AAAA,QAClB;AAAA,QACA,aAAa,KAAK,mBAAmB,YAAY;AAAA,QACjD,MAAM,KAAK,UAAU,EAAE,GAAG,SAAS,QAAQ,KAAK,CAAC;AAAA,QACjD;AAAA,MACF,CAAC;AAED,UAAI,CAACA,KAAI,IAAI;AACX,cAAM,OAAO,MAAMA,KAAI,KAAK,EAAE,MAAM,MAAM,EAAE;AAC5C,cAAM,MAAM,IAAI,MAAM,QAAQA,KAAI,MAAM,KAAK,QAAQA,KAAI,UAAU,EAAE;AACpE,QAAC,IAAY,WAAW,EAAE,QAAQA,KAAI,QAAQ,MAAM,KAAK;AAC1D,cAAM;AAAA,MACR;AAEA,YAAM,OAAOA,KAAI;AACjB,UAAI,CAAC,MAAM;AACT,cAAM,IAAI,MAAM,iDAAiD;AAAA,MACnE;AAEA,YAAM,SAAS,KAAK,UAAU;AAC9B,YAAM,YAAY,mBAAmB;AACnC,mBAAS;AACP,gBAAM,EAAE,OAAO,KAAK,IAAI,MAAM,OAAO,KAAK;AAC1C,cAAI,MAAM;AACR;AAAA,UACF;AACA,cAAI,OAAO;AACT,kBAAM;AAAA,UACR;AAAA,QACF;AAAA,MACF,GAAG;AAEH,uBAAiB,OAAO,KAAK,QAA8B,QAAQ,GAAG;AACpE,cAAM;AAAA,MACR;AACA;AAAA,IACF;AAEA,UAAM,MAAM,MAAM,KAAK;AAAA,MAAiB,MACtC,KAAK,aAAa;AAAA,QAChB;AAAA,QACA,EAAE,GAAG,SAAS,QAAQ,KAAK;AAAA,QAC3B;AAAA,UACE,cAAc;AAAA,UACd;AAAA,UACA,SAAS,QAAQ,WAAW,KAAK;AAAA,QACnC;AAAA,MACF;AAAA,IACF;AAEA,UAAM,aAAwC,IAAI;AAClD,QAAI,CAAC,YAAY;AACf,YAAM,IAAI,MAAM,iDAAiD;AAAA,IACnE;AAEA,qBAAiB,OAAO,KAAK,QAA8B,UAAU,GAAG;AACtE,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,OAAe,QAAW,QAAqE;AAC7F,UAAM,UAAU,IAAI,YAAY,OAAO;AACvC,QAAI,SAAS;AAEb,qBAAiB,SAAS,QAAQ;AAChC,gBAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,KAAK,CAAC;AAEhD,iBAAS;AACP,cAAM,IAAI,OAAO,QAAQ,IAAI;AAC7B,YAAI,IAAI,GAAG;AACT;AAAA,QACF;AAEA,cAAM,OAAO,OAAO,MAAM,GAAG,CAAC,EAAE,QAAQ,OAAO,EAAE;AACjD,iBAAS,OAAO,MAAM,IAAI,CAAC;AAE3B,YAAI,CAAC,MAAM;AACT;AAAA,QACF;AAEA,cAAM,KAAK,MAAM,IAAI;AAAA,MACvB;AAAA,IACF;AAEA,cAAU,QAAQ,OAAO;AAEzB,UAAM,OAAO,OAAO,KAAK;AACzB,QAAI,MAAM;AACR,YAAM,KAAK,MAAM,IAAI;AAAA,IACvB;AAAA,EACF;AAAA,EAEQ,wBAAwB,OAAqB;AACnD,QAAI,MAAM,aAAa,KAAK,GAAG;AAC7B,UAAI,CAAC,MAAM,UAAU;AACnB,eAAO;AAAA,MACT;AAEA,YAAM,SAAS,MAAM,UAAU;AAC/B,UAAI,UAAU,CAAC,KAAK,KAAK,GAAG,EAAE,SAAS,MAAM,GAAG;AAC9C,eAAO;AAAA,MACT;AAEA,UACE,MAAM,QACN,CAAC,gBAAgB,cAAc,aAAa,aAAa,aAAa,OAAO,EAAE,SAAS,MAAM,IAAI,GAClG;AACA,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,iBAAoB,IAAkC;AAClE,eAAO,oCAAQ,IAAI;AAAA,MACjB,eAAe;AAAA,MACf,eAAe;AAAA,MACf,cAAc;AAAA,MACd,QAAQ;AAAA,MACR,OAAO,CAAC,MAAM,KAAK,wBAAwB,CAAC;AAAA,IAC9C,CAAC;AAAA,EACH;AACF;AAEO,IAAM,sBAAsB,CAAC,UAAqC;AACvE,MAAI,OAAO,KAAK,GAAG;AACjB,WAAO,OAAO,KAAK;AAAA,EACrB;AAGA,QAAM,QAAQ,OAAO,OAAO,MAAM,EAAE,KAAK,CAAC,MAAM,EAAE,SAAS,SAAS,KAAK,CAAC;AAC1E,MAAI,OAAO;AACT,WAAO;AAAA,EACT;AAGA,MAAI,UAAU,SAAS,KAAK,GAAG;AAC7B,WAAO,EAAE,GAAG,cAAc,IAAI,OAAO,MAAM,MAAM;AAAA,EACnD;AACA,SAAO;AACT;;;AEhNO,IAAM,qBAAqB,CAAC,UAAuB;AACxD,MAAI,CAAC,gBAAgB,KAAK,GAAG;AAC3B,WAAO;AAAA,EACT;AAEA,MAAI,MAAM,SAAS,qBAAqB;AACtC,QAAI,MAAM,SAAS,SAAS,eAAe,GAAG;AAE5C,aAAO;AAAA,IACT;AAGA,WAAO;AAAA,EACT;AAEA,MACE,MAAM,SAAS,mBACf,MAAM,SAAS,iBACf,MAAM,SAAS,aACf,MAAM,SAAS,iBACf;AAEA,WAAO;AAAA,EACT;AAEA,QAAM,UAAW,MAAM,UAAkB;AACzC,MAAI,YAAY,4BAA4B;AAE1C,WAAO;AAAA,EACT;AAEA,MAAI,MAAM,SAAS,YAAY;AAE7B,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAEO,IAAM,kBAAkB,CAAC,UAAwB,gBAAgB,KAAK,KAAK,MAAM,SAAS;AAE1F,IAAM,iCAAiC,CAAC,UAC7C,gBAAgB,KAAK,MAAM,MAAM,SAAS,eAAe,MAAM,SAAS;AAEnE,IAAM,kBAAkB,CAAC,UAC9B,OAAO,UAAU,YACjB,UAAU,QACV,gBAAgB,SAChB,UAAU,SACV,UAAU,SACV,QAAQ;;;AC/DH,IAAM,qBAAN,MAA4B;AAAA,EACzB,gBAAkC,CAAC;AAAA,EAEpC,IAAI,aAA6B;AACtC,SAAK,cAAc,KAAK,WAAW;AACnC,WAAO,MAAM,KAAK,OAAO,WAAW;AAAA,EACtC;AAAA,EAEO,OAAO,aAA6B;AACzC,SAAK,gBAAgB,KAAK,cAAc,OAAO,CAAC,MAAM,MAAM,WAAW;AAAA,EACzE;AAAA,EAEA,MAAa,IAAI,OAAU,QAAiC;AAC1D,QAAI,QAAoB;AACxB,QAAI,SAAY;AAChB,QAAI,OAAO;AAEX,eAAW,eAAe,KAAK,eAAe;AAC5C,UAAI,MAAM;AACR;AAAA,MACF;AAEA,UAAI,OAAO,SAAS;AAClB,cAAM,OAAO;AAAA,MACf;AAEA,YAAM,IAAI,QAAc,CAAC,YAAY;AACnC,aAAK;AAAA,UACH;AAAA,UACA;AAAA,UACA,CAAC,KAAK,QAAQ;AACZ,oBAAQ;AACR,qBAAS;AACT,oBAAQ;AAAA,UACV;AAAA,UACA,CAAC,KAAK,QAAQ;AACZ,oBAAQ;AACR,qBAAS;AACT,mBAAO;AACP,oBAAQ;AAAA,UACV;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH;AAEA,QAAI,OAAO;AACT,YAAM;AAAA,IACR;AAEA,WAAO;AAAA,EACT;AACF;;;ACjDO,IAAM,6BAA6B;AAC1C,IAAM,0BAA0B;AAEzB,IAAM,uBAAuB,CAAC,aAAa,aAAa,UAAU,YAAY,gBAAgB,MAAM;AAG3G,IAAM,oBAAoB,CAAC,aAAa,aAAa,QAAQ;AAC7D,IAAM,uBAAuB,CAAC,OAAO,MAAM,cAAc,gBAAgB;AACzE,IAAM,uBAAuB,CAAC,oBAAoB,YAAY,YAAY,WAAW,SAAS,OAAO;AAErG,IAAM,oBAAoB;AAC1B,IAAM,qBAAqB;AAC3B,IAAM,mBAAmB;AAezB,IAAM,gBAAgB,CAAC,UAAiB,MAAM,KAAK,SAAS,aAAa;AACzE,IAAM,eAAe,CAAC,UAAiB,MAAM,KAAK,SAAS,YAAY;AACvE,IAAM,YAAY,CAAC,UAAiB,MAAM,KAAK,SAAS,UAAU;AAClE,IAAM,mBAAmB,CAAC,UAAiB,MAAM,KAAK,SAAS,QAAQ;AACvE,IAAM,mBAAmB,CAAC,UAAiB,MAAM,KAAK,SAAS,iBAAiB;AAEhF,IAAM,aAAa,CAAC,OAAc,MAAuB,SAAmC,CAAC,MAAM;AACjG,MAAI,QAAgB;AAEpB,QAAM,SAA2C;AAAA,IAC/C,CAAC,uBAAuB,MAAM,MAAM,kBAAkB,mBAAmB,EAAE;AAAA,IAC3E,CAAC,wBAAwB,MAAM,OAAO,kBAAkB,oBAAoB,EAAE;AAAA,IAC9E,CAAC,uBAAuB,MAAM,MAAM,aAAa,MAAM,MAAM,OAAO,aAAa,KAAK,kBAAkB,EAAE;AAAA,IAC1G,CAAC,eAAe,cAAc,KAAK,GAAG,CAAC;AAAA,IACvC,CAAC,cAAc,aAAa,KAAK,GAAG,EAAE;AAAA,IACtC,CAAC,kBAAkB,iBAAiB,KAAK,GAAG,CAAC;AAAA,IAC7C,CAAC,mBAAmB,iBAAiB,KAAK,GAAG,CAAC;AAAA,IAC9C,CAAC,qBAAqB,kBAAkB,SAAS,MAAM,WAAW,GAAG,CAAC;AAAA,IACtE,CAAC,yBAAyB,SAAS,UAAU,qBAAqB,KAAK,CAAC,MAAM,MAAM,GAAG,SAAS,CAAC,CAAC,GAAG,CAAC;AAAA,IACtG,CAAC,iCAAiC,SAAS,UAAU,qBAAqB,KAAK,CAAC,MAAM,MAAM,GAAG,SAAS,CAAC,CAAC,GAAG,EAAE;AAAA,IAC/G,CAAC,yBAAyB,SAAS,UAAU,qBAAqB,KAAK,CAAC,MAAM,MAAM,GAAG,SAAS,CAAC,CAAC,GAAG,CAAC;AAAA,IACtG,CAAC,YAAY,SAAS,UAAU,UAAU,KAAK,GAAG,CAAC;AAAA,EACrD;AAEA,aAAW,QAAQ,QAAQ;AACzB,QAAI,MAAM,IAAI,SAAS,IAAI,GAAG;AAC5B,aAAO,KAAK,CAAC,UAAU,IAAI,KAAK,MAAM,OAAO,OAAO,IAAgB,CAAC,KAAK,CAAC,CAAU;AAAA,IACvF;AAAA,EACF;AAEA,aAAW,CAAC,EAAE,WAAW,KAAK,KAAK,QAAQ;AACzC,QAAI,WAAW;AACb,eAAS;AAAA,IACX;AAAA,EACF;AAEA,SAAO;AACT;AAEO,IAAM,gBAAgB,CAACC,SAAiB,SAAmC,CAAC,MACjFA,QAAO,KAAK,CAAC,GAAG,MAAM,WAAW,GAAG,QAAQ,MAAM,IAAI,WAAW,GAAG,QAAQ,MAAM,CAAC;AAE9E,IAAM,gBAAgB,CAACA,SAAiB,SAAmC,CAAC,MACjFA,QAAO,KAAK,CAAC,GAAG,MAAM,WAAW,GAAG,QAAQ,MAAM,IAAI,WAAW,GAAG,QAAQ,MAAM,CAAC;AAE9E,IAAM,YAAY,CAACA,SAAoB,YAA2C,CAAC,MAAM;AAC9F,QAAM,OAAO,CAAC,GAAGA,OAAM;AACvB,QAAM,UAAU,CAAC,UAAiB,oBAAI,KAAK,GAAE,QAAQ,IAAI,IAAI,KAAK,IAAI,EAAE,QAAQ;AAChF,QAAM,qBAAqB,MAAO,KAAK;AAEvC,MAAI,CAAC,KAAK,QAAQ;AAChB,UAAM,IAAI,MAAM,gCAAgC;AAAA,EAClD;AAEA,SAAO,KAAK,QAAQ;AAClB,UAAM,MAAM,KAAK,MAAM;AACvB,UAAM,WAAW,UAAU,KAAK,CAAC,MAAM,EAAE,QAAQ,OAAO,QAAQ,EAAE,SAAS,IAAI,kBAAkB;AACjG,QAAI,UAAU;AACZ;AAAA,IACF,OAAO;AACL,aAAO;AAAA,IACT;AAAA,EACF;AAEA,QAAM,IAAI,MAAM,wBAAwBA,QAAO,KAAK,IAAI,CAAC,EAAE;AAC7D;AAEO,IAAe,gBAAf,MAA6B;AAKpC;AAEO,IAAM,sBAAN,cAAkC,cAAc;AAAA,EAC7C;AAAA,EAED,YAAY,QAA4B;AAC7C,UAAM;AACN,SAAK,UAAU,kBAAkB,MAAM;AAAA,EACzC;AAAA,EAEA,MAAc,kCAAkC;AAC9C,QAAI;AACF,YAAM,EAAE,IAAI,IAAI,MAAM,KAAK,QAAQ,OAAO,EAAE,IAAI,KAAK,QAAQ,MAAM,CAAC;AACpE,YAAM,eAAe,OAAO,OAAO,IAAI,YAAY,EAAE,OAAO,CAAC,MAAM,EAAE,WAAW,YAAY;AAC5F,aAAO,aAAa,IAAI,CAAC,MAAM,EAAE,IAAI;AAAA,IACvC,SAAS,KAAK;AACZ,UAAI,+BAA+B,GAAG,GAAG;AAEvC,eAAO;AAAA,MACT;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAa,uBAAuB;AAClC,UAAM,mBAAmB,MAAM,KAAK,gCAAgC;AACpE,UAAMA,UAAkB,CAAC;AAEzB,UAAM,QAAQ;AAAA,MACZ,iBAAiB,IAAI,OAAO,gBAAgB;AAC1C,cAAM,EAAE,OAAO,IAAI,MAAM,KAAK,QAAQ,WAAW;AAAA,UAC/C,MAAM,GAAG,WAAW;AAAA,UACpB,OAAO,CAAC;AAAA,QACV,CAAC;AAED,YAAI,CAAC,QAAQ,QAAQ,QAAQ;AAC3B;AAAA,QACF;AAEA,mBAAW,SAAS,OAAO,QAAsB;AAC/C,cAAI,MAAM,QAAQ,MAAM,MAAM,MAAM,SAAS,MAAM,MAAM;AACvD,YAAAA,QAAO,KAAK;AAAA,cACV,KAAK,GAAG,WAAW,IAAI,MAAM,EAAE;AAAA,cAC/B;AAAA,cACA,IAAI,MAAM;AAAA,cACV,MAAM,MAAM;AAAA,cACZ,aAAa,MAAM;AAAA,cACnB,OAAO,MAAM;AAAA,cACb,QAAQ,MAAM;AAAA,cACd,MAAM,MAAM;AAAA,YACd,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAOA;AAAA,EACT;AAAA,EAEA,MAAa,wBAA0D;AACrE,QAAI;AACF,YAAM,EAAE,KAAK,IAAI,MAAM,KAAK,QAAQ,QAAQ,EAAE,IAAI,KAAK,mBAAmB,CAAC;AAE3E,UAAI,WAAW,UAAU,QAAW;AAClC,cAAM,WAAW,MAAM,MAAM,KAAK,GAAG;AACrC,eAAQ,MAAM,SAAS,KAAK;AAAA,MAC9B,OAAO;AACL,cAAM,EAAE,KAAK,IAAI,MAAM,KAAK,QAAQ,MAAM,IAAI,KAAK,KAAK;AAAA;AAAA;AAAA,UAGtD,SAAS,OAAO,KAAK,KAAK,QAAQ,OAAO,OAAO,EAAE;AAAA,YAChD,CAAC,KAAK,QAAQ;AACZ,kBAAI,GAAG,IAAI;AACX,qBAAO;AAAA,YACT;AAAA,YACA,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AACD,eAAO;AAAA,MACT;AAAA,IACF,SAAS,KAAK;AACZ,UAAI,gBAAgB,GAAG,GAAG;AACxB,eAAO;AAAA,MACT;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAa,qBAAqB,aAA+B;AAC/D,UAAM,KAAK,QAAQ,WAAW;AAAA,MAC5B,KAAK,KAAK;AAAA,MACV,SAAS,KAAK,UAAU,aAAa,MAAM,CAAC;AAAA,MAC5C,OAAO;AAAA,MACP,MAAM;AAAA,QACJ,QAAQ;AAAA,QACR,SAAS;AAAA,MACX;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAa,yBAAyB;AACpC,UAAM,KAAK,QAAQ,WAAW,EAAE,IAAI,KAAK,mBAAmB,CAAC,EAAE,MAAM,MAAM;AAAA,IAAC,CAAC;AAAA,EAC/E;AAAA,EAEA,IAAY,qBAAqB;AAC/B,WAAO,QAAQ,KAAK,QAAQ,KAAK,KAAK,uBAAuB;AAAA,EAC/D;AACF;;;AP/LO,IAAM,YAAN,MAAM,WAAU;AAAA,EACrB,CAAQ,gBAAgB,IAAI;AAAA,EAE5B,OAAc,kBAAkB,KAA4B;AAC1D,WAAO,KAAK,mBAAmB;AAAA,EACjC;AAAA,EAEO,eAAe;AAAA,IACpB,SAAS,IAAI,mBAA4B;AAAA,IACzC,UAAU,IAAI,mBAA6B;AAAA,EAC7C;AAAA,EAEU,UAAmB,CAAC;AAAA,EACpB,aAAqB,IAAI,KAAK;AAAA;AAAA,EAC9B,cAAsB;AAAA;AAAA,EACtB;AAAA,EACA,eAAwC;AAAA,EACxC;AAAA,EACA,aAA4C,CAAC;AAAA,EAC7C,WAAoB;AAAA,EAEtB,UAAU,iBAAyB;AAAA,EAEpC,YAAY,OAAuB;AACxC,SAAK,UAAU,kBAAkB,MAAM,MAAM;AAC7C,SAAK,YAAY,MAAM,YAAY,IAAI,oBAAoB,MAAM,MAAM;AACvE,SAAK,aAAa,MAAM,WAAW,KAAK;AACxC,SAAK,cAAc,MAAM,cAAc,KAAK;AAC5C,SAAK,WAAW,MAAM,uBAAuB;AAAA,EAC/C;AAAA,EAEA,IAAW,SAAyB;AAClC,WAAO,KAAK;AAAA,EACd;AAAA,EAEO,QAAmB;AACxB,UAAM,OAAO,IAAI,WAAU;AAAA,MACzB,QAAQ,KAAK,QAAQ,MAAM;AAAA,MAC3B,UAAU,KAAK;AAAA,MACf,SAAS,KAAK;AAAA,MACd,YAAY,KAAK;AAAA,IACnB,CAAC;AAED,SAAK,UAAU,CAAC,GAAG,KAAK,OAAO;AAC/B,SAAK,eAAe,KAAK,eAAe,EAAE,GAAG,KAAK,aAAa,IAAI;AACnE,SAAK,aAAa,CAAC,GAAG,KAAK,UAAU;AACrC,SAAK,aAAa,UAAU,KAAK,aAAa;AAC9C,SAAK,aAAa,WAAW,KAAK,aAAa;AAE/C,WAAO;AAAA,EACT;AAAA,EAEO,GAAuC,OAAU,IAA4B;AAClF,WAAO,KAAK,QAAQ,GAAG,OAAO,EAAE;AAAA,EAClC;AAAA,EAEA,MAAa,uBAAyC;AACpD,QAAI,CAAC,KAAK,QAAQ,QAAQ;AACxB,WAAK,UAAU,MAAM,KAAK,UAAU,qBAAqB;AAAA,IAC3D;AAEA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAa,mBAA8C;AACzD,QAAI,KAAK,cAAc;AACrB,aAAO,KAAK;AAAA,IACd;AAEA,SAAK,eAAe,MAAM,KAAK,UAAU,sBAAsB;AAE/D,QAAI,KAAK,cAAc;AACrB,aAAO,KAAK;AAAA,IACd;AAEA,UAAMC,UAAS,MAAM,KAAK,qBAAqB;AAE/C,SAAK,eAAe;AAAA,MAClB,MAAM,cAAcA,OAAM,EAAE,IAAI,CAAC,MAAM,EAAE,GAAG;AAAA,MAC5C,MAAM,cAAcA,OAAM,EAAE,IAAI,CAAC,MAAM,EAAE,GAAG;AAAA,MAC5C,WAAW,CAAC;AAAA,IACd;AAEA,UAAM,KAAK,UAAU,qBAAqB,KAAK,YAAY;AAE3D,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAa,eAAe,aAA+B,OAAgB,OAAsB;AAC/F,SAAK,eAAe;AAEpB,QAAI,MAAM;AACR,YAAM,KAAK,UAAU,qBAAqB,WAAW;AAAA,IACvD;AAAA,EACF;AAAA,EAEQ,uBAA6B;AACnC,UAAM,MAAM,KAAK,IAAI;AACrB,UAAM,cAAc,MAAO,KAAK;AAEhC,SAAK,aAAc,YAAY,KAAK,aAAc,UAAU,OAAO,CAAC,aAAa;AAC/E,YAAM,gBAAgB,IAAI,KAAK,SAAS,SAAS,EAAE,QAAQ;AAC3D,aAAO,MAAM,iBAAiB;AAAA,IAChC,CAAC;AAAA,EACH;AAAA,EAEA,MAAc,aAAa,KAA8D;AACvF,UAAM,WAAW,CAACC,SAAgB;AAChC,YAAM,QAAQA,KAAI,MAAM,GAAG;AAC3B,aAAO,EAAE,aAAa,MAAM,CAAC,GAAI,OAAO,MAAM,MAAM,CAAC,EAAE,KAAK,GAAG,EAAE;AAAA,IACnE;AAEA,UAAM,cAAc,MAAM,KAAK,iBAAiB;AAEhD,gBAAY,SAAS,CAAC;AACtB,gBAAY,SAAS,CAAC;AACtB,gBAAY,cAAc,CAAC;AAE3B,UAAM,YAAY,CAAC,GAAG,YAAY,WAAW,GAAI,KAAK,cAAc,CAAC,CAAE;AAEvE,QAAI,QAAQ,QAAQ;AAClB,aAAO,SAAS,UAAU,YAAY,MAAM,SAAS,CAAC;AAAA,IACxD;AAEA,QAAI,QAAQ,QAAQ;AAClB,aAAO,SAAS,UAAU,YAAY,MAAM,SAAS,CAAC;AAAA,IACxD;AAEA,WAAO,SAAS,UAAU,CAAC,KAAiB,GAAG,YAAY,MAAM,GAAG,YAAY,IAAI,GAAG,SAAS,CAAC;AAAA,EACnG;AAAA,EAEA,MAAa,gBAAgB,OAA+B;AAC1D,QAAI,KAAK,UAAU;AACjB,YAAM,gBAAgB,oBAAoB,KAAK;AAC/C,UAAI,eAAe;AACjB,eAAO,EAAE,GAAG,eAAe,KAAK,cAAc,IAAgB,aAAa,eAAe;AAAA,MAC5F;AAAA,IACF;AAEA,UAAM,KAAK,qBAAqB;AAChC,UAAM,EAAE,aAAa,OAAO,UAAU,IAAI,MAAM,KAAK,aAAa,KAAK;AACvE,UAAM,MAAM,KAAK,QAAQ,KAAK,CAAC,MAAM,EAAE,gBAAgB,gBAAgB,EAAE,SAAS,aAAa,EAAE,OAAO,UAAU;AAClH,QAAI,CAAC,KAAK;AACR,YAAM,IAAI,MAAM,SAAS,SAAS,YAAY;AAAA,IAChD;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAa,gBAAgB,OAAsC;AACjE,QAAI,CAAC,KAAK,YAAY,CAAC,oBAAoB,MAAM,KAAM,GAAG;AACxD,aAAO,KAAK,iBAAiB,KAAK;AAAA,IACpC;AAEA,UAAM,aAAa,IAAI,cAAc,KAAK,QAAQ,MAAa;AAC/D,UAAM,WAAW,MAAM,WAAW,aAAa,KAAY;AAE3D,WAAO;AAAA,MACL,QAAQ;AAAA,QACN,IAAI;AAAA,QACJ,UAAU,SAAS,SAAS;AAAA,QAC5B,OAAO,SAAS,SAAS;AAAA,QACzB,SAAS;AAAA,UACP;AAAA,YACE,MAAM;AAAA,YACN,SAAS,SAAS;AAAA,YAClB,MAAM;AAAA,YACN,OAAO;AAAA,YACP,YAAY,SAAS,SAAS;AAAA,UAChC;AAAA,QACF;AAAA,QACA,OAAO;AAAA,UACL,aAAa,SAAS,SAAS,MAAM;AAAA,UACrC,WAAW;AAAA,UACX,cAAc,SAAS,SAAS,MAAM;AAAA,UACtC,YAAY,SAAS,SAAS,QAAQ;AAAA,QACxC;AAAA,QACA,UAAU;AAAA,UACR,MAAM,SAAS,SAAS,QAAQ;AAAA,QAClC;AAAA,MACF;AAAA,MACA,MAAM;AAAA,QACJ,QAAQ,SAAS,SAAS;AAAA,QAC1B,OAAO,EAAE,aAAa,SAAS,SAAS,UAAU,OAAO,SAAS,SAAS,MAAO;AAAA,QAClF,SAAS,SAAS,SAAS;AAAA,QAC3B,MAAM;AAAA,UACJ,OAAO;AAAA,UACP,QAAQ,SAAS,SAAS,QAAQ;AAAA,QACpC;AAAA,QACA,QAAQ;AAAA,UACN,OAAO,SAAS,SAAS,MAAM;AAAA,UAC/B,QAAQ,SAAS,SAAS,MAAM;AAAA,QAClC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,iBAAiB,OAAsC;AACnE,UAAM,QAAQ,KAAK,IAAI;AAEvB,UAAM,SAAS,MAAM,UAAU,YAAY,QAAQ,KAAK,UAAU;AAElE,UAAM,SAAS,KAAK,QAAQ,UAAU,MAAM;AAE5C,QAAI,QAAiB,EAAE,MAAM;AAC7B,QAAI;AACJ,QAAI;AAEJ,SAAK,QAAQ,KAAK,WAAW,KAAK;AAElC,UAAM,EAAE,QAAQ,KAAK,IAAI,UAAM;AAAA,MAI7B,YAAY;AACV,cAAM,YAAY,MAAM,KAAK,aAAa,MAAM,SAAS,MAAM;AAE/D,sBAAc,UAAU;AACxB,gBAAQ,UAAU;AAElB,gBAAQ,MAAM,KAAK,aAAa,QAAQ,IAAI,EAAE,MAAM,GAAG,MAAM;AAE7D,eAAO,OAAO,WAAW;AAAA,UACvB,MAAM,GAAG,WAAW;AAAA,UACpB,OAAO;AAAA,YACL,GAAG,MAAM;AAAA,YACT,OAAO,EAAE,IAAI,MAAM;AAAA,UACrB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA;AAAA,QACE,OAAO,OAAO,KAAK,aAAa;AAC9B,cAAI,QAAQ,SAAS;AAEnB,iBAAK,QAAQ,KAAK,WAAW,OAAO,GAAG;AACvC,mBAAO,eAAe;AACtB,mBAAO;AAAA,UACT;AAEA,cAAI,WAAW,KAAK,aAAa;AAC/B,iBAAK,QAAQ,KAAK,SAAS,OAAO,GAAG;AACrC,mBAAO;AAAA,UACT;AAEA,gBAAM,SAAS,mBAAmB,GAAG;AAErC,cAAI,WAAW,SAAS;AACtB,iBAAK,QAAQ,KAAK,SAAS,OAAO,GAAG;AACrC,mBAAO;AAAA,UACT;AAEA,cAAI,WAAW,YAAY;AAEzB,iBAAK,WAAW,KAAK;AAAA,cACnB,KAAK,GAAG,WAAY,IAAI,KAAM;AAAA,cAC9B,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,cAClC,QAAQ;AAAA,YACV,CAAC;AAED,iBAAK,qBAAqB;AAE1B,kBAAM,KAAK,UAAU,qBAAqB;AAAA,cACxC,GAAI,KAAK,gBAAgB,EAAE,MAAM,CAAC,GAAG,WAAW,CAAC,GAAG,MAAM,CAAC,EAAE;AAAA,cAC7D,WAAW,CAAC,GAAI,KAAK,aAAc,aAAa,CAAC,GAAI,GAAI,KAAK,cAAc,CAAC,CAAE;AAAA,YACjF,CAAC;AAED,iBAAK,QAAQ,KAAK,YAAY,OAAO,GAAG;AACxC,mBAAO;AAAA,UACT;AAEA,eAAK,QAAQ,KAAK,SAAS,OAAO,GAAG;AACrC,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF;AAEA,UAAM,WAAW;AAAA,MACf;AAAA,MACA,MAAM;AAAA,QACJ,QAAQ,KAAK,UAAU;AAAA,QACvB,OAAO,EAAE,aAA2B,MAAc;AAAA,QAClD,SAAS,KAAK,IAAI,IAAI;AAAA,QACtB,MAAM,EAAE,OAAO,OAAO,MAAM,WAAW,QAAQ,OAAO,MAAM,WAAW;AAAA,QACvE,QAAQ,EAAE,OAAO,OAAO,MAAM,aAAa,QAAQ,OAAO,MAAM,aAAa;AAAA,MAC/E;AAAA,IACF;AAEA,SAAK,QAAQ,KAAK,YAAY,OAAO,QAAQ;AAE7C,WAAO,KAAK,aAAa,SAAS,IAAI,UAAU,MAAM;AAAA,EACxD;AACF;",
|
|
4
|
+
"sourcesContent": [null, null, null, null, null, null, null, null, null, "import { backOff } from 'exponential-backoff'\nimport { createNanoEvents, Unsubscribe } from 'nanoevents'\n\nimport { ExtendedClient, getExtendedClient } from './bp-client'\nimport { CognitiveBeta, getCognitiveV2Model } from './cognitive-v2'\n\nimport { getActionFromError } from './errors'\nimport { InterceptorManager } from './interceptors'\nimport {\n DOWNTIME_THRESHOLD_MINUTES,\n getBestModels,\n getFastModels,\n Model,\n ModelPreferences,\n ModelProvider,\n ModelRef,\n pickModel,\n RemoteModelProvider,\n} from './models'\nimport { GenerateContentOutput } from './schemas.gen'\nimport { CognitiveProps, Events, InputProps, Request, Response } from './types'\n\nexport class Cognitive {\n public ['$$IS_COGNITIVE'] = true\n\n public static isCognitiveClient(obj: any): obj is Cognitive {\n return obj?.$$IS_COGNITIVE === true\n }\n\n public interceptors = {\n request: new InterceptorManager<Request>(),\n response: new InterceptorManager<Response>(),\n }\n\n protected _models: Model[] = []\n protected _timeoutMs: number = 5 * 60 * 1000 // Default timeout of 5 minutes\n protected _maxRetries: number = 5 // Default max retries\n protected _client: ExtendedClient\n protected _preferences: ModelPreferences | null = null\n protected _provider: ModelProvider\n protected _downtimes: ModelPreferences['downtimes'] = []\n protected _useBeta: boolean = false\n\n private _events = createNanoEvents<Events>()\n\n public constructor(props: CognitiveProps) {\n this._client = getExtendedClient(props.client)\n this._provider = props.provider ?? new RemoteModelProvider(props.client)\n this._timeoutMs = props.timeout ?? this._timeoutMs\n this._maxRetries = props.maxRetries ?? this._maxRetries\n this._useBeta = props.__experimental_beta ?? false\n }\n\n public get client(): ExtendedClient {\n return this._client\n }\n\n public clone(): Cognitive {\n const copy = new Cognitive({\n client: this._client.clone(),\n provider: this._provider,\n timeout: this._timeoutMs,\n maxRetries: this._maxRetries,\n })\n\n copy._models = [...this._models]\n copy._preferences = this._preferences ? { ...this._preferences } : null\n copy._downtimes = [...this._downtimes]\n copy.interceptors.request = this.interceptors.request\n copy.interceptors.response = this.interceptors.response\n\n return copy\n }\n\n public on<K extends keyof Events>(this: this, event: K, cb: Events[K]): Unsubscribe {\n return this._events.on(event, cb)\n }\n\n public async fetchInstalledModels(): Promise<Model[]> {\n if (!this._models.length) {\n this._models = await this._provider.fetchInstalledModels()\n }\n\n return this._models\n }\n\n public async fetchPreferences(): Promise<ModelPreferences> {\n if (this._preferences) {\n return this._preferences\n }\n\n this._preferences = await this._provider.fetchModelPreferences()\n\n if (this._preferences) {\n return this._preferences\n }\n\n const models = await this.fetchInstalledModels()\n\n this._preferences = {\n best: getBestModels(models).map((m) => m.ref),\n fast: getFastModels(models).map((m) => m.ref),\n downtimes: [],\n }\n\n await this._provider.saveModelPreferences(this._preferences)\n\n return this._preferences\n }\n\n public async setPreferences(preferences: ModelPreferences, save: boolean = false): Promise<void> {\n this._preferences = preferences\n\n if (save) {\n await this._provider.saveModelPreferences(preferences)\n }\n }\n\n private _cleanupOldDowntimes(): void {\n const now = Date.now()\n const thresholdMs = 1000 * 60 * DOWNTIME_THRESHOLD_MINUTES\n\n this._preferences!.downtimes = this._preferences!.downtimes.filter((downtime) => {\n const downtimeStart = new Date(downtime.startedAt).getTime()\n return now - downtimeStart <= thresholdMs\n })\n }\n\n private async _selectModel(ref: string): Promise<{ integration: string; model: string }> {\n const parseRef = (ref: string) => {\n const parts = ref.split(':')\n return { integration: parts[0]!, model: parts.slice(1).join(':') }\n }\n\n const preferences = await this.fetchPreferences()\n\n preferences.best ??= []\n preferences.fast ??= []\n preferences.downtimes ??= []\n\n const downtimes = [...preferences.downtimes, ...(this._downtimes ?? [])]\n\n if (ref === 'best') {\n return parseRef(pickModel(preferences.best, downtimes))\n }\n\n if (ref === 'fast') {\n return parseRef(pickModel(preferences.fast, downtimes))\n }\n\n return parseRef(pickModel([ref as ModelRef, ...preferences.best, ...preferences.fast], downtimes))\n }\n\n public async getModelDetails(model: string): Promise<Model> {\n if (this._useBeta) {\n const resolvedModel = getCognitiveV2Model(model)\n if (resolvedModel) {\n return { ...resolvedModel, ref: resolvedModel.id as ModelRef, integration: 'cognitive-v2' }\n }\n }\n\n await this.fetchInstalledModels()\n const { integration, model: modelName } = await this._selectModel(model)\n const def = this._models.find((m) => m.integration === integration && (m.name === modelName || m.id === modelName))\n if (!def) {\n throw new Error(`Model ${modelName} not found`)\n }\n\n return def\n }\n\n public async generateContent(input: InputProps): Promise<Response> {\n if (!this._useBeta || !getCognitiveV2Model(input.model!)) {\n return this._generateContent(input)\n }\n\n const betaClient = new CognitiveBeta(this._client.config as any)\n const response = await betaClient.generateText(input as any)\n\n return {\n output: {\n id: 'beta-output',\n provider: response.metadata.provider,\n model: response.metadata.model!,\n choices: [\n {\n type: 'text',\n content: response.output,\n role: 'assistant',\n index: 0,\n stopReason: response.metadata.stopReason! as any,\n },\n ],\n usage: {\n inputTokens: response.metadata.usage.inputTokens,\n inputCost: 0,\n outputTokens: response.metadata.usage.outputTokens,\n outputCost: response.metadata.cost ?? 0,\n },\n botpress: {\n cost: response.metadata.cost ?? 0,\n },\n },\n meta: {\n cached: response.metadata.cached,\n model: { integration: response.metadata.provider, model: response.metadata.model! },\n latency: response.metadata.latency!,\n cost: {\n input: 0,\n output: response.metadata.cost || 0,\n },\n tokens: {\n input: response.metadata.usage.inputTokens,\n output: response.metadata.usage.outputTokens,\n },\n },\n }\n }\n\n private async _generateContent(input: InputProps): Promise<Response> {\n const start = Date.now()\n\n const signal = input.signal ?? AbortSignal.timeout(this._timeoutMs)\n\n const client = this._client.abortable(signal)\n\n let props: Request = { input }\n let integration: string\n let model: string\n\n this._events.emit('request', props)\n\n const { output, meta } = await backOff<{\n output: GenerateContentOutput\n meta: any\n }>(\n async () => {\n const selection = await this._selectModel(input.model ?? 'best')\n\n integration = selection.integration\n model = selection.model\n\n props = await this.interceptors.request.run({ input }, signal)\n\n return client.callAction({\n type: `${integration}:generateContent`,\n input: {\n ...props.input,\n model: { id: model },\n },\n }) as Promise<{ output: GenerateContentOutput; meta: any }>\n },\n {\n retry: async (err, _attempt) => {\n if (signal?.aborted) {\n // We don't want to retry if the request was aborted\n this._events.emit('aborted', props, err)\n signal.throwIfAborted()\n return false\n }\n\n if (_attempt > this._maxRetries) {\n this._events.emit('error', props, err)\n return false\n }\n\n const action = getActionFromError(err)\n\n if (action === 'abort') {\n this._events.emit('error', props, err)\n return false\n }\n\n if (action === 'fallback') {\n // We don't want to retry if the request was already retried with a fallback model\n this._downtimes.push({\n ref: `${integration!}:${model!}`,\n startedAt: new Date().toISOString(),\n reason: 'Model is down',\n })\n\n this._cleanupOldDowntimes()\n\n await this._provider.saveModelPreferences({\n ...(this._preferences ?? { best: [], downtimes: [], fast: [] }),\n downtimes: [...(this._preferences!.downtimes ?? []), ...(this._downtimes ?? [])],\n })\n\n this._events.emit('fallback', props, err)\n return true\n }\n\n this._events.emit('retry', props, err)\n return true\n },\n }\n )\n\n const response = {\n output,\n meta: {\n cached: meta.cached ?? false,\n model: { integration: integration!, model: model! },\n latency: Date.now() - start,\n cost: { input: output.usage.inputCost, output: output.usage.outputCost },\n tokens: { input: output.usage.inputTokens, output: output.usage.outputTokens },\n },\n } satisfies Response\n\n this._events.emit('response', props, response)\n\n return this.interceptors.response.run(response, signal)\n }\n}\n", "export let createNanoEvents = () => ({\n emit(event, ...args) {\n for (\n let callbacks = this.events[event] || [],\n i = 0,\n length = callbacks.length;\n i < length;\n i++\n ) {\n callbacks[i](...args)\n }\n },\n events: {},\n on(event, cb) {\n ;(this.events[event] ||= []).push(cb)\n return () => {\n this.events[event] = this.events[event]?.filter(i => cb !== i)\n }\n }\n})\n", "import { type Client } from '@botpress/client'\nimport { type AxiosInstance } from 'axios'\nimport { BotpressClientLike } from './types'\n\n/** @internal */\nexport type ExtendedClient = Client & {\n botId: string\n axios: AxiosInstance\n clone: () => ExtendedClient\n abortable: (signal: AbortSignal) => ExtendedClient\n}\n\ntype InternalClientType = BotpressClientLike & {\n _client?: InternalClientType\n config: {\n headers: Record<string, string>\n }\n}\n\nexport const getExtendedClient = (_client: unknown): ExtendedClient => {\n const client = _client as InternalClientType\n\n if (!client || client === null || typeof client !== 'object') {\n throw new Error('Client must be a valid instance of a Botpress client (@botpress/client)')\n }\n\n if (typeof client._client === 'object' && !!client._client) {\n try {\n return getExtendedClient(client._client)\n } catch {}\n }\n\n if (\n typeof client.constructor !== 'function' ||\n typeof client.callAction !== 'function' ||\n !client.config ||\n typeof client.config !== 'object' ||\n !client.config.headers\n ) {\n throw new Error('Client must be a valid instance of a Botpress client (@botpress/client)')\n }\n\n const botId = client.config.headers['x-bot-id'] as string\n\n if (!botId?.length) {\n throw new Error('Client must be instanciated with Bot ID')\n }\n\n const clone = () => {\n const c = client as any\n if (c.clone && typeof c.clone === 'function') {\n return getExtendedClient(c.clone())\n }\n return getExtendedClient(new c.constructor(c.config))\n }\n\n return {\n ...client,\n botId,\n axios: (client as any).axiosInstance as AxiosInstance,\n clone,\n abortable: (signal: AbortSignal) => {\n const abortable = clone()\n const instance = abortable.axios\n instance.defaults.signal = signal\n return abortable\n },\n } as ExtendedClient\n}\n", "import axios, { AxiosInstance } from 'axios'\nimport { backOff } from 'exponential-backoff'\nimport { defaultModel, knownTags, models } from './models'\nimport { CognitiveRequest, CognitiveResponse, CognitiveStreamChunk, Model } from './types'\n\nexport { CognitiveRequest, CognitiveResponse, CognitiveStreamChunk }\n\ntype ClientProps = {\n apiUrl?: string\n timeout?: number\n botId?: string\n token?: string\n withCredentials?: boolean\n headers?: Record<string, string>\n}\n\ntype RequestOptions = {\n signal?: AbortSignal\n timeout?: number\n}\n\nconst isBrowser = () => typeof window !== 'undefined' && typeof window.fetch === 'function'\n\nexport class CognitiveBeta {\n private _axiosClient: AxiosInstance\n private readonly _apiUrl: string\n private readonly _timeout: number\n private readonly _withCredentials: boolean\n private readonly _headers: Record<string, string>\n\n public constructor(props: ClientProps) {\n this._apiUrl = props.apiUrl || 'https://api.botpress.cloud'\n this._timeout = props.timeout || 60_001\n this._withCredentials = props.withCredentials || false\n this._headers = { ...props.headers }\n\n if (props.botId) {\n this._headers['X-Bot-Id'] = props.botId\n }\n\n if (props.token) {\n this._headers['Authorization'] = `Bearer ${props.token}`\n }\n\n this._axiosClient = axios.create({\n headers: this._headers,\n withCredentials: this._withCredentials,\n baseURL: this._apiUrl,\n })\n }\n\n public async generateText(input: CognitiveRequest, options: RequestOptions = {}) {\n const signal = options.signal ?? AbortSignal.timeout(this._timeout)\n\n const { data } = await this._withServerRetry(() =>\n this._axiosClient.post<CognitiveResponse>('/v2/cognitive/generate-text', input, {\n signal,\n timeout: options.timeout ?? this._timeout,\n })\n )\n\n return data\n }\n\n public async listModels() {\n const { data } = await this._withServerRetry(() =>\n this._axiosClient.get<{ models: Model[] }>('/v2/cognitive/models')\n )\n\n return data.models\n }\n\n public async *generateTextStream(\n request: CognitiveRequest,\n options: RequestOptions = {}\n ): AsyncGenerator<CognitiveStreamChunk, void, unknown> {\n const signal = options.signal ?? AbortSignal.timeout(this._timeout)\n\n if (isBrowser()) {\n const res = await fetch(`${this._apiUrl}/v2/cognitive/generate-text-stream`, {\n method: 'POST',\n headers: {\n ...this._headers,\n 'Content-Type': 'application/json',\n },\n credentials: this._withCredentials ? 'include' : 'omit',\n body: JSON.stringify({ ...request, stream: true }),\n signal,\n })\n\n if (!res.ok) {\n const text = await res.text().catch(() => '')\n const err = new Error(`HTTP ${res.status}: ${text || res.statusText}`)\n ;(err as any).response = { status: res.status, data: text }\n throw err\n }\n\n const body = res.body\n if (!body) {\n throw new Error('No response body received for streaming request')\n }\n\n const reader = body.getReader()\n const iterable = (async function* () {\n for (;;) {\n const { value, done } = await reader.read()\n if (done) {\n break\n }\n if (value) {\n yield value\n }\n }\n })()\n\n for await (const obj of this._ndjson<CognitiveStreamChunk>(iterable)) {\n yield obj\n }\n return\n }\n\n const res = await this._withServerRetry(() =>\n this._axiosClient.post(\n '/v2/cognitive/generate-text-stream',\n { ...request, stream: true },\n {\n responseType: 'stream',\n signal,\n timeout: options.timeout ?? this._timeout,\n }\n )\n )\n\n const nodeStream: AsyncIterable<Uint8Array> = res.data as any\n if (!nodeStream) {\n throw new Error('No response body received for streaming request')\n }\n\n for await (const obj of this._ndjson<CognitiveStreamChunk>(nodeStream)) {\n yield obj\n }\n }\n\n private async *_ndjson<T>(stream: AsyncIterable<Uint8Array>): AsyncGenerator<T, void, unknown> {\n const decoder = new TextDecoder('utf-8')\n let buffer = ''\n\n for await (const chunk of stream) {\n buffer += decoder.decode(chunk, { stream: true })\n\n for (;;) {\n const i = buffer.indexOf('\\n')\n if (i < 0) {\n break\n }\n\n const line = buffer.slice(0, i).replace(/\\r$/, '')\n buffer = buffer.slice(i + 1)\n\n if (!line) {\n continue\n }\n\n yield JSON.parse(line) as T\n }\n }\n\n buffer += decoder.decode()\n\n const tail = buffer.trim()\n if (tail) {\n yield JSON.parse(tail) as T\n }\n }\n\n private _isRetryableServerError(error: any): boolean {\n if (axios.isAxiosError(error)) {\n if (!error.response) {\n return true\n }\n\n const status = error.response?.status\n if (status && [502, 503, 504].includes(status)) {\n return true\n }\n\n if (\n error.code &&\n ['ECONNABORTED', 'ECONNRESET', 'ETIMEDOUT', 'EAI_AGAIN', 'ENOTFOUND', 'EPIPE'].includes(error.code)\n ) {\n return true\n }\n }\n\n return false\n }\n\n private async _withServerRetry<T>(fn: () => Promise<T>): Promise<T> {\n return backOff(fn, {\n numOfAttempts: 3,\n startingDelay: 300,\n timeMultiple: 2,\n jitter: 'full',\n retry: (e) => this._isRetryableServerError(e),\n })\n }\n}\n\nexport const getCognitiveV2Model = (model: string): Model | undefined => {\n if (models[model]) {\n return models[model]\n }\n\n // Some models (ex fireworks) have a long name (the internal id) so it is now an alias instead of the main id\n const alias = Object.values(models).find((x) => x.aliases?.includes(model))\n if (alias) {\n return alias\n }\n\n // Special tags like auto, fast, coding don't have explicit limits so we give a default model\n if (knownTags.includes(model)) {\n return { ...defaultModel, id: model, name: model }\n }\n return undefined\n}\n", "import { Model } from 'src/schemas.gen'\n\nexport type RemoteModel = Model & { aliases?: string[]; lifecycle: 'live' | 'beta' | 'deprecated' | 'discontinued' }\n\nexport const models: Record<string, RemoteModel> = {\n 'openai:gpt-5-2025-08-07': {\n id: 'openai:gpt-5-2025-08-07',\n name: 'GPT-5',\n description:\n \"GPT-5 is OpenAI's latest and most advanced AI model. It is a reasoning model that chooses the best way to respond based on task complexity and user intent. GPT-5 delivers expert-level performance across coding, math, writing, health, and visual perception, with improved accuracy, speed, and reduced hallucinations. It excels in complex tasks, long-context understanding, multimodal inputs (text and images), and safe, nuanced responses.\",\n input: {\n maxTokens: 400000,\n costPer1MTokens: 1.25,\n },\n output: {\n maxTokens: 128000,\n costPer1MTokens: 10,\n },\n tags: ['recommended', 'reasoning', 'general-purpose'],\n lifecycle: 'live',\n },\n 'openai:gpt-5-mini-2025-08-07': {\n id: 'openai:gpt-5-mini-2025-08-07',\n name: 'GPT-5 Mini',\n description:\n 'GPT-5 Mini is a lightweight and cost-effective version of GPT-5, optimized for applications where speed and efficiency matter more than full advanced capabilities. It is designed for cost-sensitive use cases such as chatbots, content generation, and high-volume usage, striking a balance between performance and affordability, making it suitable for simpler tasks that do not require deep multi-step reasoning or the full reasoning power of GPT-5',\n input: {\n maxTokens: 400000,\n costPer1MTokens: 0.25,\n },\n output: {\n maxTokens: 128000,\n costPer1MTokens: 2,\n },\n tags: ['recommended', 'reasoning', 'general-purpose'],\n lifecycle: 'live',\n },\n 'openai:gpt-5-nano-2025-08-07': {\n id: 'openai:gpt-5-nano-2025-08-07',\n name: 'GPT-5 Nano',\n description:\n 'GPT-5 Nano is an ultra-lightweight version of GPT-5 optimized for speed and very low latency, making it ideal for use cases like simple chatbots, basic content generation, summarization, and classification tasks.',\n input: {\n maxTokens: 400000,\n costPer1MTokens: 0.05,\n },\n output: {\n maxTokens: 128000,\n costPer1MTokens: 0.4,\n },\n tags: ['low-cost', 'reasoning', 'general-purpose'],\n lifecycle: 'live',\n },\n 'openai:o4-mini-2025-04-16': {\n id: 'openai:o4-mini-2025-04-16',\n name: 'GPT o4-mini',\n description:\n \"o4-mini is OpenAI's latest small o-series model. It's optimized for fast, effective reasoning with exceptionally efficient performance in coding and visual tasks.\",\n input: {\n maxTokens: 200000,\n costPer1MTokens: 1.1,\n },\n output: {\n maxTokens: 100000,\n costPer1MTokens: 4.4,\n },\n tags: ['reasoning', 'vision', 'coding'],\n lifecycle: 'live',\n },\n 'openai:o3-2025-04-16': {\n id: 'openai:o3-2025-04-16',\n name: 'GPT o3',\n description:\n 'o3 is a well-rounded and powerful model across domains. It sets a new standard for math, science, coding, and visual reasoning tasks. It also excels at technical writing and instruction-following.',\n input: {\n maxTokens: 200000,\n costPer1MTokens: 2,\n },\n output: {\n maxTokens: 100000,\n costPer1MTokens: 8,\n },\n tags: ['reasoning', 'vision', 'coding'],\n lifecycle: 'live',\n },\n 'openai:gpt-4.1-2025-04-14': {\n id: 'openai:gpt-4.1-2025-04-14',\n name: 'GPT 4.1',\n description:\n 'GPT 4.1 is our flagship model for complex tasks. It is well suited for problem solving across domains. The knowledge cutoff is June 2024.',\n input: {\n maxTokens: 1047576,\n costPer1MTokens: 2,\n },\n output: {\n maxTokens: 32768,\n costPer1MTokens: 8,\n },\n tags: ['recommended', 'vision', 'general-purpose'],\n lifecycle: 'live',\n },\n 'openai:gpt-4.1-mini-2025-04-14': {\n id: 'openai:gpt-4.1-mini-2025-04-14',\n name: 'GPT 4.1 Mini',\n description:\n 'GPT 4.1 mini provides a balance between intelligence, speed, and cost that makes it an attractive model for many use cases. The knowledge cutoff is June 2024.',\n input: {\n maxTokens: 1047576,\n costPer1MTokens: 0.4,\n },\n output: {\n maxTokens: 32768,\n costPer1MTokens: 1.6,\n },\n tags: ['recommended', 'vision', 'general-purpose'],\n lifecycle: 'live',\n },\n 'openai:gpt-4.1-nano-2025-04-14': {\n id: 'openai:gpt-4.1-nano-2025-04-14',\n name: 'GPT 4.1 Nano',\n description: 'GPT-4.1 nano is the fastest, most cost-effective GPT 4.1 model. The knowledge cutoff is June 2024.',\n input: {\n maxTokens: 1047576,\n costPer1MTokens: 0.1,\n },\n output: {\n maxTokens: 32768,\n costPer1MTokens: 0.4,\n },\n tags: ['low-cost', 'vision', 'general-purpose'],\n lifecycle: 'live',\n },\n 'openai:o3-mini-2025-01-31': {\n id: 'openai:o3-mini-2025-01-31',\n name: 'GPT o3-mini',\n description:\n 'o3-mini is the most recent small reasoning model from OpenAI, providing high intelligence at the same cost and latency targets of o1-mini. Also supports key developer features like Structured Outputs and function calling.',\n input: {\n maxTokens: 200000,\n costPer1MTokens: 1.1,\n },\n output: {\n maxTokens: 100000,\n costPer1MTokens: 4.4,\n },\n tags: ['reasoning', 'general-purpose', 'coding'],\n lifecycle: 'live',\n },\n 'openai:o1-2024-12-17': {\n id: 'openai:o1-2024-12-17',\n name: 'GPT o1',\n description:\n 'The o1 model is designed to solve hard problems across domains. Trained with reinforcement learning to perform complex reasoning with a long internal chain of thought.',\n input: {\n maxTokens: 200000,\n costPer1MTokens: 15,\n },\n output: {\n maxTokens: 100000,\n costPer1MTokens: 60,\n },\n tags: ['reasoning', 'vision', 'general-purpose'],\n lifecycle: 'live',\n },\n 'openai:o1-mini-2024-09-12': {\n id: 'openai:o1-mini-2024-09-12',\n name: 'GPT o1-mini',\n description:\n 'The o1-mini model is a fast and affordable reasoning model for specialized tasks. Trained with reinforcement learning to perform complex reasoning.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 1.1,\n },\n output: {\n maxTokens: 65536,\n costPer1MTokens: 4.4,\n },\n tags: ['reasoning', 'vision', 'general-purpose'],\n lifecycle: 'live',\n },\n 'openai:gpt-4o-mini-2024-07-18': {\n id: 'openai:gpt-4o-mini-2024-07-18',\n name: 'GPT-4o Mini',\n description:\n \"GPT-4o mini is OpenAI's most advanced model in the small models category, and their cheapest model yet. Multimodal with higher intelligence than gpt-3.5-turbo but just as fast.\",\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.15,\n },\n output: {\n maxTokens: 16384,\n costPer1MTokens: 0.6,\n },\n tags: ['recommended', 'vision', 'low-cost', 'general-purpose', 'function-calling'],\n lifecycle: 'live',\n },\n 'openai:gpt-4o-2024-11-20': {\n id: 'openai:gpt-4o-2024-11-20',\n name: 'GPT-4o (November 2024)',\n description:\n \"GPT-4o is OpenAI's most advanced model. Multimodal with the same high intelligence as GPT-4 Turbo but cheaper and more efficient.\",\n input: {\n maxTokens: 128000,\n costPer1MTokens: 2.5,\n },\n output: {\n maxTokens: 16384,\n costPer1MTokens: 10,\n },\n tags: ['recommended', 'vision', 'general-purpose', 'coding', 'agents', 'function-calling'],\n lifecycle: 'live',\n },\n 'openai:gpt-4o-2024-08-06': {\n id: 'openai:gpt-4o-2024-08-06',\n name: 'GPT-4o (August 2024)',\n description:\n \"GPT-4o is OpenAI's most advanced model. Multimodal with the same high intelligence as GPT-4 Turbo but cheaper and more efficient.\",\n input: {\n maxTokens: 128000,\n costPer1MTokens: 2.5,\n },\n output: {\n maxTokens: 16384,\n costPer1MTokens: 10,\n },\n tags: ['deprecated', 'vision', 'general-purpose', 'coding', 'agents', 'function-calling'],\n lifecycle: 'deprecated',\n },\n 'openai:gpt-4o-2024-05-13': {\n id: 'openai:gpt-4o-2024-05-13',\n name: 'GPT-4o (May 2024)',\n description:\n \"GPT-4o is OpenAI's most advanced model. Multimodal with the same high intelligence as GPT-4 Turbo but cheaper and more efficient.\",\n input: {\n maxTokens: 128000,\n costPer1MTokens: 5,\n },\n output: {\n maxTokens: 4096,\n costPer1MTokens: 15,\n },\n tags: ['deprecated', 'vision', 'general-purpose', 'coding', 'agents', 'function-calling'],\n lifecycle: 'deprecated',\n },\n 'openai:gpt-4-turbo-2024-04-09': {\n id: 'openai:gpt-4-turbo-2024-04-09',\n name: 'GPT-4 Turbo',\n description:\n 'GPT-4 is a large multimodal model that can solve difficult problems with greater accuracy than previous models, thanks to its broader general knowledge and advanced reasoning capabilities.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 10,\n },\n output: {\n maxTokens: 4096,\n costPer1MTokens: 30,\n },\n tags: ['deprecated', 'general-purpose', 'coding', 'agents', 'function-calling'],\n lifecycle: 'deprecated',\n },\n 'openai:gpt-3.5-turbo-0125': {\n id: 'openai:gpt-3.5-turbo-0125',\n name: 'GPT-3.5 Turbo',\n description:\n 'GPT-3.5 Turbo can understand and generate natural language or code and has been optimized for chat but works well for non-chat tasks as well.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.5,\n },\n output: {\n maxTokens: 4096,\n costPer1MTokens: 1.5,\n },\n tags: ['deprecated', 'general-purpose', 'low-cost'],\n lifecycle: 'deprecated',\n },\n 'anthropic:claude-sonnet-4-20250514': {\n id: 'anthropic:claude-sonnet-4-20250514',\n name: 'Claude Sonnet 4',\n description:\n 'Claude Sonnet 4 significantly enhances the capabilities of its predecessor, Sonnet 3.7, excelling in both coding and reasoning tasks with improved precision and controllability. Sonnet 4 balances capability and computational efficiency, making it suitable for a broad range of applications from routine coding tasks to complex software development projects. Key enhancements include improved autonomous codebase navigation, reduced error rates in agent-driven workflows, and increased reliability in following intricate instructions.',\n input: {\n maxTokens: 200000,\n costPer1MTokens: 3,\n },\n output: {\n maxTokens: 64000,\n costPer1MTokens: 15,\n },\n tags: ['recommended', 'reasoning', 'agents', 'vision', 'general-purpose', 'coding'],\n lifecycle: 'live',\n },\n 'anthropic:claude-sonnet-4-reasoning-20250514': {\n id: 'anthropic:claude-sonnet-4-reasoning-20250514',\n name: 'Claude Sonnet 4 (Reasoning Mode)',\n description:\n 'This model uses the \"Extended Thinking\" mode and will use a significantly higher amount of output tokens than the Standard Mode, so this model should only be used for tasks that actually require it.\\n\\nClaude Sonnet 4 significantly enhances the capabilities of its predecessor, Sonnet 3.7, excelling in both coding and reasoning tasks with improved precision and controllability.',\n input: {\n maxTokens: 200000,\n costPer1MTokens: 3,\n },\n output: {\n maxTokens: 64000,\n costPer1MTokens: 15,\n },\n tags: ['deprecated', 'vision', 'reasoning', 'general-purpose', 'agents', 'coding'],\n lifecycle: 'deprecated',\n },\n 'anthropic:claude-3-7-sonnet-20250219': {\n id: 'anthropic:claude-3-7-sonnet-20250219',\n name: 'Claude 3.7 Sonnet',\n description:\n 'Claude 3.7 Sonnet is an advanced large language model with improved reasoning, coding, and problem-solving capabilities. The model demonstrates notable improvements in coding, particularly in front-end development and full-stack updates, and excels in agentic workflows, where it can autonomously navigate multi-step processes.',\n input: {\n maxTokens: 200000,\n costPer1MTokens: 3,\n },\n output: {\n maxTokens: 64000,\n costPer1MTokens: 15,\n },\n tags: ['recommended', 'reasoning', 'agents', 'vision', 'general-purpose', 'coding'],\n lifecycle: 'live',\n },\n 'anthropic:claude-3-7-sonnet-reasoning-20250219': {\n id: 'anthropic:claude-3-7-sonnet-reasoning-20250219',\n name: 'Claude 3.7 Sonnet (Reasoning Mode)',\n description:\n 'This model uses the \"Extended Thinking\" mode and will use a significantly higher amount of output tokens than the Standard Mode, so this model should only be used for tasks that actually require it.\\n\\nClaude 3.7 Sonnet is an advanced large language model with improved reasoning, coding, and problem-solving capabilities.',\n input: {\n maxTokens: 200000,\n costPer1MTokens: 3,\n },\n output: {\n maxTokens: 64000,\n costPer1MTokens: 15,\n },\n tags: ['deprecated', 'vision', 'reasoning', 'general-purpose', 'agents', 'coding'],\n lifecycle: 'deprecated',\n },\n 'anthropic:claude-3-5-haiku-20241022': {\n id: 'anthropic:claude-3-5-haiku-20241022',\n name: 'Claude 3.5 Haiku',\n description:\n 'Claude 3.5 Haiku features offers enhanced capabilities in speed, coding accuracy, and tool use. Engineered to excel in real-time applications, it delivers quick response times that are essential for dynamic tasks such as chat interactions and immediate coding suggestions.',\n input: {\n maxTokens: 200000,\n costPer1MTokens: 0.8,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 4,\n },\n tags: ['general-purpose', 'low-cost'],\n lifecycle: 'live',\n },\n 'anthropic:claude-3-5-sonnet-20241022': {\n id: 'anthropic:claude-3-5-sonnet-20241022',\n name: 'Claude 3.5 Sonnet (October 2024)',\n description:\n 'Claude 3.5 Sonnet delivers better-than-Opus capabilities, faster-than-Sonnet speeds, at the same Sonnet prices. Sonnet is particularly good at coding, data science, visual processing, and agentic tasks.',\n input: {\n maxTokens: 200000,\n costPer1MTokens: 3,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 15,\n },\n tags: ['vision', 'general-purpose', 'agents', 'coding', 'function-calling', 'storytelling'],\n lifecycle: 'live',\n },\n 'anthropic:claude-3-5-sonnet-20240620': {\n id: 'anthropic:claude-3-5-sonnet-20240620',\n name: 'Claude 3.5 Sonnet (June 2024)',\n description:\n 'Claude 3.5 Sonnet delivers better-than-Opus capabilities, faster-than-Sonnet speeds, at the same Sonnet prices. Sonnet is particularly good at coding, data science, visual processing, and agentic tasks.',\n input: {\n maxTokens: 200000,\n costPer1MTokens: 3,\n },\n output: {\n maxTokens: 4096,\n costPer1MTokens: 15,\n },\n tags: ['vision', 'general-purpose', 'agents', 'coding', 'function-calling', 'storytelling'],\n lifecycle: 'live',\n },\n 'anthropic:claude-3-haiku-20240307': {\n id: 'anthropic:claude-3-haiku-20240307',\n name: 'Claude 3 Haiku',\n description:\n \"Claude 3 Haiku is Anthropic's fastest and most compact model for near-instant responsiveness. Quick and accurate targeted performance.\",\n input: {\n maxTokens: 200000,\n costPer1MTokens: 0.25,\n },\n output: {\n maxTokens: 4096,\n costPer1MTokens: 1.25,\n },\n tags: ['low-cost', 'general-purpose'],\n lifecycle: 'live',\n },\n 'google-ai:gemini-2.5-flash': {\n id: 'google-ai:gemini-2.5-flash',\n name: 'Gemini 2.5 Flash',\n description:\n 'Google\\'s state-of-the-art workhorse model with advanced reasoning, coding, mathematics, and scientific capabilities. Includes built-in \"thinking\" capabilities for enhanced accuracy.',\n input: {\n maxTokens: 1048576,\n costPer1MTokens: 0.3,\n },\n output: {\n maxTokens: 65536,\n costPer1MTokens: 2.5,\n },\n tags: ['recommended', 'reasoning', 'agents', 'general-purpose', 'vision'],\n lifecycle: 'live',\n },\n 'google-ai:gemini-2.5-pro': {\n id: 'google-ai:gemini-2.5-pro',\n name: 'Gemini 2.5 Pro',\n description:\n 'Google\\'s most advanced AI model designed for complex reasoning, coding, mathematics, and scientific tasks. Features \"thinking\" capabilities for superior human-preference alignment and problem-solving.',\n input: {\n maxTokens: 200000,\n costPer1MTokens: 1.25,\n },\n output: {\n maxTokens: 65536,\n costPer1MTokens: 10,\n },\n tags: ['recommended', 'reasoning', 'agents', 'general-purpose', 'vision', 'coding'],\n lifecycle: 'live',\n },\n 'google-ai:models/gemini-2.0-flash': {\n id: 'google-ai:models/gemini-2.0-flash',\n name: 'Gemini 2.0 Flash',\n description:\n 'Next-gen Gemini model with improved capabilities, superior speed, native tool use, multimodal generation, and 1M token context window.',\n input: {\n maxTokens: 1048576,\n costPer1MTokens: 0.1,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 0.4,\n },\n tags: ['low-cost', 'general-purpose', 'vision'],\n lifecycle: 'live',\n },\n 'cerebras:gpt-oss-120b': {\n id: 'cerebras:gpt-oss-120b',\n name: 'GPT-OSS 120B (Preview)',\n description:\n 'gpt-oss-120b is a high-performance, open-weight language model designed for production-grade, general-purpose use cases. It excels at complex reasoning and supports configurable reasoning effort, full chain-of-thought transparency for easier debugging and trust, and native agentic capabilities for function calling, tool use, and structured outputs.',\n input: {\n maxTokens: 131000,\n costPer1MTokens: 0.35,\n },\n output: {\n maxTokens: 16000,\n costPer1MTokens: 0.75,\n },\n tags: ['preview', 'general-purpose', 'reasoning'],\n lifecycle: 'live',\n },\n 'cerebras:qwen-3-32b': {\n id: 'cerebras:qwen-3-32b',\n name: 'Qwen3 32B',\n description:\n 'Qwen3-32B is a world-class reasoning model with comparable quality to DeepSeek R1 while outperforming GPT-4.1 and Claude Sonnet 3.7. It excels in code-gen, tool-calling, and advanced reasoning, making it an exceptional model for a wide range of production use cases.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.4,\n },\n output: {\n maxTokens: 16000,\n costPer1MTokens: 0.8,\n },\n tags: ['general-purpose', 'reasoning'],\n lifecycle: 'live',\n },\n 'cerebras:llama-4-scout-17b-16e-instruct': {\n id: 'cerebras:llama-4-scout-17b-16e-instruct',\n name: 'Llama 4 Scout 17B',\n description:\n 'Llama 4 Scout 17B Instruct (16E) is a mixture-of-experts (MoE) language model developed by Meta, uses 16 experts per forward pass, activating 17 billion parameters out of a total of 109B. It supports native multimodal input (text and image) and multilingual output (text and code) across 12 supported languages.',\n input: {\n maxTokens: 32000,\n costPer1MTokens: 0.65,\n },\n output: {\n maxTokens: 16000,\n costPer1MTokens: 0.85,\n },\n tags: ['general-purpose', 'vision'],\n lifecycle: 'live',\n },\n 'cerebras:llama3.1-8b': {\n id: 'cerebras:llama3.1-8b',\n name: 'Llama 3.1 8B',\n description:\n 'Meta developed and released the Meta Llama 3 family of large language models (LLMs), a collection of pretrained and instruction tuned generative text models in 8B and 70B sizes. The Llama 3 instruction tuned models are optimized for dialogue use cases and outperform many of the available open source chat models on common industry benchmarks.',\n input: {\n maxTokens: 32000,\n costPer1MTokens: 0.1,\n },\n output: {\n maxTokens: 16000,\n costPer1MTokens: 0.1,\n },\n tags: ['low-cost', 'general-purpose'],\n lifecycle: 'live',\n },\n 'cerebras:llama3.3-70b': {\n id: 'cerebras:llama3.3-70b',\n name: 'Llama 3.3 70B',\n description:\n 'Meta developed and released the Meta Llama 3 family of large language models (LLMs), a collection of pretrained and instruction tuned generative text models in 8B and 70B sizes. The Llama 3 instruction tuned models are optimized for dialogue use cases and outperform many of the available open source chat models on common industry benchmarks.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.85,\n },\n output: {\n maxTokens: 16000,\n costPer1MTokens: 1.2,\n },\n tags: ['general-purpose'],\n lifecycle: 'live',\n },\n 'groq:openai/gpt-oss-20b': {\n id: 'groq:openai/gpt-oss-20b',\n name: 'GPT-OSS 20B (Preview)',\n description:\n 'gpt-oss-20b is a compact, open-weight language model optimized for low-latency. It shares the same training foundation and capabilities as the GPT-OSS 120B model, with faster responses and lower cost.',\n input: {\n maxTokens: 131000,\n costPer1MTokens: 0.1,\n },\n output: {\n maxTokens: 32000,\n costPer1MTokens: 0.5,\n },\n tags: ['preview', 'general-purpose', 'reasoning', 'low-cost'],\n lifecycle: 'live',\n },\n 'groq:openai/gpt-oss-120b': {\n id: 'groq:openai/gpt-oss-120b',\n name: 'GPT-OSS 120B (Preview)',\n description:\n 'gpt-oss-120b is a high-performance, open-weight language model designed for production-grade, general-purpose use cases. It excels at complex reasoning and supports configurable reasoning effort, full chain-of-thought transparency for easier debugging and trust, and native agentic capabilities for function calling, tool use, and structured outputs.',\n input: {\n maxTokens: 131000,\n costPer1MTokens: 0.15,\n },\n output: {\n maxTokens: 32000,\n costPer1MTokens: 0.75,\n },\n tags: ['preview', 'general-purpose', 'reasoning'],\n lifecycle: 'live',\n },\n 'groq:deepseek-r1-distill-llama-70b': {\n id: 'groq:deepseek-r1-distill-llama-70b',\n name: 'DeepSeek R1-Distill Llama 3.3 70B (Preview)',\n description:\n 'A fine-tuned version of Llama 3.3 70B using samples generated by DeepSeek-R1, making it smarter than the original Llama 70B, particularly for tasks requiring mathematical and factual precision.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.75,\n },\n output: {\n maxTokens: 32768,\n costPer1MTokens: 0.99,\n },\n tags: ['general-purpose', 'reasoning', 'preview'],\n lifecycle: 'live',\n },\n 'groq:llama-3.3-70b-versatile': {\n id: 'groq:llama-3.3-70b-versatile',\n name: 'LLaMA 3.3 70B',\n description:\n 'The Meta Llama 3.3 multilingual large language model (LLM) is a pretrained and instruction tuned generative model in 70B (text in/text out). The Llama 3.3 instruction tuned text only model is optimized for multilingual dialogue use cases and outperforms many of the available open source and closed chat models on common industry benchmarks.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.59,\n },\n output: {\n maxTokens: 32768,\n costPer1MTokens: 0.79,\n },\n tags: ['recommended', 'general-purpose', 'coding'],\n lifecycle: 'live',\n },\n 'groq:llama-3.2-1b-preview': {\n id: 'groq:llama-3.2-1b-preview',\n name: 'LLaMA 3.2 1B (Preview)',\n description:\n 'The Llama 3.2 instruction-tuned, text-only models are optimized for multilingual dialogue use cases, including agentic retrieval and summarization tasks.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.04,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 0.04,\n },\n tags: ['low-cost', 'deprecated'],\n lifecycle: 'discontinued',\n },\n 'groq:llama-3.2-3b-preview': {\n id: 'groq:llama-3.2-3b-preview',\n name: 'LLaMA 3.2 3B (Preview)',\n description:\n 'The Llama 3.2 instruction-tuned, text-only models are optimized for multilingual dialogue use cases, including agentic retrieval and summarization tasks.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.06,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 0.06,\n },\n tags: ['low-cost', 'general-purpose', 'deprecated'],\n lifecycle: 'discontinued',\n },\n 'groq:llama-3.2-11b-vision-preview': {\n id: 'groq:llama-3.2-11b-vision-preview',\n name: 'LLaMA 3.2 11B Vision (Preview)',\n description:\n 'The Llama 3.2-Vision instruction-tuned models are optimized for visual recognition, image reasoning, captioning, and answering general questions about an image.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.18,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 0.18,\n },\n tags: ['low-cost', 'vision', 'general-purpose', 'deprecated'],\n lifecycle: 'discontinued',\n },\n 'groq:llama-3.2-90b-vision-preview': {\n id: 'groq:llama-3.2-90b-vision-preview',\n name: 'LLaMA 3.2 90B Vision (Preview)',\n description:\n 'The Llama 3.2-Vision instruction-tuned models are optimized for visual recognition, image reasoning, captioning, and answering general questions about an image.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.9,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 0.9,\n },\n tags: ['vision', 'general-purpose', 'deprecated'],\n lifecycle: 'discontinued',\n },\n 'groq:llama-3.1-8b-instant': {\n id: 'groq:llama-3.1-8b-instant',\n name: 'LLaMA 3.1 8B',\n description: 'The Llama 3.1 instruction-tuned, text-only models are optimized for multilingual dialogue use cases.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.05,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 0.08,\n },\n tags: ['low-cost', 'general-purpose'],\n lifecycle: 'live',\n },\n 'groq:llama3-8b-8192': {\n id: 'groq:llama3-8b-8192',\n name: 'LLaMA 3 8B',\n description:\n 'Meta developed and released the Meta Llama 3 family of large language models (LLMs), a collection of pretrained and instruction tuned generative text models in 8 and 70B sizes. The Llama 3 instruction tuned models are optimized for dialogue use cases and outperform many of the available open source chat models on common industry benchmarks.',\n input: {\n maxTokens: 8192,\n costPer1MTokens: 0.05,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 0.08,\n },\n tags: ['low-cost', 'general-purpose', 'deprecated'],\n lifecycle: 'discontinued',\n },\n 'groq:llama3-70b-8192': {\n id: 'groq:llama3-70b-8192',\n name: 'LLaMA 3 70B',\n description:\n 'Meta developed and released the Meta Llama 3 family of large language models (LLMs), a collection of pretrained and instruction tuned generative text models in 8 and 70B sizes. The Llama 3 instruction tuned models are optimized for dialogue use cases and outperform many of the available open source chat models on common industry benchmarks.',\n input: {\n maxTokens: 8192,\n costPer1MTokens: 0.59,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 0.79,\n },\n tags: ['general-purpose', 'deprecated'],\n lifecycle: 'discontinued',\n },\n 'groq:gemma2-9b-it': {\n id: 'groq:gemma2-9b-it',\n name: 'Gemma2 9B',\n description:\n 'Redesigned for outsized performance and unmatched efficiency, Gemma 2 optimizes for blazing-fast inference on diverse hardware. Gemma is a family of lightweight, state-of-the-art open models from Google, built from the same research and technology used to create the Gemini models. They are text-to-text, decoder-only large language models, available in English, with open weights, pre-trained variants, and instruction-tuned variants. Gemma models are well-suited for a variety of text generation tasks, including question answering, summarization, and reasoning.',\n input: {\n maxTokens: 8192,\n costPer1MTokens: 0.2,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 0.2,\n },\n tags: ['low-cost', 'general-purpose'],\n lifecycle: 'live',\n },\n 'xai:grok-code-fast-1': {\n id: 'xai:grok-code-fast-1',\n name: 'Grok Code Fast 1',\n description: 'Fast coding-optimized Grok model with large context window.',\n input: {\n maxTokens: 256000,\n costPer1MTokens: 0.2,\n },\n output: {\n maxTokens: 32768,\n costPer1MTokens: 1.5,\n },\n tags: ['coding', 'general-purpose', 'low-cost'],\n lifecycle: 'live',\n },\n 'xai:grok-4-fast-reasoning': {\n id: 'xai:grok-4-fast-reasoning',\n name: 'Grok 4 Fast (Reasoning)',\n description: 'Advanced fast Grok model with reasoning and very large context.',\n input: {\n maxTokens: 2000000,\n costPer1MTokens: 0.2,\n },\n output: {\n maxTokens: 128000,\n costPer1MTokens: 0.5,\n },\n tags: ['reasoning', 'recommended', 'general-purpose'],\n lifecycle: 'live',\n },\n 'xai:grok-4-fast-non-reasoning': {\n id: 'xai:grok-4-fast-non-reasoning',\n name: 'Grok 4 Fast (Non-Reasoning)',\n description: 'Fast, cost-effective Grok model for non-reasoning tasks.',\n input: {\n maxTokens: 2000000,\n costPer1MTokens: 0.2,\n },\n output: {\n maxTokens: 128000,\n costPer1MTokens: 0.5,\n },\n tags: ['low-cost', 'recommended', 'general-purpose'],\n lifecycle: 'live',\n },\n 'xai:grok-4-0709': {\n id: 'xai:grok-4-0709',\n name: 'Grok 4 (0709)',\n description: 'Comprehensive Grok 4 model for general-purpose tasks.',\n input: {\n maxTokens: 256000,\n costPer1MTokens: 3,\n },\n output: {\n maxTokens: 32768,\n costPer1MTokens: 15,\n },\n tags: ['reasoning', 'general-purpose'],\n lifecycle: 'live',\n },\n 'xai:grok-3-mini': {\n id: 'xai:grok-3-mini',\n name: 'Grok 3 Mini',\n description: 'Lightweight Grok model for cost-sensitive workloads.',\n input: {\n maxTokens: 131072,\n costPer1MTokens: 0.3,\n },\n output: {\n maxTokens: 16384,\n costPer1MTokens: 0.5,\n },\n tags: ['low-cost', 'general-purpose'],\n lifecycle: 'live',\n },\n 'xai:grok-3': {\n id: 'xai:grok-3',\n name: 'Grok 3',\n description: 'Enterprise-grade Grok model for general-purpose tasks.',\n input: {\n maxTokens: 131072,\n costPer1MTokens: 3,\n },\n output: {\n maxTokens: 16384,\n costPer1MTokens: 15,\n },\n tags: ['general-purpose'],\n lifecycle: 'live',\n },\n 'openrouter:gpt-oss-120b': {\n id: 'openrouter:gpt-oss-120b',\n name: 'GPT-OSS 120B (Preview)',\n description:\n 'gpt-oss-120b is a high-performance, open-weight language model designed for production-grade, general-purpose use cases. It excels at complex reasoning and supports configurable reasoning effort, full chain-of-thought transparency for easier debugging and trust, and native agentic capabilities for function calling, tool use, and structured outputs.',\n input: {\n maxTokens: 131000,\n costPer1MTokens: 0.15,\n },\n output: {\n maxTokens: 32000,\n costPer1MTokens: 0.75,\n },\n tags: ['preview', 'general-purpose', 'reasoning'],\n lifecycle: 'live',\n },\n 'fireworks-ai:gpt-oss-20b': {\n id: 'fireworks-ai:gpt-oss-20b',\n name: 'GPT-OSS 20B',\n description:\n 'gpt-oss-20b is a compact, open-weight language model optimized for low-latency. It shares the same training foundation and capabilities as the GPT-OSS 120B model, with faster responses and lower cost.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.07,\n },\n output: {\n maxTokens: 16000,\n costPer1MTokens: 0.3,\n },\n tags: ['general-purpose', 'reasoning', 'low-cost'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/gpt-oss-20b'],\n },\n 'fireworks-ai:gpt-oss-120b': {\n id: 'fireworks-ai:gpt-oss-120b',\n name: 'GPT-OSS 120B',\n description:\n 'gpt-oss-120b is a high-performance, open-weight language model designed for production-grade, general-purpose use cases. It excels at complex reasoning and supports configurable reasoning effort, full chain-of-thought transparency for easier debugging and trust, and native agentic capabilities for function calling, tool use, and structured outputs.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.15,\n },\n output: {\n maxTokens: 16000,\n costPer1MTokens: 0.6,\n },\n tags: ['general-purpose', 'reasoning'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/gpt-oss-120b'],\n },\n 'fireworks-ai:deepseek-r1-0528': {\n id: 'fireworks-ai:deepseek-r1-0528',\n name: 'DeepSeek R1 0528',\n description:\n 'The updated DeepSeek R1 0528 model delivers major improvements in reasoning, inference, and accuracy through enhanced post-training optimization and greater computational resources. It now performs at a level approaching top-tier models like OpenAI o3 and Gemini 2.5 Pro, with notable gains in complex tasks such as math and programming. The update also reduces hallucinations, improves function calling, and enhances the coding experience.',\n input: {\n maxTokens: 160000,\n costPer1MTokens: 3,\n },\n output: {\n maxTokens: 16384,\n costPer1MTokens: 8,\n },\n tags: ['recommended', 'reasoning', 'general-purpose', 'coding'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/deepseek-r1-0528'],\n },\n 'fireworks-ai:deepseek-v3-0324': {\n id: 'fireworks-ai:deepseek-v3-0324',\n name: 'DeepSeek V3 0324',\n description:\n 'DeepSeek V3, a 685B-parameter, mixture-of-experts model, is the latest iteration of the flagship chat model family from the DeepSeek team. It succeeds the DeepSeek V3 model and performs really well on a variety of tasks.',\n input: {\n maxTokens: 160000,\n costPer1MTokens: 0.9,\n },\n output: {\n maxTokens: 16384,\n costPer1MTokens: 0.9,\n },\n tags: ['recommended', 'general-purpose'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/deepseek-v3-0324'],\n },\n 'fireworks-ai:llama4-maverick-instruct-basic': {\n id: 'fireworks-ai:llama4-maverick-instruct-basic',\n name: 'Llama 4 Maverick Instruct (Basic)',\n description:\n 'Llama 4 Maverick 17B Instruct (128E) is a high-capacity multimodal language model from Meta, built on a mixture-of-experts (MoE) architecture with 128 experts and 17 billion active parameters per forward pass (400B total). It supports multilingual text and image input, and produces multilingual text and code output across 12 supported languages. Optimized for vision-language tasks, Maverick is instruction-tuned for assistant-like behavior, image reasoning, and general-purpose multimodal interaction, and suited for research and commercial applications requiring advanced multimodal understanding and high model throughput.',\n input: {\n maxTokens: 1000000,\n costPer1MTokens: 0.22,\n },\n output: {\n maxTokens: 16384,\n costPer1MTokens: 0.88,\n },\n tags: ['general-purpose', 'vision'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/llama4-maverick-instruct-basic'],\n },\n 'fireworks-ai:llama4-scout-instruct-basic': {\n id: 'fireworks-ai:llama4-scout-instruct-basic',\n name: 'Llama 4 Scout Instruct (Basic)',\n description:\n 'Llama 4 Scout 17B Instruct (16E) is a mixture-of-experts (MoE) language model developed by Meta, uses 16 experts per forward pass, activating 17 billion parameters out of a total of 109B. It supports native multimodal input (text and image) and multilingual output (text and code) across 12 supported languages. Designed for assistant-style interaction and visual reasoning, it is instruction-tuned for use in multilingual chat, captioning, and image understanding tasks.',\n input: {\n maxTokens: 1048576,\n costPer1MTokens: 0.15,\n },\n output: {\n maxTokens: 16384,\n costPer1MTokens: 0.6,\n },\n tags: ['general-purpose', 'vision'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/llama4-scout-instruct-basic'],\n },\n 'fireworks-ai:llama-v3p3-70b-instruct': {\n id: 'fireworks-ai:llama-v3p3-70b-instruct',\n name: 'Llama 3.3 70B Instruct',\n description:\n 'Llama 3.3 70B Instruct is the December update of Llama 3.1 70B. The model improves upon Llama 3.1 70B (released July 2024) with advances in tool calling, multilingual text support, math and coding. The model achieves industry leading results in reasoning, math and instruction following and provides similar performance as 3.1 405B but with significant speed and cost improvements.',\n input: {\n maxTokens: 131072,\n costPer1MTokens: 0.9,\n },\n output: {\n maxTokens: 16384,\n costPer1MTokens: 0.9,\n },\n tags: ['general-purpose'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/llama-v3p3-70b-instruct'],\n },\n 'fireworks-ai:deepseek-r1': {\n id: 'fireworks-ai:deepseek-r1',\n name: 'DeepSeek R1 (Fast)',\n description:\n 'This version of the R1 model has a perfect balance between speed and cost-efficiency for real-time interactive experiences, with speeds up to 90 tokens per second.\\n\\nDeepSeek-R1 is a state-of-the-art large language model optimized with reinforcement learning and cold-start data for exceptional reasoning, math, and code performance. **Note**: This model will always use a temperature of 0.6 as recommended by DeepSeek.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 3,\n },\n output: {\n maxTokens: 32768,\n costPer1MTokens: 8,\n },\n tags: ['reasoning', 'general-purpose', 'coding'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/deepseek-r1'],\n },\n 'fireworks-ai:deepseek-r1-basic': {\n id: 'fireworks-ai:deepseek-r1-basic',\n name: 'DeepSeek R1 (Basic)',\n description:\n 'This version of the R1 model is optimized for throughput and cost-effectiveness and has a lower cost but slightly higher latency than the \"Fast\" version of the model.\\n\\nDeepSeek-R1 is a state-of-the-art large language model optimized with reinforcement learning and cold-start data for exceptional reasoning, math, and code performance. **Note**: This model will always use a temperature of 0.6 as recommended by DeepSeek.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.55,\n },\n output: {\n maxTokens: 32768,\n costPer1MTokens: 2.19,\n },\n tags: ['recommended', 'reasoning', 'general-purpose', 'coding'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/deepseek-r1-basic'],\n },\n 'fireworks-ai:deepseek-v3': {\n id: 'fireworks-ai:deepseek-v3',\n name: 'DeepSeek V3',\n description:\n 'A a strong Mixture-of-Experts (MoE) language model with 671B total parameters with 37B activated for each token from Deepseek.',\n input: {\n maxTokens: 128000,\n costPer1MTokens: 0.9,\n },\n output: {\n maxTokens: 8000,\n costPer1MTokens: 0.9,\n },\n tags: ['deprecated', 'general-purpose'],\n lifecycle: 'deprecated',\n aliases: ['accounts/fireworks/models/deepseek-v3'],\n },\n 'fireworks-ai:llama-v3p1-405b-instruct': {\n id: 'fireworks-ai:llama-v3p1-405b-instruct',\n name: 'Llama 3.1 405B Instruct',\n description:\n 'The Meta Llama 3.1 collection of multilingual large language models (LLMs) is a collection of pretrained and instruction tuned generative models in 8B, 70B and 405B sizes. The Llama 3.1 instruction tuned text only models (8B, 70B, 405B) are optimized for multilingual dialogue use cases and outperform many of the available open source and closed chat models on common industry benchmarks.',\n input: {\n maxTokens: 131072,\n costPer1MTokens: 3,\n },\n output: {\n maxTokens: 131072,\n costPer1MTokens: 3,\n },\n tags: ['deprecated', 'general-purpose'],\n lifecycle: 'deprecated',\n aliases: ['accounts/fireworks/models/llama-v3p1-405b-instruct'],\n },\n 'fireworks-ai:llama-v3p1-70b-instruct': {\n id: 'fireworks-ai:llama-v3p1-70b-instruct',\n name: 'Llama 3.1 70B Instruct',\n description:\n 'The Meta Llama 3.1 collection of multilingual large language models (LLMs) is a collection of pretrained and instruction tuned generative models in 8B, 70B and 405B sizes. The Llama 3.1 instruction tuned text only models (8B, 70B, 405B) are optimized for multilingual dialogue use cases and outperform many of the available open source and closed chat models on common industry benchmarks.',\n input: {\n maxTokens: 131072,\n costPer1MTokens: 0.9,\n },\n output: {\n maxTokens: 131072,\n costPer1MTokens: 0.9,\n },\n tags: ['deprecated', 'general-purpose'],\n lifecycle: 'deprecated',\n aliases: ['accounts/fireworks/models/llama-v3p1-70b-instruct'],\n },\n 'fireworks-ai:llama-v3p1-8b-instruct': {\n id: 'fireworks-ai:llama-v3p1-8b-instruct',\n name: 'Llama 3.1 8B Instruct',\n description:\n 'The Meta Llama 3.1 collection of multilingual large language models (LLMs) is a collection of pretrained and instruction tuned generative models in 8B, 70B and 405B sizes. The Llama 3.1 instruction tuned text only models (8B, 70B, 405B) are optimized for multilingual dialogue use cases and outperform many of the available open source and closed chat models on common industry benchmarks.',\n input: {\n maxTokens: 131072,\n costPer1MTokens: 0.2,\n },\n output: {\n maxTokens: 131072,\n costPer1MTokens: 0.2,\n },\n tags: ['low-cost', 'general-purpose'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/llama-v3p1-8b-instruct'],\n },\n 'fireworks-ai:mixtral-8x22b-instruct': {\n id: 'fireworks-ai:mixtral-8x22b-instruct',\n name: 'Mixtral MoE 8x22B Instruct',\n description:\n 'Mistral MoE 8x22B Instruct v0.1 model with Sparse Mixture of Experts. Fine tuned for instruction following.',\n input: {\n maxTokens: 65536,\n costPer1MTokens: 1.2,\n },\n output: {\n maxTokens: 65536,\n costPer1MTokens: 1.2,\n },\n tags: ['general-purpose'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/mixtral-8x22b-instruct'],\n },\n 'fireworks-ai:mixtral-8x7b-instruct': {\n id: 'fireworks-ai:mixtral-8x7b-instruct',\n name: 'Mixtral MoE 8x7B Instruct',\n description:\n 'Mistral MoE 8x7B Instruct v0.1 model with Sparse Mixture of Experts. Fine tuned for instruction following',\n input: {\n maxTokens: 32768,\n costPer1MTokens: 0.5,\n },\n output: {\n maxTokens: 32768,\n costPer1MTokens: 0.5,\n },\n tags: ['low-cost', 'general-purpose'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/mixtral-8x7b-instruct'],\n },\n 'fireworks-ai:mythomax-l2-13b': {\n id: 'fireworks-ai:mythomax-l2-13b',\n name: 'MythoMax L2 13b',\n description:\n 'MythoMax L2 is designed to excel at both roleplaying and storytelling, and is an improved variant of the previous MythoMix model, combining the MythoLogic-L2 and Huginn models.',\n input: {\n maxTokens: 4096,\n costPer1MTokens: 0.2,\n },\n output: {\n maxTokens: 4096,\n costPer1MTokens: 0.2,\n },\n tags: ['roleplay', 'storytelling', 'low-cost'],\n lifecycle: 'live',\n aliases: ['accounts/fireworks/models/mythomax-l2-13b'],\n },\n 'fireworks-ai:gemma2-9b-it': {\n id: 'fireworks-ai:gemma2-9b-it',\n name: 'Gemma 2 9B Instruct',\n description:\n 'Redesigned for outsized performance and unmatched efficiency, Gemma 2 optimizes for blazing-fast inference on diverse hardware. Gemma is a family of lightweight, state-of-the-art open models from Google, built from the same research and technology used to create the Gemini models. They are text-to-text, decoder-only large language models, available in English, with open weights, pre-trained variants, and instruction-tuned variants. Gemma models are well-suited for a variety of text generation tasks, including question answering, summarization, and reasoning.',\n input: {\n maxTokens: 8192,\n costPer1MTokens: 0.2,\n },\n output: {\n maxTokens: 8192,\n costPer1MTokens: 0.2,\n },\n tags: ['deprecated', 'low-cost', 'general-purpose'],\n lifecycle: 'deprecated',\n aliases: ['accounts/fireworks/models/gemma2-9b-it'],\n },\n}\n\nexport const knownTags = [\n 'auto',\n 'best',\n 'fast',\n 'reasoning',\n 'cheapest',\n 'balance',\n 'recommended',\n 'reasoning',\n 'general-purpose',\n 'low-cost',\n 'vision',\n 'coding',\n 'function-calling',\n 'agents',\n 'storytelling',\n 'preview',\n 'roleplay',\n]\n\nexport const defaultModel: RemoteModel = {\n id: '',\n name: '',\n description: '',\n input: {\n costPer1MTokens: 0,\n maxTokens: 1000000,\n },\n output: {\n costPer1MTokens: 0,\n maxTokens: 1000000,\n },\n tags: [],\n lifecycle: 'live',\n}\n", "import { type ErrorType } from '@botpress/client'\n\nexport type BotpressError = {\n isApiError: boolean\n code: number\n description: string\n type: ErrorType\n subtype?: string\n error?: unknown\n metadata?: unknown\n message?: string\n id: string\n}\n\ntype Action = 'fallback' | 'retry' | 'abort'\n\nexport const getActionFromError = (error: any): Action => {\n if (!isBotpressError(error)) {\n return 'retry'\n }\n\n if (error.type === 'InvalidDataFormat') {\n if (error.message?.includes('data/model/id')) {\n // Invalid Model ID, so we want to try another model\n return 'fallback'\n }\n\n // Usually means the request was malformed\n return 'abort'\n }\n\n if (\n error.type === 'QuotaExceeded' ||\n error.type === 'RateLimited' ||\n error.type === 'Unknown' ||\n error.type === 'LimitExceeded'\n ) {\n // These errors are usually temporary, so we want to retry\n return 'retry'\n }\n\n const subtype = (error.metadata as any)?.subtype\n if (subtype === 'UPSTREAM_PROVIDER_FAILED') {\n // The model is degraded, so we want to try another model\n return 'fallback'\n }\n\n if (error.type === 'Internal') {\n // This is an internal error, probably a lambda timeout\n return 'retry'\n }\n\n return 'abort'\n}\n\nexport const isNotFoundError = (error: any): boolean => isBotpressError(error) && error.type === 'ResourceNotFound'\n\nexport const isForbiddenOrUnauthorizedError = (error: any): boolean =>\n isBotpressError(error) && (error.type === 'Forbidden' || error.type === 'Unauthorized')\n\nexport const isBotpressError = (error: any): error is BotpressError =>\n typeof error === 'object' &&\n error !== null &&\n 'isApiError' in error &&\n 'code' in error &&\n 'type' in error &&\n 'id' in error\n", "export type Callback<T> = (error: any | null, value: T) => void\nexport type Interceptor<T> = (error: any | null, value: T, next: Callback<T>, done: Callback<T>) => Promise<void> | void\n\nexport class InterceptorManager<T> {\n private _interceptors: Interceptor<T>[] = []\n\n public use(interceptor: Interceptor<T>) {\n this._interceptors.push(interceptor)\n return () => this.remove(interceptor)\n }\n\n public remove(interceptor: Interceptor<T>) {\n this._interceptors = this._interceptors.filter((i) => i !== interceptor)\n }\n\n public async run(value: T, signal: AbortSignal): Promise<T> {\n let error: any | null = null\n let result: T = value\n let done = false\n\n for (const interceptor of this._interceptors) {\n if (done) {\n break\n }\n\n if (signal.aborted) {\n throw signal.reason\n }\n\n await new Promise<void>((resolve) => {\n void interceptor(\n error,\n result,\n (err, val) => {\n error = err\n result = val\n resolve()\n },\n (err, val) => {\n error = err\n result = val\n done = true\n resolve()\n }\n )\n })\n }\n\n if (error) {\n throw error\n }\n\n return result\n }\n}\n", "import { ExtendedClient, getExtendedClient } from './bp-client'\nimport { isForbiddenOrUnauthorizedError, isNotFoundError } from './errors'\nimport { Model as RawModel } from './schemas.gen'\nimport { BotpressClientLike } from './types'\n\nexport const DOWNTIME_THRESHOLD_MINUTES = 5\nconst PREFERENCES_FILE_SUFFIX = 'models.config.json'\n\nexport const DEFAULT_INTEGRATIONS = ['google-ai', 'anthropic', 'openai', 'cerebras', 'fireworks-ai', 'groq']\n\n// Biases for vendors and models\nconst VendorPreferences = ['google-ai', 'anthropic', 'openai']\nconst BestModelPreferences = ['4.1', '4o', '3-5-sonnet', 'gemini-1.5-pro']\nconst FastModelPreferences = ['gemini-1.5-flash', '4.1-mini', '4.1-nano', '4o-mini', 'flash', 'haiku']\n\nconst InputPricePenalty = 3 // $3 per 1M tokens\nconst OutputPricePenalty = 10 // $10 per 1M tokens\nconst LowTokensPenalty = 128_000 // 128k tokens\n\nexport type Model = RawModel & {\n ref: ModelRef\n integration: string\n}\n\nexport type ModelRef = `${string}:${string}`\n\nexport type ModelPreferences = {\n best: ModelRef[]\n fast: ModelRef[]\n downtimes: Array<{ ref: ModelRef; startedAt: string; reason: string }>\n}\n\nconst isRecommended = (model: Model) => model.tags.includes('recommended')\nconst isDeprecated = (model: Model) => model.tags.includes('deprecated')\nconst isLowCost = (model: Model) => model.tags.includes('low-cost')\nconst hasVisionSupport = (model: Model) => model.tags.includes('vision')\nconst isGeneralPurpose = (model: Model) => model.tags.includes('general-purpose')\n\nconst scoreModel = (model: Model, type: 'best' | 'fast', boosts: Record<ModelRef, number> = {}) => {\n let score: number = 0\n\n const scores: Array<[string, boolean, number]> = [\n ['input price penalty', model.input.costPer1MTokens > InputPricePenalty, -1],\n ['output price penalty', model.output.costPer1MTokens > OutputPricePenalty, -1],\n ['low tokens penalty', (model.input.maxTokens ?? 0) + (model.output.maxTokens ?? 0) < LowTokensPenalty, -1],\n ['recommended', isRecommended(model), 2],\n ['deprecated', isDeprecated(model), -2],\n ['vision support', hasVisionSupport(model), 1],\n ['general purpose', isGeneralPurpose(model), 1],\n ['vendor preference', VendorPreferences.includes(model.integration), 1],\n ['best model preference', type === 'best' && BestModelPreferences.some((x) => model.id.includes(x)), 1],\n ['fast model preference penalty', type === 'best' && FastModelPreferences.some((x) => model.id.includes(x)), -2],\n ['fast model preference', type === 'fast' && FastModelPreferences.some((x) => model.id.includes(x)), 2],\n ['low cost', type === 'fast' && isLowCost(model), 1],\n ]\n\n for (const rule in boosts) {\n if (model.ref.includes(rule)) {\n scores.push([`boost (${rule})`, true, Number(boosts[rule as ModelRef]) ?? 0] as const)\n }\n }\n\n for (const [, condition, value] of scores) {\n if (condition) {\n score += value\n }\n }\n\n return score\n}\n\nexport const getBestModels = (models: Model[], boosts: Record<ModelRef, number> = {}) =>\n models.sort((a, b) => scoreModel(b, 'best', boosts) - scoreModel(a, 'best', boosts))\n\nexport const getFastModels = (models: Model[], boosts: Record<ModelRef, number> = {}) =>\n models.sort((a, b) => scoreModel(b, 'fast', boosts) - scoreModel(a, 'fast', boosts))\n\nexport const pickModel = (models: ModelRef[], downtimes: ModelPreferences['downtimes'] = []) => {\n const copy = [...models]\n const elasped = (date: string) => new Date().getTime() - new Date(date).getTime()\n const DOWNTIME_THRESHOLD = 1000 * 60 * DOWNTIME_THRESHOLD_MINUTES\n\n if (!copy.length) {\n throw new Error('At least one model is required')\n }\n\n while (copy.length) {\n const ref = copy.shift() as ModelRef\n const downtime = downtimes.find((o) => o.ref === ref && elasped(o.startedAt) < DOWNTIME_THRESHOLD)\n if (downtime) {\n continue\n } else {\n return ref\n }\n }\n\n throw new Error(`All models are down: ${models.join(', ')}`)\n}\n\nexport abstract class ModelProvider {\n public abstract fetchInstalledModels(): Promise<Model[]>\n public abstract fetchModelPreferences(): Promise<ModelPreferences | null>\n public abstract saveModelPreferences(preferences: ModelPreferences): Promise<void>\n public abstract deleteModelPreferences(): Promise<void>\n}\n\nexport class RemoteModelProvider extends ModelProvider {\n private _client: ExtendedClient\n\n public constructor(client: BotpressClientLike) {\n super()\n this._client = getExtendedClient(client)\n }\n\n private async _fetchInstalledIntegrationNames() {\n try {\n const { bot } = await this._client.getBot({ id: this._client.botId })\n const integrations = Object.values(bot.integrations).filter((x) => x.status === 'registered')\n return integrations.map((x) => x.name)\n } catch (err) {\n if (isForbiddenOrUnauthorizedError(err)) {\n // This happens when the bot (with a BAK token) tries to access the .getBot endpoint\n return DEFAULT_INTEGRATIONS\n }\n\n throw err\n }\n }\n\n public async fetchInstalledModels() {\n const integrationNames = await this._fetchInstalledIntegrationNames()\n const models: Model[] = []\n\n await Promise.allSettled(\n integrationNames.map(async (integration) => {\n const { output } = await this._client.callAction({\n type: `${integration}:listLanguageModels`,\n input: {},\n })\n\n if (!output?.models?.length) {\n return\n }\n\n for (const model of output.models as RawModel[]) {\n if (model.name && model.id && model.input && model.tags) {\n models.push({\n ref: `${integration}:${model.id}`,\n integration,\n id: model.id,\n name: model.name,\n description: model.description,\n input: model.input,\n output: model.output,\n tags: model.tags,\n })\n }\n }\n })\n )\n\n return models\n }\n\n public async fetchModelPreferences(): Promise<ModelPreferences | null> {\n try {\n const { file } = await this._client.getFile({ id: this._preferenceFileKey })\n\n if (globalThis.fetch !== undefined) {\n const response = await fetch(file.url)\n return (await response.json()) as ModelPreferences\n } else {\n const { data } = await this._client.axios.get(file.url, {\n // we piggy-back axios to avoid adding a new dependency\n // unset all headers to avoid S3 pre-signed signature mismatch\n headers: Object.keys(this._client.config.headers).reduce(\n (acc, key) => {\n acc[key] = undefined\n return acc\n },\n {} as Record<string, undefined>\n ),\n })\n return data as ModelPreferences\n }\n } catch (err) {\n if (isNotFoundError(err)) {\n return null\n }\n\n throw err\n }\n }\n\n public async saveModelPreferences(preferences: ModelPreferences) {\n await this._client.uploadFile({\n key: this._preferenceFileKey,\n content: JSON.stringify(preferences, null, 2),\n index: false,\n tags: {\n system: 'true',\n purpose: 'config',\n },\n })\n }\n\n public async deleteModelPreferences() {\n await this._client.deleteFile({ id: this._preferenceFileKey }).catch(() => {})\n }\n\n private get _preferenceFileKey() {\n return `bot->${this._client.botId}->${PREFERENCES_FILE_SUFFIX}`\n }\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAcA,QAAM,iBAAkC;MACtC,mBAAmB;MACnB,QAAQ;MACR,UAAU;MACV,eAAe;MACf,OAAO,WAAA;AAAM,eAAA;MAAA;MACb,eAAe;MACf,cAAc;;AAGhB,aAAgB,oBAAoB,SAAuB;AACzD,UAAM,YAAS,SAAA,SAAA,CAAA,GAAyB,cAAc,GAAK,OAAO;AAElE,UAAI,UAAU,gBAAgB,GAAG;AAC/B,kBAAU,gBAAgB;;AAG5B,aAAO;IACT;AARA,YAAA,sBAAA;;;;;;;;;ACxBA,aAAgB,WAAW,OAAa;AACpC,UAAM,gBAAgB,KAAK,OAAM,IAAK;AACtC,aAAO,KAAK,MAAM,aAAa;IACnC;AAHA,YAAA,aAAA;;;;;;;;;ACAA,aAAgB,SAAS,OAAa;AAClC,aAAO;IACX;AAFA,YAAA,WAAA;;;;;;;;;ACCA,QAAA,gBAAA;AACA,QAAA,cAAA;AAIA,aAAgB,cAAc,SAAwB;AACpD,cAAQ,QAAQ,QAAQ;QACtB,KAAK;AACH,iBAAO,cAAA;QAET,KAAK;QACL;AACE,iBAAO,YAAA;;IAEb;AATA,YAAA,gBAAA;;;;;;;;;ACJA,QAAA,mBAAA;AAEA,QAAA;;OAAA,WAAA;AAEE,iBAAAA,OAAoB,SAAwB;AAAxB,eAAA,UAAA;AADV,eAAA,UAAU;QAC2B;AAExC,QAAAA,OAAA,UAAA,QAAP,WAAA;AAAA,cAAA,QAAA;AACE,iBAAO,IAAI,QAAQ,SAAA,SAAO;AAAI,mBAAA,WAAW,SAAS,MAAK,aAAa;UAAtC,CAAuC;QACvE;AAEO,QAAAA,OAAA,UAAA,mBAAP,SAAwB,SAAe;AACrC,eAAK,UAAU;QACjB;AAEA,eAAA,eAAYA,OAAA,WAAA,iBAAa;eAAzB,WAAA;AACE,gBAAM,SAAS,iBAAA,cAAc,KAAK,OAAO;AACzC,mBAAO,OAAO,KAAK,KAAK;UAC1B;;;;AAEA,eAAA,eAAYA,OAAA,WAAA,SAAK;eAAjB,WAAA;AACE,gBAAM,WAAW,KAAK,QAAQ;AAC9B,gBAAM,OAAO,KAAK,QAAQ;AAC1B,gBAAM,QAAQ,KAAK;AACnB,gBAAM,QAAQ,WAAW,KAAK,IAAI,MAAM,KAAK;AAE7C,mBAAO,KAAK,IAAI,OAAO,KAAK,QAAQ,QAAQ;UAC9C;;;;AAEA,eAAA,eAAcA,OAAA,WAAA,wBAAoB;eAAlC,WAAA;AACE,mBAAO,KAAK;UACd;;;;AACF,eAAAA;MAAA,GA7BA;;AAAsB,YAAA,QAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACJtB,QAAA,eAAA;AAEA,QAAA;;OAAA,SAAA,QAAA;AAAoC,kBAAAC,iBAAA,MAAA;AAApC,iBAAAA,kBAAA;;QAYA;AAXiB,QAAAA,gBAAA,UAAA,QAAb,WAAA;;;AACI,qBAAA,CAAA,GAAO,KAAK,iBAAiB,OAAO,OAAA,UAAM,MAAK,KAAA,IAAA,CAAE;;;;AAGrD,eAAA,eAAYA,gBAAA,WAAA,kBAAc;eAA1B,WAAA;AACI,mBAAO,KAAK,YAAY;UAC5B;;;;AAEA,eAAA,eAAcA,gBAAA,WAAA,wBAAoB;eAAlC,WAAA;AACI,mBAAO,KAAK,UAAU;UAC1B;;;;AACJ,eAAAA;MAAA,GAZoC,aAAA,KAAK;;AAA5B,YAAA,iBAAA;;;;;;;;;;;;;;;;;;;;;;;;;;ACFb,QAAA,eAAA;AAEA,QAAA;;OAAA,SAAA,QAAA;AAAiC,kBAAAC,cAAA,MAAA;AAAjC,iBAAAA,eAAA;;QAAwC;AAAA,eAAAA;MAAA,GAAP,aAAA,KAAK;;AAAzB,YAAA,cAAA;;;;;;;;;ACDb,QAAA,qBAAA;AACA,QAAA,iBAAA;AAGA,aAAgB,aAAa,SAA0B,SAAe;AAClE,UAAM,QAAQ,eAAe,OAAO;AACpC,YAAM,iBAAiB,OAAO;AAC9B,aAAO;IACX;AAJA,YAAA,eAAA;AAMA,aAAS,eAAe,SAAwB;AAC5C,UAAI,CAAC,QAAQ,mBAAmB;AAC5B,eAAO,IAAI,mBAAA,eAAe,OAAO;;AAGrC,aAAO,IAAI,eAAA,YAAY,OAAO;IAClC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACjBA,QAAA,YAAA;AAKA,QAAA,kBAAA;AAIA,aAAsBC,SACpB,SACA,SAA4B;AAA5B,UAAA,YAAA,QAAA;AAAA,kBAAA,CAAA;MAA4B;;;;;;AAEtB,iCAAmB,UAAA,oBAAoB,OAAO;AAC9C,cAAAA,WAAU,IAAI,QAAQ,SAAS,gBAAgB;AAE9C,qBAAA,CAAA,GAAMA,SAAQ,QAAO,CAAE;;AAA9B,qBAAA,CAAA,GAAO,GAAA,KAAA,CAAuB;;;;;AAPhC,YAAA,UAAAA;AAUA,QAAA;;OAAA,WAAA;AAGE,iBAAAC,SACU,SACA,SAAwB;AADxB,eAAA,UAAA;AACA,eAAA,UAAA;AAJF,eAAA,gBAAgB;QAKrB;AAEU,QAAAA,SAAA,UAAA,UAAb,WAAA;;;;;;uBACS,CAAC,KAAK,oBAAmB,QAAA,CAAA,GAAA,CAAA;;;;AAE5B,yBAAA,CAAA,GAAM,KAAK,WAAU,CAAE;;AAAvB,qBAAA,KAAA;AACO,yBAAA,CAAA,GAAM,KAAK,QAAO,CAAE;;AAA3B,yBAAA,CAAA,GAAO,GAAA,KAAA,CAAoB;;;AAE3B,uBAAK;AACe,yBAAA,CAAA,GAAM,KAAK,QAAQ,MAAM,KAAG,KAAK,aAAa,CAAC;;AAA7D,gCAAc,GAAA,KAAA;AAEpB,sBAAI,CAAC,eAAe,KAAK,qBAAqB;AAC5C,0BAAM;;;;;;AAKZ,wBAAM,IAAI,MAAM,uBAAuB;;;;;AAGzC,eAAA,eAAYA,SAAA,WAAA,uBAAmB;eAA/B,WAAA;AACE,mBAAO,KAAK,iBAAiB,KAAK,QAAQ;UAC5C;;;;AAEc,QAAAA,SAAA,UAAA,aAAd,WAAA;;;;;;AACQ,0BAAQ,gBAAA,aAAa,KAAK,SAAS,KAAK,aAAa;AAC3D,yBAAA,CAAA,GAAM,MAAM,MAAK,CAAE;;AAAnB,qBAAA,KAAA;;;;;;;;;AAEJ,eAAAA;MAAA,GAlCA;;;;;;ACnBA,IAAAC,8BAAwB;;;ACAjB,IAAI,mBAAmB,OAAO;AAAA,EACnC,KAAK,UAAU,MAAM;AACnB,aACM,YAAY,KAAK,OAAO,KAAK,KAAK,CAAC,GACrC,IAAI,GACJ,SAAS,UAAU,QACrB,IAAI,QACJ,KACA;AACA,gBAAU,CAAC,EAAE,GAAG,IAAI;AAAA,IACtB;AAAA,EACF;AAAA,EACA,QAAQ,CAAC;AAAA,EACT,GAAG,OAAO,IAAI;AACZ;AAAC,KAAC,KAAK,OAAO,KAAK,MAAM,CAAC,GAAG,KAAK,EAAE;AACpC,WAAO,MAAM;AACX,WAAK,OAAO,KAAK,IAAI,KAAK,OAAO,KAAK,GAAG,OAAO,OAAK,OAAO,CAAC;AAAA,IAC/D;AAAA,EACF;AACF;;;ACAO,IAAM,oBAAoB,CAAC,YAAqC;AACrE,QAAM,SAAS;AAEf,MAAI,CAAC,UAAU,WAAW,QAAQ,OAAO,WAAW,UAAU;AAC5D,UAAM,IAAI,MAAM,yEAAyE;AAAA,EAC3F;AAEA,MAAI,OAAO,OAAO,YAAY,YAAY,CAAC,CAAC,OAAO,SAAS;AAC1D,QAAI;AACF,aAAO,kBAAkB,OAAO,OAAO;AAAA,IACzC,QAAQ;AAAA,IAAC;AAAA,EACX;AAEA,MACE,OAAO,OAAO,gBAAgB,cAC9B,OAAO,OAAO,eAAe,cAC7B,CAAC,OAAO,UACR,OAAO,OAAO,WAAW,YACzB,CAAC,OAAO,OAAO,SACf;AACA,UAAM,IAAI,MAAM,yEAAyE;AAAA,EAC3F;AAEA,QAAM,QAAQ,OAAO,OAAO,QAAQ,UAAU;AAE9C,MAAI,CAAC,OAAO,QAAQ;AAClB,UAAM,IAAI,MAAM,yCAAyC;AAAA,EAC3D;AAEA,QAAM,QAAQ,MAAM;AAClB,UAAM,IAAI;AACV,QAAI,EAAE,SAAS,OAAO,EAAE,UAAU,YAAY;AAC5C,aAAO,kBAAkB,EAAE,MAAM,CAAC;AAAA,IACpC;AACA,WAAO,kBAAkB,IAAI,EAAE,YAAY,EAAE,MAAM,CAAC;AAAA,EACtD;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,IACA,OAAQ,OAAe;AAAA,IACvB;AAAA,IACA,WAAW,CAAC,WAAwB;AAClC,YAAM,YAAY,MAAM;AACxB,YAAM,WAAW,UAAU;AAC3B,eAAS,SAAS,SAAS;AAC3B,aAAO;AAAA,IACT;AAAA,EACF;AACF;;;ACnEA,iCAAwB;AADxB,OAAO,WAA8B;;;ACI9B,IAAM,SAAsC;AAAA,EACjD,2BAA2B;AAAA,IACzB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,aAAa,iBAAiB;AAAA,IACpD,WAAW;AAAA,EACb;AAAA,EACA,gCAAgC;AAAA,IAC9B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,aAAa,iBAAiB;AAAA,IACpD,WAAW;AAAA,EACb;AAAA,EACA,gCAAgC;AAAA,IAC9B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,aAAa,iBAAiB;AAAA,IACjD,WAAW;AAAA,EACb;AAAA,EACA,6BAA6B;AAAA,IAC3B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,aAAa,UAAU,QAAQ;AAAA,IACtC,WAAW;AAAA,EACb;AAAA,EACA,wBAAwB;AAAA,IACtB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,aAAa,UAAU,QAAQ;AAAA,IACtC,WAAW;AAAA,EACb;AAAA,EACA,6BAA6B;AAAA,IAC3B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,UAAU,iBAAiB;AAAA,IACjD,WAAW;AAAA,EACb;AAAA,EACA,kCAAkC;AAAA,IAChC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,UAAU,iBAAiB;AAAA,IACjD,WAAW;AAAA,EACb;AAAA,EACA,kCAAkC;AAAA,IAChC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,IACb,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,UAAU,iBAAiB;AAAA,IAC9C,WAAW;AAAA,EACb;AAAA,EACA,6BAA6B;AAAA,IAC3B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,aAAa,mBAAmB,QAAQ;AAAA,IAC/C,WAAW;AAAA,EACb;AAAA,EACA,wBAAwB;AAAA,IACtB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,aAAa,UAAU,iBAAiB;AAAA,IAC/C,WAAW;AAAA,EACb;AAAA,EACA,6BAA6B;AAAA,IAC3B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,aAAa,UAAU,iBAAiB;AAAA,IAC/C,WAAW;AAAA,EACb;AAAA,EACA,iCAAiC;AAAA,IAC/B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,UAAU,YAAY,mBAAmB,kBAAkB;AAAA,IACjF,WAAW;AAAA,EACb;AAAA,EACA,4BAA4B;AAAA,IAC1B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,UAAU,mBAAmB,UAAU,UAAU,kBAAkB;AAAA,IACzF,WAAW;AAAA,EACb;AAAA,EACA,4BAA4B;AAAA,IAC1B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,cAAc,UAAU,mBAAmB,UAAU,UAAU,kBAAkB;AAAA,IACxF,WAAW;AAAA,EACb;AAAA,EACA,4BAA4B;AAAA,IAC1B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,cAAc,UAAU,mBAAmB,UAAU,UAAU,kBAAkB;AAAA,IACxF,WAAW;AAAA,EACb;AAAA,EACA,iCAAiC;AAAA,IAC/B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,cAAc,mBAAmB,UAAU,UAAU,kBAAkB;AAAA,IAC9E,WAAW;AAAA,EACb;AAAA,EACA,6BAA6B;AAAA,IAC3B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,cAAc,mBAAmB,UAAU;AAAA,IAClD,WAAW;AAAA,EACb;AAAA,EACA,sCAAsC;AAAA,IACpC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,aAAa,UAAU,UAAU,mBAAmB,QAAQ;AAAA,IAClF,WAAW;AAAA,EACb;AAAA,EACA,gDAAgD;AAAA,IAC9C,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,cAAc,UAAU,aAAa,mBAAmB,UAAU,QAAQ;AAAA,IACjF,WAAW;AAAA,EACb;AAAA,EACA,wCAAwC;AAAA,IACtC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,aAAa,UAAU,UAAU,mBAAmB,QAAQ;AAAA,IAClF,WAAW;AAAA,EACb;AAAA,EACA,kDAAkD;AAAA,IAChD,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,cAAc,UAAU,aAAa,mBAAmB,UAAU,QAAQ;AAAA,IACjF,WAAW;AAAA,EACb;AAAA,EACA,uCAAuC;AAAA,IACrC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,mBAAmB,UAAU;AAAA,IACpC,WAAW;AAAA,EACb;AAAA,EACA,wCAAwC;AAAA,IACtC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,UAAU,mBAAmB,UAAU,UAAU,oBAAoB,cAAc;AAAA,IAC1F,WAAW;AAAA,EACb;AAAA,EACA,wCAAwC;AAAA,IACtC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,UAAU,mBAAmB,UAAU,UAAU,oBAAoB,cAAc;AAAA,IAC1F,WAAW;AAAA,EACb;AAAA,EACA,qCAAqC;AAAA,IACnC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,iBAAiB;AAAA,IACpC,WAAW;AAAA,EACb;AAAA,EACA,8BAA8B;AAAA,IAC5B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,aAAa,UAAU,mBAAmB,QAAQ;AAAA,IACxE,WAAW;AAAA,EACb;AAAA,EACA,4BAA4B;AAAA,IAC1B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,aAAa,UAAU,mBAAmB,UAAU,QAAQ;AAAA,IAClF,WAAW;AAAA,EACb;AAAA,EACA,qCAAqC;AAAA,IACnC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,mBAAmB,QAAQ;AAAA,IAC9C,WAAW;AAAA,EACb;AAAA,EACA,yBAAyB;AAAA,IACvB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,WAAW,mBAAmB,WAAW;AAAA,IAChD,WAAW;AAAA,EACb;AAAA,EACA,uBAAuB;AAAA,IACrB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,mBAAmB,WAAW;AAAA,IACrC,WAAW;AAAA,EACb;AAAA,EACA,2CAA2C;AAAA,IACzC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,mBAAmB,QAAQ;AAAA,IAClC,WAAW;AAAA,EACb;AAAA,EACA,wBAAwB;AAAA,IACtB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,iBAAiB;AAAA,IACpC,WAAW;AAAA,EACb;AAAA,EACA,yBAAyB;AAAA,IACvB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,iBAAiB;AAAA,IACxB,WAAW;AAAA,EACb;AAAA,EACA,2BAA2B;AAAA,IACzB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,WAAW,mBAAmB,aAAa,UAAU;AAAA,IAC5D,WAAW;AAAA,EACb;AAAA,EACA,4BAA4B;AAAA,IAC1B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,WAAW,mBAAmB,WAAW;AAAA,IAChD,WAAW;AAAA,EACb;AAAA,EACA,sCAAsC;AAAA,IACpC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,mBAAmB,aAAa,SAAS;AAAA,IAChD,WAAW;AAAA,EACb;AAAA,EACA,gCAAgC;AAAA,IAC9B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,mBAAmB,QAAQ;AAAA,IACjD,WAAW;AAAA,EACb;AAAA,EACA,6BAA6B;AAAA,IAC3B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,YAAY;AAAA,IAC/B,WAAW;AAAA,EACb;AAAA,EACA,6BAA6B;AAAA,IAC3B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,mBAAmB,YAAY;AAAA,IAClD,WAAW;AAAA,EACb;AAAA,EACA,qCAAqC;AAAA,IACnC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,UAAU,mBAAmB,YAAY;AAAA,IAC5D,WAAW;AAAA,EACb;AAAA,EACA,qCAAqC;AAAA,IACnC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,UAAU,mBAAmB,YAAY;AAAA,IAChD,WAAW;AAAA,EACb;AAAA,EACA,6BAA6B;AAAA,IAC3B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,IACb,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,iBAAiB;AAAA,IACpC,WAAW;AAAA,EACb;AAAA,EACA,uBAAuB;AAAA,IACrB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,mBAAmB,YAAY;AAAA,IAClD,WAAW;AAAA,EACb;AAAA,EACA,wBAAwB;AAAA,IACtB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,mBAAmB,YAAY;AAAA,IACtC,WAAW;AAAA,EACb;AAAA,EACA,qBAAqB;AAAA,IACnB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,iBAAiB;AAAA,IACpC,WAAW;AAAA,EACb;AAAA,EACA,wBAAwB;AAAA,IACtB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,IACb,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,UAAU,mBAAmB,UAAU;AAAA,IAC9C,WAAW;AAAA,EACb;AAAA,EACA,6BAA6B;AAAA,IAC3B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,IACb,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,aAAa,eAAe,iBAAiB;AAAA,IACpD,WAAW;AAAA,EACb;AAAA,EACA,iCAAiC;AAAA,IAC/B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,IACb,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,eAAe,iBAAiB;AAAA,IACnD,WAAW;AAAA,EACb;AAAA,EACA,mBAAmB;AAAA,IACjB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,IACb,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,aAAa,iBAAiB;AAAA,IACrC,WAAW;AAAA,EACb;AAAA,EACA,mBAAmB;AAAA,IACjB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,IACb,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,iBAAiB;AAAA,IACpC,WAAW;AAAA,EACb;AAAA,EACA,cAAc;AAAA,IACZ,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,IACb,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,iBAAiB;AAAA,IACxB,WAAW;AAAA,EACb;AAAA,EACA,2BAA2B;AAAA,IACzB,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,WAAW,mBAAmB,WAAW;AAAA,IAChD,WAAW;AAAA,EACb;AAAA,EACA,4BAA4B;AAAA,IAC1B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,mBAAmB,aAAa,UAAU;AAAA,IACjD,WAAW;AAAA,IACX,SAAS,CAAC,uCAAuC;AAAA,EACnD;AAAA,EACA,6BAA6B;AAAA,IAC3B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,mBAAmB,WAAW;AAAA,IACrC,WAAW;AAAA,IACX,SAAS,CAAC,wCAAwC;AAAA,EACpD;AAAA,EACA,iCAAiC;AAAA,IAC/B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,aAAa,mBAAmB,QAAQ;AAAA,IAC9D,WAAW;AAAA,IACX,SAAS,CAAC,4CAA4C;AAAA,EACxD;AAAA,EACA,iCAAiC;AAAA,IAC/B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,iBAAiB;AAAA,IACvC,WAAW;AAAA,IACX,SAAS,CAAC,4CAA4C;AAAA,EACxD;AAAA,EACA,+CAA+C;AAAA,IAC7C,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,mBAAmB,QAAQ;AAAA,IAClC,WAAW;AAAA,IACX,SAAS,CAAC,0DAA0D;AAAA,EACtE;AAAA,EACA,4CAA4C;AAAA,IAC1C,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,mBAAmB,QAAQ;AAAA,IAClC,WAAW;AAAA,IACX,SAAS,CAAC,uDAAuD;AAAA,EACnE;AAAA,EACA,wCAAwC;AAAA,IACtC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,iBAAiB;AAAA,IACxB,WAAW;AAAA,IACX,SAAS,CAAC,mDAAmD;AAAA,EAC/D;AAAA,EACA,4BAA4B;AAAA,IAC1B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,aAAa,mBAAmB,QAAQ;AAAA,IAC/C,WAAW;AAAA,IACX,SAAS,CAAC,uCAAuC;AAAA,EACnD;AAAA,EACA,kCAAkC;AAAA,IAChC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,eAAe,aAAa,mBAAmB,QAAQ;AAAA,IAC9D,WAAW;AAAA,IACX,SAAS,CAAC,6CAA6C;AAAA,EACzD;AAAA,EACA,4BAA4B;AAAA,IAC1B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,cAAc,iBAAiB;AAAA,IACtC,WAAW;AAAA,IACX,SAAS,CAAC,uCAAuC;AAAA,EACnD;AAAA,EACA,yCAAyC;AAAA,IACvC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,cAAc,iBAAiB;AAAA,IACtC,WAAW;AAAA,IACX,SAAS,CAAC,oDAAoD;AAAA,EAChE;AAAA,EACA,wCAAwC;AAAA,IACtC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,cAAc,iBAAiB;AAAA,IACtC,WAAW;AAAA,IACX,SAAS,CAAC,mDAAmD;AAAA,EAC/D;AAAA,EACA,uCAAuC;AAAA,IACrC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,iBAAiB;AAAA,IACpC,WAAW;AAAA,IACX,SAAS,CAAC,kDAAkD;AAAA,EAC9D;AAAA,EACA,uCAAuC;AAAA,IACrC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,iBAAiB;AAAA,IACxB,WAAW;AAAA,IACX,SAAS,CAAC,kDAAkD;AAAA,EAC9D;AAAA,EACA,sCAAsC;AAAA,IACpC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,iBAAiB;AAAA,IACpC,WAAW;AAAA,IACX,SAAS,CAAC,iDAAiD;AAAA,EAC7D;AAAA,EACA,gCAAgC;AAAA,IAC9B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,YAAY,gBAAgB,UAAU;AAAA,IAC7C,WAAW;AAAA,IACX,SAAS,CAAC,2CAA2C;AAAA,EACvD;AAAA,EACA,6BAA6B;AAAA,IAC3B,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aACE;AAAA,IACF,OAAO;AAAA,MACL,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,QAAQ;AAAA,MACN,WAAW;AAAA,MACX,iBAAiB;AAAA,IACnB;AAAA,IACA,MAAM,CAAC,cAAc,YAAY,iBAAiB;AAAA,IAClD,WAAW;AAAA,IACX,SAAS,CAAC,wCAAwC;AAAA,EACpD;AACF;AAEO,IAAM,YAAY;AAAA,EACvB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEO,IAAM,eAA4B;AAAA,EACvC,IAAI;AAAA,EACJ,MAAM;AAAA,EACN,aAAa;AAAA,EACb,OAAO;AAAA,IACL,iBAAiB;AAAA,IACjB,WAAW;AAAA,EACb;AAAA,EACA,QAAQ;AAAA,IACN,iBAAiB;AAAA,IACjB,WAAW;AAAA,EACb;AAAA,EACA,MAAM,CAAC;AAAA,EACP,WAAW;AACb;;;AD7mCA,IAAM,YAAY,MAAM,OAAO,WAAW,eAAe,OAAO,OAAO,UAAU;AAE1E,IAAM,gBAAN,MAAoB;AAAA,EACjB;AAAA,EACS;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEV,YAAY,OAAoB;AACrC,SAAK,UAAU,MAAM,UAAU;AAC/B,SAAK,WAAW,MAAM,WAAW;AACjC,SAAK,mBAAmB,MAAM,mBAAmB;AACjD,SAAK,WAAW,EAAE,GAAG,MAAM,QAAQ;AAEnC,QAAI,MAAM,OAAO;AACf,WAAK,SAAS,UAAU,IAAI,MAAM;AAAA,IACpC;AAEA,QAAI,MAAM,OAAO;AACf,WAAK,SAAS,eAAe,IAAI,UAAU,MAAM,KAAK;AAAA,IACxD;AAEA,SAAK,eAAe,MAAM,OAAO;AAAA,MAC/B,SAAS,KAAK;AAAA,MACd,iBAAiB,KAAK;AAAA,MACtB,SAAS,KAAK;AAAA,IAChB,CAAC;AAAA,EACH;AAAA,EAEA,MAAa,aAAa,OAAyB,UAA0B,CAAC,GAAG;AAC/E,UAAM,SAAS,QAAQ,UAAU,YAAY,QAAQ,KAAK,QAAQ;AAElE,UAAM,EAAE,KAAK,IAAI,MAAM,KAAK;AAAA,MAAiB,MAC3C,KAAK,aAAa,KAAwB,+BAA+B,OAAO;AAAA,QAC9E;AAAA,QACA,SAAS,QAAQ,WAAW,KAAK;AAAA,MACnC,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAa,aAAa;AACxB,UAAM,EAAE,KAAK,IAAI,MAAM,KAAK;AAAA,MAAiB,MAC3C,KAAK,aAAa,IAAyB,sBAAsB;AAAA,IACnE;AAEA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,OAAc,mBACZ,SACA,UAA0B,CAAC,GAC0B;AACrD,UAAM,SAAS,QAAQ,UAAU,YAAY,QAAQ,KAAK,QAAQ;AAElE,QAAI,UAAU,GAAG;AACf,YAAMC,OAAM,MAAM,MAAM,GAAG,KAAK,OAAO,sCAAsC;AAAA,QAC3E,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,GAAG,KAAK;AAAA,UACR,gBAAgB;AAAA,QAClB;AAAA,QACA,aAAa,KAAK,mBAAmB,YAAY;AAAA,QACjD,MAAM,KAAK,UAAU,EAAE,GAAG,SAAS,QAAQ,KAAK,CAAC;AAAA,QACjD;AAAA,MACF,CAAC;AAED,UAAI,CAACA,KAAI,IAAI;AACX,cAAM,OAAO,MAAMA,KAAI,KAAK,EAAE,MAAM,MAAM,EAAE;AAC5C,cAAM,MAAM,IAAI,MAAM,QAAQA,KAAI,MAAM,KAAK,QAAQA,KAAI,UAAU,EAAE;AACpE,QAAC,IAAY,WAAW,EAAE,QAAQA,KAAI,QAAQ,MAAM,KAAK;AAC1D,cAAM;AAAA,MACR;AAEA,YAAM,OAAOA,KAAI;AACjB,UAAI,CAAC,MAAM;AACT,cAAM,IAAI,MAAM,iDAAiD;AAAA,MACnE;AAEA,YAAM,SAAS,KAAK,UAAU;AAC9B,YAAM,YAAY,mBAAmB;AACnC,mBAAS;AACP,gBAAM,EAAE,OAAO,KAAK,IAAI,MAAM,OAAO,KAAK;AAC1C,cAAI,MAAM;AACR;AAAA,UACF;AACA,cAAI,OAAO;AACT,kBAAM;AAAA,UACR;AAAA,QACF;AAAA,MACF,GAAG;AAEH,uBAAiB,OAAO,KAAK,QAA8B,QAAQ,GAAG;AACpE,cAAM;AAAA,MACR;AACA;AAAA,IACF;AAEA,UAAM,MAAM,MAAM,KAAK;AAAA,MAAiB,MACtC,KAAK,aAAa;AAAA,QAChB;AAAA,QACA,EAAE,GAAG,SAAS,QAAQ,KAAK;AAAA,QAC3B;AAAA,UACE,cAAc;AAAA,UACd;AAAA,UACA,SAAS,QAAQ,WAAW,KAAK;AAAA,QACnC;AAAA,MACF;AAAA,IACF;AAEA,UAAM,aAAwC,IAAI;AAClD,QAAI,CAAC,YAAY;AACf,YAAM,IAAI,MAAM,iDAAiD;AAAA,IACnE;AAEA,qBAAiB,OAAO,KAAK,QAA8B,UAAU,GAAG;AACtE,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,OAAe,QAAW,QAAqE;AAC7F,UAAM,UAAU,IAAI,YAAY,OAAO;AACvC,QAAI,SAAS;AAEb,qBAAiB,SAAS,QAAQ;AAChC,gBAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,KAAK,CAAC;AAEhD,iBAAS;AACP,cAAM,IAAI,OAAO,QAAQ,IAAI;AAC7B,YAAI,IAAI,GAAG;AACT;AAAA,QACF;AAEA,cAAM,OAAO,OAAO,MAAM,GAAG,CAAC,EAAE,QAAQ,OAAO,EAAE;AACjD,iBAAS,OAAO,MAAM,IAAI,CAAC;AAE3B,YAAI,CAAC,MAAM;AACT;AAAA,QACF;AAEA,cAAM,KAAK,MAAM,IAAI;AAAA,MACvB;AAAA,IACF;AAEA,cAAU,QAAQ,OAAO;AAEzB,UAAM,OAAO,OAAO,KAAK;AACzB,QAAI,MAAM;AACR,YAAM,KAAK,MAAM,IAAI;AAAA,IACvB;AAAA,EACF;AAAA,EAEQ,wBAAwB,OAAqB;AACnD,QAAI,MAAM,aAAa,KAAK,GAAG;AAC7B,UAAI,CAAC,MAAM,UAAU;AACnB,eAAO;AAAA,MACT;AAEA,YAAM,SAAS,MAAM,UAAU;AAC/B,UAAI,UAAU,CAAC,KAAK,KAAK,GAAG,EAAE,SAAS,MAAM,GAAG;AAC9C,eAAO;AAAA,MACT;AAEA,UACE,MAAM,QACN,CAAC,gBAAgB,cAAc,aAAa,aAAa,aAAa,OAAO,EAAE,SAAS,MAAM,IAAI,GAClG;AACA,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,iBAAoB,IAAkC;AAClE,eAAO,oCAAQ,IAAI;AAAA,MACjB,eAAe;AAAA,MACf,eAAe;AAAA,MACf,cAAc;AAAA,MACd,QAAQ;AAAA,MACR,OAAO,CAAC,MAAM,KAAK,wBAAwB,CAAC;AAAA,IAC9C,CAAC;AAAA,EACH;AACF;AAEO,IAAM,sBAAsB,CAAC,UAAqC;AACvE,MAAI,OAAO,KAAK,GAAG;AACjB,WAAO,OAAO,KAAK;AAAA,EACrB;AAGA,QAAM,QAAQ,OAAO,OAAO,MAAM,EAAE,KAAK,CAAC,MAAM,EAAE,SAAS,SAAS,KAAK,CAAC;AAC1E,MAAI,OAAO;AACT,WAAO;AAAA,EACT;AAGA,MAAI,UAAU,SAAS,KAAK,GAAG;AAC7B,WAAO,EAAE,GAAG,cAAc,IAAI,OAAO,MAAM,MAAM;AAAA,EACnD;AACA,SAAO;AACT;;;AEhNO,IAAM,qBAAqB,CAAC,UAAuB;AACxD,MAAI,CAAC,gBAAgB,KAAK,GAAG;AAC3B,WAAO;AAAA,EACT;AAEA,MAAI,MAAM,SAAS,qBAAqB;AACtC,QAAI,MAAM,SAAS,SAAS,eAAe,GAAG;AAE5C,aAAO;AAAA,IACT;AAGA,WAAO;AAAA,EACT;AAEA,MACE,MAAM,SAAS,mBACf,MAAM,SAAS,iBACf,MAAM,SAAS,aACf,MAAM,SAAS,iBACf;AAEA,WAAO;AAAA,EACT;AAEA,QAAM,UAAW,MAAM,UAAkB;AACzC,MAAI,YAAY,4BAA4B;AAE1C,WAAO;AAAA,EACT;AAEA,MAAI,MAAM,SAAS,YAAY;AAE7B,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAEO,IAAM,kBAAkB,CAAC,UAAwB,gBAAgB,KAAK,KAAK,MAAM,SAAS;AAE1F,IAAM,iCAAiC,CAAC,UAC7C,gBAAgB,KAAK,MAAM,MAAM,SAAS,eAAe,MAAM,SAAS;AAEnE,IAAM,kBAAkB,CAAC,UAC9B,OAAO,UAAU,YACjB,UAAU,QACV,gBAAgB,SAChB,UAAU,SACV,UAAU,SACV,QAAQ;;;AC/DH,IAAM,qBAAN,MAA4B;AAAA,EACzB,gBAAkC,CAAC;AAAA,EAEpC,IAAI,aAA6B;AACtC,SAAK,cAAc,KAAK,WAAW;AACnC,WAAO,MAAM,KAAK,OAAO,WAAW;AAAA,EACtC;AAAA,EAEO,OAAO,aAA6B;AACzC,SAAK,gBAAgB,KAAK,cAAc,OAAO,CAAC,MAAM,MAAM,WAAW;AAAA,EACzE;AAAA,EAEA,MAAa,IAAI,OAAU,QAAiC;AAC1D,QAAI,QAAoB;AACxB,QAAI,SAAY;AAChB,QAAI,OAAO;AAEX,eAAW,eAAe,KAAK,eAAe;AAC5C,UAAI,MAAM;AACR;AAAA,MACF;AAEA,UAAI,OAAO,SAAS;AAClB,cAAM,OAAO;AAAA,MACf;AAEA,YAAM,IAAI,QAAc,CAAC,YAAY;AACnC,aAAK;AAAA,UACH;AAAA,UACA;AAAA,UACA,CAAC,KAAK,QAAQ;AACZ,oBAAQ;AACR,qBAAS;AACT,oBAAQ;AAAA,UACV;AAAA,UACA,CAAC,KAAK,QAAQ;AACZ,oBAAQ;AACR,qBAAS;AACT,mBAAO;AACP,oBAAQ;AAAA,UACV;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH;AAEA,QAAI,OAAO;AACT,YAAM;AAAA,IACR;AAEA,WAAO;AAAA,EACT;AACF;;;ACjDO,IAAM,6BAA6B;AAC1C,IAAM,0BAA0B;AAEzB,IAAM,uBAAuB,CAAC,aAAa,aAAa,UAAU,YAAY,gBAAgB,MAAM;AAG3G,IAAM,oBAAoB,CAAC,aAAa,aAAa,QAAQ;AAC7D,IAAM,uBAAuB,CAAC,OAAO,MAAM,cAAc,gBAAgB;AACzE,IAAM,uBAAuB,CAAC,oBAAoB,YAAY,YAAY,WAAW,SAAS,OAAO;AAErG,IAAM,oBAAoB;AAC1B,IAAM,qBAAqB;AAC3B,IAAM,mBAAmB;AAezB,IAAM,gBAAgB,CAAC,UAAiB,MAAM,KAAK,SAAS,aAAa;AACzE,IAAM,eAAe,CAAC,UAAiB,MAAM,KAAK,SAAS,YAAY;AACvE,IAAM,YAAY,CAAC,UAAiB,MAAM,KAAK,SAAS,UAAU;AAClE,IAAM,mBAAmB,CAAC,UAAiB,MAAM,KAAK,SAAS,QAAQ;AACvE,IAAM,mBAAmB,CAAC,UAAiB,MAAM,KAAK,SAAS,iBAAiB;AAEhF,IAAM,aAAa,CAAC,OAAc,MAAuB,SAAmC,CAAC,MAAM;AACjG,MAAI,QAAgB;AAEpB,QAAM,SAA2C;AAAA,IAC/C,CAAC,uBAAuB,MAAM,MAAM,kBAAkB,mBAAmB,EAAE;AAAA,IAC3E,CAAC,wBAAwB,MAAM,OAAO,kBAAkB,oBAAoB,EAAE;AAAA,IAC9E,CAAC,uBAAuB,MAAM,MAAM,aAAa,MAAM,MAAM,OAAO,aAAa,KAAK,kBAAkB,EAAE;AAAA,IAC1G,CAAC,eAAe,cAAc,KAAK,GAAG,CAAC;AAAA,IACvC,CAAC,cAAc,aAAa,KAAK,GAAG,EAAE;AAAA,IACtC,CAAC,kBAAkB,iBAAiB,KAAK,GAAG,CAAC;AAAA,IAC7C,CAAC,mBAAmB,iBAAiB,KAAK,GAAG,CAAC;AAAA,IAC9C,CAAC,qBAAqB,kBAAkB,SAAS,MAAM,WAAW,GAAG,CAAC;AAAA,IACtE,CAAC,yBAAyB,SAAS,UAAU,qBAAqB,KAAK,CAAC,MAAM,MAAM,GAAG,SAAS,CAAC,CAAC,GAAG,CAAC;AAAA,IACtG,CAAC,iCAAiC,SAAS,UAAU,qBAAqB,KAAK,CAAC,MAAM,MAAM,GAAG,SAAS,CAAC,CAAC,GAAG,EAAE;AAAA,IAC/G,CAAC,yBAAyB,SAAS,UAAU,qBAAqB,KAAK,CAAC,MAAM,MAAM,GAAG,SAAS,CAAC,CAAC,GAAG,CAAC;AAAA,IACtG,CAAC,YAAY,SAAS,UAAU,UAAU,KAAK,GAAG,CAAC;AAAA,EACrD;AAEA,aAAW,QAAQ,QAAQ;AACzB,QAAI,MAAM,IAAI,SAAS,IAAI,GAAG;AAC5B,aAAO,KAAK,CAAC,UAAU,IAAI,KAAK,MAAM,OAAO,OAAO,IAAgB,CAAC,KAAK,CAAC,CAAU;AAAA,IACvF;AAAA,EACF;AAEA,aAAW,CAAC,EAAE,WAAW,KAAK,KAAK,QAAQ;AACzC,QAAI,WAAW;AACb,eAAS;AAAA,IACX;AAAA,EACF;AAEA,SAAO;AACT;AAEO,IAAM,gBAAgB,CAACC,SAAiB,SAAmC,CAAC,MACjFA,QAAO,KAAK,CAAC,GAAG,MAAM,WAAW,GAAG,QAAQ,MAAM,IAAI,WAAW,GAAG,QAAQ,MAAM,CAAC;AAE9E,IAAM,gBAAgB,CAACA,SAAiB,SAAmC,CAAC,MACjFA,QAAO,KAAK,CAAC,GAAG,MAAM,WAAW,GAAG,QAAQ,MAAM,IAAI,WAAW,GAAG,QAAQ,MAAM,CAAC;AAE9E,IAAM,YAAY,CAACA,SAAoB,YAA2C,CAAC,MAAM;AAC9F,QAAM,OAAO,CAAC,GAAGA,OAAM;AACvB,QAAM,UAAU,CAAC,UAAiB,oBAAI,KAAK,GAAE,QAAQ,IAAI,IAAI,KAAK,IAAI,EAAE,QAAQ;AAChF,QAAM,qBAAqB,MAAO,KAAK;AAEvC,MAAI,CAAC,KAAK,QAAQ;AAChB,UAAM,IAAI,MAAM,gCAAgC;AAAA,EAClD;AAEA,SAAO,KAAK,QAAQ;AAClB,UAAM,MAAM,KAAK,MAAM;AACvB,UAAM,WAAW,UAAU,KAAK,CAAC,MAAM,EAAE,QAAQ,OAAO,QAAQ,EAAE,SAAS,IAAI,kBAAkB;AACjG,QAAI,UAAU;AACZ;AAAA,IACF,OAAO;AACL,aAAO;AAAA,IACT;AAAA,EACF;AAEA,QAAM,IAAI,MAAM,wBAAwBA,QAAO,KAAK,IAAI,CAAC,EAAE;AAC7D;AAEO,IAAe,gBAAf,MAA6B;AAKpC;AAEO,IAAM,sBAAN,cAAkC,cAAc;AAAA,EAC7C;AAAA,EAED,YAAY,QAA4B;AAC7C,UAAM;AACN,SAAK,UAAU,kBAAkB,MAAM;AAAA,EACzC;AAAA,EAEA,MAAc,kCAAkC;AAC9C,QAAI;AACF,YAAM,EAAE,IAAI,IAAI,MAAM,KAAK,QAAQ,OAAO,EAAE,IAAI,KAAK,QAAQ,MAAM,CAAC;AACpE,YAAM,eAAe,OAAO,OAAO,IAAI,YAAY,EAAE,OAAO,CAAC,MAAM,EAAE,WAAW,YAAY;AAC5F,aAAO,aAAa,IAAI,CAAC,MAAM,EAAE,IAAI;AAAA,IACvC,SAAS,KAAK;AACZ,UAAI,+BAA+B,GAAG,GAAG;AAEvC,eAAO;AAAA,MACT;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAa,uBAAuB;AAClC,UAAM,mBAAmB,MAAM,KAAK,gCAAgC;AACpE,UAAMA,UAAkB,CAAC;AAEzB,UAAM,QAAQ;AAAA,MACZ,iBAAiB,IAAI,OAAO,gBAAgB;AAC1C,cAAM,EAAE,OAAO,IAAI,MAAM,KAAK,QAAQ,WAAW;AAAA,UAC/C,MAAM,GAAG,WAAW;AAAA,UACpB,OAAO,CAAC;AAAA,QACV,CAAC;AAED,YAAI,CAAC,QAAQ,QAAQ,QAAQ;AAC3B;AAAA,QACF;AAEA,mBAAW,SAAS,OAAO,QAAsB;AAC/C,cAAI,MAAM,QAAQ,MAAM,MAAM,MAAM,SAAS,MAAM,MAAM;AACvD,YAAAA,QAAO,KAAK;AAAA,cACV,KAAK,GAAG,WAAW,IAAI,MAAM,EAAE;AAAA,cAC/B;AAAA,cACA,IAAI,MAAM;AAAA,cACV,MAAM,MAAM;AAAA,cACZ,aAAa,MAAM;AAAA,cACnB,OAAO,MAAM;AAAA,cACb,QAAQ,MAAM;AAAA,cACd,MAAM,MAAM;AAAA,YACd,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAOA;AAAA,EACT;AAAA,EAEA,MAAa,wBAA0D;AACrE,QAAI;AACF,YAAM,EAAE,KAAK,IAAI,MAAM,KAAK,QAAQ,QAAQ,EAAE,IAAI,KAAK,mBAAmB,CAAC;AAE3E,UAAI,WAAW,UAAU,QAAW;AAClC,cAAM,WAAW,MAAM,MAAM,KAAK,GAAG;AACrC,eAAQ,MAAM,SAAS,KAAK;AAAA,MAC9B,OAAO;AACL,cAAM,EAAE,KAAK,IAAI,MAAM,KAAK,QAAQ,MAAM,IAAI,KAAK,KAAK;AAAA;AAAA;AAAA,UAGtD,SAAS,OAAO,KAAK,KAAK,QAAQ,OAAO,OAAO,EAAE;AAAA,YAChD,CAAC,KAAK,QAAQ;AACZ,kBAAI,GAAG,IAAI;AACX,qBAAO;AAAA,YACT;AAAA,YACA,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AACD,eAAO;AAAA,MACT;AAAA,IACF,SAAS,KAAK;AACZ,UAAI,gBAAgB,GAAG,GAAG;AACxB,eAAO;AAAA,MACT;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAa,qBAAqB,aAA+B;AAC/D,UAAM,KAAK,QAAQ,WAAW;AAAA,MAC5B,KAAK,KAAK;AAAA,MACV,SAAS,KAAK,UAAU,aAAa,MAAM,CAAC;AAAA,MAC5C,OAAO;AAAA,MACP,MAAM;AAAA,QACJ,QAAQ;AAAA,QACR,SAAS;AAAA,MACX;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAa,yBAAyB;AACpC,UAAM,KAAK,QAAQ,WAAW,EAAE,IAAI,KAAK,mBAAmB,CAAC,EAAE,MAAM,MAAM;AAAA,IAAC,CAAC;AAAA,EAC/E;AAAA,EAEA,IAAY,qBAAqB;AAC/B,WAAO,QAAQ,KAAK,QAAQ,KAAK,KAAK,uBAAuB;AAAA,EAC/D;AACF;;;AP/LO,IAAM,YAAN,MAAM,WAAU;AAAA,EACrB,CAAQ,gBAAgB,IAAI;AAAA,EAE5B,OAAc,kBAAkB,KAA4B;AAC1D,WAAO,KAAK,mBAAmB;AAAA,EACjC;AAAA,EAEO,eAAe;AAAA,IACpB,SAAS,IAAI,mBAA4B;AAAA,IACzC,UAAU,IAAI,mBAA6B;AAAA,EAC7C;AAAA,EAEU,UAAmB,CAAC;AAAA,EACpB,aAAqB,IAAI,KAAK;AAAA;AAAA,EAC9B,cAAsB;AAAA;AAAA,EACtB;AAAA,EACA,eAAwC;AAAA,EACxC;AAAA,EACA,aAA4C,CAAC;AAAA,EAC7C,WAAoB;AAAA,EAEtB,UAAU,iBAAyB;AAAA,EAEpC,YAAY,OAAuB;AACxC,SAAK,UAAU,kBAAkB,MAAM,MAAM;AAC7C,SAAK,YAAY,MAAM,YAAY,IAAI,oBAAoB,MAAM,MAAM;AACvE,SAAK,aAAa,MAAM,WAAW,KAAK;AACxC,SAAK,cAAc,MAAM,cAAc,KAAK;AAC5C,SAAK,WAAW,MAAM,uBAAuB;AAAA,EAC/C;AAAA,EAEA,IAAW,SAAyB;AAClC,WAAO,KAAK;AAAA,EACd;AAAA,EAEO,QAAmB;AACxB,UAAM,OAAO,IAAI,WAAU;AAAA,MACzB,QAAQ,KAAK,QAAQ,MAAM;AAAA,MAC3B,UAAU,KAAK;AAAA,MACf,SAAS,KAAK;AAAA,MACd,YAAY,KAAK;AAAA,IACnB,CAAC;AAED,SAAK,UAAU,CAAC,GAAG,KAAK,OAAO;AAC/B,SAAK,eAAe,KAAK,eAAe,EAAE,GAAG,KAAK,aAAa,IAAI;AACnE,SAAK,aAAa,CAAC,GAAG,KAAK,UAAU;AACrC,SAAK,aAAa,UAAU,KAAK,aAAa;AAC9C,SAAK,aAAa,WAAW,KAAK,aAAa;AAE/C,WAAO;AAAA,EACT;AAAA,EAEO,GAAuC,OAAU,IAA4B;AAClF,WAAO,KAAK,QAAQ,GAAG,OAAO,EAAE;AAAA,EAClC;AAAA,EAEA,MAAa,uBAAyC;AACpD,QAAI,CAAC,KAAK,QAAQ,QAAQ;AACxB,WAAK,UAAU,MAAM,KAAK,UAAU,qBAAqB;AAAA,IAC3D;AAEA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAa,mBAA8C;AACzD,QAAI,KAAK,cAAc;AACrB,aAAO,KAAK;AAAA,IACd;AAEA,SAAK,eAAe,MAAM,KAAK,UAAU,sBAAsB;AAE/D,QAAI,KAAK,cAAc;AACrB,aAAO,KAAK;AAAA,IACd;AAEA,UAAMC,UAAS,MAAM,KAAK,qBAAqB;AAE/C,SAAK,eAAe;AAAA,MAClB,MAAM,cAAcA,OAAM,EAAE,IAAI,CAAC,MAAM,EAAE,GAAG;AAAA,MAC5C,MAAM,cAAcA,OAAM,EAAE,IAAI,CAAC,MAAM,EAAE,GAAG;AAAA,MAC5C,WAAW,CAAC;AAAA,IACd;AAEA,UAAM,KAAK,UAAU,qBAAqB,KAAK,YAAY;AAE3D,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAa,eAAe,aAA+B,OAAgB,OAAsB;AAC/F,SAAK,eAAe;AAEpB,QAAI,MAAM;AACR,YAAM,KAAK,UAAU,qBAAqB,WAAW;AAAA,IACvD;AAAA,EACF;AAAA,EAEQ,uBAA6B;AACnC,UAAM,MAAM,KAAK,IAAI;AACrB,UAAM,cAAc,MAAO,KAAK;AAEhC,SAAK,aAAc,YAAY,KAAK,aAAc,UAAU,OAAO,CAAC,aAAa;AAC/E,YAAM,gBAAgB,IAAI,KAAK,SAAS,SAAS,EAAE,QAAQ;AAC3D,aAAO,MAAM,iBAAiB;AAAA,IAChC,CAAC;AAAA,EACH;AAAA,EAEA,MAAc,aAAa,KAA8D;AACvF,UAAM,WAAW,CAACC,SAAgB;AAChC,YAAM,QAAQA,KAAI,MAAM,GAAG;AAC3B,aAAO,EAAE,aAAa,MAAM,CAAC,GAAI,OAAO,MAAM,MAAM,CAAC,EAAE,KAAK,GAAG,EAAE;AAAA,IACnE;AAEA,UAAM,cAAc,MAAM,KAAK,iBAAiB;AAEhD,gBAAY,SAAS,CAAC;AACtB,gBAAY,SAAS,CAAC;AACtB,gBAAY,cAAc,CAAC;AAE3B,UAAM,YAAY,CAAC,GAAG,YAAY,WAAW,GAAI,KAAK,cAAc,CAAC,CAAE;AAEvE,QAAI,QAAQ,QAAQ;AAClB,aAAO,SAAS,UAAU,YAAY,MAAM,SAAS,CAAC;AAAA,IACxD;AAEA,QAAI,QAAQ,QAAQ;AAClB,aAAO,SAAS,UAAU,YAAY,MAAM,SAAS,CAAC;AAAA,IACxD;AAEA,WAAO,SAAS,UAAU,CAAC,KAAiB,GAAG,YAAY,MAAM,GAAG,YAAY,IAAI,GAAG,SAAS,CAAC;AAAA,EACnG;AAAA,EAEA,MAAa,gBAAgB,OAA+B;AAC1D,QAAI,KAAK,UAAU;AACjB,YAAM,gBAAgB,oBAAoB,KAAK;AAC/C,UAAI,eAAe;AACjB,eAAO,EAAE,GAAG,eAAe,KAAK,cAAc,IAAgB,aAAa,eAAe;AAAA,MAC5F;AAAA,IACF;AAEA,UAAM,KAAK,qBAAqB;AAChC,UAAM,EAAE,aAAa,OAAO,UAAU,IAAI,MAAM,KAAK,aAAa,KAAK;AACvE,UAAM,MAAM,KAAK,QAAQ,KAAK,CAAC,MAAM,EAAE,gBAAgB,gBAAgB,EAAE,SAAS,aAAa,EAAE,OAAO,UAAU;AAClH,QAAI,CAAC,KAAK;AACR,YAAM,IAAI,MAAM,SAAS,SAAS,YAAY;AAAA,IAChD;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAa,gBAAgB,OAAsC;AACjE,QAAI,CAAC,KAAK,YAAY,CAAC,oBAAoB,MAAM,KAAM,GAAG;AACxD,aAAO,KAAK,iBAAiB,KAAK;AAAA,IACpC;AAEA,UAAM,aAAa,IAAI,cAAc,KAAK,QAAQ,MAAa;AAC/D,UAAM,WAAW,MAAM,WAAW,aAAa,KAAY;AAE3D,WAAO;AAAA,MACL,QAAQ;AAAA,QACN,IAAI;AAAA,QACJ,UAAU,SAAS,SAAS;AAAA,QAC5B,OAAO,SAAS,SAAS;AAAA,QACzB,SAAS;AAAA,UACP;AAAA,YACE,MAAM;AAAA,YACN,SAAS,SAAS;AAAA,YAClB,MAAM;AAAA,YACN,OAAO;AAAA,YACP,YAAY,SAAS,SAAS;AAAA,UAChC;AAAA,QACF;AAAA,QACA,OAAO;AAAA,UACL,aAAa,SAAS,SAAS,MAAM;AAAA,UACrC,WAAW;AAAA,UACX,cAAc,SAAS,SAAS,MAAM;AAAA,UACtC,YAAY,SAAS,SAAS,QAAQ;AAAA,QACxC;AAAA,QACA,UAAU;AAAA,UACR,MAAM,SAAS,SAAS,QAAQ;AAAA,QAClC;AAAA,MACF;AAAA,MACA,MAAM;AAAA,QACJ,QAAQ,SAAS,SAAS;AAAA,QAC1B,OAAO,EAAE,aAAa,SAAS,SAAS,UAAU,OAAO,SAAS,SAAS,MAAO;AAAA,QAClF,SAAS,SAAS,SAAS;AAAA,QAC3B,MAAM;AAAA,UACJ,OAAO;AAAA,UACP,QAAQ,SAAS,SAAS,QAAQ;AAAA,QACpC;AAAA,QACA,QAAQ;AAAA,UACN,OAAO,SAAS,SAAS,MAAM;AAAA,UAC/B,QAAQ,SAAS,SAAS,MAAM;AAAA,QAClC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,iBAAiB,OAAsC;AACnE,UAAM,QAAQ,KAAK,IAAI;AAEvB,UAAM,SAAS,MAAM,UAAU,YAAY,QAAQ,KAAK,UAAU;AAElE,UAAM,SAAS,KAAK,QAAQ,UAAU,MAAM;AAE5C,QAAI,QAAiB,EAAE,MAAM;AAC7B,QAAI;AACJ,QAAI;AAEJ,SAAK,QAAQ,KAAK,WAAW,KAAK;AAElC,UAAM,EAAE,QAAQ,KAAK,IAAI,UAAM;AAAA,MAI7B,YAAY;AACV,cAAM,YAAY,MAAM,KAAK,aAAa,MAAM,SAAS,MAAM;AAE/D,sBAAc,UAAU;AACxB,gBAAQ,UAAU;AAElB,gBAAQ,MAAM,KAAK,aAAa,QAAQ,IAAI,EAAE,MAAM,GAAG,MAAM;AAE7D,eAAO,OAAO,WAAW;AAAA,UACvB,MAAM,GAAG,WAAW;AAAA,UACpB,OAAO;AAAA,YACL,GAAG,MAAM;AAAA,YACT,OAAO,EAAE,IAAI,MAAM;AAAA,UACrB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA;AAAA,QACE,OAAO,OAAO,KAAK,aAAa;AAC9B,cAAI,QAAQ,SAAS;AAEnB,iBAAK,QAAQ,KAAK,WAAW,OAAO,GAAG;AACvC,mBAAO,eAAe;AACtB,mBAAO;AAAA,UACT;AAEA,cAAI,WAAW,KAAK,aAAa;AAC/B,iBAAK,QAAQ,KAAK,SAAS,OAAO,GAAG;AACrC,mBAAO;AAAA,UACT;AAEA,gBAAM,SAAS,mBAAmB,GAAG;AAErC,cAAI,WAAW,SAAS;AACtB,iBAAK,QAAQ,KAAK,SAAS,OAAO,GAAG;AACrC,mBAAO;AAAA,UACT;AAEA,cAAI,WAAW,YAAY;AAEzB,iBAAK,WAAW,KAAK;AAAA,cACnB,KAAK,GAAG,WAAY,IAAI,KAAM;AAAA,cAC9B,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,cAClC,QAAQ;AAAA,YACV,CAAC;AAED,iBAAK,qBAAqB;AAE1B,kBAAM,KAAK,UAAU,qBAAqB;AAAA,cACxC,GAAI,KAAK,gBAAgB,EAAE,MAAM,CAAC,GAAG,WAAW,CAAC,GAAG,MAAM,CAAC,EAAE;AAAA,cAC7D,WAAW,CAAC,GAAI,KAAK,aAAc,aAAa,CAAC,GAAI,GAAI,KAAK,cAAc,CAAC,CAAE;AAAA,YACjF,CAAC;AAED,iBAAK,QAAQ,KAAK,YAAY,OAAO,GAAG;AACxC,mBAAO;AAAA,UACT;AAEA,eAAK,QAAQ,KAAK,SAAS,OAAO,GAAG;AACrC,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF;AAEA,UAAM,WAAW;AAAA,MACf;AAAA,MACA,MAAM;AAAA,QACJ,QAAQ,KAAK,UAAU;AAAA,QACvB,OAAO,EAAE,aAA2B,MAAc;AAAA,QAClD,SAAS,KAAK,IAAI,IAAI;AAAA,QACtB,MAAM,EAAE,OAAO,OAAO,MAAM,WAAW,QAAQ,OAAO,MAAM,WAAW;AAAA,QACvE,QAAQ,EAAE,OAAO,OAAO,MAAM,aAAa,QAAQ,OAAO,MAAM,aAAa;AAAA,MAC/E;AAAA,IACF;AAEA,SAAK,QAAQ,KAAK,YAAY,OAAO,QAAQ;AAE7C,WAAO,KAAK,aAAa,SAAS,IAAI,UAAU,MAAM;AAAA,EACxD;AACF;",
|
|
6
6
|
"names": ["Delay", "SkipFirstDelay", "AlwaysDelay", "backOff", "BackOff", "import_exponential_backoff", "res", "models", "models", "ref"]
|
|
7
7
|
}
|