@page-agent/llms 0.0.24 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -61,9 +61,9 @@ export declare interface LLMClient {
61
61
  * LLM configuration
62
62
  */
63
63
  export declare interface LLMConfig {
64
- baseURL?: string;
65
- apiKey?: string;
66
- model?: string;
64
+ baseURL: string;
65
+ apiKey: string;
66
+ model: string;
67
67
  temperature?: number;
68
68
  maxRetries?: number;
69
69
  /**
@@ -286,16 +286,18 @@ const _OpenAIClient = class _OpenAIClient {
286
286
  };
287
287
  __name(_OpenAIClient, "OpenAIClient");
288
288
  let OpenAIClient = _OpenAIClient;
289
- const DEFAULT_MODEL_NAME = "PAGE-AGENT-FREE-TESTING-RANDOM";
290
- const DEFAULT_API_KEY = "PAGE-AGENT-FREE-TESTING-RANDOM";
291
- const DEFAULT_BASE_URL = "https://hwcxiuzfylggtcktqgij.supabase.co/functions/v1/llm-testing-proxy";
292
289
  const LLM_MAX_RETRIES = 2;
293
290
  const DEFAULT_TEMPERATURE = 0.7;
294
291
  function parseLLMConfig(config) {
292
+ if (!config.baseURL || !config.apiKey || !config.model) {
293
+ throw new Error(
294
+ "[PageAgent] LLM configuration required. Please provide: baseURL, apiKey, model. See: https://alibaba.github.io/page-agent/#/docs/features/models"
295
+ );
296
+ }
295
297
  return {
296
- baseURL: config.baseURL ?? DEFAULT_BASE_URL,
297
- apiKey: config.apiKey ?? DEFAULT_API_KEY,
298
- model: config.model ?? DEFAULT_MODEL_NAME,
298
+ baseURL: config.baseURL,
299
+ apiKey: config.apiKey,
300
+ model: config.model,
299
301
  temperature: config.temperature ?? DEFAULT_TEMPERATURE,
300
302
  maxRetries: config.maxRetries ?? LLM_MAX_RETRIES,
301
303
  customFetch: (config.customFetch ?? fetch).bind(globalThis)
@@ -1 +1 @@
1
- {"version":3,"file":"page-agent-llms.js","sources":["../../src/errors.ts","../../src/utils.ts","../../src/OpenAIClient.ts","../../src/constants.ts","../../src/index.ts"],"sourcesContent":["/**\n * Error types and error handling for LLM invocations\n */\n\nexport const InvokeErrorType = {\n\t// Retryable\n\tNETWORK_ERROR: 'network_error', // Network error, retry\n\tRATE_LIMIT: 'rate_limit', // Rate limit, retry\n\tSERVER_ERROR: 'server_error', // 5xx, retry\n\tNO_TOOL_CALL: 'no_tool_call', // Model did not call tool\n\tINVALID_TOOL_ARGS: 'invalid_tool_args', // Tool args don't match schema\n\tTOOL_EXECUTION_ERROR: 'tool_execution_error', // Tool execution error\n\n\tUNKNOWN: 'unknown',\n\n\t// Non-retryable\n\tAUTH_ERROR: 'auth_error', // Authentication failed\n\tCONTEXT_LENGTH: 'context_length', // Prompt too long\n\tCONTENT_FILTER: 'content_filter', // Content filtered\n} as const\n\nexport type InvokeErrorType = (typeof InvokeErrorType)[keyof typeof InvokeErrorType]\n\nexport class InvokeError extends Error {\n\ttype: InvokeErrorType\n\tretryable: boolean\n\tstatusCode?: number\n\trawError?: unknown\n\n\tconstructor(type: InvokeErrorType, message: string, rawError?: unknown) {\n\t\tsuper(message)\n\t\tthis.name = 'InvokeError'\n\t\tthis.type = type\n\t\tthis.retryable = this.isRetryable(type)\n\t\tthis.rawError = rawError\n\t}\n\n\tprivate isRetryable(type: InvokeErrorType): boolean {\n\t\tconst retryableTypes: InvokeErrorType[] = [\n\t\t\tInvokeErrorType.NETWORK_ERROR,\n\t\t\tInvokeErrorType.RATE_LIMIT,\n\t\t\tInvokeErrorType.SERVER_ERROR,\n\t\t\tInvokeErrorType.NO_TOOL_CALL,\n\t\t\tInvokeErrorType.INVALID_TOOL_ARGS,\n\t\t\tInvokeErrorType.TOOL_EXECUTION_ERROR,\n\t\t\tInvokeErrorType.UNKNOWN,\n\t\t]\n\t\treturn retryableTypes.includes(type)\n\t}\n}\n","/**\n * Utility functions for LLM integration\n */\nimport chalk from 'chalk'\nimport { z } from 'zod'\n\nimport type { Tool } from './types'\n\nfunction debug(message: string) {\n\tconsole.debug(chalk.gray('[LLM]'), message)\n}\n\n/**\n * Convert Zod schema to OpenAI tool format\n * Uses Zod 4 native z.toJSONSchema()\n */\nexport function zodToOpenAITool(name: string, tool: Tool) {\n\treturn {\n\t\ttype: 'function' as const,\n\t\tfunction: {\n\t\t\tname,\n\t\t\tdescription: tool.description,\n\t\t\tparameters: z.toJSONSchema(tool.inputSchema, { target: 'openapi-3.0' }),\n\t\t},\n\t}\n}\n\n/**\n * Patch model specific parameters\n */\nexport function modelPatch(body: Record<string, any>) {\n\tconst model: string = body.model || ''\n\tif (!model) return body\n\n\tconst modelName = normalizeModelName(model)\n\n\tif (modelName.startsWith('qwen')) {\n\t\tdebug('Applying Qwen patch: use higher temperature for auto fixing')\n\t\tbody.temperature = Math.max(body.temperature || 0, 1.0)\n\t}\n\n\tif (modelName.startsWith('claude')) {\n\t\tdebug('Applying Claude patch: disable thinking')\n\t\tbody.thinking = { type: 'disabled' }\n\n\t\t// Convert tool_choice to Claude format\n\t\tif (body.tool_choice === 'required') {\n\t\t\t// 'required' -> { type: 'any' } (must call some tool)\n\t\t\tdebug('Applying Claude patch: convert tool_choice \"required\" to { type: \"any\" }')\n\t\t\tbody.tool_choice = { type: 'any' }\n\t\t} else if (body.tool_choice?.function?.name) {\n\t\t\t// { type: 'function', function: { name: '...' } } -> { type: 'tool', name: '...' }\n\t\t\tdebug('Applying Claude patch: convert tool_choice format')\n\t\t\tbody.tool_choice = { type: 'tool', name: body.tool_choice.function.name }\n\t\t}\n\t}\n\n\tif (modelName.startsWith('grok')) {\n\t\tdebug('Applying Grok patch: removing tool_choice')\n\t\tdelete body.tool_choice\n\t\tdebug('Applying Grok patch: disable reasoning and thinking')\n\t\tbody.thinking = { type: 'disabled', effort: 'minimal' }\n\t\tbody.reasoning = { enabled: false, effort: 'low' }\n\t}\n\n\tif (modelName.startsWith('gpt')) {\n\t\tdebug('Applying GPT patch: set verbosity to low')\n\t\tbody.verbosity = 'low'\n\n\t\tif (modelName.startsWith('gpt-52')) {\n\t\t\tdebug('Applying GPT-52 patch: disable reasoning')\n\t\t\tbody.reasoning_effort = 'none'\n\t\t} else if (modelName.startsWith('gpt-51')) {\n\t\t\tdebug('Applying GPT-51 patch: disable reasoning')\n\t\t\tbody.reasoning_effort = 'none'\n\t\t} else if (modelName.startsWith('gpt-5')) {\n\t\t\tdebug('Applying GPT-5 patch: set reasoning effort to low')\n\t\t\tbody.reasoning_effort = 'low'\n\t\t}\n\t}\n\n\tif (modelName.startsWith('gemini')) {\n\t\tdebug('Applying Gemini patch: set reasoning effort to minimal')\n\t\tbody.reasoning_effort = 'minimal'\n\t}\n\n\treturn body\n}\n\n/**\n * check if a given model ID fits a specific model name\n *\n * @note\n * Different model providers may use different model IDs for the same model.\n * For example, openai's `gpt-5.2` may called:\n *\n * - `gpt-5.2-version`\n * - `gpt-5_2-date`\n * - `GPT-52-version-date`\n * - `openai/gpt-5.2-chat`\n *\n * They should be treated as the same model.\n * Normalize them to `gpt-52`\n */\nfunction normalizeModelName(modelName: string): string {\n\tlet normalizedName = modelName.toLowerCase()\n\n\t// remove prefix before '/'\n\tif (normalizedName.includes('/')) {\n\t\tnormalizedName = normalizedName.split('/')[1]\n\t}\n\n\t// remove '_'\n\tnormalizedName = normalizedName.replace(/_/g, '')\n\n\t// remove '.'\n\tnormalizedName = normalizedName.replace(/\\./g, '')\n\n\treturn normalizedName\n}\n","/**\n * OpenAI Client implementation\n */\nimport { InvokeError, InvokeErrorType } from './errors'\nimport type { InvokeOptions, InvokeResult, LLMClient, LLMConfig, Message, Tool } from './types'\nimport { modelPatch, zodToOpenAITool } from './utils'\n\n/**\n * Client for OpenAI compatible APIs\n */\nexport class OpenAIClient implements LLMClient {\n\tconfig: Required<LLMConfig>\n\tprivate fetch: typeof globalThis.fetch\n\n\tconstructor(config: Required<LLMConfig>) {\n\t\tthis.config = config\n\t\tthis.fetch = config.customFetch\n\t}\n\n\tasync invoke(\n\t\tmessages: Message[],\n\t\ttools: Record<string, Tool>,\n\t\tabortSignal?: AbortSignal,\n\t\toptions?: InvokeOptions\n\t): Promise<InvokeResult> {\n\t\t// 1. Convert tools to OpenAI format\n\t\tconst openaiTools = Object.entries(tools).map(([name, t]) => zodToOpenAITool(name, t))\n\n\t\t// Build request body\n\t\tconst requestBody: Record<string, unknown> = {\n\t\t\tmodel: this.config.model,\n\t\t\ttemperature: this.config.temperature,\n\t\t\tmessages,\n\t\t\ttools: openaiTools,\n\t\t\tparallel_tool_calls: false,\n\t\t\t// Require tool call: specific tool if provided, otherwise any tool\n\t\t\ttool_choice: options?.toolChoiceName\n\t\t\t\t? { type: 'function', function: { name: options.toolChoiceName } }\n\t\t\t\t: 'required',\n\t\t}\n\n\t\t// 2. Call API\n\t\tlet response: Response\n\t\ttry {\n\t\t\tresponse = await this.fetch(`${this.config.baseURL}/chat/completions`, {\n\t\t\t\tmethod: 'POST',\n\t\t\t\theaders: {\n\t\t\t\t\t'Content-Type': 'application/json',\n\t\t\t\t\tAuthorization: `Bearer ${this.config.apiKey}`,\n\t\t\t\t},\n\t\t\t\tbody: JSON.stringify(modelPatch(requestBody)),\n\t\t\t\tsignal: abortSignal,\n\t\t\t})\n\t\t} catch (error: unknown) {\n\t\t\tconsole.error(error)\n\t\t\tthrow new InvokeError(InvokeErrorType.NETWORK_ERROR, 'Network request failed', error)\n\t\t}\n\n\t\t// 3. Handle HTTP errors\n\t\tif (!response.ok) {\n\t\t\tconst errorData = await response.json().catch()\n\t\t\tconst errorMessage =\n\t\t\t\t(errorData as { error?: { message?: string } }).error?.message || response.statusText\n\n\t\t\tif (response.status === 401 || response.status === 403) {\n\t\t\t\tthrow new InvokeError(\n\t\t\t\t\tInvokeErrorType.AUTH_ERROR,\n\t\t\t\t\t`Authentication failed: ${errorMessage}`,\n\t\t\t\t\terrorData\n\t\t\t\t)\n\t\t\t}\n\t\t\tif (response.status === 429) {\n\t\t\t\tthrow new InvokeError(\n\t\t\t\t\tInvokeErrorType.RATE_LIMIT,\n\t\t\t\t\t`Rate limit exceeded: ${errorMessage}`,\n\t\t\t\t\terrorData\n\t\t\t\t)\n\t\t\t}\n\t\t\tif (response.status >= 500) {\n\t\t\t\tthrow new InvokeError(\n\t\t\t\t\tInvokeErrorType.SERVER_ERROR,\n\t\t\t\t\t`Server error: ${errorMessage}`,\n\t\t\t\t\terrorData\n\t\t\t\t)\n\t\t\t}\n\t\t\tthrow new InvokeError(\n\t\t\t\tInvokeErrorType.UNKNOWN,\n\t\t\t\t`HTTP ${response.status}: ${errorMessage}`,\n\t\t\t\terrorData\n\t\t\t)\n\t\t}\n\n\t\t// 4. Parse and validate response\n\t\tconst data = await response.json()\n\n\t\tconst choice = data.choices?.[0]\n\t\tif (!choice) {\n\t\t\tthrow new InvokeError(InvokeErrorType.UNKNOWN, 'No choices in response', data)\n\t\t}\n\n\t\t// Check finish_reason\n\t\tswitch (choice.finish_reason) {\n\t\t\tcase 'tool_calls':\n\t\t\tcase 'function_call': // gemini\n\t\t\tcase 'stop': // some models use this even with tool calls\n\t\t\t\tbreak\n\t\t\tcase 'length':\n\t\t\t\tthrow new InvokeError(\n\t\t\t\t\tInvokeErrorType.CONTEXT_LENGTH,\n\t\t\t\t\t'Response truncated: max tokens reached'\n\t\t\t\t)\n\t\t\tcase 'content_filter':\n\t\t\t\tthrow new InvokeError(InvokeErrorType.CONTENT_FILTER, 'Content filtered by safety system')\n\t\t\tdefault:\n\t\t\t\tthrow new InvokeError(\n\t\t\t\t\tInvokeErrorType.UNKNOWN,\n\t\t\t\t\t`Unexpected finish_reason: ${choice.finish_reason}`\n\t\t\t\t)\n\t\t}\n\n\t\t// Apply normalizeResponse if provided (for fixing format issues automatically)\n\t\tconst normalizedData = options?.normalizeResponse ? options.normalizeResponse(data) : data\n\t\tconst normalizedChoice = (normalizedData as any).choices?.[0]\n\n\t\t// Get tool name from response\n\t\tconst toolCallName = normalizedChoice?.message?.tool_calls?.[0]?.function?.name\n\t\tif (!toolCallName) {\n\t\t\tthrow new InvokeError(\n\t\t\t\tInvokeErrorType.NO_TOOL_CALL,\n\t\t\t\t'No tool call found in response',\n\t\t\t\tnormalizedData\n\t\t\t)\n\t\t}\n\n\t\tconst tool = tools[toolCallName]\n\t\tif (!tool) {\n\t\t\tthrow new InvokeError(\n\t\t\t\tInvokeErrorType.UNKNOWN,\n\t\t\t\t`Tool \"${toolCallName}\" not found in tools`,\n\t\t\t\tnormalizedData\n\t\t\t)\n\t\t}\n\n\t\t// Extract and parse tool arguments\n\t\tconst argString = normalizedChoice.message?.tool_calls?.[0]?.function?.arguments\n\t\tif (!argString) {\n\t\t\tthrow new InvokeError(\n\t\t\t\tInvokeErrorType.INVALID_TOOL_ARGS,\n\t\t\t\t'No tool call arguments found',\n\t\t\t\tnormalizedData\n\t\t\t)\n\t\t}\n\n\t\tlet parsedArgs: unknown\n\t\ttry {\n\t\t\tparsedArgs = JSON.parse(argString)\n\t\t} catch (error) {\n\t\t\tthrow new InvokeError(\n\t\t\t\tInvokeErrorType.INVALID_TOOL_ARGS,\n\t\t\t\t'Failed to parse tool arguments as JSON',\n\t\t\t\terror\n\t\t\t)\n\t\t}\n\n\t\t// Validate with schema\n\t\tconst validation = tool.inputSchema.safeParse(parsedArgs)\n\t\tif (!validation.success) {\n\t\t\tconsole.error(validation.error)\n\t\t\tthrow new InvokeError(\n\t\t\t\tInvokeErrorType.INVALID_TOOL_ARGS,\n\t\t\t\t'Tool arguments validation failed',\n\t\t\t\tvalidation.error\n\t\t\t)\n\t\t}\n\t\tconst toolInput = validation.data\n\n\t\t// 5. Execute tool\n\t\tlet toolResult: unknown\n\t\ttry {\n\t\t\ttoolResult = await tool.execute(toolInput)\n\t\t} catch (e) {\n\t\t\tthrow new InvokeError(\n\t\t\t\tInvokeErrorType.TOOL_EXECUTION_ERROR,\n\t\t\t\t`Tool execution failed: ${(e as Error).message}`,\n\t\t\t\te\n\t\t\t)\n\t\t}\n\n\t\t// Return result\n\t\treturn {\n\t\t\ttoolCall: {\n\t\t\t\tname: toolCallName,\n\t\t\t\targs: toolInput,\n\t\t\t},\n\t\t\ttoolResult,\n\t\t\tusage: {\n\t\t\t\tpromptTokens: data.usage?.prompt_tokens ?? 0,\n\t\t\t\tcompletionTokens: data.usage?.completion_tokens ?? 0,\n\t\t\t\ttotalTokens: data.usage?.total_tokens ?? 0,\n\t\t\t\tcachedTokens: data.usage?.prompt_tokens_details?.cached_tokens,\n\t\t\t\treasoningTokens: data.usage?.completion_tokens_details?.reasoning_tokens,\n\t\t\t},\n\t\t\trawResponse: data,\n\t\t}\n\t}\n}\n","// Dev environment: use .env config if available, otherwise fallback to testing api\nexport const DEFAULT_MODEL_NAME: string =\n\timport.meta.env.DEV && import.meta.env.LLM_MODEL_NAME\n\t\t? import.meta.env.LLM_MODEL_NAME\n\t\t: 'PAGE-AGENT-FREE-TESTING-RANDOM'\n\nexport const DEFAULT_API_KEY: string =\n\timport.meta.env.DEV && import.meta.env.LLM_API_KEY\n\t\t? import.meta.env.LLM_API_KEY\n\t\t: 'PAGE-AGENT-FREE-TESTING-RANDOM'\n\nexport const DEFAULT_BASE_URL: string =\n\timport.meta.env.DEV && import.meta.env.LLM_BASE_URL\n\t\t? import.meta.env.LLM_BASE_URL\n\t\t: 'https://hwcxiuzfylggtcktqgij.supabase.co/functions/v1/llm-testing-proxy'\n\n// internal\n\nexport const LLM_MAX_RETRIES = 2\nexport const DEFAULT_TEMPERATURE = 0.7 // higher randomness helps auto-recovery\n","import { OpenAIClient } from './OpenAIClient'\nimport {\n\tDEFAULT_API_KEY,\n\tDEFAULT_BASE_URL,\n\tDEFAULT_MODEL_NAME,\n\tDEFAULT_TEMPERATURE,\n\tLLM_MAX_RETRIES,\n} from './constants'\nimport { InvokeError } from './errors'\nimport type { InvokeOptions, InvokeResult, LLMClient, LLMConfig, Message, Tool } from './types'\n\nexport type { InvokeOptions, InvokeResult, LLMClient, LLMConfig, Message, Tool }\n\nexport function parseLLMConfig(config: LLMConfig): Required<LLMConfig> {\n\treturn {\n\t\tbaseURL: config.baseURL ?? DEFAULT_BASE_URL,\n\t\tapiKey: config.apiKey ?? DEFAULT_API_KEY,\n\t\tmodel: config.model ?? DEFAULT_MODEL_NAME,\n\t\ttemperature: config.temperature ?? DEFAULT_TEMPERATURE,\n\t\tmaxRetries: config.maxRetries ?? LLM_MAX_RETRIES,\n\t\tcustomFetch: (config.customFetch ?? fetch).bind(globalThis), // fetch will be illegal unless bound\n\t}\n}\n\nexport class LLM extends EventTarget {\n\tconfig: Required<LLMConfig>\n\tclient: LLMClient\n\n\tconstructor(config: LLMConfig) {\n\t\tsuper()\n\t\tthis.config = parseLLMConfig(config)\n\n\t\t// Default to OpenAI client\n\t\tthis.client = new OpenAIClient(this.config)\n\t}\n\n\t/**\n\t * - call llm api *once*\n\t * - invoke tool call *once*\n\t * - return the result of the tool\n\t */\n\tasync invoke(\n\t\tmessages: Message[],\n\t\ttools: Record<string, Tool>,\n\t\tabortSignal: AbortSignal,\n\t\toptions?: InvokeOptions\n\t): Promise<InvokeResult> {\n\t\treturn await withRetry(\n\t\t\tasync () => {\n\t\t\t\tconst result = await this.client.invoke(messages, tools, abortSignal, options)\n\n\t\t\t\treturn result\n\t\t\t},\n\t\t\t// retry settings\n\t\t\t{\n\t\t\t\tmaxRetries: this.config.maxRetries,\n\t\t\t\tonRetry: (current: number) => {\n\t\t\t\t\tthis.dispatchEvent(\n\t\t\t\t\t\tnew CustomEvent('retry', { detail: { current, max: this.config.maxRetries } })\n\t\t\t\t\t)\n\t\t\t\t},\n\t\t\t\tonError: (error: Error) => {\n\t\t\t\t\tthis.dispatchEvent(new CustomEvent('error', { detail: { error } }))\n\t\t\t\t},\n\t\t\t}\n\t\t)\n\t}\n}\n\nasync function withRetry<T>(\n\tfn: () => Promise<T>,\n\tsettings: {\n\t\tmaxRetries: number\n\t\tonRetry: (retries: number) => void\n\t\tonError: (error: Error) => void\n\t}\n): Promise<T> {\n\tlet retries = 0\n\tlet lastError: Error | null = null\n\twhile (retries <= settings.maxRetries) {\n\t\tif (retries > 0) {\n\t\t\tsettings.onRetry(retries)\n\t\t\tawait new Promise((resolve) => setTimeout(resolve, 100))\n\t\t}\n\n\t\ttry {\n\t\t\treturn await fn()\n\t\t} catch (error: unknown) {\n\t\t\tconsole.error(error)\n\t\t\tsettings.onError(error as Error)\n\n\t\t\t// do not retry if aborted by user\n\t\t\tif ((error as { name?: string })?.name === 'AbortError') throw error\n\n\t\t\t// do not retry if error is not retryable (InvokeError)\n\t\t\tif (error instanceof InvokeError && !error.retryable) throw error\n\n\t\t\tlastError = error as Error\n\t\t\tretries++\n\n\t\t\tawait new Promise((resolve) => setTimeout(resolve, 100))\n\t\t}\n\t}\n\n\tthrow lastError!\n}\n"],"names":[],"mappings":";;;;AAIO,MAAM,kBAAkB;AAAA;AAAA,EAE9B,eAAe;AAAA;AAAA,EACf,YAAY;AAAA;AAAA,EACZ,cAAc;AAAA;AAAA,EACd,cAAc;AAAA;AAAA,EACd,mBAAmB;AAAA;AAAA,EACnB,sBAAsB;AAAA;AAAA,EAEtB,SAAS;AAAA;AAAA,EAGT,YAAY;AAAA;AAAA,EACZ,gBAAgB;AAAA;AAAA,EAChB,gBAAgB;AAAA;AACjB;AAIO,MAAM,eAAN,MAAM,qBAAoB,MAAM;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YAAY,MAAuB,SAAiB,UAAoB;AACvE,UAAM,OAAO;AACb,SAAK,OAAO;AACZ,SAAK,OAAO;AACZ,SAAK,YAAY,KAAK,YAAY,IAAI;AACtC,SAAK,WAAW;AAAA,EACjB;AAAA,EAEQ,YAAY,MAAgC;AACnD,UAAM,iBAAoC;AAAA,MACzC,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,IAAA;AAEjB,WAAO,eAAe,SAAS,IAAI;AAAA,EACpC;AACD;AA1BuC;AAAhC,IAAM,cAAN;ACfP,SAAS,MAAM,SAAiB;AAC/B,UAAQ,MAAM,MAAM,KAAK,OAAO,GAAG,OAAO;AAC3C;AAFS;AAQF,SAAS,gBAAgB,MAAc,MAAY;AACzD,SAAO;AAAA,IACN,MAAM;AAAA,IACN,UAAU;AAAA,MACT;AAAA,MACA,aAAa,KAAK;AAAA,MAClB,YAAY,EAAE,aAAa,KAAK,aAAa,EAAE,QAAQ,eAAe;AAAA,IAAA;AAAA,EACvE;AAEF;AATgB;AAcT,SAAS,WAAW,MAA2B;AACrD,QAAM,QAAgB,KAAK,SAAS;AACpC,MAAI,CAAC,MAAO,QAAO;AAEnB,QAAM,YAAY,mBAAmB,KAAK;AAE1C,MAAI,UAAU,WAAW,MAAM,GAAG;AACjC,UAAM,6DAA6D;AACnE,SAAK,cAAc,KAAK,IAAI,KAAK,eAAe,GAAG,CAAG;AAAA,EACvD;AAEA,MAAI,UAAU,WAAW,QAAQ,GAAG;AACnC,UAAM,yCAAyC;AAC/C,SAAK,WAAW,EAAE,MAAM,WAAA;AAGxB,QAAI,KAAK,gBAAgB,YAAY;AAEpC,YAAM,0EAA0E;AAChF,WAAK,cAAc,EAAE,MAAM,MAAA;AAAA,IAC5B,WAAW,KAAK,aAAa,UAAU,MAAM;AAE5C,YAAM,mDAAmD;AACzD,WAAK,cAAc,EAAE,MAAM,QAAQ,MAAM,KAAK,YAAY,SAAS,KAAA;AAAA,IACpE;AAAA,EACD;AAEA,MAAI,UAAU,WAAW,MAAM,GAAG;AACjC,UAAM,2CAA2C;AACjD,WAAO,KAAK;AACZ,UAAM,qDAAqD;AAC3D,SAAK,WAAW,EAAE,MAAM,YAAY,QAAQ,UAAA;AAC5C,SAAK,YAAY,EAAE,SAAS,OAAO,QAAQ,MAAA;AAAA,EAC5C;AAEA,MAAI,UAAU,WAAW,KAAK,GAAG;AAChC,UAAM,0CAA0C;AAChD,SAAK,YAAY;AAEjB,QAAI,UAAU,WAAW,QAAQ,GAAG;AACnC,YAAM,0CAA0C;AAChD,WAAK,mBAAmB;AAAA,IACzB,WAAW,UAAU,WAAW,QAAQ,GAAG;AAC1C,YAAM,0CAA0C;AAChD,WAAK,mBAAmB;AAAA,IACzB,WAAW,UAAU,WAAW,OAAO,GAAG;AACzC,YAAM,mDAAmD;AACzD,WAAK,mBAAmB;AAAA,IACzB;AAAA,EACD;AAEA,MAAI,UAAU,WAAW,QAAQ,GAAG;AACnC,UAAM,wDAAwD;AAC9D,SAAK,mBAAmB;AAAA,EACzB;AAEA,SAAO;AACR;AAzDgB;AA0EhB,SAAS,mBAAmB,WAA2B;AACtD,MAAI,iBAAiB,UAAU,YAAA;AAG/B,MAAI,eAAe,SAAS,GAAG,GAAG;AACjC,qBAAiB,eAAe,MAAM,GAAG,EAAE,CAAC;AAAA,EAC7C;AAGA,mBAAiB,eAAe,QAAQ,MAAM,EAAE;AAGhD,mBAAiB,eAAe,QAAQ,OAAO,EAAE;AAEjD,SAAO;AACR;AAfS;AC9FF,MAAM,gBAAN,MAAM,cAAkC;AAAA,EAC9C;AAAA,EACQ;AAAA,EAER,YAAY,QAA6B;AACxC,SAAK,SAAS;AACd,SAAK,QAAQ,OAAO;AAAA,EACrB;AAAA,EAEA,MAAM,OACL,UACA,OACA,aACA,SACwB;AAExB,UAAM,cAAc,OAAO,QAAQ,KAAK,EAAE,IAAI,CAAC,CAAC,MAAM,CAAC,MAAM,gBAAgB,MAAM,CAAC,CAAC;AAGrF,UAAM,cAAuC;AAAA,MAC5C,OAAO,KAAK,OAAO;AAAA,MACnB,aAAa,KAAK,OAAO;AAAA,MACzB;AAAA,MACA,OAAO;AAAA,MACP,qBAAqB;AAAA;AAAA,MAErB,aAAa,SAAS,iBACnB,EAAE,MAAM,YAAY,UAAU,EAAE,MAAM,QAAQ,eAAA,EAAe,IAC7D;AAAA,IAAA;AAIJ,QAAI;AACJ,QAAI;AACH,iBAAW,MAAM,KAAK,MAAM,GAAG,KAAK,OAAO,OAAO,qBAAqB;AAAA,QACtE,QAAQ;AAAA,QACR,SAAS;AAAA,UACR,gBAAgB;AAAA,UAChB,eAAe,UAAU,KAAK,OAAO,MAAM;AAAA,QAAA;AAAA,QAE5C,MAAM,KAAK,UAAU,WAAW,WAAW,CAAC;AAAA,QAC5C,QAAQ;AAAA,MAAA,CACR;AAAA,IACF,SAAS,OAAgB;AACxB,cAAQ,MAAM,KAAK;AACnB,YAAM,IAAI,YAAY,gBAAgB,eAAe,0BAA0B,KAAK;AAAA,IACrF;AAGA,QAAI,CAAC,SAAS,IAAI;AACjB,YAAM,YAAY,MAAM,SAAS,KAAA,EAAO,MAAA;AACxC,YAAM,eACJ,UAA+C,OAAO,WAAW,SAAS;AAE5E,UAAI,SAAS,WAAW,OAAO,SAAS,WAAW,KAAK;AACvD,cAAM,IAAI;AAAA,UACT,gBAAgB;AAAA,UAChB,0BAA0B,YAAY;AAAA,UACtC;AAAA,QAAA;AAAA,MAEF;AACA,UAAI,SAAS,WAAW,KAAK;AAC5B,cAAM,IAAI;AAAA,UACT,gBAAgB;AAAA,UAChB,wBAAwB,YAAY;AAAA,UACpC;AAAA,QAAA;AAAA,MAEF;AACA,UAAI,SAAS,UAAU,KAAK;AAC3B,cAAM,IAAI;AAAA,UACT,gBAAgB;AAAA,UAChB,iBAAiB,YAAY;AAAA,UAC7B;AAAA,QAAA;AAAA,MAEF;AACA,YAAM,IAAI;AAAA,QACT,gBAAgB;AAAA,QAChB,QAAQ,SAAS,MAAM,KAAK,YAAY;AAAA,QACxC;AAAA,MAAA;AAAA,IAEF;AAGA,UAAM,OAAO,MAAM,SAAS,KAAA;AAE5B,UAAM,SAAS,KAAK,UAAU,CAAC;AAC/B,QAAI,CAAC,QAAQ;AACZ,YAAM,IAAI,YAAY,gBAAgB,SAAS,0BAA0B,IAAI;AAAA,IAC9E;AAGA,YAAQ,OAAO,eAAA;AAAA,MACd,KAAK;AAAA,MACL,KAAK;AAAA;AAAA,MACL,KAAK;AACJ;AAAA,MACD,KAAK;AACJ,cAAM,IAAI;AAAA,UACT,gBAAgB;AAAA,UAChB;AAAA,QAAA;AAAA,MAEF,KAAK;AACJ,cAAM,IAAI,YAAY,gBAAgB,gBAAgB,mCAAmC;AAAA,MAC1F;AACC,cAAM,IAAI;AAAA,UACT,gBAAgB;AAAA,UAChB,6BAA6B,OAAO,aAAa;AAAA,QAAA;AAAA,IAClD;AAIF,UAAM,iBAAiB,SAAS,oBAAoB,QAAQ,kBAAkB,IAAI,IAAI;AACtF,UAAM,mBAAoB,eAAuB,UAAU,CAAC;AAG5D,UAAM,eAAe,kBAAkB,SAAS,aAAa,CAAC,GAAG,UAAU;AAC3E,QAAI,CAAC,cAAc;AAClB,YAAM,IAAI;AAAA,QACT,gBAAgB;AAAA,QAChB;AAAA,QACA;AAAA,MAAA;AAAA,IAEF;AAEA,UAAM,OAAO,MAAM,YAAY;AAC/B,QAAI,CAAC,MAAM;AACV,YAAM,IAAI;AAAA,QACT,gBAAgB;AAAA,QAChB,SAAS,YAAY;AAAA,QACrB;AAAA,MAAA;AAAA,IAEF;AAGA,UAAM,YAAY,iBAAiB,SAAS,aAAa,CAAC,GAAG,UAAU;AACvE,QAAI,CAAC,WAAW;AACf,YAAM,IAAI;AAAA,QACT,gBAAgB;AAAA,QAChB;AAAA,QACA;AAAA,MAAA;AAAA,IAEF;AAEA,QAAI;AACJ,QAAI;AACH,mBAAa,KAAK,MAAM,SAAS;AAAA,IAClC,SAAS,OAAO;AACf,YAAM,IAAI;AAAA,QACT,gBAAgB;AAAA,QAChB;AAAA,QACA;AAAA,MAAA;AAAA,IAEF;AAGA,UAAM,aAAa,KAAK,YAAY,UAAU,UAAU;AACxD,QAAI,CAAC,WAAW,SAAS;AACxB,cAAQ,MAAM,WAAW,KAAK;AAC9B,YAAM,IAAI;AAAA,QACT,gBAAgB;AAAA,QAChB;AAAA,QACA,WAAW;AAAA,MAAA;AAAA,IAEb;AACA,UAAM,YAAY,WAAW;AAG7B,QAAI;AACJ,QAAI;AACH,mBAAa,MAAM,KAAK,QAAQ,SAAS;AAAA,IAC1C,SAAS,GAAG;AACX,YAAM,IAAI;AAAA,QACT,gBAAgB;AAAA,QAChB,0BAA2B,EAAY,OAAO;AAAA,QAC9C;AAAA,MAAA;AAAA,IAEF;AAGA,WAAO;AAAA,MACN,UAAU;AAAA,QACT,MAAM;AAAA,QACN,MAAM;AAAA,MAAA;AAAA,MAEP;AAAA,MACA,OAAO;AAAA,QACN,cAAc,KAAK,OAAO,iBAAiB;AAAA,QAC3C,kBAAkB,KAAK,OAAO,qBAAqB;AAAA,QACnD,aAAa,KAAK,OAAO,gBAAgB;AAAA,QACzC,cAAc,KAAK,OAAO,uBAAuB;AAAA,QACjD,iBAAiB,KAAK,OAAO,2BAA2B;AAAA,MAAA;AAAA,MAEzD,aAAa;AAAA,IAAA;AAAA,EAEf;AACD;AAnM+C;AAAxC,IAAM,eAAN;ACTA,MAAM,qBAGT;AAEG,MAAM,kBAGT;AAEG,MAAM,mBAGT;AAIG,MAAM,kBAAkB;AACxB,MAAM,sBAAsB;ACN5B,SAAS,eAAe,QAAwC;AACtE,SAAO;AAAA,IACN,SAAS,OAAO,WAAW;AAAA,IAC3B,QAAQ,OAAO,UAAU;AAAA,IACzB,OAAO,OAAO,SAAS;AAAA,IACvB,aAAa,OAAO,eAAe;AAAA,IACnC,YAAY,OAAO,cAAc;AAAA,IACjC,cAAc,OAAO,eAAe,OAAO,KAAK,UAAU;AAAA;AAAA,EAAA;AAE5D;AATgB;AAWT,MAAM,OAAN,MAAM,aAAY,YAAY;AAAA,EACpC;AAAA,EACA;AAAA,EAEA,YAAY,QAAmB;AAC9B,UAAA;AACA,SAAK,SAAS,eAAe,MAAM;AAGnC,SAAK,SAAS,IAAI,aAAa,KAAK,MAAM;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,OACL,UACA,OACA,aACA,SACwB;AACxB,WAAO,MAAM;AAAA,MACZ,YAAY;AACX,cAAM,SAAS,MAAM,KAAK,OAAO,OAAO,UAAU,OAAO,aAAa,OAAO;AAE7E,eAAO;AAAA,MACR;AAAA;AAAA,MAEA;AAAA,QACC,YAAY,KAAK,OAAO;AAAA,QACxB,SAAS,wBAAC,YAAoB;AAC7B,eAAK;AAAA,YACJ,IAAI,YAAY,SAAS,EAAE,QAAQ,EAAE,SAAS,KAAK,KAAK,OAAO,aAAW,CAAG;AAAA,UAAA;AAAA,QAE/E,GAJS;AAAA,QAKT,SAAS,wBAAC,UAAiB;AAC1B,eAAK,cAAc,IAAI,YAAY,SAAS,EAAE,QAAQ,EAAE,MAAA,EAAM,CAAG,CAAC;AAAA,QACnE,GAFS;AAAA,MAET;AAAA,IACD;AAAA,EAEF;AACD;AA3CqC;AAA9B,IAAM,MAAN;AA6CP,eAAe,UACd,IACA,UAKa;AACb,MAAI,UAAU;AACd,MAAI,YAA0B;AAC9B,SAAO,WAAW,SAAS,YAAY;AACtC,QAAI,UAAU,GAAG;AAChB,eAAS,QAAQ,OAAO;AACxB,YAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,GAAG,CAAC;AAAA,IACxD;AAEA,QAAI;AACH,aAAO,MAAM,GAAA;AAAA,IACd,SAAS,OAAgB;AACxB,cAAQ,MAAM,KAAK;AACnB,eAAS,QAAQ,KAAc;AAG/B,UAAK,OAA6B,SAAS,aAAc,OAAM;AAG/D,UAAI,iBAAiB,eAAe,CAAC,MAAM,UAAW,OAAM;AAE5D,kBAAY;AACZ;AAEA,YAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,GAAG,CAAC;AAAA,IACxD;AAAA,EACD;AAEA,QAAM;AACP;AApCe;"}
1
+ {"version":3,"file":"page-agent-llms.js","sources":["../../src/errors.ts","../../src/utils.ts","../../src/OpenAIClient.ts","../../src/constants.ts","../../src/index.ts"],"sourcesContent":["/**\n * Error types and error handling for LLM invocations\n */\n\nexport const InvokeErrorType = {\n\t// Retryable\n\tNETWORK_ERROR: 'network_error', // Network error, retry\n\tRATE_LIMIT: 'rate_limit', // Rate limit, retry\n\tSERVER_ERROR: 'server_error', // 5xx, retry\n\tNO_TOOL_CALL: 'no_tool_call', // Model did not call tool\n\tINVALID_TOOL_ARGS: 'invalid_tool_args', // Tool args don't match schema\n\tTOOL_EXECUTION_ERROR: 'tool_execution_error', // Tool execution error\n\n\tUNKNOWN: 'unknown',\n\n\t// Non-retryable\n\tAUTH_ERROR: 'auth_error', // Authentication failed\n\tCONTEXT_LENGTH: 'context_length', // Prompt too long\n\tCONTENT_FILTER: 'content_filter', // Content filtered\n} as const\n\nexport type InvokeErrorType = (typeof InvokeErrorType)[keyof typeof InvokeErrorType]\n\nexport class InvokeError extends Error {\n\ttype: InvokeErrorType\n\tretryable: boolean\n\tstatusCode?: number\n\trawError?: unknown\n\n\tconstructor(type: InvokeErrorType, message: string, rawError?: unknown) {\n\t\tsuper(message)\n\t\tthis.name = 'InvokeError'\n\t\tthis.type = type\n\t\tthis.retryable = this.isRetryable(type)\n\t\tthis.rawError = rawError\n\t}\n\n\tprivate isRetryable(type: InvokeErrorType): boolean {\n\t\tconst retryableTypes: InvokeErrorType[] = [\n\t\t\tInvokeErrorType.NETWORK_ERROR,\n\t\t\tInvokeErrorType.RATE_LIMIT,\n\t\t\tInvokeErrorType.SERVER_ERROR,\n\t\t\tInvokeErrorType.NO_TOOL_CALL,\n\t\t\tInvokeErrorType.INVALID_TOOL_ARGS,\n\t\t\tInvokeErrorType.TOOL_EXECUTION_ERROR,\n\t\t\tInvokeErrorType.UNKNOWN,\n\t\t]\n\t\treturn retryableTypes.includes(type)\n\t}\n}\n","/**\n * Utility functions for LLM integration\n */\nimport chalk from 'chalk'\nimport { z } from 'zod'\n\nimport type { Tool } from './types'\n\nfunction debug(message: string) {\n\tconsole.debug(chalk.gray('[LLM]'), message)\n}\n\n/**\n * Convert Zod schema to OpenAI tool format\n * Uses Zod 4 native z.toJSONSchema()\n */\nexport function zodToOpenAITool(name: string, tool: Tool) {\n\treturn {\n\t\ttype: 'function' as const,\n\t\tfunction: {\n\t\t\tname,\n\t\t\tdescription: tool.description,\n\t\t\tparameters: z.toJSONSchema(tool.inputSchema, { target: 'openapi-3.0' }),\n\t\t},\n\t}\n}\n\n/**\n * Patch model specific parameters\n */\nexport function modelPatch(body: Record<string, any>) {\n\tconst model: string = body.model || ''\n\tif (!model) return body\n\n\tconst modelName = normalizeModelName(model)\n\n\tif (modelName.startsWith('qwen')) {\n\t\tdebug('Applying Qwen patch: use higher temperature for auto fixing')\n\t\tbody.temperature = Math.max(body.temperature || 0, 1.0)\n\t}\n\n\tif (modelName.startsWith('claude')) {\n\t\tdebug('Applying Claude patch: disable thinking')\n\t\tbody.thinking = { type: 'disabled' }\n\n\t\t// Convert tool_choice to Claude format\n\t\tif (body.tool_choice === 'required') {\n\t\t\t// 'required' -> { type: 'any' } (must call some tool)\n\t\t\tdebug('Applying Claude patch: convert tool_choice \"required\" to { type: \"any\" }')\n\t\t\tbody.tool_choice = { type: 'any' }\n\t\t} else if (body.tool_choice?.function?.name) {\n\t\t\t// { type: 'function', function: { name: '...' } } -> { type: 'tool', name: '...' }\n\t\t\tdebug('Applying Claude patch: convert tool_choice format')\n\t\t\tbody.tool_choice = { type: 'tool', name: body.tool_choice.function.name }\n\t\t}\n\t}\n\n\tif (modelName.startsWith('grok')) {\n\t\tdebug('Applying Grok patch: removing tool_choice')\n\t\tdelete body.tool_choice\n\t\tdebug('Applying Grok patch: disable reasoning and thinking')\n\t\tbody.thinking = { type: 'disabled', effort: 'minimal' }\n\t\tbody.reasoning = { enabled: false, effort: 'low' }\n\t}\n\n\tif (modelName.startsWith('gpt')) {\n\t\tdebug('Applying GPT patch: set verbosity to low')\n\t\tbody.verbosity = 'low'\n\n\t\tif (modelName.startsWith('gpt-52')) {\n\t\t\tdebug('Applying GPT-52 patch: disable reasoning')\n\t\t\tbody.reasoning_effort = 'none'\n\t\t} else if (modelName.startsWith('gpt-51')) {\n\t\t\tdebug('Applying GPT-51 patch: disable reasoning')\n\t\t\tbody.reasoning_effort = 'none'\n\t\t} else if (modelName.startsWith('gpt-5')) {\n\t\t\tdebug('Applying GPT-5 patch: set reasoning effort to low')\n\t\t\tbody.reasoning_effort = 'low'\n\t\t}\n\t}\n\n\tif (modelName.startsWith('gemini')) {\n\t\tdebug('Applying Gemini patch: set reasoning effort to minimal')\n\t\tbody.reasoning_effort = 'minimal'\n\t}\n\n\treturn body\n}\n\n/**\n * check if a given model ID fits a specific model name\n *\n * @note\n * Different model providers may use different model IDs for the same model.\n * For example, openai's `gpt-5.2` may called:\n *\n * - `gpt-5.2-version`\n * - `gpt-5_2-date`\n * - `GPT-52-version-date`\n * - `openai/gpt-5.2-chat`\n *\n * They should be treated as the same model.\n * Normalize them to `gpt-52`\n */\nfunction normalizeModelName(modelName: string): string {\n\tlet normalizedName = modelName.toLowerCase()\n\n\t// remove prefix before '/'\n\tif (normalizedName.includes('/')) {\n\t\tnormalizedName = normalizedName.split('/')[1]\n\t}\n\n\t// remove '_'\n\tnormalizedName = normalizedName.replace(/_/g, '')\n\n\t// remove '.'\n\tnormalizedName = normalizedName.replace(/\\./g, '')\n\n\treturn normalizedName\n}\n","/**\n * OpenAI Client implementation\n */\nimport { InvokeError, InvokeErrorType } from './errors'\nimport type { InvokeOptions, InvokeResult, LLMClient, LLMConfig, Message, Tool } from './types'\nimport { modelPatch, zodToOpenAITool } from './utils'\n\n/**\n * Client for OpenAI compatible APIs\n */\nexport class OpenAIClient implements LLMClient {\n\tconfig: Required<LLMConfig>\n\tprivate fetch: typeof globalThis.fetch\n\n\tconstructor(config: Required<LLMConfig>) {\n\t\tthis.config = config\n\t\tthis.fetch = config.customFetch\n\t}\n\n\tasync invoke(\n\t\tmessages: Message[],\n\t\ttools: Record<string, Tool>,\n\t\tabortSignal?: AbortSignal,\n\t\toptions?: InvokeOptions\n\t): Promise<InvokeResult> {\n\t\t// 1. Convert tools to OpenAI format\n\t\tconst openaiTools = Object.entries(tools).map(([name, t]) => zodToOpenAITool(name, t))\n\n\t\t// Build request body\n\t\tconst requestBody: Record<string, unknown> = {\n\t\t\tmodel: this.config.model,\n\t\t\ttemperature: this.config.temperature,\n\t\t\tmessages,\n\t\t\ttools: openaiTools,\n\t\t\tparallel_tool_calls: false,\n\t\t\t// Require tool call: specific tool if provided, otherwise any tool\n\t\t\ttool_choice: options?.toolChoiceName\n\t\t\t\t? { type: 'function', function: { name: options.toolChoiceName } }\n\t\t\t\t: 'required',\n\t\t}\n\n\t\t// 2. Call API\n\t\tlet response: Response\n\t\ttry {\n\t\t\tresponse = await this.fetch(`${this.config.baseURL}/chat/completions`, {\n\t\t\t\tmethod: 'POST',\n\t\t\t\theaders: {\n\t\t\t\t\t'Content-Type': 'application/json',\n\t\t\t\t\tAuthorization: `Bearer ${this.config.apiKey}`,\n\t\t\t\t},\n\t\t\t\tbody: JSON.stringify(modelPatch(requestBody)),\n\t\t\t\tsignal: abortSignal,\n\t\t\t})\n\t\t} catch (error: unknown) {\n\t\t\tconsole.error(error)\n\t\t\tthrow new InvokeError(InvokeErrorType.NETWORK_ERROR, 'Network request failed', error)\n\t\t}\n\n\t\t// 3. Handle HTTP errors\n\t\tif (!response.ok) {\n\t\t\tconst errorData = await response.json().catch()\n\t\t\tconst errorMessage =\n\t\t\t\t(errorData as { error?: { message?: string } }).error?.message || response.statusText\n\n\t\t\tif (response.status === 401 || response.status === 403) {\n\t\t\t\tthrow new InvokeError(\n\t\t\t\t\tInvokeErrorType.AUTH_ERROR,\n\t\t\t\t\t`Authentication failed: ${errorMessage}`,\n\t\t\t\t\terrorData\n\t\t\t\t)\n\t\t\t}\n\t\t\tif (response.status === 429) {\n\t\t\t\tthrow new InvokeError(\n\t\t\t\t\tInvokeErrorType.RATE_LIMIT,\n\t\t\t\t\t`Rate limit exceeded: ${errorMessage}`,\n\t\t\t\t\terrorData\n\t\t\t\t)\n\t\t\t}\n\t\t\tif (response.status >= 500) {\n\t\t\t\tthrow new InvokeError(\n\t\t\t\t\tInvokeErrorType.SERVER_ERROR,\n\t\t\t\t\t`Server error: ${errorMessage}`,\n\t\t\t\t\terrorData\n\t\t\t\t)\n\t\t\t}\n\t\t\tthrow new InvokeError(\n\t\t\t\tInvokeErrorType.UNKNOWN,\n\t\t\t\t`HTTP ${response.status}: ${errorMessage}`,\n\t\t\t\terrorData\n\t\t\t)\n\t\t}\n\n\t\t// 4. Parse and validate response\n\t\tconst data = await response.json()\n\n\t\tconst choice = data.choices?.[0]\n\t\tif (!choice) {\n\t\t\tthrow new InvokeError(InvokeErrorType.UNKNOWN, 'No choices in response', data)\n\t\t}\n\n\t\t// Check finish_reason\n\t\tswitch (choice.finish_reason) {\n\t\t\tcase 'tool_calls':\n\t\t\tcase 'function_call': // gemini\n\t\t\tcase 'stop': // some models use this even with tool calls\n\t\t\t\tbreak\n\t\t\tcase 'length':\n\t\t\t\tthrow new InvokeError(\n\t\t\t\t\tInvokeErrorType.CONTEXT_LENGTH,\n\t\t\t\t\t'Response truncated: max tokens reached'\n\t\t\t\t)\n\t\t\tcase 'content_filter':\n\t\t\t\tthrow new InvokeError(InvokeErrorType.CONTENT_FILTER, 'Content filtered by safety system')\n\t\t\tdefault:\n\t\t\t\tthrow new InvokeError(\n\t\t\t\t\tInvokeErrorType.UNKNOWN,\n\t\t\t\t\t`Unexpected finish_reason: ${choice.finish_reason}`\n\t\t\t\t)\n\t\t}\n\n\t\t// Apply normalizeResponse if provided (for fixing format issues automatically)\n\t\tconst normalizedData = options?.normalizeResponse ? options.normalizeResponse(data) : data\n\t\tconst normalizedChoice = (normalizedData as any).choices?.[0]\n\n\t\t// Get tool name from response\n\t\tconst toolCallName = normalizedChoice?.message?.tool_calls?.[0]?.function?.name\n\t\tif (!toolCallName) {\n\t\t\tthrow new InvokeError(\n\t\t\t\tInvokeErrorType.NO_TOOL_CALL,\n\t\t\t\t'No tool call found in response',\n\t\t\t\tnormalizedData\n\t\t\t)\n\t\t}\n\n\t\tconst tool = tools[toolCallName]\n\t\tif (!tool) {\n\t\t\tthrow new InvokeError(\n\t\t\t\tInvokeErrorType.UNKNOWN,\n\t\t\t\t`Tool \"${toolCallName}\" not found in tools`,\n\t\t\t\tnormalizedData\n\t\t\t)\n\t\t}\n\n\t\t// Extract and parse tool arguments\n\t\tconst argString = normalizedChoice.message?.tool_calls?.[0]?.function?.arguments\n\t\tif (!argString) {\n\t\t\tthrow new InvokeError(\n\t\t\t\tInvokeErrorType.INVALID_TOOL_ARGS,\n\t\t\t\t'No tool call arguments found',\n\t\t\t\tnormalizedData\n\t\t\t)\n\t\t}\n\n\t\tlet parsedArgs: unknown\n\t\ttry {\n\t\t\tparsedArgs = JSON.parse(argString)\n\t\t} catch (error) {\n\t\t\tthrow new InvokeError(\n\t\t\t\tInvokeErrorType.INVALID_TOOL_ARGS,\n\t\t\t\t'Failed to parse tool arguments as JSON',\n\t\t\t\terror\n\t\t\t)\n\t\t}\n\n\t\t// Validate with schema\n\t\tconst validation = tool.inputSchema.safeParse(parsedArgs)\n\t\tif (!validation.success) {\n\t\t\tconsole.error(validation.error)\n\t\t\tthrow new InvokeError(\n\t\t\t\tInvokeErrorType.INVALID_TOOL_ARGS,\n\t\t\t\t'Tool arguments validation failed',\n\t\t\t\tvalidation.error\n\t\t\t)\n\t\t}\n\t\tconst toolInput = validation.data\n\n\t\t// 5. Execute tool\n\t\tlet toolResult: unknown\n\t\ttry {\n\t\t\ttoolResult = await tool.execute(toolInput)\n\t\t} catch (e) {\n\t\t\tthrow new InvokeError(\n\t\t\t\tInvokeErrorType.TOOL_EXECUTION_ERROR,\n\t\t\t\t`Tool execution failed: ${(e as Error).message}`,\n\t\t\t\te\n\t\t\t)\n\t\t}\n\n\t\t// Return result\n\t\treturn {\n\t\t\ttoolCall: {\n\t\t\t\tname: toolCallName,\n\t\t\t\targs: toolInput,\n\t\t\t},\n\t\t\ttoolResult,\n\t\t\tusage: {\n\t\t\t\tpromptTokens: data.usage?.prompt_tokens ?? 0,\n\t\t\t\tcompletionTokens: data.usage?.completion_tokens ?? 0,\n\t\t\t\ttotalTokens: data.usage?.total_tokens ?? 0,\n\t\t\t\tcachedTokens: data.usage?.prompt_tokens_details?.cached_tokens,\n\t\t\t\treasoningTokens: data.usage?.completion_tokens_details?.reasoning_tokens,\n\t\t\t},\n\t\t\trawResponse: data,\n\t\t}\n\t}\n}\n","// Internal constants\nexport const LLM_MAX_RETRIES = 2\nexport const DEFAULT_TEMPERATURE = 0.7 // higher randomness helps auto-recovery\n","import { OpenAIClient } from './OpenAIClient'\nimport { DEFAULT_TEMPERATURE, LLM_MAX_RETRIES } from './constants'\nimport { InvokeError } from './errors'\nimport type { InvokeOptions, InvokeResult, LLMClient, LLMConfig, Message, Tool } from './types'\n\nexport type { InvokeOptions, InvokeResult, LLMClient, LLMConfig, Message, Tool }\n\nexport function parseLLMConfig(config: LLMConfig): Required<LLMConfig> {\n\t// Runtime validation as defensive programming (types already guarantee these)\n\tif (!config.baseURL || !config.apiKey || !config.model) {\n\t\tthrow new Error(\n\t\t\t'[PageAgent] LLM configuration required. Please provide: baseURL, apiKey, model. ' +\n\t\t\t\t'See: https://alibaba.github.io/page-agent/#/docs/features/models'\n\t\t)\n\t}\n\n\treturn {\n\t\tbaseURL: config.baseURL,\n\t\tapiKey: config.apiKey,\n\t\tmodel: config.model,\n\t\ttemperature: config.temperature ?? DEFAULT_TEMPERATURE,\n\t\tmaxRetries: config.maxRetries ?? LLM_MAX_RETRIES,\n\t\tcustomFetch: (config.customFetch ?? fetch).bind(globalThis), // fetch will be illegal unless bound\n\t}\n}\n\nexport class LLM extends EventTarget {\n\tconfig: Required<LLMConfig>\n\tclient: LLMClient\n\n\tconstructor(config: LLMConfig) {\n\t\tsuper()\n\t\tthis.config = parseLLMConfig(config)\n\n\t\t// Default to OpenAI client\n\t\tthis.client = new OpenAIClient(this.config)\n\t}\n\n\t/**\n\t * - call llm api *once*\n\t * - invoke tool call *once*\n\t * - return the result of the tool\n\t */\n\tasync invoke(\n\t\tmessages: Message[],\n\t\ttools: Record<string, Tool>,\n\t\tabortSignal: AbortSignal,\n\t\toptions?: InvokeOptions\n\t): Promise<InvokeResult> {\n\t\treturn await withRetry(\n\t\t\tasync () => {\n\t\t\t\tconst result = await this.client.invoke(messages, tools, abortSignal, options)\n\n\t\t\t\treturn result\n\t\t\t},\n\t\t\t// retry settings\n\t\t\t{\n\t\t\t\tmaxRetries: this.config.maxRetries,\n\t\t\t\tonRetry: (current: number) => {\n\t\t\t\t\tthis.dispatchEvent(\n\t\t\t\t\t\tnew CustomEvent('retry', { detail: { current, max: this.config.maxRetries } })\n\t\t\t\t\t)\n\t\t\t\t},\n\t\t\t\tonError: (error: Error) => {\n\t\t\t\t\tthis.dispatchEvent(new CustomEvent('error', { detail: { error } }))\n\t\t\t\t},\n\t\t\t}\n\t\t)\n\t}\n}\n\nasync function withRetry<T>(\n\tfn: () => Promise<T>,\n\tsettings: {\n\t\tmaxRetries: number\n\t\tonRetry: (retries: number) => void\n\t\tonError: (error: Error) => void\n\t}\n): Promise<T> {\n\tlet retries = 0\n\tlet lastError: Error | null = null\n\twhile (retries <= settings.maxRetries) {\n\t\tif (retries > 0) {\n\t\t\tsettings.onRetry(retries)\n\t\t\tawait new Promise((resolve) => setTimeout(resolve, 100))\n\t\t}\n\n\t\ttry {\n\t\t\treturn await fn()\n\t\t} catch (error: unknown) {\n\t\t\tconsole.error(error)\n\t\t\tsettings.onError(error as Error)\n\n\t\t\t// do not retry if aborted by user\n\t\t\tif ((error as { name?: string })?.name === 'AbortError') throw error\n\n\t\t\t// do not retry if error is not retryable (InvokeError)\n\t\t\tif (error instanceof InvokeError && !error.retryable) throw error\n\n\t\t\tlastError = error as Error\n\t\t\tretries++\n\n\t\t\tawait new Promise((resolve) => setTimeout(resolve, 100))\n\t\t}\n\t}\n\n\tthrow lastError!\n}\n"],"names":[],"mappings":";;;;AAIO,MAAM,kBAAkB;AAAA;AAAA,EAE9B,eAAe;AAAA;AAAA,EACf,YAAY;AAAA;AAAA,EACZ,cAAc;AAAA;AAAA,EACd,cAAc;AAAA;AAAA,EACd,mBAAmB;AAAA;AAAA,EACnB,sBAAsB;AAAA;AAAA,EAEtB,SAAS;AAAA;AAAA,EAGT,YAAY;AAAA;AAAA,EACZ,gBAAgB;AAAA;AAAA,EAChB,gBAAgB;AAAA;AACjB;AAIO,MAAM,eAAN,MAAM,qBAAoB,MAAM;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YAAY,MAAuB,SAAiB,UAAoB;AACvE,UAAM,OAAO;AACb,SAAK,OAAO;AACZ,SAAK,OAAO;AACZ,SAAK,YAAY,KAAK,YAAY,IAAI;AACtC,SAAK,WAAW;AAAA,EACjB;AAAA,EAEQ,YAAY,MAAgC;AACnD,UAAM,iBAAoC;AAAA,MACzC,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,IAAA;AAEjB,WAAO,eAAe,SAAS,IAAI;AAAA,EACpC;AACD;AA1BuC;AAAhC,IAAM,cAAN;ACfP,SAAS,MAAM,SAAiB;AAC/B,UAAQ,MAAM,MAAM,KAAK,OAAO,GAAG,OAAO;AAC3C;AAFS;AAQF,SAAS,gBAAgB,MAAc,MAAY;AACzD,SAAO;AAAA,IACN,MAAM;AAAA,IACN,UAAU;AAAA,MACT;AAAA,MACA,aAAa,KAAK;AAAA,MAClB,YAAY,EAAE,aAAa,KAAK,aAAa,EAAE,QAAQ,eAAe;AAAA,IAAA;AAAA,EACvE;AAEF;AATgB;AAcT,SAAS,WAAW,MAA2B;AACrD,QAAM,QAAgB,KAAK,SAAS;AACpC,MAAI,CAAC,MAAO,QAAO;AAEnB,QAAM,YAAY,mBAAmB,KAAK;AAE1C,MAAI,UAAU,WAAW,MAAM,GAAG;AACjC,UAAM,6DAA6D;AACnE,SAAK,cAAc,KAAK,IAAI,KAAK,eAAe,GAAG,CAAG;AAAA,EACvD;AAEA,MAAI,UAAU,WAAW,QAAQ,GAAG;AACnC,UAAM,yCAAyC;AAC/C,SAAK,WAAW,EAAE,MAAM,WAAA;AAGxB,QAAI,KAAK,gBAAgB,YAAY;AAEpC,YAAM,0EAA0E;AAChF,WAAK,cAAc,EAAE,MAAM,MAAA;AAAA,IAC5B,WAAW,KAAK,aAAa,UAAU,MAAM;AAE5C,YAAM,mDAAmD;AACzD,WAAK,cAAc,EAAE,MAAM,QAAQ,MAAM,KAAK,YAAY,SAAS,KAAA;AAAA,IACpE;AAAA,EACD;AAEA,MAAI,UAAU,WAAW,MAAM,GAAG;AACjC,UAAM,2CAA2C;AACjD,WAAO,KAAK;AACZ,UAAM,qDAAqD;AAC3D,SAAK,WAAW,EAAE,MAAM,YAAY,QAAQ,UAAA;AAC5C,SAAK,YAAY,EAAE,SAAS,OAAO,QAAQ,MAAA;AAAA,EAC5C;AAEA,MAAI,UAAU,WAAW,KAAK,GAAG;AAChC,UAAM,0CAA0C;AAChD,SAAK,YAAY;AAEjB,QAAI,UAAU,WAAW,QAAQ,GAAG;AACnC,YAAM,0CAA0C;AAChD,WAAK,mBAAmB;AAAA,IACzB,WAAW,UAAU,WAAW,QAAQ,GAAG;AAC1C,YAAM,0CAA0C;AAChD,WAAK,mBAAmB;AAAA,IACzB,WAAW,UAAU,WAAW,OAAO,GAAG;AACzC,YAAM,mDAAmD;AACzD,WAAK,mBAAmB;AAAA,IACzB;AAAA,EACD;AAEA,MAAI,UAAU,WAAW,QAAQ,GAAG;AACnC,UAAM,wDAAwD;AAC9D,SAAK,mBAAmB;AAAA,EACzB;AAEA,SAAO;AACR;AAzDgB;AA0EhB,SAAS,mBAAmB,WAA2B;AACtD,MAAI,iBAAiB,UAAU,YAAA;AAG/B,MAAI,eAAe,SAAS,GAAG,GAAG;AACjC,qBAAiB,eAAe,MAAM,GAAG,EAAE,CAAC;AAAA,EAC7C;AAGA,mBAAiB,eAAe,QAAQ,MAAM,EAAE;AAGhD,mBAAiB,eAAe,QAAQ,OAAO,EAAE;AAEjD,SAAO;AACR;AAfS;AC9FF,MAAM,gBAAN,MAAM,cAAkC;AAAA,EAC9C;AAAA,EACQ;AAAA,EAER,YAAY,QAA6B;AACxC,SAAK,SAAS;AACd,SAAK,QAAQ,OAAO;AAAA,EACrB;AAAA,EAEA,MAAM,OACL,UACA,OACA,aACA,SACwB;AAExB,UAAM,cAAc,OAAO,QAAQ,KAAK,EAAE,IAAI,CAAC,CAAC,MAAM,CAAC,MAAM,gBAAgB,MAAM,CAAC,CAAC;AAGrF,UAAM,cAAuC;AAAA,MAC5C,OAAO,KAAK,OAAO;AAAA,MACnB,aAAa,KAAK,OAAO;AAAA,MACzB;AAAA,MACA,OAAO;AAAA,MACP,qBAAqB;AAAA;AAAA,MAErB,aAAa,SAAS,iBACnB,EAAE,MAAM,YAAY,UAAU,EAAE,MAAM,QAAQ,eAAA,EAAe,IAC7D;AAAA,IAAA;AAIJ,QAAI;AACJ,QAAI;AACH,iBAAW,MAAM,KAAK,MAAM,GAAG,KAAK,OAAO,OAAO,qBAAqB;AAAA,QACtE,QAAQ;AAAA,QACR,SAAS;AAAA,UACR,gBAAgB;AAAA,UAChB,eAAe,UAAU,KAAK,OAAO,MAAM;AAAA,QAAA;AAAA,QAE5C,MAAM,KAAK,UAAU,WAAW,WAAW,CAAC;AAAA,QAC5C,QAAQ;AAAA,MAAA,CACR;AAAA,IACF,SAAS,OAAgB;AACxB,cAAQ,MAAM,KAAK;AACnB,YAAM,IAAI,YAAY,gBAAgB,eAAe,0BAA0B,KAAK;AAAA,IACrF;AAGA,QAAI,CAAC,SAAS,IAAI;AACjB,YAAM,YAAY,MAAM,SAAS,KAAA,EAAO,MAAA;AACxC,YAAM,eACJ,UAA+C,OAAO,WAAW,SAAS;AAE5E,UAAI,SAAS,WAAW,OAAO,SAAS,WAAW,KAAK;AACvD,cAAM,IAAI;AAAA,UACT,gBAAgB;AAAA,UAChB,0BAA0B,YAAY;AAAA,UACtC;AAAA,QAAA;AAAA,MAEF;AACA,UAAI,SAAS,WAAW,KAAK;AAC5B,cAAM,IAAI;AAAA,UACT,gBAAgB;AAAA,UAChB,wBAAwB,YAAY;AAAA,UACpC;AAAA,QAAA;AAAA,MAEF;AACA,UAAI,SAAS,UAAU,KAAK;AAC3B,cAAM,IAAI;AAAA,UACT,gBAAgB;AAAA,UAChB,iBAAiB,YAAY;AAAA,UAC7B;AAAA,QAAA;AAAA,MAEF;AACA,YAAM,IAAI;AAAA,QACT,gBAAgB;AAAA,QAChB,QAAQ,SAAS,MAAM,KAAK,YAAY;AAAA,QACxC;AAAA,MAAA;AAAA,IAEF;AAGA,UAAM,OAAO,MAAM,SAAS,KAAA;AAE5B,UAAM,SAAS,KAAK,UAAU,CAAC;AAC/B,QAAI,CAAC,QAAQ;AACZ,YAAM,IAAI,YAAY,gBAAgB,SAAS,0BAA0B,IAAI;AAAA,IAC9E;AAGA,YAAQ,OAAO,eAAA;AAAA,MACd,KAAK;AAAA,MACL,KAAK;AAAA;AAAA,MACL,KAAK;AACJ;AAAA,MACD,KAAK;AACJ,cAAM,IAAI;AAAA,UACT,gBAAgB;AAAA,UAChB;AAAA,QAAA;AAAA,MAEF,KAAK;AACJ,cAAM,IAAI,YAAY,gBAAgB,gBAAgB,mCAAmC;AAAA,MAC1F;AACC,cAAM,IAAI;AAAA,UACT,gBAAgB;AAAA,UAChB,6BAA6B,OAAO,aAAa;AAAA,QAAA;AAAA,IAClD;AAIF,UAAM,iBAAiB,SAAS,oBAAoB,QAAQ,kBAAkB,IAAI,IAAI;AACtF,UAAM,mBAAoB,eAAuB,UAAU,CAAC;AAG5D,UAAM,eAAe,kBAAkB,SAAS,aAAa,CAAC,GAAG,UAAU;AAC3E,QAAI,CAAC,cAAc;AAClB,YAAM,IAAI;AAAA,QACT,gBAAgB;AAAA,QAChB;AAAA,QACA;AAAA,MAAA;AAAA,IAEF;AAEA,UAAM,OAAO,MAAM,YAAY;AAC/B,QAAI,CAAC,MAAM;AACV,YAAM,IAAI;AAAA,QACT,gBAAgB;AAAA,QAChB,SAAS,YAAY;AAAA,QACrB;AAAA,MAAA;AAAA,IAEF;AAGA,UAAM,YAAY,iBAAiB,SAAS,aAAa,CAAC,GAAG,UAAU;AACvE,QAAI,CAAC,WAAW;AACf,YAAM,IAAI;AAAA,QACT,gBAAgB;AAAA,QAChB;AAAA,QACA;AAAA,MAAA;AAAA,IAEF;AAEA,QAAI;AACJ,QAAI;AACH,mBAAa,KAAK,MAAM,SAAS;AAAA,IAClC,SAAS,OAAO;AACf,YAAM,IAAI;AAAA,QACT,gBAAgB;AAAA,QAChB;AAAA,QACA;AAAA,MAAA;AAAA,IAEF;AAGA,UAAM,aAAa,KAAK,YAAY,UAAU,UAAU;AACxD,QAAI,CAAC,WAAW,SAAS;AACxB,cAAQ,MAAM,WAAW,KAAK;AAC9B,YAAM,IAAI;AAAA,QACT,gBAAgB;AAAA,QAChB;AAAA,QACA,WAAW;AAAA,MAAA;AAAA,IAEb;AACA,UAAM,YAAY,WAAW;AAG7B,QAAI;AACJ,QAAI;AACH,mBAAa,MAAM,KAAK,QAAQ,SAAS;AAAA,IAC1C,SAAS,GAAG;AACX,YAAM,IAAI;AAAA,QACT,gBAAgB;AAAA,QAChB,0BAA2B,EAAY,OAAO;AAAA,QAC9C;AAAA,MAAA;AAAA,IAEF;AAGA,WAAO;AAAA,MACN,UAAU;AAAA,QACT,MAAM;AAAA,QACN,MAAM;AAAA,MAAA;AAAA,MAEP;AAAA,MACA,OAAO;AAAA,QACN,cAAc,KAAK,OAAO,iBAAiB;AAAA,QAC3C,kBAAkB,KAAK,OAAO,qBAAqB;AAAA,QACnD,aAAa,KAAK,OAAO,gBAAgB;AAAA,QACzC,cAAc,KAAK,OAAO,uBAAuB;AAAA,QACjD,iBAAiB,KAAK,OAAO,2BAA2B;AAAA,MAAA;AAAA,MAEzD,aAAa;AAAA,IAAA;AAAA,EAEf;AACD;AAnM+C;AAAxC,IAAM,eAAN;ACTA,MAAM,kBAAkB;AACxB,MAAM,sBAAsB;ACK5B,SAAS,eAAe,QAAwC;AAEtE,MAAI,CAAC,OAAO,WAAW,CAAC,OAAO,UAAU,CAAC,OAAO,OAAO;AACvD,UAAM,IAAI;AAAA,MACT;AAAA,IAAA;AAAA,EAGF;AAEA,SAAO;AAAA,IACN,SAAS,OAAO;AAAA,IAChB,QAAQ,OAAO;AAAA,IACf,OAAO,OAAO;AAAA,IACd,aAAa,OAAO,eAAe;AAAA,IACnC,YAAY,OAAO,cAAc;AAAA,IACjC,cAAc,OAAO,eAAe,OAAO,KAAK,UAAU;AAAA;AAAA,EAAA;AAE5D;AAjBgB;AAmBT,MAAM,OAAN,MAAM,aAAY,YAAY;AAAA,EACpC;AAAA,EACA;AAAA,EAEA,YAAY,QAAmB;AAC9B,UAAA;AACA,SAAK,SAAS,eAAe,MAAM;AAGnC,SAAK,SAAS,IAAI,aAAa,KAAK,MAAM;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,OACL,UACA,OACA,aACA,SACwB;AACxB,WAAO,MAAM;AAAA,MACZ,YAAY;AACX,cAAM,SAAS,MAAM,KAAK,OAAO,OAAO,UAAU,OAAO,aAAa,OAAO;AAE7E,eAAO;AAAA,MACR;AAAA;AAAA,MAEA;AAAA,QACC,YAAY,KAAK,OAAO;AAAA,QACxB,SAAS,wBAAC,YAAoB;AAC7B,eAAK;AAAA,YACJ,IAAI,YAAY,SAAS,EAAE,QAAQ,EAAE,SAAS,KAAK,KAAK,OAAO,aAAW,CAAG;AAAA,UAAA;AAAA,QAE/E,GAJS;AAAA,QAKT,SAAS,wBAAC,UAAiB;AAC1B,eAAK,cAAc,IAAI,YAAY,SAAS,EAAE,QAAQ,EAAE,MAAA,EAAM,CAAG,CAAC;AAAA,QACnE,GAFS;AAAA,MAET;AAAA,IACD;AAAA,EAEF;AACD;AA3CqC;AAA9B,IAAM,MAAN;AA6CP,eAAe,UACd,IACA,UAKa;AACb,MAAI,UAAU;AACd,MAAI,YAA0B;AAC9B,SAAO,WAAW,SAAS,YAAY;AACtC,QAAI,UAAU,GAAG;AAChB,eAAS,QAAQ,OAAO;AACxB,YAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,GAAG,CAAC;AAAA,IACxD;AAEA,QAAI;AACH,aAAO,MAAM,GAAA;AAAA,IACd,SAAS,OAAgB;AACxB,cAAQ,MAAM,KAAK;AACnB,eAAS,QAAQ,KAAc;AAG/B,UAAK,OAA6B,SAAS,aAAc,OAAM;AAG/D,UAAI,iBAAiB,eAAe,CAAC,MAAM,UAAW,OAAM;AAE5D,kBAAY;AACZ;AAEA,YAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,GAAG,CAAC;AAAA,IACxD;AAAA,EACD;AAEA,QAAM;AACP;AApCe;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@page-agent/llms",
3
- "version": "0.0.24",
3
+ "version": "0.2.0",
4
4
  "type": "module",
5
5
  "main": "./dist/lib/page-agent-llms.js",
6
6
  "module": "./dist/lib/page-agent-llms.js",