phi-code-ai 0.56.3 → 0.74.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (187) hide show
  1. package/README.md +258 -73
  2. package/dist/api-registry.d.ts.map +1 -1
  3. package/dist/api-registry.js.map +1 -1
  4. package/dist/bedrock-provider.d.ts.map +1 -1
  5. package/dist/cli.d.ts.map +1 -1
  6. package/dist/cli.js +1 -1
  7. package/dist/cli.js.map +1 -1
  8. package/dist/env-api-keys.d.ts +9 -0
  9. package/dist/env-api-keys.d.ts.map +1 -1
  10. package/dist/env-api-keys.js +96 -30
  11. package/dist/env-api-keys.js.map +1 -1
  12. package/dist/image-models.d.ts +10 -0
  13. package/dist/image-models.d.ts.map +1 -0
  14. package/dist/image-models.generated.d.ts +305 -0
  15. package/dist/image-models.generated.d.ts.map +1 -0
  16. package/dist/image-models.generated.js +307 -0
  17. package/dist/image-models.generated.js.map +1 -0
  18. package/dist/image-models.js +23 -0
  19. package/dist/image-models.js.map +1 -0
  20. package/dist/images-api-registry.d.ts +14 -0
  21. package/dist/images-api-registry.d.ts.map +1 -0
  22. package/dist/images-api-registry.js +22 -0
  23. package/dist/images-api-registry.js.map +1 -0
  24. package/dist/images.d.ts +4 -0
  25. package/dist/images.d.ts.map +1 -0
  26. package/dist/images.js +14 -0
  27. package/dist/images.js.map +1 -0
  28. package/dist/index.d.ts +20 -11
  29. package/dist/index.d.ts.map +1 -1
  30. package/dist/index.js +8 -9
  31. package/dist/index.js.map +1 -1
  32. package/dist/models.d.ts +3 -9
  33. package/dist/models.d.ts.map +1 -1
  34. package/dist/models.generated.d.ts +6525 -2231
  35. package/dist/models.generated.d.ts.map +1 -1
  36. package/dist/models.generated.js +8992 -5524
  37. package/dist/models.generated.js.map +1 -1
  38. package/dist/models.js +28 -12
  39. package/dist/models.js.map +1 -1
  40. package/dist/oauth.d.ts.map +1 -1
  41. package/dist/providers/amazon-bedrock.d.ts +23 -0
  42. package/dist/providers/amazon-bedrock.d.ts.map +1 -1
  43. package/dist/providers/amazon-bedrock.js +206 -44
  44. package/dist/providers/amazon-bedrock.js.map +1 -1
  45. package/dist/providers/anthropic.d.ts +23 -2
  46. package/dist/providers/anthropic.d.ts.map +1 -1
  47. package/dist/providers/anthropic.js +294 -63
  48. package/dist/providers/anthropic.js.map +1 -1
  49. package/dist/providers/azure-openai-responses.d.ts.map +1 -1
  50. package/dist/providers/azure-openai-responses.js +47 -23
  51. package/dist/providers/azure-openai-responses.js.map +1 -1
  52. package/dist/providers/cloudflare.d.ts +13 -0
  53. package/dist/providers/cloudflare.d.ts.map +1 -0
  54. package/dist/providers/cloudflare.js +26 -0
  55. package/dist/providers/cloudflare.js.map +1 -0
  56. package/dist/providers/faux.d.ts +56 -0
  57. package/dist/providers/faux.d.ts.map +1 -0
  58. package/dist/providers/faux.js +368 -0
  59. package/dist/providers/faux.js.map +1 -0
  60. package/dist/providers/github-copilot-headers.d.ts.map +1 -1
  61. package/dist/providers/github-copilot-headers.js.map +1 -1
  62. package/dist/providers/google-shared.d.ts +7 -2
  63. package/dist/providers/google-shared.d.ts.map +1 -1
  64. package/dist/providers/google-shared.js +53 -24
  65. package/dist/providers/google-shared.js.map +1 -1
  66. package/dist/providers/google-vertex.d.ts +1 -1
  67. package/dist/providers/google-vertex.d.ts.map +1 -1
  68. package/dist/providers/google-vertex.js +87 -16
  69. package/dist/providers/google-vertex.js.map +1 -1
  70. package/dist/providers/google.d.ts +1 -1
  71. package/dist/providers/google.d.ts.map +1 -1
  72. package/dist/providers/google.js +57 -9
  73. package/dist/providers/google.js.map +1 -1
  74. package/dist/providers/images/openrouter.d.ts +3 -0
  75. package/dist/providers/images/openrouter.d.ts.map +1 -0
  76. package/dist/providers/images/openrouter.js +129 -0
  77. package/dist/providers/images/openrouter.js.map +1 -0
  78. package/dist/providers/images/register-builtins.d.ts +4 -0
  79. package/dist/providers/images/register-builtins.d.ts.map +1 -0
  80. package/dist/providers/images/register-builtins.js +34 -0
  81. package/dist/providers/images/register-builtins.js.map +1 -0
  82. package/dist/providers/mistral.d.ts +3 -0
  83. package/dist/providers/mistral.d.ts.map +1 -1
  84. package/dist/providers/mistral.js +49 -9
  85. package/dist/providers/mistral.js.map +1 -1
  86. package/dist/providers/openai-codex-responses.d.ts +21 -0
  87. package/dist/providers/openai-codex-responses.d.ts.map +1 -1
  88. package/dist/providers/openai-codex-responses.js +443 -86
  89. package/dist/providers/openai-codex-responses.js.map +1 -1
  90. package/dist/providers/openai-completions.d.ts +5 -1
  91. package/dist/providers/openai-completions.d.ts.map +1 -1
  92. package/dist/providers/openai-completions.js +460 -225
  93. package/dist/providers/openai-completions.js.map +1 -1
  94. package/dist/providers/openai-responses-shared.d.ts +1 -0
  95. package/dist/providers/openai-responses-shared.d.ts.map +1 -1
  96. package/dist/providers/openai-responses-shared.js +95 -45
  97. package/dist/providers/openai-responses-shared.js.map +1 -1
  98. package/dist/providers/openai-responses.d.ts.map +1 -1
  99. package/dist/providers/openai-responses.js +66 -44
  100. package/dist/providers/openai-responses.js.map +1 -1
  101. package/dist/providers/register-builtins.d.ts +27 -2
  102. package/dist/providers/register-builtins.d.ts.map +1 -1
  103. package/dist/providers/register-builtins.js +157 -52
  104. package/dist/providers/register-builtins.js.map +1 -1
  105. package/dist/providers/simple-options.d.ts.map +1 -1
  106. package/dist/providers/simple-options.js +5 -1
  107. package/dist/providers/simple-options.js.map +1 -1
  108. package/dist/providers/transform-messages.d.ts.map +1 -1
  109. package/dist/providers/transform-messages.js +63 -34
  110. package/dist/providers/transform-messages.js.map +1 -1
  111. package/dist/session-resources.d.ts +4 -0
  112. package/dist/session-resources.d.ts.map +1 -0
  113. package/dist/session-resources.js +22 -0
  114. package/dist/session-resources.js.map +1 -0
  115. package/dist/stream.d.ts.map +1 -1
  116. package/dist/stream.js.map +1 -1
  117. package/dist/types.d.ts +219 -15
  118. package/dist/types.d.ts.map +1 -1
  119. package/dist/types.js.map +1 -1
  120. package/dist/utils/diagnostics.d.ts +19 -0
  121. package/dist/utils/diagnostics.d.ts.map +1 -0
  122. package/dist/utils/diagnostics.js +25 -0
  123. package/dist/utils/diagnostics.js.map +1 -0
  124. package/dist/utils/event-stream.d.ts.map +1 -1
  125. package/dist/utils/event-stream.js +7 -3
  126. package/dist/utils/event-stream.js.map +1 -1
  127. package/dist/utils/hash.d.ts.map +1 -1
  128. package/dist/utils/hash.js.map +1 -1
  129. package/dist/utils/headers.d.ts +2 -0
  130. package/dist/utils/headers.d.ts.map +1 -0
  131. package/dist/utils/headers.js +8 -0
  132. package/dist/utils/headers.js.map +1 -0
  133. package/dist/utils/json-parse.d.ts +8 -1
  134. package/dist/utils/json-parse.d.ts.map +1 -1
  135. package/dist/utils/json-parse.js +89 -5
  136. package/dist/utils/json-parse.js.map +1 -1
  137. package/dist/utils/oauth/anthropic.d.ts +14 -6
  138. package/dist/utils/oauth/anthropic.d.ts.map +1 -1
  139. package/dist/utils/oauth/anthropic.js +288 -57
  140. package/dist/utils/oauth/anthropic.js.map +1 -1
  141. package/dist/utils/oauth/github-copilot.d.ts.map +1 -1
  142. package/dist/utils/oauth/github-copilot.js +23 -12
  143. package/dist/utils/oauth/github-copilot.js.map +1 -1
  144. package/dist/utils/oauth/index.d.ts +0 -4
  145. package/dist/utils/oauth/index.d.ts.map +1 -1
  146. package/dist/utils/oauth/index.js +0 -10
  147. package/dist/utils/oauth/index.js.map +1 -1
  148. package/dist/utils/oauth/oauth-page.d.ts +3 -0
  149. package/dist/utils/oauth/oauth-page.d.ts.map +1 -0
  150. package/dist/utils/oauth/oauth-page.js +105 -0
  151. package/dist/utils/oauth/oauth-page.js.map +1 -0
  152. package/dist/utils/oauth/openai-codex.d.ts.map +1 -1
  153. package/dist/utils/oauth/openai-codex.js +51 -46
  154. package/dist/utils/oauth/openai-codex.js.map +1 -1
  155. package/dist/utils/oauth/pkce.d.ts.map +1 -1
  156. package/dist/utils/oauth/pkce.js.map +1 -1
  157. package/dist/utils/oauth/types.d.ts +10 -0
  158. package/dist/utils/oauth/types.d.ts.map +1 -1
  159. package/dist/utils/oauth/types.js.map +1 -1
  160. package/dist/utils/overflow.d.ts +7 -3
  161. package/dist/utils/overflow.d.ts.map +1 -1
  162. package/dist/utils/overflow.js +46 -13
  163. package/dist/utils/overflow.js.map +1 -1
  164. package/dist/utils/sanitize-unicode.d.ts.map +1 -1
  165. package/dist/utils/sanitize-unicode.js.map +1 -1
  166. package/dist/utils/typebox-helpers.d.ts +1 -1
  167. package/dist/utils/typebox-helpers.d.ts.map +1 -1
  168. package/dist/utils/typebox-helpers.js +1 -1
  169. package/dist/utils/typebox-helpers.js.map +1 -1
  170. package/dist/utils/validation.d.ts.map +1 -1
  171. package/dist/utils/validation.js +247 -38
  172. package/dist/utils/validation.js.map +1 -1
  173. package/package.json +44 -14
  174. package/bedrock-provider.d.ts +0 -1
  175. package/bedrock-provider.js +0 -1
  176. package/dist/providers/google-gemini-cli.d.ts +0 -74
  177. package/dist/providers/google-gemini-cli.d.ts.map +0 -1
  178. package/dist/providers/google-gemini-cli.js +0 -754
  179. package/dist/providers/google-gemini-cli.js.map +0 -1
  180. package/dist/utils/oauth/google-antigravity.d.ts +0 -26
  181. package/dist/utils/oauth/google-antigravity.d.ts.map +0 -1
  182. package/dist/utils/oauth/google-antigravity.js +0 -373
  183. package/dist/utils/oauth/google-antigravity.js.map +0 -1
  184. package/dist/utils/oauth/google-gemini-cli.d.ts +0 -26
  185. package/dist/utils/oauth/google-gemini-cli.d.ts.map +0 -1
  186. package/dist/utils/oauth/google-gemini-cli.js +0 -478
  187. package/dist/utils/oauth/google-gemini-cli.js.map +0 -1
@@ -0,0 +1,26 @@
1
+ /** Workers AI direct endpoint. */
2
+ export const CLOUDFLARE_WORKERS_AI_BASE_URL = "https://api.cloudflare.com/client/v4/accounts/{CLOUDFLARE_ACCOUNT_ID}/ai/v1";
3
+ /** AI Gateway Unified API. https://developers.cloudflare.com/ai-gateway/usage/unified-api/ */
4
+ export const CLOUDFLARE_AI_GATEWAY_COMPAT_BASE_URL = "https://gateway.ai.cloudflare.com/v1/{CLOUDFLARE_ACCOUNT_ID}/{CLOUDFLARE_GATEWAY_ID}/compat";
5
+ /** AI Gateway → OpenAI passthrough. Used until /compat supports /v1/responses. */
6
+ export const CLOUDFLARE_AI_GATEWAY_OPENAI_BASE_URL = "https://gateway.ai.cloudflare.com/v1/{CLOUDFLARE_ACCOUNT_ID}/{CLOUDFLARE_GATEWAY_ID}/openai";
7
+ /** AI Gateway → Anthropic passthrough. */
8
+ export const CLOUDFLARE_AI_GATEWAY_ANTHROPIC_BASE_URL = "https://gateway.ai.cloudflare.com/v1/{CLOUDFLARE_ACCOUNT_ID}/{CLOUDFLARE_GATEWAY_ID}/anthropic";
9
+ export function isCloudflareProvider(provider) {
10
+ return provider === "cloudflare-workers-ai" || provider === "cloudflare-ai-gateway";
11
+ }
12
+ /** Substitute `{VAR}` placeholders in a Cloudflare baseUrl from process.env. */
13
+ export function resolveCloudflareBaseUrl(model) {
14
+ const url = model.baseUrl;
15
+ if (!url.includes("{"))
16
+ return url;
17
+ const baseUrl = url.replace(/\{([A-Z_][A-Z0-9_]*)\}/g, (_match, name) => {
18
+ const value = process.env[name];
19
+ if (!value) {
20
+ throw new Error(`${name} is required for provider ${model.provider} but is not set.`);
21
+ }
22
+ return value;
23
+ });
24
+ return baseUrl;
25
+ }
26
+ //# sourceMappingURL=cloudflare.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"cloudflare.js","sourceRoot":"","sources":["../../src/providers/cloudflare.ts"],"names":[],"mappings":"AAEA,kCAAkC;AAClC,MAAM,CAAC,MAAM,8BAA8B,GAC1C,6EAA6E,CAAC;AAE/E,8FAA8F;AAC9F,MAAM,CAAC,MAAM,qCAAqC,GACjD,6FAA6F,CAAC;AAE/F,oFAAkF;AAClF,MAAM,CAAC,MAAM,qCAAqC,GACjD,6FAA6F,CAAC;AAE/F,4CAA0C;AAC1C,MAAM,CAAC,MAAM,wCAAwC,GACpD,gGAAgG,CAAC;AAElG,MAAM,UAAU,oBAAoB,CAAC,QAAgB,EAAW;IAC/D,OAAO,QAAQ,KAAK,uBAAuB,IAAI,QAAQ,KAAK,uBAAuB,CAAC;AAAA,CACpF;AAED,gFAAgF;AAChF,MAAM,UAAU,wBAAwB,CAAC,KAAiB,EAAU;IACnE,MAAM,GAAG,GAAG,KAAK,CAAC,OAAO,CAAC;IAC1B,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC;QAAE,OAAO,GAAG,CAAC;IACnC,MAAM,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,yBAAyB,EAAE,CAAC,MAAM,EAAE,IAAY,EAAE,EAAE,CAAC;QAChF,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;QAChC,IAAI,CAAC,KAAK,EAAE,CAAC;YACZ,MAAM,IAAI,KAAK,CAAC,GAAG,IAAI,6BAA6B,KAAK,CAAC,QAAQ,kBAAkB,CAAC,CAAC;QACvF,CAAC;QACD,OAAO,KAAK,CAAC;IAAA,CACb,CAAC,CAAC;IACH,OAAO,OAAO,CAAC;AAAA,CACf","sourcesContent":["import type { Api, Model } from \"../types.js\";\n\n/** Workers AI direct endpoint. */\nexport const CLOUDFLARE_WORKERS_AI_BASE_URL =\n\t\"https://api.cloudflare.com/client/v4/accounts/{CLOUDFLARE_ACCOUNT_ID}/ai/v1\";\n\n/** AI Gateway Unified API. https://developers.cloudflare.com/ai-gateway/usage/unified-api/ */\nexport const CLOUDFLARE_AI_GATEWAY_COMPAT_BASE_URL =\n\t\"https://gateway.ai.cloudflare.com/v1/{CLOUDFLARE_ACCOUNT_ID}/{CLOUDFLARE_GATEWAY_ID}/compat\";\n\n/** AI Gateway → OpenAI passthrough. Used until /compat supports /v1/responses. */\nexport const CLOUDFLARE_AI_GATEWAY_OPENAI_BASE_URL =\n\t\"https://gateway.ai.cloudflare.com/v1/{CLOUDFLARE_ACCOUNT_ID}/{CLOUDFLARE_GATEWAY_ID}/openai\";\n\n/** AI Gateway → Anthropic passthrough. */\nexport const CLOUDFLARE_AI_GATEWAY_ANTHROPIC_BASE_URL =\n\t\"https://gateway.ai.cloudflare.com/v1/{CLOUDFLARE_ACCOUNT_ID}/{CLOUDFLARE_GATEWAY_ID}/anthropic\";\n\nexport function isCloudflareProvider(provider: string): boolean {\n\treturn provider === \"cloudflare-workers-ai\" || provider === \"cloudflare-ai-gateway\";\n}\n\n/** Substitute `{VAR}` placeholders in a Cloudflare baseUrl from process.env. */\nexport function resolveCloudflareBaseUrl(model: Model<Api>): string {\n\tconst url = model.baseUrl;\n\tif (!url.includes(\"{\")) return url;\n\tconst baseUrl = url.replace(/\\{([A-Z_][A-Z0-9_]*)\\}/g, (_match, name: string) => {\n\t\tconst value = process.env[name];\n\t\tif (!value) {\n\t\t\tthrow new Error(`${name} is required for provider ${model.provider} but is not set.`);\n\t\t}\n\t\treturn value;\n\t});\n\treturn baseUrl;\n}\n"]}
@@ -0,0 +1,56 @@
1
+ import type { AssistantMessage, Context, Model, StreamOptions, TextContent, ThinkingContent, ToolCall } from "../types.js";
2
+ export interface FauxModelDefinition {
3
+ id: string;
4
+ name?: string;
5
+ reasoning?: boolean;
6
+ input?: ("text" | "image")[];
7
+ cost?: {
8
+ input: number;
9
+ output: number;
10
+ cacheRead: number;
11
+ cacheWrite: number;
12
+ };
13
+ contextWindow?: number;
14
+ maxTokens?: number;
15
+ }
16
+ export type FauxContentBlock = TextContent | ThinkingContent | ToolCall;
17
+ export declare function fauxText(text: string): TextContent;
18
+ export declare function fauxThinking(thinking: string): ThinkingContent;
19
+ export declare function fauxToolCall(name: string, arguments_: ToolCall["arguments"], options?: {
20
+ id?: string;
21
+ }): ToolCall;
22
+ export declare function fauxAssistantMessage(content: string | FauxContentBlock | FauxContentBlock[], options?: {
23
+ stopReason?: AssistantMessage["stopReason"];
24
+ errorMessage?: string;
25
+ responseId?: string;
26
+ timestamp?: number;
27
+ }): AssistantMessage;
28
+ export type FauxResponseFactory = (context: Context, options: StreamOptions | undefined, state: {
29
+ callCount: number;
30
+ }, model: Model<string>) => AssistantMessage | Promise<AssistantMessage>;
31
+ export type FauxResponseStep = AssistantMessage | FauxResponseFactory;
32
+ export interface RegisterFauxProviderOptions {
33
+ api?: string;
34
+ provider?: string;
35
+ models?: FauxModelDefinition[];
36
+ tokensPerSecond?: number;
37
+ tokenSize?: {
38
+ min?: number;
39
+ max?: number;
40
+ };
41
+ }
42
+ export interface FauxProviderRegistration {
43
+ api: string;
44
+ models: [Model<string>, ...Model<string>[]];
45
+ getModel(): Model<string>;
46
+ getModel(modelId: string): Model<string> | undefined;
47
+ state: {
48
+ callCount: number;
49
+ };
50
+ setResponses: (responses: FauxResponseStep[]) => void;
51
+ appendResponses: (responses: FauxResponseStep[]) => void;
52
+ getPendingResponseCount: () => number;
53
+ unregister: () => void;
54
+ }
55
+ export declare function registerFauxProvider(options?: RegisterFauxProviderOptions): FauxProviderRegistration;
56
+ //# sourceMappingURL=faux.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"faux.d.ts","sourceRoot":"","sources":["../../src/providers/faux.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EACX,gBAAgB,EAEhB,OAAO,EAGP,KAAK,EAGL,aAAa,EACb,WAAW,EACX,eAAe,EACf,QAAQ,EAGR,MAAM,aAAa,CAAC;AAoBrB,MAAM,WAAW,mBAAmB;IACnC,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,KAAK,CAAC,EAAE,CAAC,MAAM,GAAG,OAAO,CAAC,EAAE,CAAC;IAC7B,IAAI,CAAC,EAAE;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,MAAM,EAAE,MAAM,CAAC;QAAC,SAAS,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,MAAM,CAAA;KAAE,CAAC;IAChF,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,SAAS,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,MAAM,gBAAgB,GAAG,WAAW,GAAG,eAAe,GAAG,QAAQ,CAAC;AAExE,wBAAgB,QAAQ,CAAC,IAAI,EAAE,MAAM,GAAG,WAAW,CAElD;AAED,wBAAgB,YAAY,CAAC,QAAQ,EAAE,MAAM,GAAG,eAAe,CAE9D;AAED,wBAAgB,YAAY,CAAC,IAAI,EAAE,MAAM,EAAE,UAAU,EAAE,QAAQ,CAAC,WAAW,CAAC,EAAE,OAAO,GAAE;IAAE,EAAE,CAAC,EAAE,MAAM,CAAA;CAAO,GAAG,QAAQ,CAOrH;AASD,wBAAgB,oBAAoB,CACnC,OAAO,EAAE,MAAM,GAAG,gBAAgB,GAAG,gBAAgB,EAAE,EACvD,OAAO,GAAE;IACR,UAAU,CAAC,EAAE,gBAAgB,CAAC,YAAY,CAAC,CAAC;IAC5C,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,SAAS,CAAC,EAAE,MAAM,CAAC;CACd,GACJ,gBAAgB,CAalB;AAED,MAAM,MAAM,mBAAmB,GAAG,CACjC,OAAO,EAAE,OAAO,EAChB,OAAO,EAAE,aAAa,GAAG,SAAS,EAClC,KAAK,EAAE;IAAE,SAAS,EAAE,MAAM,CAAA;CAAE,EAC5B,KAAK,EAAE,KAAK,CAAC,MAAM,CAAC,KAChB,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC,CAAC;AAElD,MAAM,MAAM,gBAAgB,GAAG,gBAAgB,GAAG,mBAAmB,CAAC;AAEtE,MAAM,WAAW,2BAA2B;IAC3C,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,MAAM,CAAC,EAAE,mBAAmB,EAAE,CAAC;IAC/B,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,SAAS,CAAC,EAAE;QACX,GAAG,CAAC,EAAE,MAAM,CAAC;QACb,GAAG,CAAC,EAAE,MAAM,CAAC;KACb,CAAC;CACF;AAED,MAAM,WAAW,wBAAwB;IACxC,GAAG,EAAE,MAAM,CAAC;IACZ,MAAM,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,GAAG,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC;IAC5C,QAAQ,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC;IAC1B,QAAQ,CAAC,OAAO,EAAE,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,GAAG,SAAS,CAAC;IACrD,KAAK,EAAE;QAAE,SAAS,EAAE,MAAM,CAAA;KAAE,CAAC;IAC7B,YAAY,EAAE,CAAC,SAAS,EAAE,gBAAgB,EAAE,KAAK,IAAI,CAAC;IACtD,eAAe,EAAE,CAAC,SAAS,EAAE,gBAAgB,EAAE,KAAK,IAAI,CAAC;IACzD,uBAAuB,EAAE,MAAM,MAAM,CAAC;IACtC,UAAU,EAAE,MAAM,IAAI,CAAC;CACvB;AAyQD,wBAAgB,oBAAoB,CAAC,OAAO,GAAE,2BAAgC,GAAG,wBAAwB,CA4GxG","sourcesContent":["import { registerApiProvider, unregisterApiProviders } from \"../api-registry.js\";\nimport type {\n\tAssistantMessage,\n\tAssistantMessageEventStream,\n\tContext,\n\tImageContent,\n\tMessage,\n\tModel,\n\tSimpleStreamOptions,\n\tStreamFunction,\n\tStreamOptions,\n\tTextContent,\n\tThinkingContent,\n\tToolCall,\n\tToolResultMessage,\n\tUsage,\n} from \"../types.js\";\nimport { createAssistantMessageEventStream } from \"../utils/event-stream.js\";\n\nconst DEFAULT_API = \"faux\";\nconst DEFAULT_PROVIDER = \"faux\";\nconst DEFAULT_MODEL_ID = \"faux-1\";\nconst DEFAULT_MODEL_NAME = \"Faux Model\";\nconst DEFAULT_BASE_URL = \"http://localhost:0\";\nconst DEFAULT_MIN_TOKEN_SIZE = 3;\nconst DEFAULT_MAX_TOKEN_SIZE = 5;\n\nconst DEFAULT_USAGE: Usage = {\n\tinput: 0,\n\toutput: 0,\n\tcacheRead: 0,\n\tcacheWrite: 0,\n\ttotalTokens: 0,\n\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n};\n\nexport interface FauxModelDefinition {\n\tid: string;\n\tname?: string;\n\treasoning?: boolean;\n\tinput?: (\"text\" | \"image\")[];\n\tcost?: { input: number; output: number; cacheRead: number; cacheWrite: number };\n\tcontextWindow?: number;\n\tmaxTokens?: number;\n}\n\nexport type FauxContentBlock = TextContent | ThinkingContent | ToolCall;\n\nexport function fauxText(text: string): TextContent {\n\treturn { type: \"text\", text };\n}\n\nexport function fauxThinking(thinking: string): ThinkingContent {\n\treturn { type: \"thinking\", thinking };\n}\n\nexport function fauxToolCall(name: string, arguments_: ToolCall[\"arguments\"], options: { id?: string } = {}): ToolCall {\n\treturn {\n\t\ttype: \"toolCall\",\n\t\tid: options.id ?? randomId(\"tool\"),\n\t\tname,\n\t\targuments: arguments_,\n\t};\n}\n\nfunction normalizeFauxAssistantContent(content: string | FauxContentBlock | FauxContentBlock[]): FauxContentBlock[] {\n\tif (typeof content === \"string\") {\n\t\treturn [fauxText(content)];\n\t}\n\treturn Array.isArray(content) ? content : [content];\n}\n\nexport function fauxAssistantMessage(\n\tcontent: string | FauxContentBlock | FauxContentBlock[],\n\toptions: {\n\t\tstopReason?: AssistantMessage[\"stopReason\"];\n\t\terrorMessage?: string;\n\t\tresponseId?: string;\n\t\ttimestamp?: number;\n\t} = {},\n): AssistantMessage {\n\treturn {\n\t\trole: \"assistant\",\n\t\tcontent: normalizeFauxAssistantContent(content),\n\t\tapi: DEFAULT_API,\n\t\tprovider: DEFAULT_PROVIDER,\n\t\tmodel: DEFAULT_MODEL_ID,\n\t\tusage: DEFAULT_USAGE,\n\t\tstopReason: options.stopReason ?? \"stop\",\n\t\terrorMessage: options.errorMessage,\n\t\tresponseId: options.responseId,\n\t\ttimestamp: options.timestamp ?? Date.now(),\n\t};\n}\n\nexport type FauxResponseFactory = (\n\tcontext: Context,\n\toptions: StreamOptions | undefined,\n\tstate: { callCount: number },\n\tmodel: Model<string>,\n) => AssistantMessage | Promise<AssistantMessage>;\n\nexport type FauxResponseStep = AssistantMessage | FauxResponseFactory;\n\nexport interface RegisterFauxProviderOptions {\n\tapi?: string;\n\tprovider?: string;\n\tmodels?: FauxModelDefinition[];\n\ttokensPerSecond?: number;\n\ttokenSize?: {\n\t\tmin?: number;\n\t\tmax?: number;\n\t};\n}\n\nexport interface FauxProviderRegistration {\n\tapi: string;\n\tmodels: [Model<string>, ...Model<string>[]];\n\tgetModel(): Model<string>;\n\tgetModel(modelId: string): Model<string> | undefined;\n\tstate: { callCount: number };\n\tsetResponses: (responses: FauxResponseStep[]) => void;\n\tappendResponses: (responses: FauxResponseStep[]) => void;\n\tgetPendingResponseCount: () => number;\n\tunregister: () => void;\n}\n\nfunction estimateTokens(text: string): number {\n\treturn Math.ceil(text.length / 4);\n}\n\nfunction randomId(prefix: string): string {\n\treturn `${prefix}:${Date.now()}:${Math.random().toString(36).slice(2)}`;\n}\n\nfunction contentToText(content: string | Array<TextContent | ImageContent>): string {\n\tif (typeof content === \"string\") {\n\t\treturn content;\n\t}\n\treturn content\n\t\t.map((block) => {\n\t\t\tif (block.type === \"text\") {\n\t\t\t\treturn block.text;\n\t\t\t}\n\t\t\treturn `[image:${block.mimeType}:${block.data.length}]`;\n\t\t})\n\t\t.join(\"\\n\");\n}\n\nfunction assistantContentToText(content: Array<TextContent | ThinkingContent | ToolCall>): string {\n\treturn content\n\t\t.map((block) => {\n\t\t\tif (block.type === \"text\") {\n\t\t\t\treturn block.text;\n\t\t\t}\n\t\t\tif (block.type === \"thinking\") {\n\t\t\t\treturn block.thinking;\n\t\t\t}\n\t\t\treturn `${block.name}:${JSON.stringify(block.arguments)}`;\n\t\t})\n\t\t.join(\"\\n\");\n}\n\nfunction toolResultToText(message: ToolResultMessage): string {\n\treturn [message.toolName, ...message.content.map((block) => contentToText([block]))].join(\"\\n\");\n}\n\nfunction messageToText(message: Message): string {\n\tif (message.role === \"user\") {\n\t\treturn contentToText(message.content);\n\t}\n\tif (message.role === \"assistant\") {\n\t\treturn assistantContentToText(message.content);\n\t}\n\treturn toolResultToText(message);\n}\n\nfunction serializeContext(context: Context): string {\n\tconst parts: string[] = [];\n\tif (context.systemPrompt) {\n\t\tparts.push(`system:${context.systemPrompt}`);\n\t}\n\tfor (const message of context.messages) {\n\t\tparts.push(`${message.role}:${messageToText(message)}`);\n\t}\n\tif (context.tools?.length) {\n\t\tparts.push(`tools:${JSON.stringify(context.tools)}`);\n\t}\n\treturn parts.join(\"\\n\\n\");\n}\n\nfunction commonPrefixLength(a: string, b: string): number {\n\tconst length = Math.min(a.length, b.length);\n\tlet index = 0;\n\twhile (index < length && a[index] === b[index]) {\n\t\tindex++;\n\t}\n\treturn index;\n}\n\nfunction withUsageEstimate(\n\tmessage: AssistantMessage,\n\tcontext: Context,\n\toptions: StreamOptions | undefined,\n\tpromptCache: Map<string, string>,\n): AssistantMessage {\n\tconst promptText = serializeContext(context);\n\tconst promptTokens = estimateTokens(promptText);\n\tconst outputTokens = estimateTokens(assistantContentToText(message.content));\n\tlet input = promptTokens;\n\tlet cacheRead = 0;\n\tlet cacheWrite = 0;\n\tconst sessionId = options?.sessionId;\n\n\tif (sessionId && options?.cacheRetention !== \"none\") {\n\t\tconst previousPrompt = promptCache.get(sessionId);\n\t\tif (previousPrompt) {\n\t\t\tconst cachedChars = commonPrefixLength(previousPrompt, promptText);\n\t\t\tcacheRead = estimateTokens(previousPrompt.slice(0, cachedChars));\n\t\t\tcacheWrite = estimateTokens(promptText.slice(cachedChars));\n\t\t\tinput = Math.max(0, promptTokens - cacheRead);\n\t\t} else {\n\t\t\tcacheWrite = promptTokens;\n\t\t}\n\t\tpromptCache.set(sessionId, promptText);\n\t}\n\n\treturn {\n\t\t...message,\n\t\tusage: {\n\t\t\tinput,\n\t\t\toutput: outputTokens,\n\t\t\tcacheRead,\n\t\t\tcacheWrite,\n\t\t\ttotalTokens: input + outputTokens + cacheRead + cacheWrite,\n\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t},\n\t};\n}\n\nfunction splitStringByTokenSize(text: string, minTokenSize: number, maxTokenSize: number): string[] {\n\tconst chunks: string[] = [];\n\tlet index = 0;\n\twhile (index < text.length) {\n\t\tconst tokenSize = minTokenSize + Math.floor(Math.random() * (maxTokenSize - minTokenSize + 1));\n\t\tconst charSize = Math.max(1, tokenSize * 4);\n\t\tchunks.push(text.slice(index, index + charSize));\n\t\tindex += charSize;\n\t}\n\treturn chunks.length > 0 ? chunks : [\"\"];\n}\n\nfunction cloneMessage(message: AssistantMessage, api: string, provider: string, modelId: string): AssistantMessage {\n\tconst cloned = structuredClone(message);\n\treturn {\n\t\t...cloned,\n\t\tapi,\n\t\tprovider,\n\t\tmodel: modelId,\n\t\ttimestamp: cloned.timestamp ?? Date.now(),\n\t\tusage: cloned.usage ?? DEFAULT_USAGE,\n\t};\n}\n\nfunction createErrorMessage(error: unknown, api: string, provider: string, modelId: string): AssistantMessage {\n\treturn {\n\t\trole: \"assistant\",\n\t\tcontent: [],\n\t\tapi,\n\t\tprovider,\n\t\tmodel: modelId,\n\t\tusage: DEFAULT_USAGE,\n\t\tstopReason: \"error\",\n\t\terrorMessage: error instanceof Error ? error.message : String(error),\n\t\ttimestamp: Date.now(),\n\t};\n}\n\nfunction createAbortedMessage(partial: AssistantMessage): AssistantMessage {\n\treturn {\n\t\t...partial,\n\t\tstopReason: \"aborted\",\n\t\terrorMessage: \"Request was aborted\",\n\t\ttimestamp: Date.now(),\n\t};\n}\n\nfunction scheduleChunk(chunk: string, tokensPerSecond: number | undefined): Promise<void> {\n\tif (!tokensPerSecond || tokensPerSecond <= 0) {\n\t\treturn new Promise((resolve) => queueMicrotask(resolve));\n\t}\n\tconst delayMs = (estimateTokens(chunk) / tokensPerSecond) * 1000;\n\treturn new Promise((resolve) => setTimeout(resolve, delayMs));\n}\n\nasync function streamWithDeltas(\n\tstream: AssistantMessageEventStream,\n\tmessage: AssistantMessage,\n\tminTokenSize: number,\n\tmaxTokenSize: number,\n\ttokensPerSecond: number | undefined,\n\tsignal: AbortSignal | undefined,\n): Promise<void> {\n\tconst partial: AssistantMessage = { ...message, content: [] };\n\tif (signal?.aborted) {\n\t\tconst aborted = createAbortedMessage(partial);\n\t\tstream.push({ type: \"error\", reason: \"aborted\", error: aborted });\n\t\tstream.end(aborted);\n\t\treturn;\n\t}\n\n\tstream.push({ type: \"start\", partial: { ...partial } });\n\n\tfor (let index = 0; index < message.content.length; index++) {\n\t\tif (signal?.aborted) {\n\t\t\tconst aborted = createAbortedMessage(partial);\n\t\t\tstream.push({ type: \"error\", reason: \"aborted\", error: aborted });\n\t\t\tstream.end(aborted);\n\t\t\treturn;\n\t\t}\n\n\t\tconst block = message.content[index];\n\n\t\tif (block.type === \"thinking\") {\n\t\t\tpartial.content = [...partial.content, { type: \"thinking\", thinking: \"\" }];\n\t\t\tstream.push({ type: \"thinking_start\", contentIndex: index, partial: { ...partial } });\n\t\t\tfor (const chunk of splitStringByTokenSize(block.thinking, minTokenSize, maxTokenSize)) {\n\t\t\t\tawait scheduleChunk(chunk, tokensPerSecond);\n\t\t\t\tif (signal?.aborted) {\n\t\t\t\t\tconst aborted = createAbortedMessage(partial);\n\t\t\t\t\tstream.push({ type: \"error\", reason: \"aborted\", error: aborted });\n\t\t\t\t\tstream.end(aborted);\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\t(partial.content[index] as ThinkingContent).thinking += chunk;\n\t\t\t\tstream.push({ type: \"thinking_delta\", contentIndex: index, delta: chunk, partial: { ...partial } });\n\t\t\t}\n\t\t\tstream.push({\n\t\t\t\ttype: \"thinking_end\",\n\t\t\t\tcontentIndex: index,\n\t\t\t\tcontent: block.thinking,\n\t\t\t\tpartial: { ...partial },\n\t\t\t});\n\t\t\tcontinue;\n\t\t}\n\n\t\tif (block.type === \"text\") {\n\t\t\tpartial.content = [...partial.content, { type: \"text\", text: \"\" }];\n\t\t\tstream.push({ type: \"text_start\", contentIndex: index, partial: { ...partial } });\n\t\t\tfor (const chunk of splitStringByTokenSize(block.text, minTokenSize, maxTokenSize)) {\n\t\t\t\tawait scheduleChunk(chunk, tokensPerSecond);\n\t\t\t\tif (signal?.aborted) {\n\t\t\t\t\tconst aborted = createAbortedMessage(partial);\n\t\t\t\t\tstream.push({ type: \"error\", reason: \"aborted\", error: aborted });\n\t\t\t\t\tstream.end(aborted);\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\t(partial.content[index] as TextContent).text += chunk;\n\t\t\t\tstream.push({ type: \"text_delta\", contentIndex: index, delta: chunk, partial: { ...partial } });\n\t\t\t}\n\t\t\tstream.push({ type: \"text_end\", contentIndex: index, content: block.text, partial: { ...partial } });\n\t\t\tcontinue;\n\t\t}\n\n\t\tpartial.content = [...partial.content, { type: \"toolCall\", id: block.id, name: block.name, arguments: {} }];\n\t\tstream.push({ type: \"toolcall_start\", contentIndex: index, partial: { ...partial } });\n\t\tfor (const chunk of splitStringByTokenSize(JSON.stringify(block.arguments), minTokenSize, maxTokenSize)) {\n\t\t\tawait scheduleChunk(chunk, tokensPerSecond);\n\t\t\tif (signal?.aborted) {\n\t\t\t\tconst aborted = createAbortedMessage(partial);\n\t\t\t\tstream.push({ type: \"error\", reason: \"aborted\", error: aborted });\n\t\t\t\tstream.end(aborted);\n\t\t\t\treturn;\n\t\t\t}\n\t\t\tstream.push({ type: \"toolcall_delta\", contentIndex: index, delta: chunk, partial: { ...partial } });\n\t\t}\n\t\t(partial.content[index] as ToolCall).arguments = block.arguments;\n\t\tstream.push({ type: \"toolcall_end\", contentIndex: index, toolCall: block, partial: { ...partial } });\n\t}\n\n\tif (message.stopReason === \"error\" || message.stopReason === \"aborted\") {\n\t\tstream.push({ type: \"error\", reason: message.stopReason, error: message });\n\t\tstream.end(message);\n\t\treturn;\n\t}\n\n\tstream.push({ type: \"done\", reason: message.stopReason, message });\n\tstream.end(message);\n}\n\nexport function registerFauxProvider(options: RegisterFauxProviderOptions = {}): FauxProviderRegistration {\n\tconst api = options.api ?? randomId(DEFAULT_API);\n\tconst provider = options.provider ?? DEFAULT_PROVIDER;\n\tconst sourceId = randomId(\"faux-provider\");\n\tconst minTokenSize = Math.max(\n\t\t1,\n\t\tMath.min(options.tokenSize?.min ?? DEFAULT_MIN_TOKEN_SIZE, options.tokenSize?.max ?? DEFAULT_MAX_TOKEN_SIZE),\n\t);\n\tconst maxTokenSize = Math.max(minTokenSize, options.tokenSize?.max ?? DEFAULT_MAX_TOKEN_SIZE);\n\tlet pendingResponses: FauxResponseStep[] = [];\n\tconst tokensPerSecond = options.tokensPerSecond;\n\tconst state = { callCount: 0 };\n\tconst promptCache = new Map<string, string>();\n\n\tconst modelDefinitions = options.models?.length\n\t\t? options.models\n\t\t: [\n\t\t\t\t{\n\t\t\t\t\tid: DEFAULT_MODEL_ID,\n\t\t\t\t\tname: DEFAULT_MODEL_NAME,\n\t\t\t\t\treasoning: false,\n\t\t\t\t\tinput: [\"text\", \"image\"] as (\"text\" | \"image\")[],\n\t\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },\n\t\t\t\t\tcontextWindow: 128000,\n\t\t\t\t\tmaxTokens: 16384,\n\t\t\t\t},\n\t\t\t];\n\tconst models = modelDefinitions.map((definition) => ({\n\t\tid: definition.id,\n\t\tname: definition.name ?? definition.id,\n\t\tapi,\n\t\tprovider,\n\t\tbaseUrl: DEFAULT_BASE_URL,\n\t\treasoning: definition.reasoning ?? false,\n\t\tinput: definition.input ?? [\"text\", \"image\"],\n\t\tcost: definition.cost ?? { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },\n\t\tcontextWindow: definition.contextWindow ?? 128000,\n\t\tmaxTokens: definition.maxTokens ?? 16384,\n\t})) as [Model<string>, ...Model<string>[]];\n\n\tconst stream: StreamFunction<string, StreamOptions> = (requestModel, context, streamOptions) => {\n\t\tconst outer = createAssistantMessageEventStream();\n\t\tconst step = pendingResponses.shift();\n\t\tstate.callCount++;\n\n\t\tqueueMicrotask(async () => {\n\t\t\ttry {\n\t\t\t\tawait streamOptions?.onResponse?.({ status: 200, headers: {} }, requestModel);\n\t\t\t\tif (!step) {\n\t\t\t\t\tlet message = createErrorMessage(\n\t\t\t\t\t\tnew Error(\"No more faux responses queued\"),\n\t\t\t\t\t\tapi,\n\t\t\t\t\t\tprovider,\n\t\t\t\t\t\trequestModel.id,\n\t\t\t\t\t);\n\t\t\t\t\tmessage = withUsageEstimate(message, context, streamOptions, promptCache);\n\t\t\t\t\touter.push({ type: \"error\", reason: \"error\", error: message });\n\t\t\t\t\touter.end(message);\n\t\t\t\t\treturn;\n\t\t\t\t}\n\n\t\t\t\tconst resolved =\n\t\t\t\t\ttypeof step === \"function\" ? await step(context, streamOptions, state, requestModel) : step;\n\t\t\t\tlet message = cloneMessage(resolved, api, provider, requestModel.id);\n\t\t\t\tmessage = withUsageEstimate(message, context, streamOptions, promptCache);\n\t\t\t\tawait streamWithDeltas(outer, message, minTokenSize, maxTokenSize, tokensPerSecond, streamOptions?.signal);\n\t\t\t} catch (error) {\n\t\t\t\tconst message = createErrorMessage(error, api, provider, requestModel.id);\n\t\t\t\touter.push({ type: \"error\", reason: \"error\", error: message });\n\t\t\t\touter.end(message);\n\t\t\t}\n\t\t});\n\n\t\treturn outer;\n\t};\n\n\tconst streamSimple: StreamFunction<string, SimpleStreamOptions> = (streamModel, context, streamOptions) =>\n\t\tstream(streamModel, context, streamOptions);\n\n\tregisterApiProvider({ api, stream, streamSimple }, sourceId);\n\n\tfunction getModel(): Model<string>;\n\tfunction getModel(requestedModelId: string): Model<string> | undefined;\n\tfunction getModel(requestedModelId?: string): Model<string> | undefined {\n\t\tif (!requestedModelId) {\n\t\t\treturn models[0];\n\t\t}\n\t\treturn models.find((candidate) => candidate.id === requestedModelId);\n\t}\n\n\treturn {\n\t\tapi,\n\t\tmodels,\n\t\tgetModel,\n\t\tstate,\n\t\tsetResponses(responses) {\n\t\t\tpendingResponses = [...responses];\n\t\t},\n\t\tappendResponses(responses) {\n\t\t\tpendingResponses.push(...responses);\n\t\t},\n\t\tgetPendingResponseCount() {\n\t\t\treturn pendingResponses.length;\n\t\t},\n\t\tunregister() {\n\t\t\tunregisterApiProviders(sourceId);\n\t\t},\n\t};\n}\n"]}
@@ -0,0 +1,368 @@
1
+ import { registerApiProvider, unregisterApiProviders } from "../api-registry.js";
2
+ import { createAssistantMessageEventStream } from "../utils/event-stream.js";
3
+ const DEFAULT_API = "faux";
4
+ const DEFAULT_PROVIDER = "faux";
5
+ const DEFAULT_MODEL_ID = "faux-1";
6
+ const DEFAULT_MODEL_NAME = "Faux Model";
7
+ const DEFAULT_BASE_URL = "http://localhost:0";
8
+ const DEFAULT_MIN_TOKEN_SIZE = 3;
9
+ const DEFAULT_MAX_TOKEN_SIZE = 5;
10
+ const DEFAULT_USAGE = {
11
+ input: 0,
12
+ output: 0,
13
+ cacheRead: 0,
14
+ cacheWrite: 0,
15
+ totalTokens: 0,
16
+ cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
17
+ };
18
+ export function fauxText(text) {
19
+ return { type: "text", text };
20
+ }
21
+ export function fauxThinking(thinking) {
22
+ return { type: "thinking", thinking };
23
+ }
24
+ export function fauxToolCall(name, arguments_, options = {}) {
25
+ return {
26
+ type: "toolCall",
27
+ id: options.id ?? randomId("tool"),
28
+ name,
29
+ arguments: arguments_,
30
+ };
31
+ }
32
+ function normalizeFauxAssistantContent(content) {
33
+ if (typeof content === "string") {
34
+ return [fauxText(content)];
35
+ }
36
+ return Array.isArray(content) ? content : [content];
37
+ }
38
+ export function fauxAssistantMessage(content, options = {}) {
39
+ return {
40
+ role: "assistant",
41
+ content: normalizeFauxAssistantContent(content),
42
+ api: DEFAULT_API,
43
+ provider: DEFAULT_PROVIDER,
44
+ model: DEFAULT_MODEL_ID,
45
+ usage: DEFAULT_USAGE,
46
+ stopReason: options.stopReason ?? "stop",
47
+ errorMessage: options.errorMessage,
48
+ responseId: options.responseId,
49
+ timestamp: options.timestamp ?? Date.now(),
50
+ };
51
+ }
52
+ function estimateTokens(text) {
53
+ return Math.ceil(text.length / 4);
54
+ }
55
+ function randomId(prefix) {
56
+ return `${prefix}:${Date.now()}:${Math.random().toString(36).slice(2)}`;
57
+ }
58
+ function contentToText(content) {
59
+ if (typeof content === "string") {
60
+ return content;
61
+ }
62
+ return content
63
+ .map((block) => {
64
+ if (block.type === "text") {
65
+ return block.text;
66
+ }
67
+ return `[image:${block.mimeType}:${block.data.length}]`;
68
+ })
69
+ .join("\n");
70
+ }
71
+ function assistantContentToText(content) {
72
+ return content
73
+ .map((block) => {
74
+ if (block.type === "text") {
75
+ return block.text;
76
+ }
77
+ if (block.type === "thinking") {
78
+ return block.thinking;
79
+ }
80
+ return `${block.name}:${JSON.stringify(block.arguments)}`;
81
+ })
82
+ .join("\n");
83
+ }
84
+ function toolResultToText(message) {
85
+ return [message.toolName, ...message.content.map((block) => contentToText([block]))].join("\n");
86
+ }
87
+ function messageToText(message) {
88
+ if (message.role === "user") {
89
+ return contentToText(message.content);
90
+ }
91
+ if (message.role === "assistant") {
92
+ return assistantContentToText(message.content);
93
+ }
94
+ return toolResultToText(message);
95
+ }
96
+ function serializeContext(context) {
97
+ const parts = [];
98
+ if (context.systemPrompt) {
99
+ parts.push(`system:${context.systemPrompt}`);
100
+ }
101
+ for (const message of context.messages) {
102
+ parts.push(`${message.role}:${messageToText(message)}`);
103
+ }
104
+ if (context.tools?.length) {
105
+ parts.push(`tools:${JSON.stringify(context.tools)}`);
106
+ }
107
+ return parts.join("\n\n");
108
+ }
109
+ function commonPrefixLength(a, b) {
110
+ const length = Math.min(a.length, b.length);
111
+ let index = 0;
112
+ while (index < length && a[index] === b[index]) {
113
+ index++;
114
+ }
115
+ return index;
116
+ }
117
+ function withUsageEstimate(message, context, options, promptCache) {
118
+ const promptText = serializeContext(context);
119
+ const promptTokens = estimateTokens(promptText);
120
+ const outputTokens = estimateTokens(assistantContentToText(message.content));
121
+ let input = promptTokens;
122
+ let cacheRead = 0;
123
+ let cacheWrite = 0;
124
+ const sessionId = options?.sessionId;
125
+ if (sessionId && options?.cacheRetention !== "none") {
126
+ const previousPrompt = promptCache.get(sessionId);
127
+ if (previousPrompt) {
128
+ const cachedChars = commonPrefixLength(previousPrompt, promptText);
129
+ cacheRead = estimateTokens(previousPrompt.slice(0, cachedChars));
130
+ cacheWrite = estimateTokens(promptText.slice(cachedChars));
131
+ input = Math.max(0, promptTokens - cacheRead);
132
+ }
133
+ else {
134
+ cacheWrite = promptTokens;
135
+ }
136
+ promptCache.set(sessionId, promptText);
137
+ }
138
+ return {
139
+ ...message,
140
+ usage: {
141
+ input,
142
+ output: outputTokens,
143
+ cacheRead,
144
+ cacheWrite,
145
+ totalTokens: input + outputTokens + cacheRead + cacheWrite,
146
+ cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
147
+ },
148
+ };
149
+ }
150
+ function splitStringByTokenSize(text, minTokenSize, maxTokenSize) {
151
+ const chunks = [];
152
+ let index = 0;
153
+ while (index < text.length) {
154
+ const tokenSize = minTokenSize + Math.floor(Math.random() * (maxTokenSize - minTokenSize + 1));
155
+ const charSize = Math.max(1, tokenSize * 4);
156
+ chunks.push(text.slice(index, index + charSize));
157
+ index += charSize;
158
+ }
159
+ return chunks.length > 0 ? chunks : [""];
160
+ }
161
+ function cloneMessage(message, api, provider, modelId) {
162
+ const cloned = structuredClone(message);
163
+ return {
164
+ ...cloned,
165
+ api,
166
+ provider,
167
+ model: modelId,
168
+ timestamp: cloned.timestamp ?? Date.now(),
169
+ usage: cloned.usage ?? DEFAULT_USAGE,
170
+ };
171
+ }
172
+ function createErrorMessage(error, api, provider, modelId) {
173
+ return {
174
+ role: "assistant",
175
+ content: [],
176
+ api,
177
+ provider,
178
+ model: modelId,
179
+ usage: DEFAULT_USAGE,
180
+ stopReason: "error",
181
+ errorMessage: error instanceof Error ? error.message : String(error),
182
+ timestamp: Date.now(),
183
+ };
184
+ }
185
+ function createAbortedMessage(partial) {
186
+ return {
187
+ ...partial,
188
+ stopReason: "aborted",
189
+ errorMessage: "Request was aborted",
190
+ timestamp: Date.now(),
191
+ };
192
+ }
193
+ function scheduleChunk(chunk, tokensPerSecond) {
194
+ if (!tokensPerSecond || tokensPerSecond <= 0) {
195
+ return new Promise((resolve) => queueMicrotask(resolve));
196
+ }
197
+ const delayMs = (estimateTokens(chunk) / tokensPerSecond) * 1000;
198
+ return new Promise((resolve) => setTimeout(resolve, delayMs));
199
+ }
200
+ async function streamWithDeltas(stream, message, minTokenSize, maxTokenSize, tokensPerSecond, signal) {
201
+ const partial = { ...message, content: [] };
202
+ if (signal?.aborted) {
203
+ const aborted = createAbortedMessage(partial);
204
+ stream.push({ type: "error", reason: "aborted", error: aborted });
205
+ stream.end(aborted);
206
+ return;
207
+ }
208
+ stream.push({ type: "start", partial: { ...partial } });
209
+ for (let index = 0; index < message.content.length; index++) {
210
+ if (signal?.aborted) {
211
+ const aborted = createAbortedMessage(partial);
212
+ stream.push({ type: "error", reason: "aborted", error: aborted });
213
+ stream.end(aborted);
214
+ return;
215
+ }
216
+ const block = message.content[index];
217
+ if (block.type === "thinking") {
218
+ partial.content = [...partial.content, { type: "thinking", thinking: "" }];
219
+ stream.push({ type: "thinking_start", contentIndex: index, partial: { ...partial } });
220
+ for (const chunk of splitStringByTokenSize(block.thinking, minTokenSize, maxTokenSize)) {
221
+ await scheduleChunk(chunk, tokensPerSecond);
222
+ if (signal?.aborted) {
223
+ const aborted = createAbortedMessage(partial);
224
+ stream.push({ type: "error", reason: "aborted", error: aborted });
225
+ stream.end(aborted);
226
+ return;
227
+ }
228
+ partial.content[index].thinking += chunk;
229
+ stream.push({ type: "thinking_delta", contentIndex: index, delta: chunk, partial: { ...partial } });
230
+ }
231
+ stream.push({
232
+ type: "thinking_end",
233
+ contentIndex: index,
234
+ content: block.thinking,
235
+ partial: { ...partial },
236
+ });
237
+ continue;
238
+ }
239
+ if (block.type === "text") {
240
+ partial.content = [...partial.content, { type: "text", text: "" }];
241
+ stream.push({ type: "text_start", contentIndex: index, partial: { ...partial } });
242
+ for (const chunk of splitStringByTokenSize(block.text, minTokenSize, maxTokenSize)) {
243
+ await scheduleChunk(chunk, tokensPerSecond);
244
+ if (signal?.aborted) {
245
+ const aborted = createAbortedMessage(partial);
246
+ stream.push({ type: "error", reason: "aborted", error: aborted });
247
+ stream.end(aborted);
248
+ return;
249
+ }
250
+ partial.content[index].text += chunk;
251
+ stream.push({ type: "text_delta", contentIndex: index, delta: chunk, partial: { ...partial } });
252
+ }
253
+ stream.push({ type: "text_end", contentIndex: index, content: block.text, partial: { ...partial } });
254
+ continue;
255
+ }
256
+ partial.content = [...partial.content, { type: "toolCall", id: block.id, name: block.name, arguments: {} }];
257
+ stream.push({ type: "toolcall_start", contentIndex: index, partial: { ...partial } });
258
+ for (const chunk of splitStringByTokenSize(JSON.stringify(block.arguments), minTokenSize, maxTokenSize)) {
259
+ await scheduleChunk(chunk, tokensPerSecond);
260
+ if (signal?.aborted) {
261
+ const aborted = createAbortedMessage(partial);
262
+ stream.push({ type: "error", reason: "aborted", error: aborted });
263
+ stream.end(aborted);
264
+ return;
265
+ }
266
+ stream.push({ type: "toolcall_delta", contentIndex: index, delta: chunk, partial: { ...partial } });
267
+ }
268
+ partial.content[index].arguments = block.arguments;
269
+ stream.push({ type: "toolcall_end", contentIndex: index, toolCall: block, partial: { ...partial } });
270
+ }
271
+ if (message.stopReason === "error" || message.stopReason === "aborted") {
272
+ stream.push({ type: "error", reason: message.stopReason, error: message });
273
+ stream.end(message);
274
+ return;
275
+ }
276
+ stream.push({ type: "done", reason: message.stopReason, message });
277
+ stream.end(message);
278
+ }
279
+ export function registerFauxProvider(options = {}) {
280
+ const api = options.api ?? randomId(DEFAULT_API);
281
+ const provider = options.provider ?? DEFAULT_PROVIDER;
282
+ const sourceId = randomId("faux-provider");
283
+ const minTokenSize = Math.max(1, Math.min(options.tokenSize?.min ?? DEFAULT_MIN_TOKEN_SIZE, options.tokenSize?.max ?? DEFAULT_MAX_TOKEN_SIZE));
284
+ const maxTokenSize = Math.max(minTokenSize, options.tokenSize?.max ?? DEFAULT_MAX_TOKEN_SIZE);
285
+ let pendingResponses = [];
286
+ const tokensPerSecond = options.tokensPerSecond;
287
+ const state = { callCount: 0 };
288
+ const promptCache = new Map();
289
+ const modelDefinitions = options.models?.length
290
+ ? options.models
291
+ : [
292
+ {
293
+ id: DEFAULT_MODEL_ID,
294
+ name: DEFAULT_MODEL_NAME,
295
+ reasoning: false,
296
+ input: ["text", "image"],
297
+ cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
298
+ contextWindow: 128000,
299
+ maxTokens: 16384,
300
+ },
301
+ ];
302
+ const models = modelDefinitions.map((definition) => ({
303
+ id: definition.id,
304
+ name: definition.name ?? definition.id,
305
+ api,
306
+ provider,
307
+ baseUrl: DEFAULT_BASE_URL,
308
+ reasoning: definition.reasoning ?? false,
309
+ input: definition.input ?? ["text", "image"],
310
+ cost: definition.cost ?? { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
311
+ contextWindow: definition.contextWindow ?? 128000,
312
+ maxTokens: definition.maxTokens ?? 16384,
313
+ }));
314
+ const stream = (requestModel, context, streamOptions) => {
315
+ const outer = createAssistantMessageEventStream();
316
+ const step = pendingResponses.shift();
317
+ state.callCount++;
318
+ queueMicrotask(async () => {
319
+ try {
320
+ await streamOptions?.onResponse?.({ status: 200, headers: {} }, requestModel);
321
+ if (!step) {
322
+ let message = createErrorMessage(new Error("No more faux responses queued"), api, provider, requestModel.id);
323
+ message = withUsageEstimate(message, context, streamOptions, promptCache);
324
+ outer.push({ type: "error", reason: "error", error: message });
325
+ outer.end(message);
326
+ return;
327
+ }
328
+ const resolved = typeof step === "function" ? await step(context, streamOptions, state, requestModel) : step;
329
+ let message = cloneMessage(resolved, api, provider, requestModel.id);
330
+ message = withUsageEstimate(message, context, streamOptions, promptCache);
331
+ await streamWithDeltas(outer, message, minTokenSize, maxTokenSize, tokensPerSecond, streamOptions?.signal);
332
+ }
333
+ catch (error) {
334
+ const message = createErrorMessage(error, api, provider, requestModel.id);
335
+ outer.push({ type: "error", reason: "error", error: message });
336
+ outer.end(message);
337
+ }
338
+ });
339
+ return outer;
340
+ };
341
+ const streamSimple = (streamModel, context, streamOptions) => stream(streamModel, context, streamOptions);
342
+ registerApiProvider({ api, stream, streamSimple }, sourceId);
343
+ function getModel(requestedModelId) {
344
+ if (!requestedModelId) {
345
+ return models[0];
346
+ }
347
+ return models.find((candidate) => candidate.id === requestedModelId);
348
+ }
349
+ return {
350
+ api,
351
+ models,
352
+ getModel,
353
+ state,
354
+ setResponses(responses) {
355
+ pendingResponses = [...responses];
356
+ },
357
+ appendResponses(responses) {
358
+ pendingResponses.push(...responses);
359
+ },
360
+ getPendingResponseCount() {
361
+ return pendingResponses.length;
362
+ },
363
+ unregister() {
364
+ unregisterApiProviders(sourceId);
365
+ },
366
+ };
367
+ }
368
+ //# sourceMappingURL=faux.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"faux.js","sourceRoot":"","sources":["../../src/providers/faux.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,mBAAmB,EAAE,sBAAsB,EAAE,MAAM,oBAAoB,CAAC;AAiBjF,OAAO,EAAE,iCAAiC,EAAE,MAAM,0BAA0B,CAAC;AAE7E,MAAM,WAAW,GAAG,MAAM,CAAC;AAC3B,MAAM,gBAAgB,GAAG,MAAM,CAAC;AAChC,MAAM,gBAAgB,GAAG,QAAQ,CAAC;AAClC,MAAM,kBAAkB,GAAG,YAAY,CAAC;AACxC,MAAM,gBAAgB,GAAG,oBAAoB,CAAC;AAC9C,MAAM,sBAAsB,GAAG,CAAC,CAAC;AACjC,MAAM,sBAAsB,GAAG,CAAC,CAAC;AAEjC,MAAM,aAAa,GAAU;IAC5B,KAAK,EAAE,CAAC;IACR,MAAM,EAAE,CAAC;IACT,SAAS,EAAE,CAAC;IACZ,UAAU,EAAE,CAAC;IACb,WAAW,EAAE,CAAC;IACd,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,UAAU,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE;CACpE,CAAC;AAcF,MAAM,UAAU,QAAQ,CAAC,IAAY,EAAe;IACnD,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC;AAAA,CAC9B;AAED,MAAM,UAAU,YAAY,CAAC,QAAgB,EAAmB;IAC/D,OAAO,EAAE,IAAI,EAAE,UAAU,EAAE,QAAQ,EAAE,CAAC;AAAA,CACtC;AAED,MAAM,UAAU,YAAY,CAAC,IAAY,EAAE,UAAiC,EAAE,OAAO,GAAoB,EAAE,EAAY;IACtH,OAAO;QACN,IAAI,EAAE,UAAU;QAChB,EAAE,EAAE,OAAO,CAAC,EAAE,IAAI,QAAQ,CAAC,MAAM,CAAC;QAClC,IAAI;QACJ,SAAS,EAAE,UAAU;KACrB,CAAC;AAAA,CACF;AAED,SAAS,6BAA6B,CAAC,OAAuD,EAAsB;IACnH,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE,CAAC;QACjC,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC;IAC5B,CAAC;IACD,OAAO,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;AAAA,CACpD;AAED,MAAM,UAAU,oBAAoB,CACnC,OAAuD,EACvD,OAAO,GAKH,EAAE,EACa;IACnB,OAAO;QACN,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,6BAA6B,CAAC,OAAO,CAAC;QAC/C,GAAG,EAAE,WAAW;QAChB,QAAQ,EAAE,gBAAgB;QAC1B,KAAK,EAAE,gBAAgB;QACvB,KAAK,EAAE,aAAa;QACpB,UAAU,EAAE,OAAO,CAAC,UAAU,IAAI,MAAM;QACxC,YAAY,EAAE,OAAO,CAAC,YAAY;QAClC,UAAU,EAAE,OAAO,CAAC,UAAU;QAC9B,SAAS,EAAE,OAAO,CAAC,SAAS,IAAI,IAAI,CAAC,GAAG,EAAE;KAC1C,CAAC;AAAA,CACF;AAkCD,SAAS,cAAc,CAAC,IAAY,EAAU;IAC7C,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;AAAA,CAClC;AAED,SAAS,QAAQ,CAAC,MAAc,EAAU;IACzC,OAAO,GAAG,MAAM,IAAI,IAAI,CAAC,GAAG,EAAE,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;AAAA,CACxE;AAED,SAAS,aAAa,CAAC,OAAmD,EAAU;IACnF,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE,CAAC;QACjC,OAAO,OAAO,CAAC;IAChB,CAAC;IACD,OAAO,OAAO;SACZ,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC;QACf,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YAC3B,OAAO,KAAK,CAAC,IAAI,CAAC;QACnB,CAAC;QACD,OAAO,UAAU,KAAK,CAAC,QAAQ,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC;IAAA,CACxD,CAAC;SACD,IAAI,CAAC,IAAI,CAAC,CAAC;AAAA,CACb;AAED,SAAS,sBAAsB,CAAC,OAAwD,EAAU;IACjG,OAAO,OAAO;SACZ,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC;QACf,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YAC3B,OAAO,KAAK,CAAC,IAAI,CAAC;QACnB,CAAC;QACD,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;YAC/B,OAAO,KAAK,CAAC,QAAQ,CAAC;QACvB,CAAC;QACD,OAAO,GAAG,KAAK,CAAC,IAAI,IAAI,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE,CAAC;IAAA,CAC1D,CAAC;SACD,IAAI,CAAC,IAAI,CAAC,CAAC;AAAA,CACb;AAED,SAAS,gBAAgB,CAAC,OAA0B,EAAU;IAC7D,OAAO,CAAC,OAAO,CAAC,QAAQ,EAAE,GAAG,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,aAAa,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAAA,CAChG;AAED,SAAS,aAAa,CAAC,OAAgB,EAAU;IAChD,IAAI,OAAO,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;QAC7B,OAAO,aAAa,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;IACvC,CAAC;IACD,IAAI,OAAO,CAAC,IAAI,KAAK,WAAW,EAAE,CAAC;QAClC,OAAO,sBAAsB,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;IAChD,CAAC;IACD,OAAO,gBAAgB,CAAC,OAAO,CAAC,CAAC;AAAA,CACjC;AAED,SAAS,gBAAgB,CAAC,OAAgB,EAAU;IACnD,MAAM,KAAK,GAAa,EAAE,CAAC;IAC3B,IAAI,OAAO,CAAC,YAAY,EAAE,CAAC;QAC1B,KAAK,CAAC,IAAI,CAAC,UAAU,OAAO,CAAC,YAAY,EAAE,CAAC,CAAC;IAC9C,CAAC;IACD,KAAK,MAAM,OAAO,IAAI,OAAO,CAAC,QAAQ,EAAE,CAAC;QACxC,KAAK,CAAC,IAAI,CAAC,GAAG,OAAO,CAAC,IAAI,IAAI,aAAa,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;IACzD,CAAC;IACD,IAAI,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC;QAC3B,KAAK,CAAC,IAAI,CAAC,SAAS,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;IACtD,CAAC;IACD,OAAO,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AAAA,CAC1B;AAED,SAAS,kBAAkB,CAAC,CAAS,EAAE,CAAS,EAAU;IACzD,MAAM,MAAM,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC;IAC5C,IAAI,KAAK,GAAG,CAAC,CAAC;IACd,OAAO,KAAK,GAAG,MAAM,IAAI,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC;QAChD,KAAK,EAAE,CAAC;IACT,CAAC;IACD,OAAO,KAAK,CAAC;AAAA,CACb;AAED,SAAS,iBAAiB,CACzB,OAAyB,EACzB,OAAgB,EAChB,OAAkC,EAClC,WAAgC,EACb;IACnB,MAAM,UAAU,GAAG,gBAAgB,CAAC,OAAO,CAAC,CAAC;IAC7C,MAAM,YAAY,GAAG,cAAc,CAAC,UAAU,CAAC,CAAC;IAChD,MAAM,YAAY,GAAG,cAAc,CAAC,sBAAsB,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC;IAC7E,IAAI,KAAK,GAAG,YAAY,CAAC;IACzB,IAAI,SAAS,GAAG,CAAC,CAAC;IAClB,IAAI,UAAU,GAAG,CAAC,CAAC;IACnB,MAAM,SAAS,GAAG,OAAO,EAAE,SAAS,CAAC;IAErC,IAAI,SAAS,IAAI,OAAO,EAAE,cAAc,KAAK,MAAM,EAAE,CAAC;QACrD,MAAM,cAAc,GAAG,WAAW,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;QAClD,IAAI,cAAc,EAAE,CAAC;YACpB,MAAM,WAAW,GAAG,kBAAkB,CAAC,cAAc,EAAE,UAAU,CAAC,CAAC;YACnE,SAAS,GAAG,cAAc,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,CAAC;YACjE,UAAU,GAAG,cAAc,CAAC,UAAU,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC;YAC3D,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,YAAY,GAAG,SAAS,CAAC,CAAC;QAC/C,CAAC;aAAM,CAAC;YACP,UAAU,GAAG,YAAY,CAAC;QAC3B,CAAC;QACD,WAAW,CAAC,GAAG,CAAC,SAAS,EAAE,UAAU,CAAC,CAAC;IACxC,CAAC;IAED,OAAO;QACN,GAAG,OAAO;QACV,KAAK,EAAE;YACN,KAAK;YACL,MAAM,EAAE,YAAY;YACpB,SAAS;YACT,UAAU;YACV,WAAW,EAAE,KAAK,GAAG,YAAY,GAAG,SAAS,GAAG,UAAU;YAC1D,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,UAAU,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE;SACpE;KACD,CAAC;AAAA,CACF;AAED,SAAS,sBAAsB,CAAC,IAAY,EAAE,YAAoB,EAAE,YAAoB,EAAY;IACnG,MAAM,MAAM,GAAa,EAAE,CAAC;IAC5B,IAAI,KAAK,GAAG,CAAC,CAAC;IACd,OAAO,KAAK,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;QAC5B,MAAM,SAAS,GAAG,YAAY,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,YAAY,GAAG,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC;QAC/F,MAAM,QAAQ,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,GAAG,CAAC,CAAC,CAAC;QAC5C,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE,KAAK,GAAG,QAAQ,CAAC,CAAC,CAAC;QACjD,KAAK,IAAI,QAAQ,CAAC;IACnB,CAAC;IACD,OAAO,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AAAA,CACzC;AAED,SAAS,YAAY,CAAC,OAAyB,EAAE,GAAW,EAAE,QAAgB,EAAE,OAAe,EAAoB;IAClH,MAAM,MAAM,GAAG,eAAe,CAAC,OAAO,CAAC,CAAC;IACxC,OAAO;QACN,GAAG,MAAM;QACT,GAAG;QACH,QAAQ;QACR,KAAK,EAAE,OAAO;QACd,SAAS,EAAE,MAAM,CAAC,SAAS,IAAI,IAAI,CAAC,GAAG,EAAE;QACzC,KAAK,EAAE,MAAM,CAAC,KAAK,IAAI,aAAa;KACpC,CAAC;AAAA,CACF;AAED,SAAS,kBAAkB,CAAC,KAAc,EAAE,GAAW,EAAE,QAAgB,EAAE,OAAe,EAAoB;IAC7G,OAAO;QACN,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,EAAE;QACX,GAAG;QACH,QAAQ;QACR,KAAK,EAAE,OAAO;QACd,KAAK,EAAE,aAAa;QACpB,UAAU,EAAE,OAAO;QACnB,YAAY,EAAE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;QACpE,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;KACrB,CAAC;AAAA,CACF;AAED,SAAS,oBAAoB,CAAC,OAAyB,EAAoB;IAC1E,OAAO;QACN,GAAG,OAAO;QACV,UAAU,EAAE,SAAS;QACrB,YAAY,EAAE,qBAAqB;QACnC,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;KACrB,CAAC;AAAA,CACF;AAED,SAAS,aAAa,CAAC,KAAa,EAAE,eAAmC,EAAiB;IACzF,IAAI,CAAC,eAAe,IAAI,eAAe,IAAI,CAAC,EAAE,CAAC;QAC9C,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC,CAAC;IAC1D,CAAC;IACD,MAAM,OAAO,GAAG,CAAC,cAAc,CAAC,KAAK,CAAC,GAAG,eAAe,CAAC,GAAG,IAAI,CAAC;IACjE,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,CAAC;AAAA,CAC9D;AAED,KAAK,UAAU,gBAAgB,CAC9B,MAAmC,EACnC,OAAyB,EACzB,YAAoB,EACpB,YAAoB,EACpB,eAAmC,EACnC,MAA+B,EACf;IAChB,MAAM,OAAO,GAAqB,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,EAAE,EAAE,CAAC;IAC9D,IAAI,MAAM,EAAE,OAAO,EAAE,CAAC;QACrB,MAAM,OAAO,GAAG,oBAAoB,CAAC,OAAO,CAAC,CAAC;QAC9C,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC;QAClE,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;QACpB,OAAO;IACR,CAAC;IAED,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,EAAE,CAAC,CAAC;IAExD,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC,MAAM,EAAE,KAAK,EAAE,EAAE,CAAC;QAC7D,IAAI,MAAM,EAAE,OAAO,EAAE,CAAC;YACrB,MAAM,OAAO,GAAG,oBAAoB,CAAC,OAAO,CAAC,CAAC;YAC9C,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC;YAClE,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;YACpB,OAAO;QACR,CAAC;QAED,MAAM,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QAErC,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;YAC/B,OAAO,CAAC,OAAO,GAAG,CAAC,GAAG,OAAO,CAAC,OAAO,EAAE,EAAE,IAAI,EAAE,UAAU,EAAE,QAAQ,EAAE,EAAE,EAAE,CAAC,CAAC;YAC3E,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,EAAE,CAAC,CAAC;YACtF,KAAK,MAAM,KAAK,IAAI,sBAAsB,CAAC,KAAK,CAAC,QAAQ,EAAE,YAAY,EAAE,YAAY,CAAC,EAAE,CAAC;gBACxF,MAAM,aAAa,CAAC,KAAK,EAAE,eAAe,CAAC,CAAC;gBAC5C,IAAI,MAAM,EAAE,OAAO,EAAE,CAAC;oBACrB,MAAM,OAAO,GAAG,oBAAoB,CAAC,OAAO,CAAC,CAAC;oBAC9C,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC;oBAClE,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;oBACpB,OAAO;gBACR,CAAC;gBACA,OAAO,CAAC,OAAO,CAAC,KAAK,CAAqB,CAAC,QAAQ,IAAI,KAAK,CAAC;gBAC9D,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,EAAE,CAAC,CAAC;YACrG,CAAC;YACD,MAAM,CAAC,IAAI,CAAC;gBACX,IAAI,EAAE,cAAc;gBACpB,YAAY,EAAE,KAAK;gBACnB,OAAO,EAAE,KAAK,CAAC,QAAQ;gBACvB,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE;aACvB,CAAC,CAAC;YACH,SAAS;QACV,CAAC;QAED,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YAC3B,OAAO,CAAC,OAAO,GAAG,CAAC,GAAG,OAAO,CAAC,OAAO,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC;YACnE,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,YAAY,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,EAAE,CAAC,CAAC;YAClF,KAAK,MAAM,KAAK,IAAI,sBAAsB,CAAC,KAAK,CAAC,IAAI,EAAE,YAAY,EAAE,YAAY,CAAC,EAAE,CAAC;gBACpF,MAAM,aAAa,CAAC,KAAK,EAAE,eAAe,CAAC,CAAC;gBAC5C,IAAI,MAAM,EAAE,OAAO,EAAE,CAAC;oBACrB,MAAM,OAAO,GAAG,oBAAoB,CAAC,OAAO,CAAC,CAAC;oBAC9C,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC;oBAClE,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;oBACpB,OAAO;gBACR,CAAC;gBACA,OAAO,CAAC,OAAO,CAAC,KAAK,CAAiB,CAAC,IAAI,IAAI,KAAK,CAAC;gBACtD,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,YAAY,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,EAAE,CAAC,CAAC;YACjG,CAAC;YACD,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,UAAU,EAAE,YAAY,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,EAAE,CAAC,CAAC;YACrG,SAAS;QACV,CAAC;QAED,OAAO,CAAC,OAAO,GAAG,CAAC,GAAG,OAAO,CAAC,OAAO,EAAE,EAAE,IAAI,EAAE,UAAU,EAAE,EAAE,EAAE,KAAK,CAAC,EAAE,EAAE,IAAI,EAAE,KAAK,CAAC,IAAI,EAAE,SAAS,EAAE,EAAE,EAAE,CAAC,CAAC;QAC5G,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,EAAE,CAAC,CAAC;QACtF,KAAK,MAAM,KAAK,IAAI,sBAAsB,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE,YAAY,EAAE,YAAY,CAAC,EAAE,CAAC;YACzG,MAAM,aAAa,CAAC,KAAK,EAAE,eAAe,CAAC,CAAC;YAC5C,IAAI,MAAM,EAAE,OAAO,EAAE,CAAC;gBACrB,MAAM,OAAO,GAAG,oBAAoB,CAAC,OAAO,CAAC,CAAC;gBAC9C,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC;gBAClE,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;gBACpB,OAAO;YACR,CAAC;YACD,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,EAAE,CAAC,CAAC;QACrG,CAAC;QACA,OAAO,CAAC,OAAO,CAAC,KAAK,CAAc,CAAC,SAAS,GAAG,KAAK,CAAC,SAAS,CAAC;QACjE,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,cAAc,EAAE,YAAY,EAAE,KAAK,EAAE,QAAQ,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,EAAE,CAAC,CAAC;IACtG,CAAC;IAED,IAAI,OAAO,CAAC,UAAU,KAAK,OAAO,IAAI,OAAO,CAAC,UAAU,KAAK,SAAS,EAAE,CAAC;QACxE,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,UAAU,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC;QAC3E,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;QACpB,OAAO;IACR,CAAC;IAED,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,UAAU,EAAE,OAAO,EAAE,CAAC,CAAC;IACnE,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;AAAA,CACpB;AAED,MAAM,UAAU,oBAAoB,CAAC,OAAO,GAAgC,EAAE,EAA4B;IACzG,MAAM,GAAG,GAAG,OAAO,CAAC,GAAG,IAAI,QAAQ,CAAC,WAAW,CAAC,CAAC;IACjD,MAAM,QAAQ,GAAG,OAAO,CAAC,QAAQ,IAAI,gBAAgB,CAAC;IACtD,MAAM,QAAQ,GAAG,QAAQ,CAAC,eAAe,CAAC,CAAC;IAC3C,MAAM,YAAY,GAAG,IAAI,CAAC,GAAG,CAC5B,CAAC,EACD,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,SAAS,EAAE,GAAG,IAAI,sBAAsB,EAAE,OAAO,CAAC,SAAS,EAAE,GAAG,IAAI,sBAAsB,CAAC,CAC5G,CAAC;IACF,MAAM,YAAY,GAAG,IAAI,CAAC,GAAG,CAAC,YAAY,EAAE,OAAO,CAAC,SAAS,EAAE,GAAG,IAAI,sBAAsB,CAAC,CAAC;IAC9F,IAAI,gBAAgB,GAAuB,EAAE,CAAC;IAC9C,MAAM,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;IAChD,MAAM,KAAK,GAAG,EAAE,SAAS,EAAE,CAAC,EAAE,CAAC;IAC/B,MAAM,WAAW,GAAG,IAAI,GAAG,EAAkB,CAAC;IAE9C,MAAM,gBAAgB,GAAG,OAAO,CAAC,MAAM,EAAE,MAAM;QAC9C,CAAC,CAAC,OAAO,CAAC,MAAM;QAChB,CAAC,CAAC;YACA;gBACC,EAAE,EAAE,gBAAgB;gBACpB,IAAI,EAAE,kBAAkB;gBACxB,SAAS,EAAE,KAAK;gBAChB,KAAK,EAAE,CAAC,MAAM,EAAE,OAAO,CAAyB;gBAChD,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,UAAU,EAAE,CAAC,EAAE;gBAC1D,aAAa,EAAE,MAAM;gBACrB,SAAS,EAAE,KAAK;aAChB;SACD,CAAC;IACJ,MAAM,MAAM,GAAG,gBAAgB,CAAC,GAAG,CAAC,CAAC,UAAU,EAAE,EAAE,CAAC,CAAC;QACpD,EAAE,EAAE,UAAU,CAAC,EAAE;QACjB,IAAI,EAAE,UAAU,CAAC,IAAI,IAAI,UAAU,CAAC,EAAE;QACtC,GAAG;QACH,QAAQ;QACR,OAAO,EAAE,gBAAgB;QACzB,SAAS,EAAE,UAAU,CAAC,SAAS,IAAI,KAAK;QACxC,KAAK,EAAE,UAAU,CAAC,KAAK,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC;QAC5C,IAAI,EAAE,UAAU,CAAC,IAAI,IAAI,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,UAAU,EAAE,CAAC,EAAE;QAC7E,aAAa,EAAE,UAAU,CAAC,aAAa,IAAI,MAAM;QACjD,SAAS,EAAE,UAAU,CAAC,SAAS,IAAI,KAAK;KACxC,CAAC,CAAwC,CAAC;IAE3C,MAAM,MAAM,GAA0C,CAAC,YAAY,EAAE,OAAO,EAAE,aAAa,EAAE,EAAE,CAAC;QAC/F,MAAM,KAAK,GAAG,iCAAiC,EAAE,CAAC;QAClD,MAAM,IAAI,GAAG,gBAAgB,CAAC,KAAK,EAAE,CAAC;QACtC,KAAK,CAAC,SAAS,EAAE,CAAC;QAElB,cAAc,CAAC,KAAK,IAAI,EAAE,CAAC;YAC1B,IAAI,CAAC;gBACJ,MAAM,aAAa,EAAE,UAAU,EAAE,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,OAAO,EAAE,EAAE,EAAE,EAAE,YAAY,CAAC,CAAC;gBAC9E,IAAI,CAAC,IAAI,EAAE,CAAC;oBACX,IAAI,OAAO,GAAG,kBAAkB,CAC/B,IAAI,KAAK,CAAC,+BAA+B,CAAC,EAC1C,GAAG,EACH,QAAQ,EACR,YAAY,CAAC,EAAE,CACf,CAAC;oBACF,OAAO,GAAG,iBAAiB,CAAC,OAAO,EAAE,OAAO,EAAE,aAAa,EAAE,WAAW,CAAC,CAAC;oBAC1E,KAAK,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC;oBAC/D,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;oBACnB,OAAO;gBACR,CAAC;gBAED,MAAM,QAAQ,GACb,OAAO,IAAI,KAAK,UAAU,CAAC,CAAC,CAAC,MAAM,IAAI,CAAC,OAAO,EAAE,aAAa,EAAE,KAAK,EAAE,YAAY,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;gBAC7F,IAAI,OAAO,GAAG,YAAY,CAAC,QAAQ,EAAE,GAAG,EAAE,QAAQ,EAAE,YAAY,CAAC,EAAE,CAAC,CAAC;gBACrE,OAAO,GAAG,iBAAiB,CAAC,OAAO,EAAE,OAAO,EAAE,aAAa,EAAE,WAAW,CAAC,CAAC;gBAC1E,MAAM,gBAAgB,CAAC,KAAK,EAAE,OAAO,EAAE,YAAY,EAAE,YAAY,EAAE,eAAe,EAAE,aAAa,EAAE,MAAM,CAAC,CAAC;YAC5G,CAAC;YAAC,OAAO,KAAK,EAAE,CAAC;gBAChB,MAAM,OAAO,GAAG,kBAAkB,CAAC,KAAK,EAAE,GAAG,EAAE,QAAQ,EAAE,YAAY,CAAC,EAAE,CAAC,CAAC;gBAC1E,KAAK,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC;gBAC/D,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;YACpB,CAAC;QAAA,CACD,CAAC,CAAC;QAEH,OAAO,KAAK,CAAC;IAAA,CACb,CAAC;IAEF,MAAM,YAAY,GAAgD,CAAC,WAAW,EAAE,OAAO,EAAE,aAAa,EAAE,EAAE,CACzG,MAAM,CAAC,WAAW,EAAE,OAAO,EAAE,aAAa,CAAC,CAAC;IAE7C,mBAAmB,CAAC,EAAE,GAAG,EAAE,MAAM,EAAE,YAAY,EAAE,EAAE,QAAQ,CAAC,CAAC;IAI7D,SAAS,QAAQ,CAAC,gBAAyB,EAA6B;QACvE,IAAI,CAAC,gBAAgB,EAAE,CAAC;YACvB,OAAO,MAAM,CAAC,CAAC,CAAC,CAAC;QAClB,CAAC;QACD,OAAO,MAAM,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,EAAE,CAAC,SAAS,CAAC,EAAE,KAAK,gBAAgB,CAAC,CAAC;IAAA,CACrE;IAED,OAAO;QACN,GAAG;QACH,MAAM;QACN,QAAQ;QACR,KAAK;QACL,YAAY,CAAC,SAAS,EAAE;YACvB,gBAAgB,GAAG,CAAC,GAAG,SAAS,CAAC,CAAC;QAAA,CAClC;QACD,eAAe,CAAC,SAAS,EAAE;YAC1B,gBAAgB,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC,CAAC;QAAA,CACpC;QACD,uBAAuB,GAAG;YACzB,OAAO,gBAAgB,CAAC,MAAM,CAAC;QAAA,CAC/B;QACD,UAAU,GAAG;YACZ,sBAAsB,CAAC,QAAQ,CAAC,CAAC;QAAA,CACjC;KACD,CAAC;AAAA,CACF","sourcesContent":["import { registerApiProvider, unregisterApiProviders } from \"../api-registry.js\";\nimport type {\n\tAssistantMessage,\n\tAssistantMessageEventStream,\n\tContext,\n\tImageContent,\n\tMessage,\n\tModel,\n\tSimpleStreamOptions,\n\tStreamFunction,\n\tStreamOptions,\n\tTextContent,\n\tThinkingContent,\n\tToolCall,\n\tToolResultMessage,\n\tUsage,\n} from \"../types.js\";\nimport { createAssistantMessageEventStream } from \"../utils/event-stream.js\";\n\nconst DEFAULT_API = \"faux\";\nconst DEFAULT_PROVIDER = \"faux\";\nconst DEFAULT_MODEL_ID = \"faux-1\";\nconst DEFAULT_MODEL_NAME = \"Faux Model\";\nconst DEFAULT_BASE_URL = \"http://localhost:0\";\nconst DEFAULT_MIN_TOKEN_SIZE = 3;\nconst DEFAULT_MAX_TOKEN_SIZE = 5;\n\nconst DEFAULT_USAGE: Usage = {\n\tinput: 0,\n\toutput: 0,\n\tcacheRead: 0,\n\tcacheWrite: 0,\n\ttotalTokens: 0,\n\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n};\n\nexport interface FauxModelDefinition {\n\tid: string;\n\tname?: string;\n\treasoning?: boolean;\n\tinput?: (\"text\" | \"image\")[];\n\tcost?: { input: number; output: number; cacheRead: number; cacheWrite: number };\n\tcontextWindow?: number;\n\tmaxTokens?: number;\n}\n\nexport type FauxContentBlock = TextContent | ThinkingContent | ToolCall;\n\nexport function fauxText(text: string): TextContent {\n\treturn { type: \"text\", text };\n}\n\nexport function fauxThinking(thinking: string): ThinkingContent {\n\treturn { type: \"thinking\", thinking };\n}\n\nexport function fauxToolCall(name: string, arguments_: ToolCall[\"arguments\"], options: { id?: string } = {}): ToolCall {\n\treturn {\n\t\ttype: \"toolCall\",\n\t\tid: options.id ?? randomId(\"tool\"),\n\t\tname,\n\t\targuments: arguments_,\n\t};\n}\n\nfunction normalizeFauxAssistantContent(content: string | FauxContentBlock | FauxContentBlock[]): FauxContentBlock[] {\n\tif (typeof content === \"string\") {\n\t\treturn [fauxText(content)];\n\t}\n\treturn Array.isArray(content) ? content : [content];\n}\n\nexport function fauxAssistantMessage(\n\tcontent: string | FauxContentBlock | FauxContentBlock[],\n\toptions: {\n\t\tstopReason?: AssistantMessage[\"stopReason\"];\n\t\terrorMessage?: string;\n\t\tresponseId?: string;\n\t\ttimestamp?: number;\n\t} = {},\n): AssistantMessage {\n\treturn {\n\t\trole: \"assistant\",\n\t\tcontent: normalizeFauxAssistantContent(content),\n\t\tapi: DEFAULT_API,\n\t\tprovider: DEFAULT_PROVIDER,\n\t\tmodel: DEFAULT_MODEL_ID,\n\t\tusage: DEFAULT_USAGE,\n\t\tstopReason: options.stopReason ?? \"stop\",\n\t\terrorMessage: options.errorMessage,\n\t\tresponseId: options.responseId,\n\t\ttimestamp: options.timestamp ?? Date.now(),\n\t};\n}\n\nexport type FauxResponseFactory = (\n\tcontext: Context,\n\toptions: StreamOptions | undefined,\n\tstate: { callCount: number },\n\tmodel: Model<string>,\n) => AssistantMessage | Promise<AssistantMessage>;\n\nexport type FauxResponseStep = AssistantMessage | FauxResponseFactory;\n\nexport interface RegisterFauxProviderOptions {\n\tapi?: string;\n\tprovider?: string;\n\tmodels?: FauxModelDefinition[];\n\ttokensPerSecond?: number;\n\ttokenSize?: {\n\t\tmin?: number;\n\t\tmax?: number;\n\t};\n}\n\nexport interface FauxProviderRegistration {\n\tapi: string;\n\tmodels: [Model<string>, ...Model<string>[]];\n\tgetModel(): Model<string>;\n\tgetModel(modelId: string): Model<string> | undefined;\n\tstate: { callCount: number };\n\tsetResponses: (responses: FauxResponseStep[]) => void;\n\tappendResponses: (responses: FauxResponseStep[]) => void;\n\tgetPendingResponseCount: () => number;\n\tunregister: () => void;\n}\n\nfunction estimateTokens(text: string): number {\n\treturn Math.ceil(text.length / 4);\n}\n\nfunction randomId(prefix: string): string {\n\treturn `${prefix}:${Date.now()}:${Math.random().toString(36).slice(2)}`;\n}\n\nfunction contentToText(content: string | Array<TextContent | ImageContent>): string {\n\tif (typeof content === \"string\") {\n\t\treturn content;\n\t}\n\treturn content\n\t\t.map((block) => {\n\t\t\tif (block.type === \"text\") {\n\t\t\t\treturn block.text;\n\t\t\t}\n\t\t\treturn `[image:${block.mimeType}:${block.data.length}]`;\n\t\t})\n\t\t.join(\"\\n\");\n}\n\nfunction assistantContentToText(content: Array<TextContent | ThinkingContent | ToolCall>): string {\n\treturn content\n\t\t.map((block) => {\n\t\t\tif (block.type === \"text\") {\n\t\t\t\treturn block.text;\n\t\t\t}\n\t\t\tif (block.type === \"thinking\") {\n\t\t\t\treturn block.thinking;\n\t\t\t}\n\t\t\treturn `${block.name}:${JSON.stringify(block.arguments)}`;\n\t\t})\n\t\t.join(\"\\n\");\n}\n\nfunction toolResultToText(message: ToolResultMessage): string {\n\treturn [message.toolName, ...message.content.map((block) => contentToText([block]))].join(\"\\n\");\n}\n\nfunction messageToText(message: Message): string {\n\tif (message.role === \"user\") {\n\t\treturn contentToText(message.content);\n\t}\n\tif (message.role === \"assistant\") {\n\t\treturn assistantContentToText(message.content);\n\t}\n\treturn toolResultToText(message);\n}\n\nfunction serializeContext(context: Context): string {\n\tconst parts: string[] = [];\n\tif (context.systemPrompt) {\n\t\tparts.push(`system:${context.systemPrompt}`);\n\t}\n\tfor (const message of context.messages) {\n\t\tparts.push(`${message.role}:${messageToText(message)}`);\n\t}\n\tif (context.tools?.length) {\n\t\tparts.push(`tools:${JSON.stringify(context.tools)}`);\n\t}\n\treturn parts.join(\"\\n\\n\");\n}\n\nfunction commonPrefixLength(a: string, b: string): number {\n\tconst length = Math.min(a.length, b.length);\n\tlet index = 0;\n\twhile (index < length && a[index] === b[index]) {\n\t\tindex++;\n\t}\n\treturn index;\n}\n\nfunction withUsageEstimate(\n\tmessage: AssistantMessage,\n\tcontext: Context,\n\toptions: StreamOptions | undefined,\n\tpromptCache: Map<string, string>,\n): AssistantMessage {\n\tconst promptText = serializeContext(context);\n\tconst promptTokens = estimateTokens(promptText);\n\tconst outputTokens = estimateTokens(assistantContentToText(message.content));\n\tlet input = promptTokens;\n\tlet cacheRead = 0;\n\tlet cacheWrite = 0;\n\tconst sessionId = options?.sessionId;\n\n\tif (sessionId && options?.cacheRetention !== \"none\") {\n\t\tconst previousPrompt = promptCache.get(sessionId);\n\t\tif (previousPrompt) {\n\t\t\tconst cachedChars = commonPrefixLength(previousPrompt, promptText);\n\t\t\tcacheRead = estimateTokens(previousPrompt.slice(0, cachedChars));\n\t\t\tcacheWrite = estimateTokens(promptText.slice(cachedChars));\n\t\t\tinput = Math.max(0, promptTokens - cacheRead);\n\t\t} else {\n\t\t\tcacheWrite = promptTokens;\n\t\t}\n\t\tpromptCache.set(sessionId, promptText);\n\t}\n\n\treturn {\n\t\t...message,\n\t\tusage: {\n\t\t\tinput,\n\t\t\toutput: outputTokens,\n\t\t\tcacheRead,\n\t\t\tcacheWrite,\n\t\t\ttotalTokens: input + outputTokens + cacheRead + cacheWrite,\n\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t},\n\t};\n}\n\nfunction splitStringByTokenSize(text: string, minTokenSize: number, maxTokenSize: number): string[] {\n\tconst chunks: string[] = [];\n\tlet index = 0;\n\twhile (index < text.length) {\n\t\tconst tokenSize = minTokenSize + Math.floor(Math.random() * (maxTokenSize - minTokenSize + 1));\n\t\tconst charSize = Math.max(1, tokenSize * 4);\n\t\tchunks.push(text.slice(index, index + charSize));\n\t\tindex += charSize;\n\t}\n\treturn chunks.length > 0 ? chunks : [\"\"];\n}\n\nfunction cloneMessage(message: AssistantMessage, api: string, provider: string, modelId: string): AssistantMessage {\n\tconst cloned = structuredClone(message);\n\treturn {\n\t\t...cloned,\n\t\tapi,\n\t\tprovider,\n\t\tmodel: modelId,\n\t\ttimestamp: cloned.timestamp ?? Date.now(),\n\t\tusage: cloned.usage ?? DEFAULT_USAGE,\n\t};\n}\n\nfunction createErrorMessage(error: unknown, api: string, provider: string, modelId: string): AssistantMessage {\n\treturn {\n\t\trole: \"assistant\",\n\t\tcontent: [],\n\t\tapi,\n\t\tprovider,\n\t\tmodel: modelId,\n\t\tusage: DEFAULT_USAGE,\n\t\tstopReason: \"error\",\n\t\terrorMessage: error instanceof Error ? error.message : String(error),\n\t\ttimestamp: Date.now(),\n\t};\n}\n\nfunction createAbortedMessage(partial: AssistantMessage): AssistantMessage {\n\treturn {\n\t\t...partial,\n\t\tstopReason: \"aborted\",\n\t\terrorMessage: \"Request was aborted\",\n\t\ttimestamp: Date.now(),\n\t};\n}\n\nfunction scheduleChunk(chunk: string, tokensPerSecond: number | undefined): Promise<void> {\n\tif (!tokensPerSecond || tokensPerSecond <= 0) {\n\t\treturn new Promise((resolve) => queueMicrotask(resolve));\n\t}\n\tconst delayMs = (estimateTokens(chunk) / tokensPerSecond) * 1000;\n\treturn new Promise((resolve) => setTimeout(resolve, delayMs));\n}\n\nasync function streamWithDeltas(\n\tstream: AssistantMessageEventStream,\n\tmessage: AssistantMessage,\n\tminTokenSize: number,\n\tmaxTokenSize: number,\n\ttokensPerSecond: number | undefined,\n\tsignal: AbortSignal | undefined,\n): Promise<void> {\n\tconst partial: AssistantMessage = { ...message, content: [] };\n\tif (signal?.aborted) {\n\t\tconst aborted = createAbortedMessage(partial);\n\t\tstream.push({ type: \"error\", reason: \"aborted\", error: aborted });\n\t\tstream.end(aborted);\n\t\treturn;\n\t}\n\n\tstream.push({ type: \"start\", partial: { ...partial } });\n\n\tfor (let index = 0; index < message.content.length; index++) {\n\t\tif (signal?.aborted) {\n\t\t\tconst aborted = createAbortedMessage(partial);\n\t\t\tstream.push({ type: \"error\", reason: \"aborted\", error: aborted });\n\t\t\tstream.end(aborted);\n\t\t\treturn;\n\t\t}\n\n\t\tconst block = message.content[index];\n\n\t\tif (block.type === \"thinking\") {\n\t\t\tpartial.content = [...partial.content, { type: \"thinking\", thinking: \"\" }];\n\t\t\tstream.push({ type: \"thinking_start\", contentIndex: index, partial: { ...partial } });\n\t\t\tfor (const chunk of splitStringByTokenSize(block.thinking, minTokenSize, maxTokenSize)) {\n\t\t\t\tawait scheduleChunk(chunk, tokensPerSecond);\n\t\t\t\tif (signal?.aborted) {\n\t\t\t\t\tconst aborted = createAbortedMessage(partial);\n\t\t\t\t\tstream.push({ type: \"error\", reason: \"aborted\", error: aborted });\n\t\t\t\t\tstream.end(aborted);\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\t(partial.content[index] as ThinkingContent).thinking += chunk;\n\t\t\t\tstream.push({ type: \"thinking_delta\", contentIndex: index, delta: chunk, partial: { ...partial } });\n\t\t\t}\n\t\t\tstream.push({\n\t\t\t\ttype: \"thinking_end\",\n\t\t\t\tcontentIndex: index,\n\t\t\t\tcontent: block.thinking,\n\t\t\t\tpartial: { ...partial },\n\t\t\t});\n\t\t\tcontinue;\n\t\t}\n\n\t\tif (block.type === \"text\") {\n\t\t\tpartial.content = [...partial.content, { type: \"text\", text: \"\" }];\n\t\t\tstream.push({ type: \"text_start\", contentIndex: index, partial: { ...partial } });\n\t\t\tfor (const chunk of splitStringByTokenSize(block.text, minTokenSize, maxTokenSize)) {\n\t\t\t\tawait scheduleChunk(chunk, tokensPerSecond);\n\t\t\t\tif (signal?.aborted) {\n\t\t\t\t\tconst aborted = createAbortedMessage(partial);\n\t\t\t\t\tstream.push({ type: \"error\", reason: \"aborted\", error: aborted });\n\t\t\t\t\tstream.end(aborted);\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\t(partial.content[index] as TextContent).text += chunk;\n\t\t\t\tstream.push({ type: \"text_delta\", contentIndex: index, delta: chunk, partial: { ...partial } });\n\t\t\t}\n\t\t\tstream.push({ type: \"text_end\", contentIndex: index, content: block.text, partial: { ...partial } });\n\t\t\tcontinue;\n\t\t}\n\n\t\tpartial.content = [...partial.content, { type: \"toolCall\", id: block.id, name: block.name, arguments: {} }];\n\t\tstream.push({ type: \"toolcall_start\", contentIndex: index, partial: { ...partial } });\n\t\tfor (const chunk of splitStringByTokenSize(JSON.stringify(block.arguments), minTokenSize, maxTokenSize)) {\n\t\t\tawait scheduleChunk(chunk, tokensPerSecond);\n\t\t\tif (signal?.aborted) {\n\t\t\t\tconst aborted = createAbortedMessage(partial);\n\t\t\t\tstream.push({ type: \"error\", reason: \"aborted\", error: aborted });\n\t\t\t\tstream.end(aborted);\n\t\t\t\treturn;\n\t\t\t}\n\t\t\tstream.push({ type: \"toolcall_delta\", contentIndex: index, delta: chunk, partial: { ...partial } });\n\t\t}\n\t\t(partial.content[index] as ToolCall).arguments = block.arguments;\n\t\tstream.push({ type: \"toolcall_end\", contentIndex: index, toolCall: block, partial: { ...partial } });\n\t}\n\n\tif (message.stopReason === \"error\" || message.stopReason === \"aborted\") {\n\t\tstream.push({ type: \"error\", reason: message.stopReason, error: message });\n\t\tstream.end(message);\n\t\treturn;\n\t}\n\n\tstream.push({ type: \"done\", reason: message.stopReason, message });\n\tstream.end(message);\n}\n\nexport function registerFauxProvider(options: RegisterFauxProviderOptions = {}): FauxProviderRegistration {\n\tconst api = options.api ?? randomId(DEFAULT_API);\n\tconst provider = options.provider ?? DEFAULT_PROVIDER;\n\tconst sourceId = randomId(\"faux-provider\");\n\tconst minTokenSize = Math.max(\n\t\t1,\n\t\tMath.min(options.tokenSize?.min ?? DEFAULT_MIN_TOKEN_SIZE, options.tokenSize?.max ?? DEFAULT_MAX_TOKEN_SIZE),\n\t);\n\tconst maxTokenSize = Math.max(minTokenSize, options.tokenSize?.max ?? DEFAULT_MAX_TOKEN_SIZE);\n\tlet pendingResponses: FauxResponseStep[] = [];\n\tconst tokensPerSecond = options.tokensPerSecond;\n\tconst state = { callCount: 0 };\n\tconst promptCache = new Map<string, string>();\n\n\tconst modelDefinitions = options.models?.length\n\t\t? options.models\n\t\t: [\n\t\t\t\t{\n\t\t\t\t\tid: DEFAULT_MODEL_ID,\n\t\t\t\t\tname: DEFAULT_MODEL_NAME,\n\t\t\t\t\treasoning: false,\n\t\t\t\t\tinput: [\"text\", \"image\"] as (\"text\" | \"image\")[],\n\t\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },\n\t\t\t\t\tcontextWindow: 128000,\n\t\t\t\t\tmaxTokens: 16384,\n\t\t\t\t},\n\t\t\t];\n\tconst models = modelDefinitions.map((definition) => ({\n\t\tid: definition.id,\n\t\tname: definition.name ?? definition.id,\n\t\tapi,\n\t\tprovider,\n\t\tbaseUrl: DEFAULT_BASE_URL,\n\t\treasoning: definition.reasoning ?? false,\n\t\tinput: definition.input ?? [\"text\", \"image\"],\n\t\tcost: definition.cost ?? { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },\n\t\tcontextWindow: definition.contextWindow ?? 128000,\n\t\tmaxTokens: definition.maxTokens ?? 16384,\n\t})) as [Model<string>, ...Model<string>[]];\n\n\tconst stream: StreamFunction<string, StreamOptions> = (requestModel, context, streamOptions) => {\n\t\tconst outer = createAssistantMessageEventStream();\n\t\tconst step = pendingResponses.shift();\n\t\tstate.callCount++;\n\n\t\tqueueMicrotask(async () => {\n\t\t\ttry {\n\t\t\t\tawait streamOptions?.onResponse?.({ status: 200, headers: {} }, requestModel);\n\t\t\t\tif (!step) {\n\t\t\t\t\tlet message = createErrorMessage(\n\t\t\t\t\t\tnew Error(\"No more faux responses queued\"),\n\t\t\t\t\t\tapi,\n\t\t\t\t\t\tprovider,\n\t\t\t\t\t\trequestModel.id,\n\t\t\t\t\t);\n\t\t\t\t\tmessage = withUsageEstimate(message, context, streamOptions, promptCache);\n\t\t\t\t\touter.push({ type: \"error\", reason: \"error\", error: message });\n\t\t\t\t\touter.end(message);\n\t\t\t\t\treturn;\n\t\t\t\t}\n\n\t\t\t\tconst resolved =\n\t\t\t\t\ttypeof step === \"function\" ? await step(context, streamOptions, state, requestModel) : step;\n\t\t\t\tlet message = cloneMessage(resolved, api, provider, requestModel.id);\n\t\t\t\tmessage = withUsageEstimate(message, context, streamOptions, promptCache);\n\t\t\t\tawait streamWithDeltas(outer, message, minTokenSize, maxTokenSize, tokensPerSecond, streamOptions?.signal);\n\t\t\t} catch (error) {\n\t\t\t\tconst message = createErrorMessage(error, api, provider, requestModel.id);\n\t\t\t\touter.push({ type: \"error\", reason: \"error\", error: message });\n\t\t\t\touter.end(message);\n\t\t\t}\n\t\t});\n\n\t\treturn outer;\n\t};\n\n\tconst streamSimple: StreamFunction<string, SimpleStreamOptions> = (streamModel, context, streamOptions) =>\n\t\tstream(streamModel, context, streamOptions);\n\n\tregisterApiProvider({ api, stream, streamSimple }, sourceId);\n\n\tfunction getModel(): Model<string>;\n\tfunction getModel(requestedModelId: string): Model<string> | undefined;\n\tfunction getModel(requestedModelId?: string): Model<string> | undefined {\n\t\tif (!requestedModelId) {\n\t\t\treturn models[0];\n\t\t}\n\t\treturn models.find((candidate) => candidate.id === requestedModelId);\n\t}\n\n\treturn {\n\t\tapi,\n\t\tmodels,\n\t\tgetModel,\n\t\tstate,\n\t\tsetResponses(responses) {\n\t\t\tpendingResponses = [...responses];\n\t\t},\n\t\tappendResponses(responses) {\n\t\t\tpendingResponses.push(...responses);\n\t\t},\n\t\tgetPendingResponseCount() {\n\t\t\treturn pendingResponses.length;\n\t\t},\n\t\tunregister() {\n\t\t\tunregisterApiProviders(sourceId);\n\t\t},\n\t};\n}\n"]}
@@ -1 +1 @@
1
- {"version":3,"file":"github-copilot-headers.d.ts","sourceRoot":"","sources":["../../src/providers/github-copilot-headers.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAI3C,wBAAgB,qBAAqB,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG,MAAM,GAAG,OAAO,CAG3E;AAGD,wBAAgB,qBAAqB,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG,OAAO,CAUlE;AAED,wBAAgB,0BAA0B,CAAC,MAAM,EAAE;IAClD,QAAQ,EAAE,OAAO,EAAE,CAAC;IACpB,SAAS,EAAE,OAAO,CAAC;CACnB,GAAG,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAWzB"}
1
+ {"version":3,"file":"github-copilot-headers.d.ts","sourceRoot":"","sources":["../../src/providers/github-copilot-headers.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAI3C,wBAAgB,qBAAqB,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG,MAAM,GAAG,OAAO,CAG3E;AAGD,wBAAgB,qBAAqB,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG,OAAO,CAUlE;AAED,wBAAgB,0BAA0B,CAAC,MAAM,EAAE;IAClD,QAAQ,EAAE,OAAO,EAAE,CAAC;IACpB,SAAS,EAAE,OAAO,CAAC;CACnB,GAAG,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAWzB","sourcesContent":["import type { Message } from \"../types.js\";\n\n// Copilot expects X-Initiator to indicate whether the request is user-initiated\n// or agent-initiated (e.g. follow-up after assistant/tool messages).\nexport function inferCopilotInitiator(messages: Message[]): \"user\" | \"agent\" {\n\tconst last = messages[messages.length - 1];\n\treturn last && last.role !== \"user\" ? \"agent\" : \"user\";\n}\n\n// Copilot requires Copilot-Vision-Request header when sending images\nexport function hasCopilotVisionInput(messages: Message[]): boolean {\n\treturn messages.some((msg) => {\n\t\tif (msg.role === \"user\" && Array.isArray(msg.content)) {\n\t\t\treturn msg.content.some((c) => c.type === \"image\");\n\t\t}\n\t\tif (msg.role === \"toolResult\" && Array.isArray(msg.content)) {\n\t\t\treturn msg.content.some((c) => c.type === \"image\");\n\t\t}\n\t\treturn false;\n\t});\n}\n\nexport function buildCopilotDynamicHeaders(params: {\n\tmessages: Message[];\n\thasImages: boolean;\n}): Record<string, string> {\n\tconst headers: Record<string, string> = {\n\t\t\"X-Initiator\": inferCopilotInitiator(params.messages),\n\t\t\"Openai-Intent\": \"conversation-edits\",\n\t};\n\n\tif (params.hasImages) {\n\t\theaders[\"Copilot-Vision-Request\"] = \"true\";\n\t}\n\n\treturn headers;\n}\n"]}
@@ -1 +1 @@
1
- {"version":3,"file":"github-copilot-headers.js","sourceRoot":"","sources":["../../src/providers/github-copilot-headers.ts"],"names":[],"mappings":"AAEA,gFAAgF;AAChF,qEAAqE;AACrE,MAAM,UAAU,qBAAqB,CAAC,QAAmB;IACxD,MAAM,IAAI,GAAG,QAAQ,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IAC3C,OAAO,IAAI,IAAI,IAAI,CAAC,IAAI,KAAK,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC;AACxD,CAAC;AAED,qEAAqE;AACrE,MAAM,UAAU,qBAAqB,CAAC,QAAmB;IACxD,OAAO,QAAQ,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,EAAE;QAC5B,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC;YACvD,OAAO,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,OAAO,CAAC,CAAC;QACpD,CAAC;QACD,IAAI,GAAG,CAAC,IAAI,KAAK,YAAY,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC;YAC7D,OAAO,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,OAAO,CAAC,CAAC;QACpD,CAAC;QACD,OAAO,KAAK,CAAC;IACd,CAAC,CAAC,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,0BAA0B,CAAC,MAG1C;IACA,MAAM,OAAO,GAA2B;QACvC,aAAa,EAAE,qBAAqB,CAAC,MAAM,CAAC,QAAQ,CAAC;QACrD,eAAe,EAAE,oBAAoB;KACrC,CAAC;IAEF,IAAI,MAAM,CAAC,SAAS,EAAE,CAAC;QACtB,OAAO,CAAC,wBAAwB,CAAC,GAAG,MAAM,CAAC;IAC5C,CAAC;IAED,OAAO,OAAO,CAAC;AAChB,CAAC","sourcesContent":["import type { Message } from \"../types.js\";\n\n// Copilot expects X-Initiator to indicate whether the request is user-initiated\n// or agent-initiated (e.g. follow-up after assistant/tool messages).\nexport function inferCopilotInitiator(messages: Message[]): \"user\" | \"agent\" {\n\tconst last = messages[messages.length - 1];\n\treturn last && last.role !== \"user\" ? \"agent\" : \"user\";\n}\n\n// Copilot requires Copilot-Vision-Request header when sending images\nexport function hasCopilotVisionInput(messages: Message[]): boolean {\n\treturn messages.some((msg) => {\n\t\tif (msg.role === \"user\" && Array.isArray(msg.content)) {\n\t\t\treturn msg.content.some((c) => c.type === \"image\");\n\t\t}\n\t\tif (msg.role === \"toolResult\" && Array.isArray(msg.content)) {\n\t\t\treturn msg.content.some((c) => c.type === \"image\");\n\t\t}\n\t\treturn false;\n\t});\n}\n\nexport function buildCopilotDynamicHeaders(params: {\n\tmessages: Message[];\n\thasImages: boolean;\n}): Record<string, string> {\n\tconst headers: Record<string, string> = {\n\t\t\"X-Initiator\": inferCopilotInitiator(params.messages),\n\t\t\"Openai-Intent\": \"conversation-edits\",\n\t};\n\n\tif (params.hasImages) {\n\t\theaders[\"Copilot-Vision-Request\"] = \"true\";\n\t}\n\n\treturn headers;\n}\n"]}
1
+ {"version":3,"file":"github-copilot-headers.js","sourceRoot":"","sources":["../../src/providers/github-copilot-headers.ts"],"names":[],"mappings":"AAEA,gFAAgF;AAChF,qEAAqE;AACrE,MAAM,UAAU,qBAAqB,CAAC,QAAmB,EAAoB;IAC5E,MAAM,IAAI,GAAG,QAAQ,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IAC3C,OAAO,IAAI,IAAI,IAAI,CAAC,IAAI,KAAK,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC;AAAA,CACvD;AAED,qEAAqE;AACrE,MAAM,UAAU,qBAAqB,CAAC,QAAmB,EAAW;IACnE,OAAO,QAAQ,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC;QAC7B,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC;YACvD,OAAO,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,OAAO,CAAC,CAAC;QACpD,CAAC;QACD,IAAI,GAAG,CAAC,IAAI,KAAK,YAAY,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC;YAC7D,OAAO,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,OAAO,CAAC,CAAC;QACpD,CAAC;QACD,OAAO,KAAK,CAAC;IAAA,CACb,CAAC,CAAC;AAAA,CACH;AAED,MAAM,UAAU,0BAA0B,CAAC,MAG1C,EAA0B;IAC1B,MAAM,OAAO,GAA2B;QACvC,aAAa,EAAE,qBAAqB,CAAC,MAAM,CAAC,QAAQ,CAAC;QACrD,eAAe,EAAE,oBAAoB;KACrC,CAAC;IAEF,IAAI,MAAM,CAAC,SAAS,EAAE,CAAC;QACtB,OAAO,CAAC,wBAAwB,CAAC,GAAG,MAAM,CAAC;IAC5C,CAAC;IAED,OAAO,OAAO,CAAC;AAAA,CACf","sourcesContent":["import type { Message } from \"../types.js\";\n\n// Copilot expects X-Initiator to indicate whether the request is user-initiated\n// or agent-initiated (e.g. follow-up after assistant/tool messages).\nexport function inferCopilotInitiator(messages: Message[]): \"user\" | \"agent\" {\n\tconst last = messages[messages.length - 1];\n\treturn last && last.role !== \"user\" ? \"agent\" : \"user\";\n}\n\n// Copilot requires Copilot-Vision-Request header when sending images\nexport function hasCopilotVisionInput(messages: Message[]): boolean {\n\treturn messages.some((msg) => {\n\t\tif (msg.role === \"user\" && Array.isArray(msg.content)) {\n\t\t\treturn msg.content.some((c) => c.type === \"image\");\n\t\t}\n\t\tif (msg.role === \"toolResult\" && Array.isArray(msg.content)) {\n\t\t\treturn msg.content.some((c) => c.type === \"image\");\n\t\t}\n\t\treturn false;\n\t});\n}\n\nexport function buildCopilotDynamicHeaders(params: {\n\tmessages: Message[];\n\thasImages: boolean;\n}): Record<string, string> {\n\tconst headers: Record<string, string> = {\n\t\t\"X-Initiator\": inferCopilotInitiator(params.messages),\n\t\t\"Openai-Intent\": \"conversation-edits\",\n\t};\n\n\tif (params.hasImages) {\n\t\theaders[\"Copilot-Vision-Request\"] = \"true\";\n\t}\n\n\treturn headers;\n}\n"]}
@@ -1,9 +1,14 @@
1
1
  /**
2
- * Shared utilities for Google Generative AI and Google Cloud Code Assist providers.
2
+ * Shared utilities for Google Generative AI and Google Vertex providers.
3
3
  */
4
4
  import { type Content, FinishReason, FunctionCallingConfigMode, type Part } from "@google/genai";
5
5
  import type { Context, Model, StopReason, Tool } from "../types.js";
6
- type GoogleApiType = "google-generative-ai" | "google-gemini-cli" | "google-vertex";
6
+ type GoogleApiType = "google-generative-ai" | "google-vertex";
7
+ /**
8
+ * Thinking level for Gemini 3 models.
9
+ * Mirrors Google's ThinkingLevel enum values.
10
+ */
11
+ export type GoogleThinkingLevel = "THINKING_LEVEL_UNSPECIFIED" | "MINIMAL" | "LOW" | "MEDIUM" | "HIGH";
7
12
  /**
8
13
  * Determines whether a streamed Gemini `Part` should be treated as "thinking".
9
14
  *