@mariozechner/pi-ai 0.63.2 → 0.64.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -636,6 +636,92 @@ The library uses a registry of API implementations. Built-in APIs include:
636
636
  - **`azure-openai-responses`**: Azure OpenAI Responses API (`streamAzureOpenAIResponses`, `AzureOpenAIResponsesOptions`)
637
637
  - **`bedrock-converse-stream`**: Amazon Bedrock Converse API (`streamBedrock`, `BedrockOptions`)
638
638
 
639
+ ### Faux provider for tests
640
+
641
+ `registerFauxProvider()` registers a temporary in-memory provider for tests and demos. It is opt-in and not part of the built-in provider set.
642
+
643
+ ```typescript
644
+ import {
645
+ complete,
646
+ fauxAssistantMessage,
647
+ fauxText,
648
+ fauxThinking,
649
+ fauxToolCall,
650
+ registerFauxProvider,
651
+ stream,
652
+ } from '@mariozechner/pi-ai';
653
+
654
+ const registration = registerFauxProvider({
655
+ tokensPerSecond: 50 // optional
656
+ });
657
+
658
+ const model = registration.getModel();
659
+ const context = {
660
+ messages: [{ role: 'user', content: 'Summarize package.json and then call echo', timestamp: Date.now() }]
661
+ };
662
+
663
+ registration.setResponses([
664
+ fauxAssistantMessage([
665
+ fauxThinking('Need to inspect package metadata first.'),
666
+ fauxToolCall('echo', { text: 'package.json' })
667
+ ], { stopReason: 'toolUse' })
668
+ ]);
669
+
670
+ const first = await complete(model, context, {
671
+ sessionId: 'session-1',
672
+ cacheRetention: 'short'
673
+ });
674
+ context.messages.push(first);
675
+
676
+ context.messages.push({
677
+ role: 'toolResult',
678
+ toolCallId: first.content.find((block) => block.type === 'toolCall')!.id,
679
+ toolName: 'echo',
680
+ content: [{ type: 'text', text: 'package.json contents here' }],
681
+ isError: false,
682
+ timestamp: Date.now()
683
+ });
684
+
685
+ registration.setResponses([
686
+ fauxAssistantMessage([
687
+ fauxThinking('Now I can summarize the tool output.'),
688
+ fauxText('Here is the summary.')
689
+ ])
690
+ ]);
691
+
692
+ const s = stream(model, context);
693
+ for await (const event of s) {
694
+ console.log(event.type);
695
+ }
696
+
697
+ // Optional: register multiple faux models for model-switching tests
698
+ const multiModel = registerFauxProvider({
699
+ models: [
700
+ { id: 'faux-fast', reasoning: false },
701
+ { id: 'faux-thinker', reasoning: true }
702
+ ]
703
+ });
704
+ const thinker = multiModel.getModel('faux-thinker');
705
+
706
+ console.log(thinker?.reasoning);
707
+ console.log(registration.getPendingResponseCount());
708
+ console.log(registration.state.callCount);
709
+ registration.unregister();
710
+ multiModel.unregister();
711
+ ```
712
+
713
+ Notes:
714
+ - Responses are consumed from a queue in request start order.
715
+ - If the queue is empty, the faux provider returns an assistant error message with `errorMessage: "No more faux responses queued"`.
716
+ - Use `registration.setResponses([...])` to replace the remaining queue and `registration.appendResponses([...])` to add more responses.
717
+ - `registration.models` exposes all registered faux models. `registration.getModel()` returns the first one, and `registration.getModel(id)` returns a specific one.
718
+ - Use `fauxAssistantMessage(...)` for scripted assistant replies. Use `fauxText(...)`, `fauxThinking(...)`, and `fauxToolCall(...)` to build content blocks without filling in low-level fields manually.
719
+ - `registration.unregister()` removes the temporary provider from the global API registry.
720
+ - Usage is estimated at roughly 1 token per 4 characters. When `sessionId` is present and `cacheRetention` is not `"none"`, prompt cache reads and writes are simulated automatically.
721
+ - Tool call arguments stream incrementally via `toolcall_delta` chunks.
722
+ - By default, each streamed chunk is emitted on its own microtask. Set `tokensPerSecond` to pace chunk delivery in real time.
723
+ - The intended use is one deterministic scripted flow per registration. If you need independent concurrent flows, register separate faux providers.
724
+
639
725
  ### Providers and Models
640
726
 
641
727
  A **provider** offers models through a specific API. For example:
package/dist/index.d.ts CHANGED
@@ -6,6 +6,7 @@ export * from "./models.js";
6
6
  export type { BedrockOptions } from "./providers/amazon-bedrock.js";
7
7
  export type { AnthropicOptions } from "./providers/anthropic.js";
8
8
  export type { AzureOpenAIResponsesOptions } from "./providers/azure-openai-responses.js";
9
+ export * from "./providers/faux.js";
9
10
  export type { GoogleOptions } from "./providers/google.js";
10
11
  export type { GoogleGeminiCliOptions, GoogleThinkingLevel } from "./providers/google-gemini-cli.js";
11
12
  export type { GoogleVertexOptions } from "./providers/google-vertex.js";
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,YAAY,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AACzD,OAAO,EAAE,IAAI,EAAE,MAAM,mBAAmB,CAAC;AAEzC,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC;AAClC,cAAc,aAAa,CAAC;AAC5B,YAAY,EAAE,cAAc,EAAE,MAAM,+BAA+B,CAAC;AACpE,YAAY,EAAE,gBAAgB,EAAE,MAAM,0BAA0B,CAAC;AACjE,YAAY,EAAE,2BAA2B,EAAE,MAAM,uCAAuC,CAAC;AACzF,YAAY,EAAE,aAAa,EAAE,MAAM,uBAAuB,CAAC;AAC3D,YAAY,EAAE,sBAAsB,EAAE,mBAAmB,EAAE,MAAM,kCAAkC,CAAC;AACpG,YAAY,EAAE,mBAAmB,EAAE,MAAM,8BAA8B,CAAC;AACxE,YAAY,EAAE,cAAc,EAAE,MAAM,wBAAwB,CAAC;AAC7D,YAAY,EAAE,2BAA2B,EAAE,MAAM,uCAAuC,CAAC;AACzF,YAAY,EAAE,wBAAwB,EAAE,MAAM,mCAAmC,CAAC;AAClF,YAAY,EAAE,sBAAsB,EAAE,MAAM,iCAAiC,CAAC;AAC9E,cAAc,kCAAkC,CAAC;AACjD,cAAc,aAAa,CAAC;AAC5B,cAAc,YAAY,CAAC;AAC3B,cAAc,yBAAyB,CAAC;AACxC,cAAc,uBAAuB,CAAC;AACtC,YAAY,EACX,aAAa,EACb,gBAAgB,EAChB,mBAAmB,EACnB,WAAW,EACX,aAAa,EACb,eAAe,EACf,iBAAiB,EACjB,sBAAsB,GACtB,MAAM,wBAAwB,CAAC;AAChC,cAAc,qBAAqB,CAAC;AACpC,cAAc,4BAA4B,CAAC;AAC3C,cAAc,uBAAuB,CAAC","sourcesContent":["export type { Static, TSchema } from \"@sinclair/typebox\";\nexport { Type } from \"@sinclair/typebox\";\n\nexport * from \"./api-registry.js\";\nexport * from \"./env-api-keys.js\";\nexport * from \"./models.js\";\nexport type { BedrockOptions } from \"./providers/amazon-bedrock.js\";\nexport type { AnthropicOptions } from \"./providers/anthropic.js\";\nexport type { AzureOpenAIResponsesOptions } from \"./providers/azure-openai-responses.js\";\nexport type { GoogleOptions } from \"./providers/google.js\";\nexport type { GoogleGeminiCliOptions, GoogleThinkingLevel } from \"./providers/google-gemini-cli.js\";\nexport type { GoogleVertexOptions } from \"./providers/google-vertex.js\";\nexport type { MistralOptions } from \"./providers/mistral.js\";\nexport type { OpenAICodexResponsesOptions } from \"./providers/openai-codex-responses.js\";\nexport type { OpenAICompletionsOptions } from \"./providers/openai-completions.js\";\nexport type { OpenAIResponsesOptions } from \"./providers/openai-responses.js\";\nexport * from \"./providers/register-builtins.js\";\nexport * from \"./stream.js\";\nexport * from \"./types.js\";\nexport * from \"./utils/event-stream.js\";\nexport * from \"./utils/json-parse.js\";\nexport type {\n\tOAuthAuthInfo,\n\tOAuthCredentials,\n\tOAuthLoginCallbacks,\n\tOAuthPrompt,\n\tOAuthProvider,\n\tOAuthProviderId,\n\tOAuthProviderInfo,\n\tOAuthProviderInterface,\n} from \"./utils/oauth/types.js\";\nexport * from \"./utils/overflow.js\";\nexport * from \"./utils/typebox-helpers.js\";\nexport * from \"./utils/validation.js\";\n"]}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,YAAY,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AACzD,OAAO,EAAE,IAAI,EAAE,MAAM,mBAAmB,CAAC;AAEzC,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC;AAClC,cAAc,aAAa,CAAC;AAC5B,YAAY,EAAE,cAAc,EAAE,MAAM,+BAA+B,CAAC;AACpE,YAAY,EAAE,gBAAgB,EAAE,MAAM,0BAA0B,CAAC;AACjE,YAAY,EAAE,2BAA2B,EAAE,MAAM,uCAAuC,CAAC;AACzF,cAAc,qBAAqB,CAAC;AACpC,YAAY,EAAE,aAAa,EAAE,MAAM,uBAAuB,CAAC;AAC3D,YAAY,EAAE,sBAAsB,EAAE,mBAAmB,EAAE,MAAM,kCAAkC,CAAC;AACpG,YAAY,EAAE,mBAAmB,EAAE,MAAM,8BAA8B,CAAC;AACxE,YAAY,EAAE,cAAc,EAAE,MAAM,wBAAwB,CAAC;AAC7D,YAAY,EAAE,2BAA2B,EAAE,MAAM,uCAAuC,CAAC;AACzF,YAAY,EAAE,wBAAwB,EAAE,MAAM,mCAAmC,CAAC;AAClF,YAAY,EAAE,sBAAsB,EAAE,MAAM,iCAAiC,CAAC;AAC9E,cAAc,kCAAkC,CAAC;AACjD,cAAc,aAAa,CAAC;AAC5B,cAAc,YAAY,CAAC;AAC3B,cAAc,yBAAyB,CAAC;AACxC,cAAc,uBAAuB,CAAC;AACtC,YAAY,EACX,aAAa,EACb,gBAAgB,EAChB,mBAAmB,EACnB,WAAW,EACX,aAAa,EACb,eAAe,EACf,iBAAiB,EACjB,sBAAsB,GACtB,MAAM,wBAAwB,CAAC;AAChC,cAAc,qBAAqB,CAAC;AACpC,cAAc,4BAA4B,CAAC;AAC3C,cAAc,uBAAuB,CAAC","sourcesContent":["export type { Static, TSchema } from \"@sinclair/typebox\";\nexport { Type } from \"@sinclair/typebox\";\n\nexport * from \"./api-registry.js\";\nexport * from \"./env-api-keys.js\";\nexport * from \"./models.js\";\nexport type { BedrockOptions } from \"./providers/amazon-bedrock.js\";\nexport type { AnthropicOptions } from \"./providers/anthropic.js\";\nexport type { AzureOpenAIResponsesOptions } from \"./providers/azure-openai-responses.js\";\nexport * from \"./providers/faux.js\";\nexport type { GoogleOptions } from \"./providers/google.js\";\nexport type { GoogleGeminiCliOptions, GoogleThinkingLevel } from \"./providers/google-gemini-cli.js\";\nexport type { GoogleVertexOptions } from \"./providers/google-vertex.js\";\nexport type { MistralOptions } from \"./providers/mistral.js\";\nexport type { OpenAICodexResponsesOptions } from \"./providers/openai-codex-responses.js\";\nexport type { OpenAICompletionsOptions } from \"./providers/openai-completions.js\";\nexport type { OpenAIResponsesOptions } from \"./providers/openai-responses.js\";\nexport * from \"./providers/register-builtins.js\";\nexport * from \"./stream.js\";\nexport * from \"./types.js\";\nexport * from \"./utils/event-stream.js\";\nexport * from \"./utils/json-parse.js\";\nexport type {\n\tOAuthAuthInfo,\n\tOAuthCredentials,\n\tOAuthLoginCallbacks,\n\tOAuthPrompt,\n\tOAuthProvider,\n\tOAuthProviderId,\n\tOAuthProviderInfo,\n\tOAuthProviderInterface,\n} from \"./utils/oauth/types.js\";\nexport * from \"./utils/overflow.js\";\nexport * from \"./utils/typebox-helpers.js\";\nexport * from \"./utils/validation.js\";\n"]}
package/dist/index.js CHANGED
@@ -2,6 +2,7 @@ export { Type } from "@sinclair/typebox";
2
2
  export * from "./api-registry.js";
3
3
  export * from "./env-api-keys.js";
4
4
  export * from "./models.js";
5
+ export * from "./providers/faux.js";
5
6
  export * from "./providers/register-builtins.js";
6
7
  export * from "./stream.js";
7
8
  export * from "./types.js";
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,IAAI,EAAE,MAAM,mBAAmB,CAAC;AAEzC,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC;AAClC,cAAc,aAAa,CAAC;AAW5B,cAAc,kCAAkC,CAAC;AACjD,cAAc,aAAa,CAAC;AAC5B,cAAc,YAAY,CAAC;AAC3B,cAAc,yBAAyB,CAAC;AACxC,cAAc,uBAAuB,CAAC;AAWtC,cAAc,qBAAqB,CAAC;AACpC,cAAc,4BAA4B,CAAC;AAC3C,cAAc,uBAAuB,CAAC","sourcesContent":["export type { Static, TSchema } from \"@sinclair/typebox\";\nexport { Type } from \"@sinclair/typebox\";\n\nexport * from \"./api-registry.js\";\nexport * from \"./env-api-keys.js\";\nexport * from \"./models.js\";\nexport type { BedrockOptions } from \"./providers/amazon-bedrock.js\";\nexport type { AnthropicOptions } from \"./providers/anthropic.js\";\nexport type { AzureOpenAIResponsesOptions } from \"./providers/azure-openai-responses.js\";\nexport type { GoogleOptions } from \"./providers/google.js\";\nexport type { GoogleGeminiCliOptions, GoogleThinkingLevel } from \"./providers/google-gemini-cli.js\";\nexport type { GoogleVertexOptions } from \"./providers/google-vertex.js\";\nexport type { MistralOptions } from \"./providers/mistral.js\";\nexport type { OpenAICodexResponsesOptions } from \"./providers/openai-codex-responses.js\";\nexport type { OpenAICompletionsOptions } from \"./providers/openai-completions.js\";\nexport type { OpenAIResponsesOptions } from \"./providers/openai-responses.js\";\nexport * from \"./providers/register-builtins.js\";\nexport * from \"./stream.js\";\nexport * from \"./types.js\";\nexport * from \"./utils/event-stream.js\";\nexport * from \"./utils/json-parse.js\";\nexport type {\n\tOAuthAuthInfo,\n\tOAuthCredentials,\n\tOAuthLoginCallbacks,\n\tOAuthPrompt,\n\tOAuthProvider,\n\tOAuthProviderId,\n\tOAuthProviderInfo,\n\tOAuthProviderInterface,\n} from \"./utils/oauth/types.js\";\nexport * from \"./utils/overflow.js\";\nexport * from \"./utils/typebox-helpers.js\";\nexport * from \"./utils/validation.js\";\n"]}
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,IAAI,EAAE,MAAM,mBAAmB,CAAC;AAEzC,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC;AAClC,cAAc,aAAa,CAAC;AAI5B,cAAc,qBAAqB,CAAC;AAQpC,cAAc,kCAAkC,CAAC;AACjD,cAAc,aAAa,CAAC;AAC5B,cAAc,YAAY,CAAC;AAC3B,cAAc,yBAAyB,CAAC;AACxC,cAAc,uBAAuB,CAAC;AAWtC,cAAc,qBAAqB,CAAC;AACpC,cAAc,4BAA4B,CAAC;AAC3C,cAAc,uBAAuB,CAAC","sourcesContent":["export type { Static, TSchema } from \"@sinclair/typebox\";\nexport { Type } from \"@sinclair/typebox\";\n\nexport * from \"./api-registry.js\";\nexport * from \"./env-api-keys.js\";\nexport * from \"./models.js\";\nexport type { BedrockOptions } from \"./providers/amazon-bedrock.js\";\nexport type { AnthropicOptions } from \"./providers/anthropic.js\";\nexport type { AzureOpenAIResponsesOptions } from \"./providers/azure-openai-responses.js\";\nexport * from \"./providers/faux.js\";\nexport type { GoogleOptions } from \"./providers/google.js\";\nexport type { GoogleGeminiCliOptions, GoogleThinkingLevel } from \"./providers/google-gemini-cli.js\";\nexport type { GoogleVertexOptions } from \"./providers/google-vertex.js\";\nexport type { MistralOptions } from \"./providers/mistral.js\";\nexport type { OpenAICodexResponsesOptions } from \"./providers/openai-codex-responses.js\";\nexport type { OpenAICompletionsOptions } from \"./providers/openai-completions.js\";\nexport type { OpenAIResponsesOptions } from \"./providers/openai-responses.js\";\nexport * from \"./providers/register-builtins.js\";\nexport * from \"./stream.js\";\nexport * from \"./types.js\";\nexport * from \"./utils/event-stream.js\";\nexport * from \"./utils/json-parse.js\";\nexport type {\n\tOAuthAuthInfo,\n\tOAuthCredentials,\n\tOAuthLoginCallbacks,\n\tOAuthPrompt,\n\tOAuthProvider,\n\tOAuthProviderId,\n\tOAuthProviderInfo,\n\tOAuthProviderInterface,\n} from \"./utils/oauth/types.js\";\nexport * from \"./utils/overflow.js\";\nexport * from \"./utils/typebox-helpers.js\";\nexport * from \"./utils/validation.js\";\n"]}
@@ -0,0 +1,56 @@
1
+ import type { AssistantMessage, Context, Model, StreamOptions, TextContent, ThinkingContent, ToolCall } from "../types.js";
2
+ export interface FauxModelDefinition {
3
+ id: string;
4
+ name?: string;
5
+ reasoning?: boolean;
6
+ input?: ("text" | "image")[];
7
+ cost?: {
8
+ input: number;
9
+ output: number;
10
+ cacheRead: number;
11
+ cacheWrite: number;
12
+ };
13
+ contextWindow?: number;
14
+ maxTokens?: number;
15
+ }
16
+ export type FauxContentBlock = TextContent | ThinkingContent | ToolCall;
17
+ export declare function fauxText(text: string): TextContent;
18
+ export declare function fauxThinking(thinking: string): ThinkingContent;
19
+ export declare function fauxToolCall(name: string, arguments_: ToolCall["arguments"], options?: {
20
+ id?: string;
21
+ }): ToolCall;
22
+ export declare function fauxAssistantMessage(content: string | FauxContentBlock | FauxContentBlock[], options?: {
23
+ stopReason?: AssistantMessage["stopReason"];
24
+ errorMessage?: string;
25
+ responseId?: string;
26
+ timestamp?: number;
27
+ }): AssistantMessage;
28
+ export type FauxResponseFactory = (context: Context, options: StreamOptions | undefined, state: {
29
+ callCount: number;
30
+ }, model: Model<string>) => AssistantMessage | Promise<AssistantMessage>;
31
+ export type FauxResponseStep = AssistantMessage | FauxResponseFactory;
32
+ export interface RegisterFauxProviderOptions {
33
+ api?: string;
34
+ provider?: string;
35
+ models?: FauxModelDefinition[];
36
+ tokensPerSecond?: number;
37
+ tokenSize?: {
38
+ min?: number;
39
+ max?: number;
40
+ };
41
+ }
42
+ export interface FauxProviderRegistration {
43
+ api: string;
44
+ models: [Model<string>, ...Model<string>[]];
45
+ getModel(): Model<string>;
46
+ getModel(modelId: string): Model<string> | undefined;
47
+ state: {
48
+ callCount: number;
49
+ };
50
+ setResponses: (responses: FauxResponseStep[]) => void;
51
+ appendResponses: (responses: FauxResponseStep[]) => void;
52
+ getPendingResponseCount: () => number;
53
+ unregister: () => void;
54
+ }
55
+ export declare function registerFauxProvider(options?: RegisterFauxProviderOptions): FauxProviderRegistration;
56
+ //# sourceMappingURL=faux.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"faux.d.ts","sourceRoot":"","sources":["../../src/providers/faux.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EACX,gBAAgB,EAEhB,OAAO,EAGP,KAAK,EAGL,aAAa,EACb,WAAW,EACX,eAAe,EACf,QAAQ,EAGR,MAAM,aAAa,CAAC;AAoBrB,MAAM,WAAW,mBAAmB;IACnC,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,KAAK,CAAC,EAAE,CAAC,MAAM,GAAG,OAAO,CAAC,EAAE,CAAC;IAC7B,IAAI,CAAC,EAAE;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,MAAM,EAAE,MAAM,CAAC;QAAC,SAAS,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,MAAM,CAAA;KAAE,CAAC;IAChF,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,SAAS,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,MAAM,gBAAgB,GAAG,WAAW,GAAG,eAAe,GAAG,QAAQ,CAAC;AAExE,wBAAgB,QAAQ,CAAC,IAAI,EAAE,MAAM,GAAG,WAAW,CAElD;AAED,wBAAgB,YAAY,CAAC,QAAQ,EAAE,MAAM,GAAG,eAAe,CAE9D;AAED,wBAAgB,YAAY,CAAC,IAAI,EAAE,MAAM,EAAE,UAAU,EAAE,QAAQ,CAAC,WAAW,CAAC,EAAE,OAAO,GAAE;IAAE,EAAE,CAAC,EAAE,MAAM,CAAA;CAAO,GAAG,QAAQ,CAOrH;AASD,wBAAgB,oBAAoB,CACnC,OAAO,EAAE,MAAM,GAAG,gBAAgB,GAAG,gBAAgB,EAAE,EACvD,OAAO,GAAE;IACR,UAAU,CAAC,EAAE,gBAAgB,CAAC,YAAY,CAAC,CAAC;IAC5C,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,SAAS,CAAC,EAAE,MAAM,CAAC;CACd,GACJ,gBAAgB,CAalB;AAED,MAAM,MAAM,mBAAmB,GAAG,CACjC,OAAO,EAAE,OAAO,EAChB,OAAO,EAAE,aAAa,GAAG,SAAS,EAClC,KAAK,EAAE;IAAE,SAAS,EAAE,MAAM,CAAA;CAAE,EAC5B,KAAK,EAAE,KAAK,CAAC,MAAM,CAAC,KAChB,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC,CAAC;AAElD,MAAM,MAAM,gBAAgB,GAAG,gBAAgB,GAAG,mBAAmB,CAAC;AAEtE,MAAM,WAAW,2BAA2B;IAC3C,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,MAAM,CAAC,EAAE,mBAAmB,EAAE,CAAC;IAC/B,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,SAAS,CAAC,EAAE;QACX,GAAG,CAAC,EAAE,MAAM,CAAC;QACb,GAAG,CAAC,EAAE,MAAM,CAAC;KACb,CAAC;CACF;AAED,MAAM,WAAW,wBAAwB;IACxC,GAAG,EAAE,MAAM,CAAC;IACZ,MAAM,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,GAAG,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC;IAC5C,QAAQ,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC;IAC1B,QAAQ,CAAC,OAAO,EAAE,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,GAAG,SAAS,CAAC;IACrD,KAAK,EAAE;QAAE,SAAS,EAAE,MAAM,CAAA;KAAE,CAAC;IAC7B,YAAY,EAAE,CAAC,SAAS,EAAE,gBAAgB,EAAE,KAAK,IAAI,CAAC;IACtD,eAAe,EAAE,CAAC,SAAS,EAAE,gBAAgB,EAAE,KAAK,IAAI,CAAC;IACzD,uBAAuB,EAAE,MAAM,MAAM,CAAC;IACtC,UAAU,EAAE,MAAM,IAAI,CAAC;CACvB;AAyQD,wBAAgB,oBAAoB,CAAC,OAAO,GAAE,2BAAgC,GAAG,wBAAwB,CA2GxG","sourcesContent":["import { registerApiProvider, unregisterApiProviders } from \"../api-registry.js\";\nimport type {\n\tAssistantMessage,\n\tAssistantMessageEventStream,\n\tContext,\n\tImageContent,\n\tMessage,\n\tModel,\n\tSimpleStreamOptions,\n\tStreamFunction,\n\tStreamOptions,\n\tTextContent,\n\tThinkingContent,\n\tToolCall,\n\tToolResultMessage,\n\tUsage,\n} from \"../types.js\";\nimport { createAssistantMessageEventStream } from \"../utils/event-stream.js\";\n\nconst DEFAULT_API = \"faux\";\nconst DEFAULT_PROVIDER = \"faux\";\nconst DEFAULT_MODEL_ID = \"faux-1\";\nconst DEFAULT_MODEL_NAME = \"Faux Model\";\nconst DEFAULT_BASE_URL = \"http://localhost:0\";\nconst DEFAULT_MIN_TOKEN_SIZE = 3;\nconst DEFAULT_MAX_TOKEN_SIZE = 5;\n\nconst DEFAULT_USAGE: Usage = {\n\tinput: 0,\n\toutput: 0,\n\tcacheRead: 0,\n\tcacheWrite: 0,\n\ttotalTokens: 0,\n\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n};\n\nexport interface FauxModelDefinition {\n\tid: string;\n\tname?: string;\n\treasoning?: boolean;\n\tinput?: (\"text\" | \"image\")[];\n\tcost?: { input: number; output: number; cacheRead: number; cacheWrite: number };\n\tcontextWindow?: number;\n\tmaxTokens?: number;\n}\n\nexport type FauxContentBlock = TextContent | ThinkingContent | ToolCall;\n\nexport function fauxText(text: string): TextContent {\n\treturn { type: \"text\", text };\n}\n\nexport function fauxThinking(thinking: string): ThinkingContent {\n\treturn { type: \"thinking\", thinking };\n}\n\nexport function fauxToolCall(name: string, arguments_: ToolCall[\"arguments\"], options: { id?: string } = {}): ToolCall {\n\treturn {\n\t\ttype: \"toolCall\",\n\t\tid: options.id ?? randomId(\"tool\"),\n\t\tname,\n\t\targuments: arguments_,\n\t};\n}\n\nfunction normalizeFauxAssistantContent(content: string | FauxContentBlock | FauxContentBlock[]): FauxContentBlock[] {\n\tif (typeof content === \"string\") {\n\t\treturn [fauxText(content)];\n\t}\n\treturn Array.isArray(content) ? content : [content];\n}\n\nexport function fauxAssistantMessage(\n\tcontent: string | FauxContentBlock | FauxContentBlock[],\n\toptions: {\n\t\tstopReason?: AssistantMessage[\"stopReason\"];\n\t\terrorMessage?: string;\n\t\tresponseId?: string;\n\t\ttimestamp?: number;\n\t} = {},\n): AssistantMessage {\n\treturn {\n\t\trole: \"assistant\",\n\t\tcontent: normalizeFauxAssistantContent(content),\n\t\tapi: DEFAULT_API,\n\t\tprovider: DEFAULT_PROVIDER,\n\t\tmodel: DEFAULT_MODEL_ID,\n\t\tusage: DEFAULT_USAGE,\n\t\tstopReason: options.stopReason ?? \"stop\",\n\t\terrorMessage: options.errorMessage,\n\t\tresponseId: options.responseId,\n\t\ttimestamp: options.timestamp ?? Date.now(),\n\t};\n}\n\nexport type FauxResponseFactory = (\n\tcontext: Context,\n\toptions: StreamOptions | undefined,\n\tstate: { callCount: number },\n\tmodel: Model<string>,\n) => AssistantMessage | Promise<AssistantMessage>;\n\nexport type FauxResponseStep = AssistantMessage | FauxResponseFactory;\n\nexport interface RegisterFauxProviderOptions {\n\tapi?: string;\n\tprovider?: string;\n\tmodels?: FauxModelDefinition[];\n\ttokensPerSecond?: number;\n\ttokenSize?: {\n\t\tmin?: number;\n\t\tmax?: number;\n\t};\n}\n\nexport interface FauxProviderRegistration {\n\tapi: string;\n\tmodels: [Model<string>, ...Model<string>[]];\n\tgetModel(): Model<string>;\n\tgetModel(modelId: string): Model<string> | undefined;\n\tstate: { callCount: number };\n\tsetResponses: (responses: FauxResponseStep[]) => void;\n\tappendResponses: (responses: FauxResponseStep[]) => void;\n\tgetPendingResponseCount: () => number;\n\tunregister: () => void;\n}\n\nfunction estimateTokens(text: string): number {\n\treturn Math.ceil(text.length / 4);\n}\n\nfunction randomId(prefix: string): string {\n\treturn `${prefix}:${Date.now()}:${Math.random().toString(36).slice(2)}`;\n}\n\nfunction contentToText(content: string | Array<TextContent | ImageContent>): string {\n\tif (typeof content === \"string\") {\n\t\treturn content;\n\t}\n\treturn content\n\t\t.map((block) => {\n\t\t\tif (block.type === \"text\") {\n\t\t\t\treturn block.text;\n\t\t\t}\n\t\t\treturn `[image:${block.mimeType}:${block.data.length}]`;\n\t\t})\n\t\t.join(\"\\n\");\n}\n\nfunction assistantContentToText(content: Array<TextContent | ThinkingContent | ToolCall>): string {\n\treturn content\n\t\t.map((block) => {\n\t\t\tif (block.type === \"text\") {\n\t\t\t\treturn block.text;\n\t\t\t}\n\t\t\tif (block.type === \"thinking\") {\n\t\t\t\treturn block.thinking;\n\t\t\t}\n\t\t\treturn `${block.name}:${JSON.stringify(block.arguments)}`;\n\t\t})\n\t\t.join(\"\\n\");\n}\n\nfunction toolResultToText(message: ToolResultMessage): string {\n\treturn [message.toolName, ...message.content.map((block) => contentToText([block]))].join(\"\\n\");\n}\n\nfunction messageToText(message: Message): string {\n\tif (message.role === \"user\") {\n\t\treturn contentToText(message.content);\n\t}\n\tif (message.role === \"assistant\") {\n\t\treturn assistantContentToText(message.content);\n\t}\n\treturn toolResultToText(message);\n}\n\nfunction serializeContext(context: Context): string {\n\tconst parts: string[] = [];\n\tif (context.systemPrompt) {\n\t\tparts.push(`system:${context.systemPrompt}`);\n\t}\n\tfor (const message of context.messages) {\n\t\tparts.push(`${message.role}:${messageToText(message)}`);\n\t}\n\tif (context.tools?.length) {\n\t\tparts.push(`tools:${JSON.stringify(context.tools)}`);\n\t}\n\treturn parts.join(\"\\n\\n\");\n}\n\nfunction commonPrefixLength(a: string, b: string): number {\n\tconst length = Math.min(a.length, b.length);\n\tlet index = 0;\n\twhile (index < length && a[index] === b[index]) {\n\t\tindex++;\n\t}\n\treturn index;\n}\n\nfunction withUsageEstimate(\n\tmessage: AssistantMessage,\n\tcontext: Context,\n\toptions: StreamOptions | undefined,\n\tpromptCache: Map<string, string>,\n): AssistantMessage {\n\tconst promptText = serializeContext(context);\n\tconst promptTokens = estimateTokens(promptText);\n\tconst outputTokens = estimateTokens(assistantContentToText(message.content));\n\tlet input = promptTokens;\n\tlet cacheRead = 0;\n\tlet cacheWrite = 0;\n\tconst sessionId = options?.sessionId;\n\n\tif (sessionId && options?.cacheRetention !== \"none\") {\n\t\tconst previousPrompt = promptCache.get(sessionId);\n\t\tif (previousPrompt) {\n\t\t\tconst cachedChars = commonPrefixLength(previousPrompt, promptText);\n\t\t\tcacheRead = estimateTokens(previousPrompt.slice(0, cachedChars));\n\t\t\tcacheWrite = estimateTokens(promptText.slice(cachedChars));\n\t\t\tinput = Math.max(0, promptTokens - cacheRead);\n\t\t} else {\n\t\t\tcacheWrite = promptTokens;\n\t\t}\n\t\tpromptCache.set(sessionId, promptText);\n\t}\n\n\treturn {\n\t\t...message,\n\t\tusage: {\n\t\t\tinput,\n\t\t\toutput: outputTokens,\n\t\t\tcacheRead,\n\t\t\tcacheWrite,\n\t\t\ttotalTokens: input + outputTokens + cacheRead + cacheWrite,\n\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t},\n\t};\n}\n\nfunction splitStringByTokenSize(text: string, minTokenSize: number, maxTokenSize: number): string[] {\n\tconst chunks: string[] = [];\n\tlet index = 0;\n\twhile (index < text.length) {\n\t\tconst tokenSize = minTokenSize + Math.floor(Math.random() * (maxTokenSize - minTokenSize + 1));\n\t\tconst charSize = Math.max(1, tokenSize * 4);\n\t\tchunks.push(text.slice(index, index + charSize));\n\t\tindex += charSize;\n\t}\n\treturn chunks.length > 0 ? chunks : [\"\"];\n}\n\nfunction cloneMessage(message: AssistantMessage, api: string, provider: string, modelId: string): AssistantMessage {\n\tconst cloned = structuredClone(message);\n\treturn {\n\t\t...cloned,\n\t\tapi,\n\t\tprovider,\n\t\tmodel: modelId,\n\t\ttimestamp: cloned.timestamp ?? Date.now(),\n\t\tusage: cloned.usage ?? DEFAULT_USAGE,\n\t};\n}\n\nfunction createErrorMessage(error: unknown, api: string, provider: string, modelId: string): AssistantMessage {\n\treturn {\n\t\trole: \"assistant\",\n\t\tcontent: [],\n\t\tapi,\n\t\tprovider,\n\t\tmodel: modelId,\n\t\tusage: DEFAULT_USAGE,\n\t\tstopReason: \"error\",\n\t\terrorMessage: error instanceof Error ? error.message : String(error),\n\t\ttimestamp: Date.now(),\n\t};\n}\n\nfunction createAbortedMessage(partial: AssistantMessage): AssistantMessage {\n\treturn {\n\t\t...partial,\n\t\tstopReason: \"aborted\",\n\t\terrorMessage: \"Request was aborted\",\n\t\ttimestamp: Date.now(),\n\t};\n}\n\nfunction scheduleChunk(chunk: string, tokensPerSecond: number | undefined): Promise<void> {\n\tif (!tokensPerSecond || tokensPerSecond <= 0) {\n\t\treturn new Promise((resolve) => queueMicrotask(resolve));\n\t}\n\tconst delayMs = (estimateTokens(chunk) / tokensPerSecond) * 1000;\n\treturn new Promise((resolve) => setTimeout(resolve, delayMs));\n}\n\nasync function streamWithDeltas(\n\tstream: AssistantMessageEventStream,\n\tmessage: AssistantMessage,\n\tminTokenSize: number,\n\tmaxTokenSize: number,\n\ttokensPerSecond: number | undefined,\n\tsignal: AbortSignal | undefined,\n): Promise<void> {\n\tconst partial: AssistantMessage = { ...message, content: [] };\n\tif (signal?.aborted) {\n\t\tconst aborted = createAbortedMessage(partial);\n\t\tstream.push({ type: \"error\", reason: \"aborted\", error: aborted });\n\t\tstream.end(aborted);\n\t\treturn;\n\t}\n\n\tstream.push({ type: \"start\", partial: { ...partial } });\n\n\tfor (let index = 0; index < message.content.length; index++) {\n\t\tif (signal?.aborted) {\n\t\t\tconst aborted = createAbortedMessage(partial);\n\t\t\tstream.push({ type: \"error\", reason: \"aborted\", error: aborted });\n\t\t\tstream.end(aborted);\n\t\t\treturn;\n\t\t}\n\n\t\tconst block = message.content[index];\n\n\t\tif (block.type === \"thinking\") {\n\t\t\tpartial.content = [...partial.content, { type: \"thinking\", thinking: \"\" }];\n\t\t\tstream.push({ type: \"thinking_start\", contentIndex: index, partial: { ...partial } });\n\t\t\tfor (const chunk of splitStringByTokenSize(block.thinking, minTokenSize, maxTokenSize)) {\n\t\t\t\tawait scheduleChunk(chunk, tokensPerSecond);\n\t\t\t\tif (signal?.aborted) {\n\t\t\t\t\tconst aborted = createAbortedMessage(partial);\n\t\t\t\t\tstream.push({ type: \"error\", reason: \"aborted\", error: aborted });\n\t\t\t\t\tstream.end(aborted);\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\t(partial.content[index] as ThinkingContent).thinking += chunk;\n\t\t\t\tstream.push({ type: \"thinking_delta\", contentIndex: index, delta: chunk, partial: { ...partial } });\n\t\t\t}\n\t\t\tstream.push({\n\t\t\t\ttype: \"thinking_end\",\n\t\t\t\tcontentIndex: index,\n\t\t\t\tcontent: block.thinking,\n\t\t\t\tpartial: { ...partial },\n\t\t\t});\n\t\t\tcontinue;\n\t\t}\n\n\t\tif (block.type === \"text\") {\n\t\t\tpartial.content = [...partial.content, { type: \"text\", text: \"\" }];\n\t\t\tstream.push({ type: \"text_start\", contentIndex: index, partial: { ...partial } });\n\t\t\tfor (const chunk of splitStringByTokenSize(block.text, minTokenSize, maxTokenSize)) {\n\t\t\t\tawait scheduleChunk(chunk, tokensPerSecond);\n\t\t\t\tif (signal?.aborted) {\n\t\t\t\t\tconst aborted = createAbortedMessage(partial);\n\t\t\t\t\tstream.push({ type: \"error\", reason: \"aborted\", error: aborted });\n\t\t\t\t\tstream.end(aborted);\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\t(partial.content[index] as TextContent).text += chunk;\n\t\t\t\tstream.push({ type: \"text_delta\", contentIndex: index, delta: chunk, partial: { ...partial } });\n\t\t\t}\n\t\t\tstream.push({ type: \"text_end\", contentIndex: index, content: block.text, partial: { ...partial } });\n\t\t\tcontinue;\n\t\t}\n\n\t\tpartial.content = [...partial.content, { type: \"toolCall\", id: block.id, name: block.name, arguments: {} }];\n\t\tstream.push({ type: \"toolcall_start\", contentIndex: index, partial: { ...partial } });\n\t\tfor (const chunk of splitStringByTokenSize(JSON.stringify(block.arguments), minTokenSize, maxTokenSize)) {\n\t\t\tawait scheduleChunk(chunk, tokensPerSecond);\n\t\t\tif (signal?.aborted) {\n\t\t\t\tconst aborted = createAbortedMessage(partial);\n\t\t\t\tstream.push({ type: \"error\", reason: \"aborted\", error: aborted });\n\t\t\t\tstream.end(aborted);\n\t\t\t\treturn;\n\t\t\t}\n\t\t\tstream.push({ type: \"toolcall_delta\", contentIndex: index, delta: chunk, partial: { ...partial } });\n\t\t}\n\t\t(partial.content[index] as ToolCall).arguments = block.arguments;\n\t\tstream.push({ type: \"toolcall_end\", contentIndex: index, toolCall: block, partial: { ...partial } });\n\t}\n\n\tif (message.stopReason === \"error\" || message.stopReason === \"aborted\") {\n\t\tstream.push({ type: \"error\", reason: message.stopReason, error: message });\n\t\tstream.end(message);\n\t\treturn;\n\t}\n\n\tstream.push({ type: \"done\", reason: message.stopReason, message });\n\tstream.end(message);\n}\n\nexport function registerFauxProvider(options: RegisterFauxProviderOptions = {}): FauxProviderRegistration {\n\tconst api = options.api ?? randomId(DEFAULT_API);\n\tconst provider = options.provider ?? DEFAULT_PROVIDER;\n\tconst sourceId = randomId(\"faux-provider\");\n\tconst minTokenSize = Math.max(\n\t\t1,\n\t\tMath.min(options.tokenSize?.min ?? DEFAULT_MIN_TOKEN_SIZE, options.tokenSize?.max ?? DEFAULT_MAX_TOKEN_SIZE),\n\t);\n\tconst maxTokenSize = Math.max(minTokenSize, options.tokenSize?.max ?? DEFAULT_MAX_TOKEN_SIZE);\n\tlet pendingResponses: FauxResponseStep[] = [];\n\tconst tokensPerSecond = options.tokensPerSecond;\n\tconst state = { callCount: 0 };\n\tconst promptCache = new Map<string, string>();\n\n\tconst modelDefinitions = options.models?.length\n\t\t? options.models\n\t\t: [\n\t\t\t\t{\n\t\t\t\t\tid: DEFAULT_MODEL_ID,\n\t\t\t\t\tname: DEFAULT_MODEL_NAME,\n\t\t\t\t\treasoning: false,\n\t\t\t\t\tinput: [\"text\", \"image\"] as (\"text\" | \"image\")[],\n\t\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },\n\t\t\t\t\tcontextWindow: 128000,\n\t\t\t\t\tmaxTokens: 16384,\n\t\t\t\t},\n\t\t\t];\n\tconst models = modelDefinitions.map((definition) => ({\n\t\tid: definition.id,\n\t\tname: definition.name ?? definition.id,\n\t\tapi,\n\t\tprovider,\n\t\tbaseUrl: DEFAULT_BASE_URL,\n\t\treasoning: definition.reasoning ?? false,\n\t\tinput: definition.input ?? [\"text\", \"image\"],\n\t\tcost: definition.cost ?? { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },\n\t\tcontextWindow: definition.contextWindow ?? 128000,\n\t\tmaxTokens: definition.maxTokens ?? 16384,\n\t})) as [Model<string>, ...Model<string>[]];\n\n\tconst stream: StreamFunction<string, StreamOptions> = (requestModel, context, streamOptions) => {\n\t\tconst outer = createAssistantMessageEventStream();\n\t\tconst step = pendingResponses.shift();\n\t\tstate.callCount++;\n\n\t\tqueueMicrotask(async () => {\n\t\t\ttry {\n\t\t\t\tif (!step) {\n\t\t\t\t\tlet message = createErrorMessage(\n\t\t\t\t\t\tnew Error(\"No more faux responses queued\"),\n\t\t\t\t\t\tapi,\n\t\t\t\t\t\tprovider,\n\t\t\t\t\t\trequestModel.id,\n\t\t\t\t\t);\n\t\t\t\t\tmessage = withUsageEstimate(message, context, streamOptions, promptCache);\n\t\t\t\t\touter.push({ type: \"error\", reason: \"error\", error: message });\n\t\t\t\t\touter.end(message);\n\t\t\t\t\treturn;\n\t\t\t\t}\n\n\t\t\t\tconst resolved =\n\t\t\t\t\ttypeof step === \"function\" ? await step(context, streamOptions, state, requestModel) : step;\n\t\t\t\tlet message = cloneMessage(resolved, api, provider, requestModel.id);\n\t\t\t\tmessage = withUsageEstimate(message, context, streamOptions, promptCache);\n\t\t\t\tawait streamWithDeltas(outer, message, minTokenSize, maxTokenSize, tokensPerSecond, streamOptions?.signal);\n\t\t\t} catch (error) {\n\t\t\t\tconst message = createErrorMessage(error, api, provider, requestModel.id);\n\t\t\t\touter.push({ type: \"error\", reason: \"error\", error: message });\n\t\t\t\touter.end(message);\n\t\t\t}\n\t\t});\n\n\t\treturn outer;\n\t};\n\n\tconst streamSimple: StreamFunction<string, SimpleStreamOptions> = (streamModel, context, streamOptions) =>\n\t\tstream(streamModel, context, streamOptions);\n\n\tregisterApiProvider({ api, stream, streamSimple }, sourceId);\n\n\tfunction getModel(): Model<string>;\n\tfunction getModel(requestedModelId: string): Model<string> | undefined;\n\tfunction getModel(requestedModelId?: string): Model<string> | undefined {\n\t\tif (!requestedModelId) {\n\t\t\treturn models[0];\n\t\t}\n\t\treturn models.find((candidate) => candidate.id === requestedModelId);\n\t}\n\n\treturn {\n\t\tapi,\n\t\tmodels,\n\t\tgetModel,\n\t\tstate,\n\t\tsetResponses(responses) {\n\t\t\tpendingResponses = [...responses];\n\t\t},\n\t\tappendResponses(responses) {\n\t\t\tpendingResponses.push(...responses);\n\t\t},\n\t\tgetPendingResponseCount() {\n\t\t\treturn pendingResponses.length;\n\t\t},\n\t\tunregister() {\n\t\t\tunregisterApiProviders(sourceId);\n\t\t},\n\t};\n}\n"]}
@@ -0,0 +1,367 @@
1
+ import { registerApiProvider, unregisterApiProviders } from "../api-registry.js";
2
+ import { createAssistantMessageEventStream } from "../utils/event-stream.js";
3
+ const DEFAULT_API = "faux";
4
+ const DEFAULT_PROVIDER = "faux";
5
+ const DEFAULT_MODEL_ID = "faux-1";
6
+ const DEFAULT_MODEL_NAME = "Faux Model";
7
+ const DEFAULT_BASE_URL = "http://localhost:0";
8
+ const DEFAULT_MIN_TOKEN_SIZE = 3;
9
+ const DEFAULT_MAX_TOKEN_SIZE = 5;
10
+ const DEFAULT_USAGE = {
11
+ input: 0,
12
+ output: 0,
13
+ cacheRead: 0,
14
+ cacheWrite: 0,
15
+ totalTokens: 0,
16
+ cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
17
+ };
18
+ export function fauxText(text) {
19
+ return { type: "text", text };
20
+ }
21
+ export function fauxThinking(thinking) {
22
+ return { type: "thinking", thinking };
23
+ }
24
+ export function fauxToolCall(name, arguments_, options = {}) {
25
+ return {
26
+ type: "toolCall",
27
+ id: options.id ?? randomId("tool"),
28
+ name,
29
+ arguments: arguments_,
30
+ };
31
+ }
32
+ function normalizeFauxAssistantContent(content) {
33
+ if (typeof content === "string") {
34
+ return [fauxText(content)];
35
+ }
36
+ return Array.isArray(content) ? content : [content];
37
+ }
38
+ export function fauxAssistantMessage(content, options = {}) {
39
+ return {
40
+ role: "assistant",
41
+ content: normalizeFauxAssistantContent(content),
42
+ api: DEFAULT_API,
43
+ provider: DEFAULT_PROVIDER,
44
+ model: DEFAULT_MODEL_ID,
45
+ usage: DEFAULT_USAGE,
46
+ stopReason: options.stopReason ?? "stop",
47
+ errorMessage: options.errorMessage,
48
+ responseId: options.responseId,
49
+ timestamp: options.timestamp ?? Date.now(),
50
+ };
51
+ }
52
+ function estimateTokens(text) {
53
+ return Math.ceil(text.length / 4);
54
+ }
55
+ function randomId(prefix) {
56
+ return `${prefix}:${Date.now()}:${Math.random().toString(36).slice(2)}`;
57
+ }
58
+ function contentToText(content) {
59
+ if (typeof content === "string") {
60
+ return content;
61
+ }
62
+ return content
63
+ .map((block) => {
64
+ if (block.type === "text") {
65
+ return block.text;
66
+ }
67
+ return `[image:${block.mimeType}:${block.data.length}]`;
68
+ })
69
+ .join("\n");
70
+ }
71
+ function assistantContentToText(content) {
72
+ return content
73
+ .map((block) => {
74
+ if (block.type === "text") {
75
+ return block.text;
76
+ }
77
+ if (block.type === "thinking") {
78
+ return block.thinking;
79
+ }
80
+ return `${block.name}:${JSON.stringify(block.arguments)}`;
81
+ })
82
+ .join("\n");
83
+ }
84
+ function toolResultToText(message) {
85
+ return [message.toolName, ...message.content.map((block) => contentToText([block]))].join("\n");
86
+ }
87
+ function messageToText(message) {
88
+ if (message.role === "user") {
89
+ return contentToText(message.content);
90
+ }
91
+ if (message.role === "assistant") {
92
+ return assistantContentToText(message.content);
93
+ }
94
+ return toolResultToText(message);
95
+ }
96
+ function serializeContext(context) {
97
+ const parts = [];
98
+ if (context.systemPrompt) {
99
+ parts.push(`system:${context.systemPrompt}`);
100
+ }
101
+ for (const message of context.messages) {
102
+ parts.push(`${message.role}:${messageToText(message)}`);
103
+ }
104
+ if (context.tools?.length) {
105
+ parts.push(`tools:${JSON.stringify(context.tools)}`);
106
+ }
107
+ return parts.join("\n\n");
108
+ }
109
+ function commonPrefixLength(a, b) {
110
+ const length = Math.min(a.length, b.length);
111
+ let index = 0;
112
+ while (index < length && a[index] === b[index]) {
113
+ index++;
114
+ }
115
+ return index;
116
+ }
117
+ function withUsageEstimate(message, context, options, promptCache) {
118
+ const promptText = serializeContext(context);
119
+ const promptTokens = estimateTokens(promptText);
120
+ const outputTokens = estimateTokens(assistantContentToText(message.content));
121
+ let input = promptTokens;
122
+ let cacheRead = 0;
123
+ let cacheWrite = 0;
124
+ const sessionId = options?.sessionId;
125
+ if (sessionId && options?.cacheRetention !== "none") {
126
+ const previousPrompt = promptCache.get(sessionId);
127
+ if (previousPrompt) {
128
+ const cachedChars = commonPrefixLength(previousPrompt, promptText);
129
+ cacheRead = estimateTokens(previousPrompt.slice(0, cachedChars));
130
+ cacheWrite = estimateTokens(promptText.slice(cachedChars));
131
+ input = Math.max(0, promptTokens - cacheRead);
132
+ }
133
+ else {
134
+ cacheWrite = promptTokens;
135
+ }
136
+ promptCache.set(sessionId, promptText);
137
+ }
138
+ return {
139
+ ...message,
140
+ usage: {
141
+ input,
142
+ output: outputTokens,
143
+ cacheRead,
144
+ cacheWrite,
145
+ totalTokens: input + outputTokens + cacheRead + cacheWrite,
146
+ cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
147
+ },
148
+ };
149
+ }
150
+ function splitStringByTokenSize(text, minTokenSize, maxTokenSize) {
151
+ const chunks = [];
152
+ let index = 0;
153
+ while (index < text.length) {
154
+ const tokenSize = minTokenSize + Math.floor(Math.random() * (maxTokenSize - minTokenSize + 1));
155
+ const charSize = Math.max(1, tokenSize * 4);
156
+ chunks.push(text.slice(index, index + charSize));
157
+ index += charSize;
158
+ }
159
+ return chunks.length > 0 ? chunks : [""];
160
+ }
161
+ function cloneMessage(message, api, provider, modelId) {
162
+ const cloned = structuredClone(message);
163
+ return {
164
+ ...cloned,
165
+ api,
166
+ provider,
167
+ model: modelId,
168
+ timestamp: cloned.timestamp ?? Date.now(),
169
+ usage: cloned.usage ?? DEFAULT_USAGE,
170
+ };
171
+ }
172
+ function createErrorMessage(error, api, provider, modelId) {
173
+ return {
174
+ role: "assistant",
175
+ content: [],
176
+ api,
177
+ provider,
178
+ model: modelId,
179
+ usage: DEFAULT_USAGE,
180
+ stopReason: "error",
181
+ errorMessage: error instanceof Error ? error.message : String(error),
182
+ timestamp: Date.now(),
183
+ };
184
+ }
185
+ function createAbortedMessage(partial) {
186
+ return {
187
+ ...partial,
188
+ stopReason: "aborted",
189
+ errorMessage: "Request was aborted",
190
+ timestamp: Date.now(),
191
+ };
192
+ }
193
+ function scheduleChunk(chunk, tokensPerSecond) {
194
+ if (!tokensPerSecond || tokensPerSecond <= 0) {
195
+ return new Promise((resolve) => queueMicrotask(resolve));
196
+ }
197
+ const delayMs = (estimateTokens(chunk) / tokensPerSecond) * 1000;
198
+ return new Promise((resolve) => setTimeout(resolve, delayMs));
199
+ }
200
+ async function streamWithDeltas(stream, message, minTokenSize, maxTokenSize, tokensPerSecond, signal) {
201
+ const partial = { ...message, content: [] };
202
+ if (signal?.aborted) {
203
+ const aborted = createAbortedMessage(partial);
204
+ stream.push({ type: "error", reason: "aborted", error: aborted });
205
+ stream.end(aborted);
206
+ return;
207
+ }
208
+ stream.push({ type: "start", partial: { ...partial } });
209
+ for (let index = 0; index < message.content.length; index++) {
210
+ if (signal?.aborted) {
211
+ const aborted = createAbortedMessage(partial);
212
+ stream.push({ type: "error", reason: "aborted", error: aborted });
213
+ stream.end(aborted);
214
+ return;
215
+ }
216
+ const block = message.content[index];
217
+ if (block.type === "thinking") {
218
+ partial.content = [...partial.content, { type: "thinking", thinking: "" }];
219
+ stream.push({ type: "thinking_start", contentIndex: index, partial: { ...partial } });
220
+ for (const chunk of splitStringByTokenSize(block.thinking, minTokenSize, maxTokenSize)) {
221
+ await scheduleChunk(chunk, tokensPerSecond);
222
+ if (signal?.aborted) {
223
+ const aborted = createAbortedMessage(partial);
224
+ stream.push({ type: "error", reason: "aborted", error: aborted });
225
+ stream.end(aborted);
226
+ return;
227
+ }
228
+ partial.content[index].thinking += chunk;
229
+ stream.push({ type: "thinking_delta", contentIndex: index, delta: chunk, partial: { ...partial } });
230
+ }
231
+ stream.push({
232
+ type: "thinking_end",
233
+ contentIndex: index,
234
+ content: block.thinking,
235
+ partial: { ...partial },
236
+ });
237
+ continue;
238
+ }
239
+ if (block.type === "text") {
240
+ partial.content = [...partial.content, { type: "text", text: "" }];
241
+ stream.push({ type: "text_start", contentIndex: index, partial: { ...partial } });
242
+ for (const chunk of splitStringByTokenSize(block.text, minTokenSize, maxTokenSize)) {
243
+ await scheduleChunk(chunk, tokensPerSecond);
244
+ if (signal?.aborted) {
245
+ const aborted = createAbortedMessage(partial);
246
+ stream.push({ type: "error", reason: "aborted", error: aborted });
247
+ stream.end(aborted);
248
+ return;
249
+ }
250
+ partial.content[index].text += chunk;
251
+ stream.push({ type: "text_delta", contentIndex: index, delta: chunk, partial: { ...partial } });
252
+ }
253
+ stream.push({ type: "text_end", contentIndex: index, content: block.text, partial: { ...partial } });
254
+ continue;
255
+ }
256
+ partial.content = [...partial.content, { type: "toolCall", id: block.id, name: block.name, arguments: {} }];
257
+ stream.push({ type: "toolcall_start", contentIndex: index, partial: { ...partial } });
258
+ for (const chunk of splitStringByTokenSize(JSON.stringify(block.arguments), minTokenSize, maxTokenSize)) {
259
+ await scheduleChunk(chunk, tokensPerSecond);
260
+ if (signal?.aborted) {
261
+ const aborted = createAbortedMessage(partial);
262
+ stream.push({ type: "error", reason: "aborted", error: aborted });
263
+ stream.end(aborted);
264
+ return;
265
+ }
266
+ stream.push({ type: "toolcall_delta", contentIndex: index, delta: chunk, partial: { ...partial } });
267
+ }
268
+ partial.content[index].arguments = block.arguments;
269
+ stream.push({ type: "toolcall_end", contentIndex: index, toolCall: block, partial: { ...partial } });
270
+ }
271
+ if (message.stopReason === "error" || message.stopReason === "aborted") {
272
+ stream.push({ type: "error", reason: message.stopReason, error: message });
273
+ stream.end(message);
274
+ return;
275
+ }
276
+ stream.push({ type: "done", reason: message.stopReason, message });
277
+ stream.end(message);
278
+ }
279
+ export function registerFauxProvider(options = {}) {
280
+ const api = options.api ?? randomId(DEFAULT_API);
281
+ const provider = options.provider ?? DEFAULT_PROVIDER;
282
+ const sourceId = randomId("faux-provider");
283
+ const minTokenSize = Math.max(1, Math.min(options.tokenSize?.min ?? DEFAULT_MIN_TOKEN_SIZE, options.tokenSize?.max ?? DEFAULT_MAX_TOKEN_SIZE));
284
+ const maxTokenSize = Math.max(minTokenSize, options.tokenSize?.max ?? DEFAULT_MAX_TOKEN_SIZE);
285
+ let pendingResponses = [];
286
+ const tokensPerSecond = options.tokensPerSecond;
287
+ const state = { callCount: 0 };
288
+ const promptCache = new Map();
289
+ const modelDefinitions = options.models?.length
290
+ ? options.models
291
+ : [
292
+ {
293
+ id: DEFAULT_MODEL_ID,
294
+ name: DEFAULT_MODEL_NAME,
295
+ reasoning: false,
296
+ input: ["text", "image"],
297
+ cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
298
+ contextWindow: 128000,
299
+ maxTokens: 16384,
300
+ },
301
+ ];
302
+ const models = modelDefinitions.map((definition) => ({
303
+ id: definition.id,
304
+ name: definition.name ?? definition.id,
305
+ api,
306
+ provider,
307
+ baseUrl: DEFAULT_BASE_URL,
308
+ reasoning: definition.reasoning ?? false,
309
+ input: definition.input ?? ["text", "image"],
310
+ cost: definition.cost ?? { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
311
+ contextWindow: definition.contextWindow ?? 128000,
312
+ maxTokens: definition.maxTokens ?? 16384,
313
+ }));
314
+ const stream = (requestModel, context, streamOptions) => {
315
+ const outer = createAssistantMessageEventStream();
316
+ const step = pendingResponses.shift();
317
+ state.callCount++;
318
+ queueMicrotask(async () => {
319
+ try {
320
+ if (!step) {
321
+ let message = createErrorMessage(new Error("No more faux responses queued"), api, provider, requestModel.id);
322
+ message = withUsageEstimate(message, context, streamOptions, promptCache);
323
+ outer.push({ type: "error", reason: "error", error: message });
324
+ outer.end(message);
325
+ return;
326
+ }
327
+ const resolved = typeof step === "function" ? await step(context, streamOptions, state, requestModel) : step;
328
+ let message = cloneMessage(resolved, api, provider, requestModel.id);
329
+ message = withUsageEstimate(message, context, streamOptions, promptCache);
330
+ await streamWithDeltas(outer, message, minTokenSize, maxTokenSize, tokensPerSecond, streamOptions?.signal);
331
+ }
332
+ catch (error) {
333
+ const message = createErrorMessage(error, api, provider, requestModel.id);
334
+ outer.push({ type: "error", reason: "error", error: message });
335
+ outer.end(message);
336
+ }
337
+ });
338
+ return outer;
339
+ };
340
+ const streamSimple = (streamModel, context, streamOptions) => stream(streamModel, context, streamOptions);
341
+ registerApiProvider({ api, stream, streamSimple }, sourceId);
342
+ function getModel(requestedModelId) {
343
+ if (!requestedModelId) {
344
+ return models[0];
345
+ }
346
+ return models.find((candidate) => candidate.id === requestedModelId);
347
+ }
348
+ return {
349
+ api,
350
+ models,
351
+ getModel,
352
+ state,
353
+ setResponses(responses) {
354
+ pendingResponses = [...responses];
355
+ },
356
+ appendResponses(responses) {
357
+ pendingResponses.push(...responses);
358
+ },
359
+ getPendingResponseCount() {
360
+ return pendingResponses.length;
361
+ },
362
+ unregister() {
363
+ unregisterApiProviders(sourceId);
364
+ },
365
+ };
366
+ }
367
+ //# sourceMappingURL=faux.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"faux.js","sourceRoot":"","sources":["../../src/providers/faux.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,mBAAmB,EAAE,sBAAsB,EAAE,MAAM,oBAAoB,CAAC;AAiBjF,OAAO,EAAE,iCAAiC,EAAE,MAAM,0BAA0B,CAAC;AAE7E,MAAM,WAAW,GAAG,MAAM,CAAC;AAC3B,MAAM,gBAAgB,GAAG,MAAM,CAAC;AAChC,MAAM,gBAAgB,GAAG,QAAQ,CAAC;AAClC,MAAM,kBAAkB,GAAG,YAAY,CAAC;AACxC,MAAM,gBAAgB,GAAG,oBAAoB,CAAC;AAC9C,MAAM,sBAAsB,GAAG,CAAC,CAAC;AACjC,MAAM,sBAAsB,GAAG,CAAC,CAAC;AAEjC,MAAM,aAAa,GAAU;IAC5B,KAAK,EAAE,CAAC;IACR,MAAM,EAAE,CAAC;IACT,SAAS,EAAE,CAAC;IACZ,UAAU,EAAE,CAAC;IACb,WAAW,EAAE,CAAC;IACd,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,UAAU,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE;CACpE,CAAC;AAcF,MAAM,UAAU,QAAQ,CAAC,IAAY,EAAe;IACnD,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC;AAAA,CAC9B;AAED,MAAM,UAAU,YAAY,CAAC,QAAgB,EAAmB;IAC/D,OAAO,EAAE,IAAI,EAAE,UAAU,EAAE,QAAQ,EAAE,CAAC;AAAA,CACtC;AAED,MAAM,UAAU,YAAY,CAAC,IAAY,EAAE,UAAiC,EAAE,OAAO,GAAoB,EAAE,EAAY;IACtH,OAAO;QACN,IAAI,EAAE,UAAU;QAChB,EAAE,EAAE,OAAO,CAAC,EAAE,IAAI,QAAQ,CAAC,MAAM,CAAC;QAClC,IAAI;QACJ,SAAS,EAAE,UAAU;KACrB,CAAC;AAAA,CACF;AAED,SAAS,6BAA6B,CAAC,OAAuD,EAAsB;IACnH,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE,CAAC;QACjC,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC;IAC5B,CAAC;IACD,OAAO,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;AAAA,CACpD;AAED,MAAM,UAAU,oBAAoB,CACnC,OAAuD,EACvD,OAAO,GAKH,EAAE,EACa;IACnB,OAAO;QACN,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,6BAA6B,CAAC,OAAO,CAAC;QAC/C,GAAG,EAAE,WAAW;QAChB,QAAQ,EAAE,gBAAgB;QAC1B,KAAK,EAAE,gBAAgB;QACvB,KAAK,EAAE,aAAa;QACpB,UAAU,EAAE,OAAO,CAAC,UAAU,IAAI,MAAM;QACxC,YAAY,EAAE,OAAO,CAAC,YAAY;QAClC,UAAU,EAAE,OAAO,CAAC,UAAU;QAC9B,SAAS,EAAE,OAAO,CAAC,SAAS,IAAI,IAAI,CAAC,GAAG,EAAE;KAC1C,CAAC;AAAA,CACF;AAkCD,SAAS,cAAc,CAAC,IAAY,EAAU;IAC7C,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;AAAA,CAClC;AAED,SAAS,QAAQ,CAAC,MAAc,EAAU;IACzC,OAAO,GAAG,MAAM,IAAI,IAAI,CAAC,GAAG,EAAE,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;AAAA,CACxE;AAED,SAAS,aAAa,CAAC,OAAmD,EAAU;IACnF,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE,CAAC;QACjC,OAAO,OAAO,CAAC;IAChB,CAAC;IACD,OAAO,OAAO;SACZ,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC;QACf,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YAC3B,OAAO,KAAK,CAAC,IAAI,CAAC;QACnB,CAAC;QACD,OAAO,UAAU,KAAK,CAAC,QAAQ,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC;IAAA,CACxD,CAAC;SACD,IAAI,CAAC,IAAI,CAAC,CAAC;AAAA,CACb;AAED,SAAS,sBAAsB,CAAC,OAAwD,EAAU;IACjG,OAAO,OAAO;SACZ,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC;QACf,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YAC3B,OAAO,KAAK,CAAC,IAAI,CAAC;QACnB,CAAC;QACD,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;YAC/B,OAAO,KAAK,CAAC,QAAQ,CAAC;QACvB,CAAC;QACD,OAAO,GAAG,KAAK,CAAC,IAAI,IAAI,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE,CAAC;IAAA,CAC1D,CAAC;SACD,IAAI,CAAC,IAAI,CAAC,CAAC;AAAA,CACb;AAED,SAAS,gBAAgB,CAAC,OAA0B,EAAU;IAC7D,OAAO,CAAC,OAAO,CAAC,QAAQ,EAAE,GAAG,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,aAAa,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAAA,CAChG;AAED,SAAS,aAAa,CAAC,OAAgB,EAAU;IAChD,IAAI,OAAO,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;QAC7B,OAAO,aAAa,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;IACvC,CAAC;IACD,IAAI,OAAO,CAAC,IAAI,KAAK,WAAW,EAAE,CAAC;QAClC,OAAO,sBAAsB,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;IAChD,CAAC;IACD,OAAO,gBAAgB,CAAC,OAAO,CAAC,CAAC;AAAA,CACjC;AAED,SAAS,gBAAgB,CAAC,OAAgB,EAAU;IACnD,MAAM,KAAK,GAAa,EAAE,CAAC;IAC3B,IAAI,OAAO,CAAC,YAAY,EAAE,CAAC;QAC1B,KAAK,CAAC,IAAI,CAAC,UAAU,OAAO,CAAC,YAAY,EAAE,CAAC,CAAC;IAC9C,CAAC;IACD,KAAK,MAAM,OAAO,IAAI,OAAO,CAAC,QAAQ,EAAE,CAAC;QACxC,KAAK,CAAC,IAAI,CAAC,GAAG,OAAO,CAAC,IAAI,IAAI,aAAa,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;IACzD,CAAC;IACD,IAAI,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC;QAC3B,KAAK,CAAC,IAAI,CAAC,SAAS,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;IACtD,CAAC;IACD,OAAO,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AAAA,CAC1B;AAED,SAAS,kBAAkB,CAAC,CAAS,EAAE,CAAS,EAAU;IACzD,MAAM,MAAM,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC;IAC5C,IAAI,KAAK,GAAG,CAAC,CAAC;IACd,OAAO,KAAK,GAAG,MAAM,IAAI,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC;QAChD,KAAK,EAAE,CAAC;IACT,CAAC;IACD,OAAO,KAAK,CAAC;AAAA,CACb;AAED,SAAS,iBAAiB,CACzB,OAAyB,EACzB,OAAgB,EAChB,OAAkC,EAClC,WAAgC,EACb;IACnB,MAAM,UAAU,GAAG,gBAAgB,CAAC,OAAO,CAAC,CAAC;IAC7C,MAAM,YAAY,GAAG,cAAc,CAAC,UAAU,CAAC,CAAC;IAChD,MAAM,YAAY,GAAG,cAAc,CAAC,sBAAsB,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC;IAC7E,IAAI,KAAK,GAAG,YAAY,CAAC;IACzB,IAAI,SAAS,GAAG,CAAC,CAAC;IAClB,IAAI,UAAU,GAAG,CAAC,CAAC;IACnB,MAAM,SAAS,GAAG,OAAO,EAAE,SAAS,CAAC;IAErC,IAAI,SAAS,IAAI,OAAO,EAAE,cAAc,KAAK,MAAM,EAAE,CAAC;QACrD,MAAM,cAAc,GAAG,WAAW,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;QAClD,IAAI,cAAc,EAAE,CAAC;YACpB,MAAM,WAAW,GAAG,kBAAkB,CAAC,cAAc,EAAE,UAAU,CAAC,CAAC;YACnE,SAAS,GAAG,cAAc,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,CAAC;YACjE,UAAU,GAAG,cAAc,CAAC,UAAU,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC;YAC3D,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,YAAY,GAAG,SAAS,CAAC,CAAC;QAC/C,CAAC;aAAM,CAAC;YACP,UAAU,GAAG,YAAY,CAAC;QAC3B,CAAC;QACD,WAAW,CAAC,GAAG,CAAC,SAAS,EAAE,UAAU,CAAC,CAAC;IACxC,CAAC;IAED,OAAO;QACN,GAAG,OAAO;QACV,KAAK,EAAE;YACN,KAAK;YACL,MAAM,EAAE,YAAY;YACpB,SAAS;YACT,UAAU;YACV,WAAW,EAAE,KAAK,GAAG,YAAY,GAAG,SAAS,GAAG,UAAU;YAC1D,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,UAAU,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE;SACpE;KACD,CAAC;AAAA,CACF;AAED,SAAS,sBAAsB,CAAC,IAAY,EAAE,YAAoB,EAAE,YAAoB,EAAY;IACnG,MAAM,MAAM,GAAa,EAAE,CAAC;IAC5B,IAAI,KAAK,GAAG,CAAC,CAAC;IACd,OAAO,KAAK,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;QAC5B,MAAM,SAAS,GAAG,YAAY,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,YAAY,GAAG,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC;QAC/F,MAAM,QAAQ,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,GAAG,CAAC,CAAC,CAAC;QAC5C,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE,KAAK,GAAG,QAAQ,CAAC,CAAC,CAAC;QACjD,KAAK,IAAI,QAAQ,CAAC;IACnB,CAAC;IACD,OAAO,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AAAA,CACzC;AAED,SAAS,YAAY,CAAC,OAAyB,EAAE,GAAW,EAAE,QAAgB,EAAE,OAAe,EAAoB;IAClH,MAAM,MAAM,GAAG,eAAe,CAAC,OAAO,CAAC,CAAC;IACxC,OAAO;QACN,GAAG,MAAM;QACT,GAAG;QACH,QAAQ;QACR,KAAK,EAAE,OAAO;QACd,SAAS,EAAE,MAAM,CAAC,SAAS,IAAI,IAAI,CAAC,GAAG,EAAE;QACzC,KAAK,EAAE,MAAM,CAAC,KAAK,IAAI,aAAa;KACpC,CAAC;AAAA,CACF;AAED,SAAS,kBAAkB,CAAC,KAAc,EAAE,GAAW,EAAE,QAAgB,EAAE,OAAe,EAAoB;IAC7G,OAAO;QACN,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,EAAE;QACX,GAAG;QACH,QAAQ;QACR,KAAK,EAAE,OAAO;QACd,KAAK,EAAE,aAAa;QACpB,UAAU,EAAE,OAAO;QACnB,YAAY,EAAE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;QACpE,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;KACrB,CAAC;AAAA,CACF;AAED,SAAS,oBAAoB,CAAC,OAAyB,EAAoB;IAC1E,OAAO;QACN,GAAG,OAAO;QACV,UAAU,EAAE,SAAS;QACrB,YAAY,EAAE,qBAAqB;QACnC,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;KACrB,CAAC;AAAA,CACF;AAED,SAAS,aAAa,CAAC,KAAa,EAAE,eAAmC,EAAiB;IACzF,IAAI,CAAC,eAAe,IAAI,eAAe,IAAI,CAAC,EAAE,CAAC;QAC9C,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC,CAAC;IAC1D,CAAC;IACD,MAAM,OAAO,GAAG,CAAC,cAAc,CAAC,KAAK,CAAC,GAAG,eAAe,CAAC,GAAG,IAAI,CAAC;IACjE,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,CAAC;AAAA,CAC9D;AAED,KAAK,UAAU,gBAAgB,CAC9B,MAAmC,EACnC,OAAyB,EACzB,YAAoB,EACpB,YAAoB,EACpB,eAAmC,EACnC,MAA+B,EACf;IAChB,MAAM,OAAO,GAAqB,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,EAAE,EAAE,CAAC;IAC9D,IAAI,MAAM,EAAE,OAAO,EAAE,CAAC;QACrB,MAAM,OAAO,GAAG,oBAAoB,CAAC,OAAO,CAAC,CAAC;QAC9C,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC;QAClE,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;QACpB,OAAO;IACR,CAAC;IAED,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,EAAE,CAAC,CAAC;IAExD,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC,MAAM,EAAE,KAAK,EAAE,EAAE,CAAC;QAC7D,IAAI,MAAM,EAAE,OAAO,EAAE,CAAC;YACrB,MAAM,OAAO,GAAG,oBAAoB,CAAC,OAAO,CAAC,CAAC;YAC9C,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC;YAClE,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;YACpB,OAAO;QACR,CAAC;QAED,MAAM,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QAErC,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;YAC/B,OAAO,CAAC,OAAO,GAAG,CAAC,GAAG,OAAO,CAAC,OAAO,EAAE,EAAE,IAAI,EAAE,UAAU,EAAE,QAAQ,EAAE,EAAE,EAAE,CAAC,CAAC;YAC3E,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,EAAE,CAAC,CAAC;YACtF,KAAK,MAAM,KAAK,IAAI,sBAAsB,CAAC,KAAK,CAAC,QAAQ,EAAE,YAAY,EAAE,YAAY,CAAC,EAAE,CAAC;gBACxF,MAAM,aAAa,CAAC,KAAK,EAAE,eAAe,CAAC,CAAC;gBAC5C,IAAI,MAAM,EAAE,OAAO,EAAE,CAAC;oBACrB,MAAM,OAAO,GAAG,oBAAoB,CAAC,OAAO,CAAC,CAAC;oBAC9C,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC;oBAClE,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;oBACpB,OAAO;gBACR,CAAC;gBACA,OAAO,CAAC,OAAO,CAAC,KAAK,CAAqB,CAAC,QAAQ,IAAI,KAAK,CAAC;gBAC9D,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,EAAE,CAAC,CAAC;YACrG,CAAC;YACD,MAAM,CAAC,IAAI,CAAC;gBACX,IAAI,EAAE,cAAc;gBACpB,YAAY,EAAE,KAAK;gBACnB,OAAO,EAAE,KAAK,CAAC,QAAQ;gBACvB,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE;aACvB,CAAC,CAAC;YACH,SAAS;QACV,CAAC;QAED,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YAC3B,OAAO,CAAC,OAAO,GAAG,CAAC,GAAG,OAAO,CAAC,OAAO,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC;YACnE,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,YAAY,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,EAAE,CAAC,CAAC;YAClF,KAAK,MAAM,KAAK,IAAI,sBAAsB,CAAC,KAAK,CAAC,IAAI,EAAE,YAAY,EAAE,YAAY,CAAC,EAAE,CAAC;gBACpF,MAAM,aAAa,CAAC,KAAK,EAAE,eAAe,CAAC,CAAC;gBAC5C,IAAI,MAAM,EAAE,OAAO,EAAE,CAAC;oBACrB,MAAM,OAAO,GAAG,oBAAoB,CAAC,OAAO,CAAC,CAAC;oBAC9C,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC;oBAClE,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;oBACpB,OAAO;gBACR,CAAC;gBACA,OAAO,CAAC,OAAO,CAAC,KAAK,CAAiB,CAAC,IAAI,IAAI,KAAK,CAAC;gBACtD,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,YAAY,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,EAAE,CAAC,CAAC;YACjG,CAAC;YACD,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,UAAU,EAAE,YAAY,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,EAAE,CAAC,CAAC;YACrG,SAAS;QACV,CAAC;QAED,OAAO,CAAC,OAAO,GAAG,CAAC,GAAG,OAAO,CAAC,OAAO,EAAE,EAAE,IAAI,EAAE,UAAU,EAAE,EAAE,EAAE,KAAK,CAAC,EAAE,EAAE,IAAI,EAAE,KAAK,CAAC,IAAI,EAAE,SAAS,EAAE,EAAE,EAAE,CAAC,CAAC;QAC5G,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,EAAE,CAAC,CAAC;QACtF,KAAK,MAAM,KAAK,IAAI,sBAAsB,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE,YAAY,EAAE,YAAY,CAAC,EAAE,CAAC;YACzG,MAAM,aAAa,CAAC,KAAK,EAAE,eAAe,CAAC,CAAC;YAC5C,IAAI,MAAM,EAAE,OAAO,EAAE,CAAC;gBACrB,MAAM,OAAO,GAAG,oBAAoB,CAAC,OAAO,CAAC,CAAC;gBAC9C,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC;gBAClE,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;gBACpB,OAAO;YACR,CAAC;YACD,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,EAAE,CAAC,CAAC;QACrG,CAAC;QACA,OAAO,CAAC,OAAO,CAAC,KAAK,CAAc,CAAC,SAAS,GAAG,KAAK,CAAC,SAAS,CAAC;QACjE,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,cAAc,EAAE,YAAY,EAAE,KAAK,EAAE,QAAQ,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,EAAE,CAAC,CAAC;IACtG,CAAC;IAED,IAAI,OAAO,CAAC,UAAU,KAAK,OAAO,IAAI,OAAO,CAAC,UAAU,KAAK,SAAS,EAAE,CAAC;QACxE,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,UAAU,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC;QAC3E,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;QACpB,OAAO;IACR,CAAC;IAED,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,UAAU,EAAE,OAAO,EAAE,CAAC,CAAC;IACnE,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;AAAA,CACpB;AAED,MAAM,UAAU,oBAAoB,CAAC,OAAO,GAAgC,EAAE,EAA4B;IACzG,MAAM,GAAG,GAAG,OAAO,CAAC,GAAG,IAAI,QAAQ,CAAC,WAAW,CAAC,CAAC;IACjD,MAAM,QAAQ,GAAG,OAAO,CAAC,QAAQ,IAAI,gBAAgB,CAAC;IACtD,MAAM,QAAQ,GAAG,QAAQ,CAAC,eAAe,CAAC,CAAC;IAC3C,MAAM,YAAY,GAAG,IAAI,CAAC,GAAG,CAC5B,CAAC,EACD,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,SAAS,EAAE,GAAG,IAAI,sBAAsB,EAAE,OAAO,CAAC,SAAS,EAAE,GAAG,IAAI,sBAAsB,CAAC,CAC5G,CAAC;IACF,MAAM,YAAY,GAAG,IAAI,CAAC,GAAG,CAAC,YAAY,EAAE,OAAO,CAAC,SAAS,EAAE,GAAG,IAAI,sBAAsB,CAAC,CAAC;IAC9F,IAAI,gBAAgB,GAAuB,EAAE,CAAC;IAC9C,MAAM,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;IAChD,MAAM,KAAK,GAAG,EAAE,SAAS,EAAE,CAAC,EAAE,CAAC;IAC/B,MAAM,WAAW,GAAG,IAAI,GAAG,EAAkB,CAAC;IAE9C,MAAM,gBAAgB,GAAG,OAAO,CAAC,MAAM,EAAE,MAAM;QAC9C,CAAC,CAAC,OAAO,CAAC,MAAM;QAChB,CAAC,CAAC;YACA;gBACC,EAAE,EAAE,gBAAgB;gBACpB,IAAI,EAAE,kBAAkB;gBACxB,SAAS,EAAE,KAAK;gBAChB,KAAK,EAAE,CAAC,MAAM,EAAE,OAAO,CAAyB;gBAChD,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,UAAU,EAAE,CAAC,EAAE;gBAC1D,aAAa,EAAE,MAAM;gBACrB,SAAS,EAAE,KAAK;aAChB;SACD,CAAC;IACJ,MAAM,MAAM,GAAG,gBAAgB,CAAC,GAAG,CAAC,CAAC,UAAU,EAAE,EAAE,CAAC,CAAC;QACpD,EAAE,EAAE,UAAU,CAAC,EAAE;QACjB,IAAI,EAAE,UAAU,CAAC,IAAI,IAAI,UAAU,CAAC,EAAE;QACtC,GAAG;QACH,QAAQ;QACR,OAAO,EAAE,gBAAgB;QACzB,SAAS,EAAE,UAAU,CAAC,SAAS,IAAI,KAAK;QACxC,KAAK,EAAE,UAAU,CAAC,KAAK,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC;QAC5C,IAAI,EAAE,UAAU,CAAC,IAAI,IAAI,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,UAAU,EAAE,CAAC,EAAE;QAC7E,aAAa,EAAE,UAAU,CAAC,aAAa,IAAI,MAAM;QACjD,SAAS,EAAE,UAAU,CAAC,SAAS,IAAI,KAAK;KACxC,CAAC,CAAwC,CAAC;IAE3C,MAAM,MAAM,GAA0C,CAAC,YAAY,EAAE,OAAO,EAAE,aAAa,EAAE,EAAE,CAAC;QAC/F,MAAM,KAAK,GAAG,iCAAiC,EAAE,CAAC;QAClD,MAAM,IAAI,GAAG,gBAAgB,CAAC,KAAK,EAAE,CAAC;QACtC,KAAK,CAAC,SAAS,EAAE,CAAC;QAElB,cAAc,CAAC,KAAK,IAAI,EAAE,CAAC;YAC1B,IAAI,CAAC;gBACJ,IAAI,CAAC,IAAI,EAAE,CAAC;oBACX,IAAI,OAAO,GAAG,kBAAkB,CAC/B,IAAI,KAAK,CAAC,+BAA+B,CAAC,EAC1C,GAAG,EACH,QAAQ,EACR,YAAY,CAAC,EAAE,CACf,CAAC;oBACF,OAAO,GAAG,iBAAiB,CAAC,OAAO,EAAE,OAAO,EAAE,aAAa,EAAE,WAAW,CAAC,CAAC;oBAC1E,KAAK,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC;oBAC/D,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;oBACnB,OAAO;gBACR,CAAC;gBAED,MAAM,QAAQ,GACb,OAAO,IAAI,KAAK,UAAU,CAAC,CAAC,CAAC,MAAM,IAAI,CAAC,OAAO,EAAE,aAAa,EAAE,KAAK,EAAE,YAAY,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;gBAC7F,IAAI,OAAO,GAAG,YAAY,CAAC,QAAQ,EAAE,GAAG,EAAE,QAAQ,EAAE,YAAY,CAAC,EAAE,CAAC,CAAC;gBACrE,OAAO,GAAG,iBAAiB,CAAC,OAAO,EAAE,OAAO,EAAE,aAAa,EAAE,WAAW,CAAC,CAAC;gBAC1E,MAAM,gBAAgB,CAAC,KAAK,EAAE,OAAO,EAAE,YAAY,EAAE,YAAY,EAAE,eAAe,EAAE,aAAa,EAAE,MAAM,CAAC,CAAC;YAC5G,CAAC;YAAC,OAAO,KAAK,EAAE,CAAC;gBAChB,MAAM,OAAO,GAAG,kBAAkB,CAAC,KAAK,EAAE,GAAG,EAAE,QAAQ,EAAE,YAAY,CAAC,EAAE,CAAC,CAAC;gBAC1E,KAAK,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC;gBAC/D,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;YACpB,CAAC;QAAA,CACD,CAAC,CAAC;QAEH,OAAO,KAAK,CAAC;IAAA,CACb,CAAC;IAEF,MAAM,YAAY,GAAgD,CAAC,WAAW,EAAE,OAAO,EAAE,aAAa,EAAE,EAAE,CACzG,MAAM,CAAC,WAAW,EAAE,OAAO,EAAE,aAAa,CAAC,CAAC;IAE7C,mBAAmB,CAAC,EAAE,GAAG,EAAE,MAAM,EAAE,YAAY,EAAE,EAAE,QAAQ,CAAC,CAAC;IAI7D,SAAS,QAAQ,CAAC,gBAAyB,EAA6B;QACvE,IAAI,CAAC,gBAAgB,EAAE,CAAC;YACvB,OAAO,MAAM,CAAC,CAAC,CAAC,CAAC;QAClB,CAAC;QACD,OAAO,MAAM,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,EAAE,CAAC,SAAS,CAAC,EAAE,KAAK,gBAAgB,CAAC,CAAC;IAAA,CACrE;IAED,OAAO;QACN,GAAG;QACH,MAAM;QACN,QAAQ;QACR,KAAK;QACL,YAAY,CAAC,SAAS,EAAE;YACvB,gBAAgB,GAAG,CAAC,GAAG,SAAS,CAAC,CAAC;QAAA,CAClC;QACD,eAAe,CAAC,SAAS,EAAE;YAC1B,gBAAgB,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC,CAAC;QAAA,CACpC;QACD,uBAAuB,GAAG;YACzB,OAAO,gBAAgB,CAAC,MAAM,CAAC;QAAA,CAC/B;QACD,UAAU,GAAG;YACZ,sBAAsB,CAAC,QAAQ,CAAC,CAAC;QAAA,CACjC;KACD,CAAC;AAAA,CACF","sourcesContent":["import { registerApiProvider, unregisterApiProviders } from \"../api-registry.js\";\nimport type {\n\tAssistantMessage,\n\tAssistantMessageEventStream,\n\tContext,\n\tImageContent,\n\tMessage,\n\tModel,\n\tSimpleStreamOptions,\n\tStreamFunction,\n\tStreamOptions,\n\tTextContent,\n\tThinkingContent,\n\tToolCall,\n\tToolResultMessage,\n\tUsage,\n} from \"../types.js\";\nimport { createAssistantMessageEventStream } from \"../utils/event-stream.js\";\n\nconst DEFAULT_API = \"faux\";\nconst DEFAULT_PROVIDER = \"faux\";\nconst DEFAULT_MODEL_ID = \"faux-1\";\nconst DEFAULT_MODEL_NAME = \"Faux Model\";\nconst DEFAULT_BASE_URL = \"http://localhost:0\";\nconst DEFAULT_MIN_TOKEN_SIZE = 3;\nconst DEFAULT_MAX_TOKEN_SIZE = 5;\n\nconst DEFAULT_USAGE: Usage = {\n\tinput: 0,\n\toutput: 0,\n\tcacheRead: 0,\n\tcacheWrite: 0,\n\ttotalTokens: 0,\n\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n};\n\nexport interface FauxModelDefinition {\n\tid: string;\n\tname?: string;\n\treasoning?: boolean;\n\tinput?: (\"text\" | \"image\")[];\n\tcost?: { input: number; output: number; cacheRead: number; cacheWrite: number };\n\tcontextWindow?: number;\n\tmaxTokens?: number;\n}\n\nexport type FauxContentBlock = TextContent | ThinkingContent | ToolCall;\n\nexport function fauxText(text: string): TextContent {\n\treturn { type: \"text\", text };\n}\n\nexport function fauxThinking(thinking: string): ThinkingContent {\n\treturn { type: \"thinking\", thinking };\n}\n\nexport function fauxToolCall(name: string, arguments_: ToolCall[\"arguments\"], options: { id?: string } = {}): ToolCall {\n\treturn {\n\t\ttype: \"toolCall\",\n\t\tid: options.id ?? randomId(\"tool\"),\n\t\tname,\n\t\targuments: arguments_,\n\t};\n}\n\nfunction normalizeFauxAssistantContent(content: string | FauxContentBlock | FauxContentBlock[]): FauxContentBlock[] {\n\tif (typeof content === \"string\") {\n\t\treturn [fauxText(content)];\n\t}\n\treturn Array.isArray(content) ? content : [content];\n}\n\nexport function fauxAssistantMessage(\n\tcontent: string | FauxContentBlock | FauxContentBlock[],\n\toptions: {\n\t\tstopReason?: AssistantMessage[\"stopReason\"];\n\t\terrorMessage?: string;\n\t\tresponseId?: string;\n\t\ttimestamp?: number;\n\t} = {},\n): AssistantMessage {\n\treturn {\n\t\trole: \"assistant\",\n\t\tcontent: normalizeFauxAssistantContent(content),\n\t\tapi: DEFAULT_API,\n\t\tprovider: DEFAULT_PROVIDER,\n\t\tmodel: DEFAULT_MODEL_ID,\n\t\tusage: DEFAULT_USAGE,\n\t\tstopReason: options.stopReason ?? \"stop\",\n\t\terrorMessage: options.errorMessage,\n\t\tresponseId: options.responseId,\n\t\ttimestamp: options.timestamp ?? Date.now(),\n\t};\n}\n\nexport type FauxResponseFactory = (\n\tcontext: Context,\n\toptions: StreamOptions | undefined,\n\tstate: { callCount: number },\n\tmodel: Model<string>,\n) => AssistantMessage | Promise<AssistantMessage>;\n\nexport type FauxResponseStep = AssistantMessage | FauxResponseFactory;\n\nexport interface RegisterFauxProviderOptions {\n\tapi?: string;\n\tprovider?: string;\n\tmodels?: FauxModelDefinition[];\n\ttokensPerSecond?: number;\n\ttokenSize?: {\n\t\tmin?: number;\n\t\tmax?: number;\n\t};\n}\n\nexport interface FauxProviderRegistration {\n\tapi: string;\n\tmodels: [Model<string>, ...Model<string>[]];\n\tgetModel(): Model<string>;\n\tgetModel(modelId: string): Model<string> | undefined;\n\tstate: { callCount: number };\n\tsetResponses: (responses: FauxResponseStep[]) => void;\n\tappendResponses: (responses: FauxResponseStep[]) => void;\n\tgetPendingResponseCount: () => number;\n\tunregister: () => void;\n}\n\nfunction estimateTokens(text: string): number {\n\treturn Math.ceil(text.length / 4);\n}\n\nfunction randomId(prefix: string): string {\n\treturn `${prefix}:${Date.now()}:${Math.random().toString(36).slice(2)}`;\n}\n\nfunction contentToText(content: string | Array<TextContent | ImageContent>): string {\n\tif (typeof content === \"string\") {\n\t\treturn content;\n\t}\n\treturn content\n\t\t.map((block) => {\n\t\t\tif (block.type === \"text\") {\n\t\t\t\treturn block.text;\n\t\t\t}\n\t\t\treturn `[image:${block.mimeType}:${block.data.length}]`;\n\t\t})\n\t\t.join(\"\\n\");\n}\n\nfunction assistantContentToText(content: Array<TextContent | ThinkingContent | ToolCall>): string {\n\treturn content\n\t\t.map((block) => {\n\t\t\tif (block.type === \"text\") {\n\t\t\t\treturn block.text;\n\t\t\t}\n\t\t\tif (block.type === \"thinking\") {\n\t\t\t\treturn block.thinking;\n\t\t\t}\n\t\t\treturn `${block.name}:${JSON.stringify(block.arguments)}`;\n\t\t})\n\t\t.join(\"\\n\");\n}\n\nfunction toolResultToText(message: ToolResultMessage): string {\n\treturn [message.toolName, ...message.content.map((block) => contentToText([block]))].join(\"\\n\");\n}\n\nfunction messageToText(message: Message): string {\n\tif (message.role === \"user\") {\n\t\treturn contentToText(message.content);\n\t}\n\tif (message.role === \"assistant\") {\n\t\treturn assistantContentToText(message.content);\n\t}\n\treturn toolResultToText(message);\n}\n\nfunction serializeContext(context: Context): string {\n\tconst parts: string[] = [];\n\tif (context.systemPrompt) {\n\t\tparts.push(`system:${context.systemPrompt}`);\n\t}\n\tfor (const message of context.messages) {\n\t\tparts.push(`${message.role}:${messageToText(message)}`);\n\t}\n\tif (context.tools?.length) {\n\t\tparts.push(`tools:${JSON.stringify(context.tools)}`);\n\t}\n\treturn parts.join(\"\\n\\n\");\n}\n\nfunction commonPrefixLength(a: string, b: string): number {\n\tconst length = Math.min(a.length, b.length);\n\tlet index = 0;\n\twhile (index < length && a[index] === b[index]) {\n\t\tindex++;\n\t}\n\treturn index;\n}\n\nfunction withUsageEstimate(\n\tmessage: AssistantMessage,\n\tcontext: Context,\n\toptions: StreamOptions | undefined,\n\tpromptCache: Map<string, string>,\n): AssistantMessage {\n\tconst promptText = serializeContext(context);\n\tconst promptTokens = estimateTokens(promptText);\n\tconst outputTokens = estimateTokens(assistantContentToText(message.content));\n\tlet input = promptTokens;\n\tlet cacheRead = 0;\n\tlet cacheWrite = 0;\n\tconst sessionId = options?.sessionId;\n\n\tif (sessionId && options?.cacheRetention !== \"none\") {\n\t\tconst previousPrompt = promptCache.get(sessionId);\n\t\tif (previousPrompt) {\n\t\t\tconst cachedChars = commonPrefixLength(previousPrompt, promptText);\n\t\t\tcacheRead = estimateTokens(previousPrompt.slice(0, cachedChars));\n\t\t\tcacheWrite = estimateTokens(promptText.slice(cachedChars));\n\t\t\tinput = Math.max(0, promptTokens - cacheRead);\n\t\t} else {\n\t\t\tcacheWrite = promptTokens;\n\t\t}\n\t\tpromptCache.set(sessionId, promptText);\n\t}\n\n\treturn {\n\t\t...message,\n\t\tusage: {\n\t\t\tinput,\n\t\t\toutput: outputTokens,\n\t\t\tcacheRead,\n\t\t\tcacheWrite,\n\t\t\ttotalTokens: input + outputTokens + cacheRead + cacheWrite,\n\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t},\n\t};\n}\n\nfunction splitStringByTokenSize(text: string, minTokenSize: number, maxTokenSize: number): string[] {\n\tconst chunks: string[] = [];\n\tlet index = 0;\n\twhile (index < text.length) {\n\t\tconst tokenSize = minTokenSize + Math.floor(Math.random() * (maxTokenSize - minTokenSize + 1));\n\t\tconst charSize = Math.max(1, tokenSize * 4);\n\t\tchunks.push(text.slice(index, index + charSize));\n\t\tindex += charSize;\n\t}\n\treturn chunks.length > 0 ? chunks : [\"\"];\n}\n\nfunction cloneMessage(message: AssistantMessage, api: string, provider: string, modelId: string): AssistantMessage {\n\tconst cloned = structuredClone(message);\n\treturn {\n\t\t...cloned,\n\t\tapi,\n\t\tprovider,\n\t\tmodel: modelId,\n\t\ttimestamp: cloned.timestamp ?? Date.now(),\n\t\tusage: cloned.usage ?? DEFAULT_USAGE,\n\t};\n}\n\nfunction createErrorMessage(error: unknown, api: string, provider: string, modelId: string): AssistantMessage {\n\treturn {\n\t\trole: \"assistant\",\n\t\tcontent: [],\n\t\tapi,\n\t\tprovider,\n\t\tmodel: modelId,\n\t\tusage: DEFAULT_USAGE,\n\t\tstopReason: \"error\",\n\t\terrorMessage: error instanceof Error ? error.message : String(error),\n\t\ttimestamp: Date.now(),\n\t};\n}\n\nfunction createAbortedMessage(partial: AssistantMessage): AssistantMessage {\n\treturn {\n\t\t...partial,\n\t\tstopReason: \"aborted\",\n\t\terrorMessage: \"Request was aborted\",\n\t\ttimestamp: Date.now(),\n\t};\n}\n\nfunction scheduleChunk(chunk: string, tokensPerSecond: number | undefined): Promise<void> {\n\tif (!tokensPerSecond || tokensPerSecond <= 0) {\n\t\treturn new Promise((resolve) => queueMicrotask(resolve));\n\t}\n\tconst delayMs = (estimateTokens(chunk) / tokensPerSecond) * 1000;\n\treturn new Promise((resolve) => setTimeout(resolve, delayMs));\n}\n\nasync function streamWithDeltas(\n\tstream: AssistantMessageEventStream,\n\tmessage: AssistantMessage,\n\tminTokenSize: number,\n\tmaxTokenSize: number,\n\ttokensPerSecond: number | undefined,\n\tsignal: AbortSignal | undefined,\n): Promise<void> {\n\tconst partial: AssistantMessage = { ...message, content: [] };\n\tif (signal?.aborted) {\n\t\tconst aborted = createAbortedMessage(partial);\n\t\tstream.push({ type: \"error\", reason: \"aborted\", error: aborted });\n\t\tstream.end(aborted);\n\t\treturn;\n\t}\n\n\tstream.push({ type: \"start\", partial: { ...partial } });\n\n\tfor (let index = 0; index < message.content.length; index++) {\n\t\tif (signal?.aborted) {\n\t\t\tconst aborted = createAbortedMessage(partial);\n\t\t\tstream.push({ type: \"error\", reason: \"aborted\", error: aborted });\n\t\t\tstream.end(aborted);\n\t\t\treturn;\n\t\t}\n\n\t\tconst block = message.content[index];\n\n\t\tif (block.type === \"thinking\") {\n\t\t\tpartial.content = [...partial.content, { type: \"thinking\", thinking: \"\" }];\n\t\t\tstream.push({ type: \"thinking_start\", contentIndex: index, partial: { ...partial } });\n\t\t\tfor (const chunk of splitStringByTokenSize(block.thinking, minTokenSize, maxTokenSize)) {\n\t\t\t\tawait scheduleChunk(chunk, tokensPerSecond);\n\t\t\t\tif (signal?.aborted) {\n\t\t\t\t\tconst aborted = createAbortedMessage(partial);\n\t\t\t\t\tstream.push({ type: \"error\", reason: \"aborted\", error: aborted });\n\t\t\t\t\tstream.end(aborted);\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\t(partial.content[index] as ThinkingContent).thinking += chunk;\n\t\t\t\tstream.push({ type: \"thinking_delta\", contentIndex: index, delta: chunk, partial: { ...partial } });\n\t\t\t}\n\t\t\tstream.push({\n\t\t\t\ttype: \"thinking_end\",\n\t\t\t\tcontentIndex: index,\n\t\t\t\tcontent: block.thinking,\n\t\t\t\tpartial: { ...partial },\n\t\t\t});\n\t\t\tcontinue;\n\t\t}\n\n\t\tif (block.type === \"text\") {\n\t\t\tpartial.content = [...partial.content, { type: \"text\", text: \"\" }];\n\t\t\tstream.push({ type: \"text_start\", contentIndex: index, partial: { ...partial } });\n\t\t\tfor (const chunk of splitStringByTokenSize(block.text, minTokenSize, maxTokenSize)) {\n\t\t\t\tawait scheduleChunk(chunk, tokensPerSecond);\n\t\t\t\tif (signal?.aborted) {\n\t\t\t\t\tconst aborted = createAbortedMessage(partial);\n\t\t\t\t\tstream.push({ type: \"error\", reason: \"aborted\", error: aborted });\n\t\t\t\t\tstream.end(aborted);\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\t(partial.content[index] as TextContent).text += chunk;\n\t\t\t\tstream.push({ type: \"text_delta\", contentIndex: index, delta: chunk, partial: { ...partial } });\n\t\t\t}\n\t\t\tstream.push({ type: \"text_end\", contentIndex: index, content: block.text, partial: { ...partial } });\n\t\t\tcontinue;\n\t\t}\n\n\t\tpartial.content = [...partial.content, { type: \"toolCall\", id: block.id, name: block.name, arguments: {} }];\n\t\tstream.push({ type: \"toolcall_start\", contentIndex: index, partial: { ...partial } });\n\t\tfor (const chunk of splitStringByTokenSize(JSON.stringify(block.arguments), minTokenSize, maxTokenSize)) {\n\t\t\tawait scheduleChunk(chunk, tokensPerSecond);\n\t\t\tif (signal?.aborted) {\n\t\t\t\tconst aborted = createAbortedMessage(partial);\n\t\t\t\tstream.push({ type: \"error\", reason: \"aborted\", error: aborted });\n\t\t\t\tstream.end(aborted);\n\t\t\t\treturn;\n\t\t\t}\n\t\t\tstream.push({ type: \"toolcall_delta\", contentIndex: index, delta: chunk, partial: { ...partial } });\n\t\t}\n\t\t(partial.content[index] as ToolCall).arguments = block.arguments;\n\t\tstream.push({ type: \"toolcall_end\", contentIndex: index, toolCall: block, partial: { ...partial } });\n\t}\n\n\tif (message.stopReason === \"error\" || message.stopReason === \"aborted\") {\n\t\tstream.push({ type: \"error\", reason: message.stopReason, error: message });\n\t\tstream.end(message);\n\t\treturn;\n\t}\n\n\tstream.push({ type: \"done\", reason: message.stopReason, message });\n\tstream.end(message);\n}\n\nexport function registerFauxProvider(options: RegisterFauxProviderOptions = {}): FauxProviderRegistration {\n\tconst api = options.api ?? randomId(DEFAULT_API);\n\tconst provider = options.provider ?? DEFAULT_PROVIDER;\n\tconst sourceId = randomId(\"faux-provider\");\n\tconst minTokenSize = Math.max(\n\t\t1,\n\t\tMath.min(options.tokenSize?.min ?? DEFAULT_MIN_TOKEN_SIZE, options.tokenSize?.max ?? DEFAULT_MAX_TOKEN_SIZE),\n\t);\n\tconst maxTokenSize = Math.max(minTokenSize, options.tokenSize?.max ?? DEFAULT_MAX_TOKEN_SIZE);\n\tlet pendingResponses: FauxResponseStep[] = [];\n\tconst tokensPerSecond = options.tokensPerSecond;\n\tconst state = { callCount: 0 };\n\tconst promptCache = new Map<string, string>();\n\n\tconst modelDefinitions = options.models?.length\n\t\t? options.models\n\t\t: [\n\t\t\t\t{\n\t\t\t\t\tid: DEFAULT_MODEL_ID,\n\t\t\t\t\tname: DEFAULT_MODEL_NAME,\n\t\t\t\t\treasoning: false,\n\t\t\t\t\tinput: [\"text\", \"image\"] as (\"text\" | \"image\")[],\n\t\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },\n\t\t\t\t\tcontextWindow: 128000,\n\t\t\t\t\tmaxTokens: 16384,\n\t\t\t\t},\n\t\t\t];\n\tconst models = modelDefinitions.map((definition) => ({\n\t\tid: definition.id,\n\t\tname: definition.name ?? definition.id,\n\t\tapi,\n\t\tprovider,\n\t\tbaseUrl: DEFAULT_BASE_URL,\n\t\treasoning: definition.reasoning ?? false,\n\t\tinput: definition.input ?? [\"text\", \"image\"],\n\t\tcost: definition.cost ?? { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },\n\t\tcontextWindow: definition.contextWindow ?? 128000,\n\t\tmaxTokens: definition.maxTokens ?? 16384,\n\t})) as [Model<string>, ...Model<string>[]];\n\n\tconst stream: StreamFunction<string, StreamOptions> = (requestModel, context, streamOptions) => {\n\t\tconst outer = createAssistantMessageEventStream();\n\t\tconst step = pendingResponses.shift();\n\t\tstate.callCount++;\n\n\t\tqueueMicrotask(async () => {\n\t\t\ttry {\n\t\t\t\tif (!step) {\n\t\t\t\t\tlet message = createErrorMessage(\n\t\t\t\t\t\tnew Error(\"No more faux responses queued\"),\n\t\t\t\t\t\tapi,\n\t\t\t\t\t\tprovider,\n\t\t\t\t\t\trequestModel.id,\n\t\t\t\t\t);\n\t\t\t\t\tmessage = withUsageEstimate(message, context, streamOptions, promptCache);\n\t\t\t\t\touter.push({ type: \"error\", reason: \"error\", error: message });\n\t\t\t\t\touter.end(message);\n\t\t\t\t\treturn;\n\t\t\t\t}\n\n\t\t\t\tconst resolved =\n\t\t\t\t\ttypeof step === \"function\" ? await step(context, streamOptions, state, requestModel) : step;\n\t\t\t\tlet message = cloneMessage(resolved, api, provider, requestModel.id);\n\t\t\t\tmessage = withUsageEstimate(message, context, streamOptions, promptCache);\n\t\t\t\tawait streamWithDeltas(outer, message, minTokenSize, maxTokenSize, tokensPerSecond, streamOptions?.signal);\n\t\t\t} catch (error) {\n\t\t\t\tconst message = createErrorMessage(error, api, provider, requestModel.id);\n\t\t\t\touter.push({ type: \"error\", reason: \"error\", error: message });\n\t\t\t\touter.end(message);\n\t\t\t}\n\t\t});\n\n\t\treturn outer;\n\t};\n\n\tconst streamSimple: StreamFunction<string, SimpleStreamOptions> = (streamModel, context, streamOptions) =>\n\t\tstream(streamModel, context, streamOptions);\n\n\tregisterApiProvider({ api, stream, streamSimple }, sourceId);\n\n\tfunction getModel(): Model<string>;\n\tfunction getModel(requestedModelId: string): Model<string> | undefined;\n\tfunction getModel(requestedModelId?: string): Model<string> | undefined {\n\t\tif (!requestedModelId) {\n\t\t\treturn models[0];\n\t\t}\n\t\treturn models.find((candidate) => candidate.id === requestedModelId);\n\t}\n\n\treturn {\n\t\tapi,\n\t\tmodels,\n\t\tgetModel,\n\t\tstate,\n\t\tsetResponses(responses) {\n\t\t\tpendingResponses = [...responses];\n\t\t},\n\t\tappendResponses(responses) {\n\t\t\tpendingResponses.push(...responses);\n\t\t},\n\t\tgetPendingResponseCount() {\n\t\t\treturn pendingResponses.length;\n\t\t},\n\t\tunregister() {\n\t\t\tunregisterApiProviders(sourceId);\n\t\t},\n\t};\n}\n"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@mariozechner/pi-ai",
3
- "version": "0.63.2",
3
+ "version": "0.64.0",
4
4
  "description": "Unified LLM API with automatic model discovery and provider configuration",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",