peerbench 0.0.3 → 0.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/benchmarks/index.d.ts +1 -1
- package/dist/benchmarks/index.js +13 -13
- package/dist/{chunk-XFXVAERY.js → chunk-HPPCDSJ3.js} +1 -1
- package/dist/chunk-HPPCDSJ3.js.map +1 -0
- package/dist/{chunk-ERALDEZY.js → chunk-JFLUJLGT.js} +4 -1
- package/dist/chunk-JFLUJLGT.js.map +1 -0
- package/dist/{chunk-ZXTQJFGL.js → chunk-Q6GSOHOP.js} +4 -4
- package/dist/index.js +3 -3
- package/dist/{json-file-ZwzLUbje.d.ts → json-file-Bgv9TLcX.d.ts} +1 -0
- package/dist/providers/index.d.ts +4 -0
- package/dist/providers/index.js +1 -1
- package/dist/schemas/llm/index.js +2 -2
- package/dist/storages/index.d.ts +2 -2
- package/dist/storages/index.js +1 -1
- package/package.json +1 -1
- package/dist/chunk-ERALDEZY.js.map +0 -1
- package/dist/chunk-XFXVAERY.js.map +0 -1
- /package/dist/{chunk-ZXTQJFGL.js.map → chunk-Q6GSOHOP.js.map} +0 -0
|
@@ -2,7 +2,7 @@ import { I as IdGenerator } from '../index-BAioQhp2.js';
|
|
|
2
2
|
import z__default, { z } from 'zod';
|
|
3
3
|
import { A as AbstractLLMProvider } from '../llm-BND163ns.js';
|
|
4
4
|
import { a as MCQScorer, L as LLMAsAJudgeScorer } from '../llm-judge-BS_oNYUK.js';
|
|
5
|
-
import { J as JSONFileStorage } from '../json-file-
|
|
5
|
+
import { J as JSONFileStorage } from '../json-file-Bgv9TLcX.js';
|
|
6
6
|
import '../provider-BDjGp2y-.js';
|
|
7
7
|
import '../abstract-Dec9Sc5O.js';
|
|
8
8
|
import 'openai/resources/shared';
|
package/dist/benchmarks/index.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import {
|
|
2
2
|
SimpleSystemPromptSchemaV1
|
|
3
|
-
} from "../chunk-
|
|
3
|
+
} from "../chunk-Q6GSOHOP.js";
|
|
4
4
|
import {
|
|
5
5
|
defineRunner
|
|
6
6
|
} from "../chunk-QY5MPNNB.js";
|
|
@@ -8,18 +8,6 @@ import {
|
|
|
8
8
|
LLMAsAJudgeScorer,
|
|
9
9
|
MCQScorer
|
|
10
10
|
} from "../chunk-DNGT4SJC.js";
|
|
11
|
-
import {
|
|
12
|
-
AbstractLLMProvider
|
|
13
|
-
} from "../chunk-XFXVAERY.js";
|
|
14
|
-
import {
|
|
15
|
-
PEERBENCH_NAMESPACE
|
|
16
|
-
} from "../chunk-UHHHSYVE.js";
|
|
17
|
-
import {
|
|
18
|
-
JSONFileStorage
|
|
19
|
-
} from "../chunk-ERALDEZY.js";
|
|
20
|
-
import {
|
|
21
|
-
idGeneratorUUIDv7
|
|
22
|
-
} from "../chunk-4UBK6452.js";
|
|
23
11
|
import {
|
|
24
12
|
BaseResponseSchemaV1,
|
|
25
13
|
BaseScoreSchemaV1,
|
|
@@ -32,6 +20,18 @@ import "../chunk-OQE6TQXZ.js";
|
|
|
32
20
|
import {
|
|
33
21
|
ScoringMethod
|
|
34
22
|
} from "../chunk-HMQYGCKI.js";
|
|
23
|
+
import {
|
|
24
|
+
JSONFileStorage
|
|
25
|
+
} from "../chunk-JFLUJLGT.js";
|
|
26
|
+
import {
|
|
27
|
+
AbstractLLMProvider
|
|
28
|
+
} from "../chunk-HPPCDSJ3.js";
|
|
29
|
+
import {
|
|
30
|
+
PEERBENCH_NAMESPACE
|
|
31
|
+
} from "../chunk-UHHHSYVE.js";
|
|
32
|
+
import {
|
|
33
|
+
idGeneratorUUIDv7
|
|
34
|
+
} from "../chunk-4UBK6452.js";
|
|
35
35
|
import {
|
|
36
36
|
ExtensionLLMAsAJudgeScoreFieldsV1,
|
|
37
37
|
ExtensionLLMResponseFieldsV1
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/providers/abstract/provider.ts","../src/providers/abstract/llm.ts","../src/providers/mastra.ts","../src/providers/openai.ts","../src/providers/openrouter.ts"],"sourcesContent":["export abstract class AbstractProvider {\n abstract readonly kind: string;\n}\n\nexport type ProviderResponse<TData = unknown> = {\n startedAt: number;\n completedAt: number;\n data: TData;\n};\n","import { AbstractProvider, ProviderResponse } from \"./provider\";\nimport {\n ResponseFormatJSONObject,\n ResponseFormatJSONSchema,\n ResponseFormatText,\n} from \"openai/resources/shared\";\nimport { ChatCompletionMessageParam } from \"openai/resources/chat/completions\";\n\nexport abstract class AbstractLLMProvider extends AbstractProvider {\n abstract forward(args: LLMProviderForwardArgs): Promise<ChatResponse>;\n}\n\nexport type LLMProviderForwardArgs = {\n messages: ChatCompletionMessageParam[];\n model: string;\n abortSignal?: AbortSignal;\n temperature?: number;\n responseFormat?:\n | ResponseFormatText\n | ResponseFormatJSONSchema\n | ResponseFormatJSONObject;\n};\n\nexport type ChatResponse = ProviderResponse<string> & {\n inputTokensUsed?: number;\n outputTokensUsed?: number;\n inputCost?: string;\n outputCost?: string;\n\n metadata?: Record<string, unknown>;\n};\n","import {\n AbstractLLMProvider,\n type ChatResponse,\n type LLMProviderForwardArgs,\n} from \"./abstract/llm\";\nimport { MastraClient, type GetAgentResponse } from \"@mastra/client-js\";\n\nexport class MastraProvider extends AbstractLLMProvider {\n override readonly kind = \"mastra\";\n\n private readonly endpoint: string;\n private readonly authToken?: string;\n private client: MastraClient;\n private memory?: AgentMemoryOption;\n\n constructor(params: {\n endpoint: string;\n authToken?: string;\n memory?: AgentMemoryOption;\n }) {\n super();\n this.endpoint = params.endpoint;\n this.authToken = params.authToken;\n this.client = new MastraClient({\n baseUrl: this.endpoint,\n headers: this.authToken\n ? {\n Authorization: `Bearer ${this.authToken}`,\n }\n : undefined,\n });\n this.memory = params.memory;\n }\n\n override async forward(\n args: LLMProviderForwardArgs & {\n memory?: AgentMemoryOption;\n\n /**\n * The model that will be used as the brain for the agent.\n */\n modelName?: string\n }\n ): Promise<ChatResponse> {\n const apiMessages = args.messages\n .filter((m) => m.role === \"user\" || m.role === \"assistant\")\n .map((m) => ({\n role: m.role as \"user\" | \"assistant\",\n content: String((m as any).content ?? \"\"),\n }));\n\n const agent = this.client.getAgent(args.model);\n\n const startedAt = Date.now();\n const response = await agent.generate(\n {\n messages: apiMessages,\n runtimeContext: {\n \"model-id\": args.model,\n },\n },\n { memory: args.memory ?? this.memory }\n );\n\n return {\n data: response.text,\n startedAt,\n completedAt: Date.now(),\n };\n }\n\n async getAgentInfo(args: {\n agentId: string;\n runtimeContext?: MastraRuntimeContext;\n }) {\n return await this.client\n .getAgent(args.agentId)\n .details(args.runtimeContext);\n }\n\n async getAgents(args?: {\n runtimeContext?: MastraRuntimeContext;\n partial?: boolean;\n }): Promise<Record<string, GetAgentResponse>> {\n return this.client.getAgents(args?.runtimeContext, args?.partial);\n }\n}\n\n// NOTE: Mastra client does not export these types\nexport type AgentMemoryOption = Parameters<\n Parameters<MastraClient[\"getAgent\"]>[\"0\"] extends string\n ? ReturnType<MastraClient[\"getAgent\"]>[\"generate\"]\n : never\n>[0] extends { memory?: infer M }\n ? M\n : never;\n\ntype MastraRuntimeContext = Parameters<\n Parameters<MastraClient[\"getAgent\"]>[\"0\"] extends string\n ? ReturnType<MastraClient[\"getAgent\"]>[\"generate\"]\n : never\n>[0] extends { runtimeContext?: infer R }\n ? R\n : never;\n","import { RateLimiter } from \"@/utils\";\nimport { ChatCompletionMessageParam } from \"openai/resources/chat/completions\";\nimport {\n ResponseFormatJSONObject,\n ResponseFormatJSONSchema,\n ResponseFormatText,\n} from \"openai/resources/shared\";\nimport OpenAI, { APIError } from \"openai\";\nimport { AbstractLLMProvider, ChatResponse } from \"./abstract/llm\";\nimport { PEERBENCH_NAMESPACE } from \"@/constants\";\n\nexport class OpenAIProvider extends AbstractLLMProvider {\n override readonly kind = `${PEERBENCH_NAMESPACE}/llm/openai` as const;\n\n private client: OpenAI;\n private rateLimiter: RateLimiter;\n private maxRetries: number;\n\n constructor(config: {\n apiKey: string;\n baseURL: string;\n maxRetries?: number;\n timeout?: number;\n rateLimiter?: RateLimiter;\n }) {\n super();\n this.maxRetries = config.maxRetries ?? 3;\n this.rateLimiter =\n config.rateLimiter ??\n new RateLimiter({\n maxWeight: 20,\n timeWindow: 3_000,\n });\n\n this.client = new OpenAI({\n baseURL: config.baseURL,\n apiKey: config.apiKey,\n timeout: config.timeout,\n dangerouslyAllowBrowser: true,\n });\n }\n\n async forward(args: {\n messages: ChatCompletionMessageParam[];\n model: string;\n abortSignal?: AbortSignal;\n temperature?: number;\n responseFormat?:\n | ResponseFormatText\n | ResponseFormatJSONSchema\n | ResponseFormatJSONObject;\n }): Promise<ChatResponse> {\n let retryCount = this.maxRetries;\n while (retryCount > 0) {\n let startedAt: Date = new Date();\n\n try {\n const response = await this.rateLimiter.execute(\n async () => {\n // Capture the start time of the request\n startedAt = new Date();\n return await this.client.chat.completions.create(\n {\n model: args.model,\n messages: args.messages,\n temperature: args.temperature,\n response_format: args.responseFormat,\n },\n // Signal for request\n { signal: args.abortSignal }\n );\n },\n // Signal for rate limiting\n { signal: args.abortSignal }\n );\n\n if (\"error\" in response) {\n const err = response.error as any;\n throw new Error(\n `${err.message} - Code ${err.code} - ${JSON.stringify(err)}`\n );\n }\n\n if (!response?.choices?.[0]?.message?.content) {\n throw new Error(\"No content returned from the model\");\n }\n\n return {\n data: response.choices[0].message.content,\n\n inputTokensUsed: response?.usage?.prompt_tokens,\n outputTokensUsed: response?.usage?.completion_tokens,\n\n startedAt: startedAt.getTime(),\n completedAt: Date.now(),\n };\n } catch (err) {\n if (err instanceof APIError && err.status === 401) {\n throw new Error(`Invalid credentials provided`, { cause: err });\n }\n\n retryCount--;\n\n // More likely an empty HTTP response returned by the Provider\n // and it couldn't be parsed as JSON by the OpenAI SDK. We need to retry the request\n // More info can be found in the following links:\n // https://www.reddit.com/r/SillyTavernAI/comments/1ik95vr/deepseek_r1_on_openrouter_returning_blank_messages/\n // https://github.com/cline/cline/issues/60\n if (err instanceof SyntaxError) {\n console.debug(err);\n continue;\n }\n\n // If it was another error, just continue until we run out of retries\n if (retryCount !== 0) {\n continue;\n }\n\n throw new Error(\n `Failed to forward prompt to the model: ${err instanceof Error ? err.message : err}`,\n { cause: err }\n );\n }\n }\n\n throw new Error(\n `Failed to forward prompt to the model: Max retries reached`,\n { cause: new Error(\"Max retries reached\") }\n );\n }\n}\n","import {\n AbstractLLMProvider,\n ChatResponse,\n LLMProviderForwardArgs,\n} from \"./abstract/llm\";\nimport { RateLimiter } from \"@/utils\";\nimport { OpenAIProvider } from \"./openai\";\nimport { PEERBENCH_NAMESPACE } from \"@/constants\";\nimport Decimal from \"decimal.js\";\nimport axios from \"axios\";\n\nconst baseURL = \"https://openrouter.ai/api/v1\";\nconst MODELS_CACHE_TTL = 1000 * 60 * 60 * 24; // 24 hours\n\nexport class OpenRouterProvider extends AbstractLLMProvider {\n override readonly kind = `${PEERBENCH_NAMESPACE}/llm/openrouter.ai` as const;\n\n private models: ModelsResponse | undefined = undefined;\n private modelsCachePromise: Promise<ModelsResponse | undefined> =\n Promise.resolve(undefined);\n private modelsUpdatedAt = 0;\n private openAIProvider: OpenAIProvider;\n\n constructor(config: {\n apiKey: string;\n maxRetries?: number;\n timeout?: number;\n rateLimiter?: RateLimiter;\n }) {\n super();\n this.openAIProvider = new OpenAIProvider({\n baseURL,\n apiKey: config.apiKey,\n maxRetries: config.maxRetries,\n timeout: config.timeout,\n rateLimiter: config.rateLimiter,\n });\n }\n\n override async forward(args: LLMProviderForwardArgs): Promise<ChatResponse> {\n // Update models cache concurrently (non-blocking)\n const [response] = await Promise.all([\n this.openAIProvider.forward(args),\n this.updateModelsCache().catch(() => {\n // Silently fail if cache update fails so we won't have cost info in the result\n }),\n ]);\n\n // Get the model info from the cache\n const modelInfo = this.models?.data.find((m) => m.id === args.model);\n let inputCost: string | undefined = undefined;\n let outputCost: string | undefined = undefined;\n\n if (modelInfo !== undefined) {\n // Use Decimal.js for more accurate calculation\n if (response.inputTokensUsed !== undefined) {\n inputCost = new Decimal(modelInfo.pricing.prompt)\n .mul(response.inputTokensUsed)\n .toFixed(10);\n }\n if (response.outputTokensUsed !== undefined) {\n outputCost = new Decimal(modelInfo.pricing.completion)\n .mul(response.outputTokensUsed)\n .toFixed(10);\n }\n }\n\n return {\n ...response,\n inputCost,\n outputCost,\n };\n }\n\n /**\n * Updates the cache that holds information about OpenRouter models\n * including pricing information. It will be valid for 24 hours as\n * long as the instance of this Provider object is alive.\n */\n private async updateModelsCache() {\n // Chain each update method call to the promise.\n // This approach prevents race conditions between multiple calls.\n // Since each call is chained to the end of the previous one,\n // each promise makes a request only if the models cache is not updated\n // in the last call. Otherwise it simply resolves to the cached value.\n this.modelsCachePromise = this.modelsCachePromise\n .then(async () => {\n if (\n // The data presented in the cache\n this.models !== undefined &&\n // The cache is still valid\n Date.now() - this.modelsUpdatedAt < MODELS_CACHE_TTL\n ) {\n return this.models;\n }\n\n // If the cache is not valid, update it\n return axios\n .get<ModelsResponse>(`${baseURL}/models`)\n .then((res) => res.data)\n .then((data) => {\n // Only get the models that supports text input and output\n data = {\n data: data.data.filter(\n (m) =>\n m.architecture.input_modalities.includes(\"text\") &&\n m.architecture.output_modalities.includes(\"text\") &&\n // These models are \"fast apply model\" and don't support multi turn conversations so don't include them\n ![\n \"morph/morph-v3-large\",\n \"morph/morph-v3-fast\",\n \"relace/relace-apply-3\",\n ].includes(m.id)\n ),\n };\n\n this.models = data;\n this.modelsUpdatedAt = Date.now();\n\n return data;\n });\n })\n .catch(() => undefined);\n\n // Wait for the promise chain to resolve\n await this.modelsCachePromise;\n }\n}\n\ntype PutModality = \"text\" | \"image\" | \"file\" | \"audio\";\ntype Modality = \"text->text\" | \"text+image->text\" | \"text+image->text+image\";\ntype ModelsResponse = {\n data: {\n readonly id: string;\n readonly canonical_slug: string;\n readonly hugging_face_id: null | string;\n readonly name: string;\n readonly created: number;\n readonly description: string;\n readonly context_length: number;\n readonly architecture: {\n readonly modality: Modality;\n readonly input_modalities: PutModality[];\n readonly output_modalities: PutModality[];\n readonly instruct_type: null | string;\n };\n readonly pricing: {\n readonly prompt: string;\n readonly completion: string;\n readonly request?: string;\n readonly image?: string;\n readonly web_search?: string;\n readonly internal_reasoning?: string;\n readonly input_cache_read?: string;\n readonly input_cache_write?: string;\n readonly audio?: string;\n };\n }[];\n};\n"],"mappings":";;;;;;;;AAAO,IAAe,mBAAf,MAAgC;AAEvC;;;ACMO,IAAe,sBAAf,cAA2C,iBAAiB;AAEnE;;;ACLA,SAAS,oBAA2C;AAE7C,IAAM,iBAAN,cAA6B,oBAAoB;AAAA,EACpC,OAAO;AAAA,EAER;AAAA,EACA;AAAA,EACT;AAAA,EACA;AAAA,EAER,YAAY,QAIT;AACD,UAAM;AACN,SAAK,WAAW,OAAO;AACvB,SAAK,YAAY,OAAO;AACxB,SAAK,SAAS,IAAI,aAAa;AAAA,MAC7B,SAAS,KAAK;AAAA,MACd,SAAS,KAAK,YACV;AAAA,QACA,eAAe,UAAU,KAAK,SAAS;AAAA,MACzC,IACE;AAAA,IACN,CAAC;AACD,SAAK,SAAS,OAAO;AAAA,EACvB;AAAA,EAEA,MAAe,QACb,MAQuB;AACvB,UAAM,cAAc,KAAK,SACtB,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU,EAAE,SAAS,WAAW,EACzD,IAAI,CAAC,OAAO;AAAA,MACX,MAAM,EAAE;AAAA,MACR,SAAS,OAAQ,EAAU,WAAW,EAAE;AAAA,IAC1C,EAAE;AAEJ,UAAM,QAAQ,KAAK,OAAO,SAAS,KAAK,KAAK;AAE7C,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,WAAW,MAAM,MAAM;AAAA,MAC3B;AAAA,QACE,UAAU;AAAA,QACV,gBAAgB;AAAA,UACd,YAAY,KAAK;AAAA,QACnB;AAAA,MACF;AAAA,MACA,EAAE,QAAQ,KAAK,UAAU,KAAK,OAAO;AAAA,IACvC;AAEA,WAAO;AAAA,MACL,MAAM,SAAS;AAAA,MACf;AAAA,MACA,aAAa,KAAK,IAAI;AAAA,IACxB;AAAA,EACF;AAAA,EAEA,MAAM,aAAa,MAGhB;AACD,WAAO,MAAM,KAAK,OACf,SAAS,KAAK,OAAO,EACrB,QAAQ,KAAK,cAAc;AAAA,EAChC;AAAA,EAEA,MAAM,UAAU,MAG8B;AAC5C,WAAO,KAAK,OAAO,UAAU,MAAM,gBAAgB,MAAM,OAAO;AAAA,EAClE;AACF;;;AC/EA,OAAO,UAAU,gBAAgB;AAI1B,IAAM,iBAAN,cAA6B,oBAAoB;AAAA,EACpC,OAAO,GAAG,mBAAmB;AAAA,EAEvC;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,QAMT;AACD,UAAM;AACN,SAAK,aAAa,OAAO,cAAc;AACvC,SAAK,cACH,OAAO,eACP,IAAI,YAAY;AAAA,MACd,WAAW;AAAA,MACX,YAAY;AAAA,IACd,CAAC;AAEH,SAAK,SAAS,IAAI,OAAO;AAAA,MACvB,SAAS,OAAO;AAAA,MAChB,QAAQ,OAAO;AAAA,MACf,SAAS,OAAO;AAAA,MAChB,yBAAyB;AAAA,IAC3B,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,QAAQ,MASY;AACxB,QAAI,aAAa,KAAK;AACtB,WAAO,aAAa,GAAG;AACrB,UAAI,YAAkB,oBAAI,KAAK;AAE/B,UAAI;AACF,cAAM,WAAW,MAAM,KAAK,YAAY;AAAA,UACtC,YAAY;AAEV,wBAAY,oBAAI,KAAK;AACrB,mBAAO,MAAM,KAAK,OAAO,KAAK,YAAY;AAAA,cACxC;AAAA,gBACE,OAAO,KAAK;AAAA,gBACZ,UAAU,KAAK;AAAA,gBACf,aAAa,KAAK;AAAA,gBAClB,iBAAiB,KAAK;AAAA,cACxB;AAAA;AAAA,cAEA,EAAE,QAAQ,KAAK,YAAY;AAAA,YAC7B;AAAA,UACF;AAAA;AAAA,UAEA,EAAE,QAAQ,KAAK,YAAY;AAAA,QAC7B;AAEA,YAAI,WAAW,UAAU;AACvB,gBAAM,MAAM,SAAS;AACrB,gBAAM,IAAI;AAAA,YACR,GAAG,IAAI,OAAO,WAAW,IAAI,IAAI,MAAM,KAAK,UAAU,GAAG,CAAC;AAAA,UAC5D;AAAA,QACF;AAEA,YAAI,CAAC,UAAU,UAAU,CAAC,GAAG,SAAS,SAAS;AAC7C,gBAAM,IAAI,MAAM,oCAAoC;AAAA,QACtD;AAEA,eAAO;AAAA,UACL,MAAM,SAAS,QAAQ,CAAC,EAAE,QAAQ;AAAA,UAElC,iBAAiB,UAAU,OAAO;AAAA,UAClC,kBAAkB,UAAU,OAAO;AAAA,UAEnC,WAAW,UAAU,QAAQ;AAAA,UAC7B,aAAa,KAAK,IAAI;AAAA,QACxB;AAAA,MACF,SAAS,KAAK;AACZ,YAAI,eAAe,YAAY,IAAI,WAAW,KAAK;AACjD,gBAAM,IAAI,MAAM,gCAAgC,EAAE,OAAO,IAAI,CAAC;AAAA,QAChE;AAEA;AAOA,YAAI,eAAe,aAAa;AAC9B,kBAAQ,MAAM,GAAG;AACjB;AAAA,QACF;AAGA,YAAI,eAAe,GAAG;AACpB;AAAA,QACF;AAEA,cAAM,IAAI;AAAA,UACR,0CAA0C,eAAe,QAAQ,IAAI,UAAU,GAAG;AAAA,UAClF,EAAE,OAAO,IAAI;AAAA,QACf;AAAA,MACF;AAAA,IACF;AAEA,UAAM,IAAI;AAAA,MACR;AAAA,MACA,EAAE,OAAO,IAAI,MAAM,qBAAqB,EAAE;AAAA,IAC5C;AAAA,EACF;AACF;;;AC1HA,OAAO,aAAa;AACpB,OAAO,WAAW;AAElB,IAAM,UAAU;AAChB,IAAM,mBAAmB,MAAO,KAAK,KAAK;AAEnC,IAAM,qBAAN,cAAiC,oBAAoB;AAAA,EACxC,OAAO,GAAG,mBAAmB;AAAA,EAEvC,SAAqC;AAAA,EACrC,qBACN,QAAQ,QAAQ,MAAS;AAAA,EACnB,kBAAkB;AAAA,EAClB;AAAA,EAER,YAAY,QAKT;AACD,UAAM;AACN,SAAK,iBAAiB,IAAI,eAAe;AAAA,MACvC;AAAA,MACA,QAAQ,OAAO;AAAA,MACf,YAAY,OAAO;AAAA,MACnB,SAAS,OAAO;AAAA,MAChB,aAAa,OAAO;AAAA,IACtB,CAAC;AAAA,EACH;AAAA,EAEA,MAAe,QAAQ,MAAqD;AAE1E,UAAM,CAAC,QAAQ,IAAI,MAAM,QAAQ,IAAI;AAAA,MACnC,KAAK,eAAe,QAAQ,IAAI;AAAA,MAChC,KAAK,kBAAkB,EAAE,MAAM,MAAM;AAAA,MAErC,CAAC;AAAA,IACH,CAAC;AAGD,UAAM,YAAY,KAAK,QAAQ,KAAK,KAAK,CAAC,MAAM,EAAE,OAAO,KAAK,KAAK;AACnE,QAAI,YAAgC;AACpC,QAAI,aAAiC;AAErC,QAAI,cAAc,QAAW;AAE3B,UAAI,SAAS,oBAAoB,QAAW;AAC1C,oBAAY,IAAI,QAAQ,UAAU,QAAQ,MAAM,EAC7C,IAAI,SAAS,eAAe,EAC5B,QAAQ,EAAE;AAAA,MACf;AACA,UAAI,SAAS,qBAAqB,QAAW;AAC3C,qBAAa,IAAI,QAAQ,UAAU,QAAQ,UAAU,EAClD,IAAI,SAAS,gBAAgB,EAC7B,QAAQ,EAAE;AAAA,MACf;AAAA,IACF;AAEA,WAAO;AAAA,MACL,GAAG;AAAA,MACH;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAc,oBAAoB;AAMhC,SAAK,qBAAqB,KAAK,mBAC5B,KAAK,YAAY;AAChB;AAAA;AAAA,QAEE,KAAK,WAAW;AAAA,QAEhB,KAAK,IAAI,IAAI,KAAK,kBAAkB;AAAA,QACpC;AACA,eAAO,KAAK;AAAA,MACd;AAGA,aAAO,MACJ,IAAoB,GAAG,OAAO,SAAS,EACvC,KAAK,CAAC,QAAQ,IAAI,IAAI,EACtB,KAAK,CAAC,SAAS;AAEd,eAAO;AAAA,UACL,MAAM,KAAK,KAAK;AAAA,YACd,CAAC,MACC,EAAE,aAAa,iBAAiB,SAAS,MAAM,KAC/C,EAAE,aAAa,kBAAkB,SAAS,MAAM;AAAA,YAEhD,CAAC;AAAA,cACC;AAAA,cACA;AAAA,cACA;AAAA,YACF,EAAE,SAAS,EAAE,EAAE;AAAA,UACnB;AAAA,QACF;AAEA,aAAK,SAAS;AACd,aAAK,kBAAkB,KAAK,IAAI;AAEhC,eAAO;AAAA,MACT,CAAC;AAAA,IACL,CAAC,EACA,MAAM,MAAM,MAAS;AAGxB,UAAM,KAAK;AAAA,EACb;AACF;","names":[]}
|
|
@@ -4,6 +4,9 @@ import {
|
|
|
4
4
|
|
|
5
5
|
// src/storages/abstract.ts
|
|
6
6
|
var AbstractStorage = class {
|
|
7
|
+
async count() {
|
|
8
|
+
throw new Error("Method not implemented.");
|
|
9
|
+
}
|
|
7
10
|
};
|
|
8
11
|
|
|
9
12
|
// src/storages/file.ts
|
|
@@ -109,4 +112,4 @@ export {
|
|
|
109
112
|
JSONFileStorage,
|
|
110
113
|
JSONFileStorageCodec
|
|
111
114
|
};
|
|
112
|
-
//# sourceMappingURL=chunk-
|
|
115
|
+
//# sourceMappingURL=chunk-JFLUJLGT.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/storages/abstract.ts","../src/storages/file.ts","../src/storages/json-file.ts"],"sourcesContent":["export abstract class AbstractStorage<TObject> {\n abstract init(params?: unknown): Promise<void>;\n abstract read(key: string, params?: unknown): Promise<TObject | null>;\n abstract readAll(params?: unknown): Promise<TObject[]>;\n abstract write(\n key: string,\n value: TObject,\n params?: unknown\n ): Promise<unknown>;\n\n async count(): Promise<number> {\n throw new Error(\"Method not implemented.\");\n }\n}\n","import { FileHandle, open } from \"node:fs/promises\";\nimport { AbstractStorage } from \"./abstract\";\n\nexport class FileStorage<TObject> extends AbstractStorage<TObject> {\n protected path: string;\n protected codec: AbstractFileStorageCodec<TObject>;\n protected fileHandle: FileHandle | undefined;\n\n constructor(config: {\n path: string;\n codec: AbstractFileStorageCodec<TObject>;\n }) {\n super();\n this.path = config.path;\n this.codec = config.codec;\n }\n\n async init(): Promise<void> {\n this.fileHandle = await open(this.path, \"r+\");\n }\n\n override async read(\n _key: string,\n _params?: unknown\n ): Promise<TObject | null> {\n throw new Error(\"Method not implemented.\");\n }\n\n override async readAll(_params?: unknown): Promise<TObject[]> {\n this.assertInitialized();\n\n return await this.codec.readAll({\n fileHandle: {\n readAt: this.readAt.bind(this),\n readChunks: this.readChunks.bind(this),\n size: this.size.bind(this),\n },\n });\n }\n\n override async write(\n _key: string,\n _value: TObject,\n _params?: unknown\n ): Promise<unknown> {\n throw new Error(\"Method not implemented.\");\n }\n\n protected async *readChunks(params: {\n startOffset?: number;\n chunkSize: number;\n }): AsyncIterable<{ offset: number; bytes: Uint8Array }> {\n this.assertInitialized();\n\n const fileSize = await this.size();\n let offset = params.startOffset ?? 0;\n\n // NOTE: What if the file size changed during the iteration?\n while (offset < fileSize) {\n const length = Math.min(params.chunkSize, fileSize - offset);\n if (length === 0) break;\n\n const bytes = await this.readAt(offset, length);\n if (bytes.length === 0) break;\n yield { offset, bytes };\n offset += bytes.length;\n }\n }\n\n protected async size(): Promise<number> {\n this.assertInitialized();\n\n return await this.fileHandle.stat().then((stat) => stat.size);\n }\n\n protected async readAt(offset: number, length: number): Promise<Uint8Array> {\n this.assertInitialized();\n\n const buffer = new Uint8Array(length);\n const result = await this.fileHandle.read(buffer, 0, length, offset);\n\n return new Uint8Array(buffer.subarray(0, result.bytesRead));\n }\n\n protected assertInitialized(): asserts this is this & {\n fileHandle: FileHandle;\n } {\n if (!this.fileHandle) {\n throw new Error(\"File Storage is not initialized\");\n }\n }\n}\n\nexport type FileStorageFileHandle = {\n size(): Promise<number>;\n readAt(offset: number, length: number): Promise<Uint8Array>;\n readChunks(params: {\n startOffset?: number;\n chunkSize: number;\n }): AsyncIterable<{ offset: number; bytes: Uint8Array }>;\n};\n\nexport abstract class AbstractFileStorageCodec<TObject> {\n abstract readAll(params: {\n fileHandle: FileStorageFileHandle;\n }): Promise<TObject[]>;\n // TODO: Add other methods like write, append, read one etc.\n}\n","import { bufferToString } from \"@/utils\";\nimport {\n AbstractFileStorageCodec,\n FileStorage,\n FileStorageFileHandle,\n} from \"./file\";\nimport z from \"zod\";\n\nexport class JSONFileStorage<TObject> extends FileStorage<TObject> {\n declare codec: JSONFileStorageCodec<TObject>;\n\n constructor(config: {\n path: string;\n chunkSize?: number;\n schema: z.ZodType<TObject>;\n }) {\n super({\n path: config.path,\n codec: new JSONFileStorageCodec({\n chunkSize: config.chunkSize,\n schema: config.schema,\n }),\n });\n }\n}\n\nexport class JSONFileStorageCodec<\n TObject,\n> extends AbstractFileStorageCodec<TObject> {\n private chunkSize: number;\n private schema: z.ZodType<TObject>;\n\n constructor(config: { chunkSize?: number; schema: z.ZodType<TObject> }) {\n super();\n this.chunkSize = config?.chunkSize ?? 64 * 1024;\n this.schema = config.schema;\n }\n\n async readAll(params: {\n fileHandle: FileStorageFileHandle;\n }): Promise<TObject[]> {\n const wholeFile: Uint8Array = new Uint8Array(\n await params.fileHandle.size()\n );\n for await (const chunk of params.fileHandle.readChunks({\n chunkSize: this.chunkSize,\n })) {\n wholeFile.set(chunk.bytes, chunk.offset);\n }\n\n return this.schema.array().parse(JSON.parse(bufferToString(wholeFile)));\n }\n}\n"],"mappings":";;;;;AAAO,IAAe,kBAAf,MAAwC;AAAA,EAU7C,MAAM,QAAyB;AAC7B,UAAM,IAAI,MAAM,yBAAyB;AAAA,EAC3C;AACF;;;ACbA,SAAqB,YAAY;AAG1B,IAAM,cAAN,cAAmC,gBAAyB;AAAA,EACvD;AAAA,EACA;AAAA,EACA;AAAA,EAEV,YAAY,QAGT;AACD,UAAM;AACN,SAAK,OAAO,OAAO;AACnB,SAAK,QAAQ,OAAO;AAAA,EACtB;AAAA,EAEA,MAAM,OAAsB;AAC1B,SAAK,aAAa,MAAM,KAAK,KAAK,MAAM,IAAI;AAAA,EAC9C;AAAA,EAEA,MAAe,KACb,MACA,SACyB;AACzB,UAAM,IAAI,MAAM,yBAAyB;AAAA,EAC3C;AAAA,EAEA,MAAe,QAAQ,SAAuC;AAC5D,SAAK,kBAAkB;AAEvB,WAAO,MAAM,KAAK,MAAM,QAAQ;AAAA,MAC9B,YAAY;AAAA,QACV,QAAQ,KAAK,OAAO,KAAK,IAAI;AAAA,QAC7B,YAAY,KAAK,WAAW,KAAK,IAAI;AAAA,QACrC,MAAM,KAAK,KAAK,KAAK,IAAI;AAAA,MAC3B;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAe,MACb,MACA,QACA,SACkB;AAClB,UAAM,IAAI,MAAM,yBAAyB;AAAA,EAC3C;AAAA,EAEA,OAAiB,WAAW,QAG6B;AACvD,SAAK,kBAAkB;AAEvB,UAAM,WAAW,MAAM,KAAK,KAAK;AACjC,QAAI,SAAS,OAAO,eAAe;AAGnC,WAAO,SAAS,UAAU;AACxB,YAAM,SAAS,KAAK,IAAI,OAAO,WAAW,WAAW,MAAM;AAC3D,UAAI,WAAW,EAAG;AAElB,YAAM,QAAQ,MAAM,KAAK,OAAO,QAAQ,MAAM;AAC9C,UAAI,MAAM,WAAW,EAAG;AACxB,YAAM,EAAE,QAAQ,MAAM;AACtB,gBAAU,MAAM;AAAA,IAClB;AAAA,EACF;AAAA,EAEA,MAAgB,OAAwB;AACtC,SAAK,kBAAkB;AAEvB,WAAO,MAAM,KAAK,WAAW,KAAK,EAAE,KAAK,CAAC,SAAS,KAAK,IAAI;AAAA,EAC9D;AAAA,EAEA,MAAgB,OAAO,QAAgB,QAAqC;AAC1E,SAAK,kBAAkB;AAEvB,UAAM,SAAS,IAAI,WAAW,MAAM;AACpC,UAAM,SAAS,MAAM,KAAK,WAAW,KAAK,QAAQ,GAAG,QAAQ,MAAM;AAEnE,WAAO,IAAI,WAAW,OAAO,SAAS,GAAG,OAAO,SAAS,CAAC;AAAA,EAC5D;AAAA,EAEU,oBAER;AACA,QAAI,CAAC,KAAK,YAAY;AACpB,YAAM,IAAI,MAAM,iCAAiC;AAAA,IACnD;AAAA,EACF;AACF;AAWO,IAAe,2BAAf,MAAiD;AAAA;AAKxD;;;ACnGO,IAAM,kBAAN,cAAuC,YAAqB;AAAA,EAGjE,YAAY,QAIT;AACD,UAAM;AAAA,MACJ,MAAM,OAAO;AAAA,MACb,OAAO,IAAI,qBAAqB;AAAA,QAC9B,WAAW,OAAO;AAAA,QAClB,QAAQ,OAAO;AAAA,MACjB,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF;AAEO,IAAM,uBAAN,cAEG,yBAAkC;AAAA,EAClC;AAAA,EACA;AAAA,EAER,YAAY,QAA4D;AACtE,UAAM;AACN,SAAK,YAAY,QAAQ,aAAa,KAAK;AAC3C,SAAK,SAAS,OAAO;AAAA,EACvB;AAAA,EAEA,MAAM,QAAQ,QAES;AACrB,UAAM,YAAwB,IAAI;AAAA,MAChC,MAAM,OAAO,WAAW,KAAK;AAAA,IAC/B;AACA,qBAAiB,SAAS,OAAO,WAAW,WAAW;AAAA,MACrD,WAAW,KAAK;AAAA,IAClB,CAAC,GAAG;AACF,gBAAU,IAAI,MAAM,OAAO,MAAM,MAAM;AAAA,IACzC;AAEA,WAAO,KAAK,OAAO,MAAM,EAAE,MAAM,KAAK,MAAM,eAAe,SAAS,CAAC,CAAC;AAAA,EACxE;AACF;","names":[]}
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
+
import {
|
|
2
|
+
buildSchemaDefiner
|
|
3
|
+
} from "./chunk-OQE6TQXZ.js";
|
|
1
4
|
import {
|
|
2
5
|
CATEGORIES,
|
|
3
6
|
PEERBENCH_NAMESPACE
|
|
4
7
|
} from "./chunk-UHHHSYVE.js";
|
|
5
|
-
import {
|
|
6
|
-
buildSchemaDefiner
|
|
7
|
-
} from "./chunk-OQE6TQXZ.js";
|
|
8
8
|
import {
|
|
9
9
|
IdSchema
|
|
10
10
|
} from "./chunk-NUEOE3K5.js";
|
|
@@ -41,4 +41,4 @@ export {
|
|
|
41
41
|
defineSystemPromptSchema,
|
|
42
42
|
SimpleSystemPromptSchemaV1
|
|
43
43
|
};
|
|
44
|
-
//# sourceMappingURL=chunk-
|
|
44
|
+
//# sourceMappingURL=chunk-Q6GSOHOP.js.map
|
package/dist/index.js
CHANGED
|
@@ -1,6 +1,9 @@
|
|
|
1
1
|
import {
|
|
2
2
|
defineRunner
|
|
3
3
|
} from "./chunk-QY5MPNNB.js";
|
|
4
|
+
import {
|
|
5
|
+
ScoringMethod
|
|
6
|
+
} from "./chunk-HMQYGCKI.js";
|
|
4
7
|
import {
|
|
5
8
|
CATEGORIES,
|
|
6
9
|
PEERBENCH_NAMESPACE
|
|
@@ -13,9 +16,6 @@ import {
|
|
|
13
16
|
sleep,
|
|
14
17
|
stringToBuffer
|
|
15
18
|
} from "./chunk-4UBK6452.js";
|
|
16
|
-
import {
|
|
17
|
-
ScoringMethod
|
|
18
|
-
} from "./chunk-HMQYGCKI.js";
|
|
19
19
|
import "./chunk-PZ5AY32C.js";
|
|
20
20
|
|
|
21
21
|
// src/errors/polyfill.ts
|
|
@@ -6,6 +6,7 @@ declare abstract class AbstractStorage<TObject> {
|
|
|
6
6
|
abstract read(key: string, params?: unknown): Promise<TObject | null>;
|
|
7
7
|
abstract readAll(params?: unknown): Promise<TObject[]>;
|
|
8
8
|
abstract write(key: string, value: TObject, params?: unknown): Promise<unknown>;
|
|
9
|
+
count(): Promise<number>;
|
|
9
10
|
}
|
|
10
11
|
|
|
11
12
|
declare class FileStorage<TObject> extends AbstractStorage<TObject> {
|
|
@@ -18,6 +18,10 @@ declare class MastraProvider extends AbstractLLMProvider {
|
|
|
18
18
|
});
|
|
19
19
|
forward(args: LLMProviderForwardArgs & {
|
|
20
20
|
memory?: AgentMemoryOption;
|
|
21
|
+
/**
|
|
22
|
+
* The model that will be used as the brain for the agent.
|
|
23
|
+
*/
|
|
24
|
+
modelName?: string;
|
|
21
25
|
}): Promise<ChatResponse>;
|
|
22
26
|
getAgentInfo(args: {
|
|
23
27
|
agentId: string;
|
package/dist/providers/index.js
CHANGED
|
@@ -2,9 +2,9 @@ import {
|
|
|
2
2
|
BaseSystemPromptSchemaV1,
|
|
3
3
|
SimpleSystemPromptSchemaV1,
|
|
4
4
|
defineSystemPromptSchema
|
|
5
|
-
} from "../../chunk-
|
|
6
|
-
import "../../chunk-UHHHSYVE.js";
|
|
5
|
+
} from "../../chunk-Q6GSOHOP.js";
|
|
7
6
|
import "../../chunk-OQE6TQXZ.js";
|
|
7
|
+
import "../../chunk-UHHHSYVE.js";
|
|
8
8
|
import "../../chunk-NUEOE3K5.js";
|
|
9
9
|
import "../../chunk-PZ5AY32C.js";
|
|
10
10
|
export {
|
package/dist/storages/index.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { A as AbstractStorage } from '../json-file-
|
|
2
|
-
export { c as AbstractFileStorageCodec, F as FileStorage, b as FileStorageFileHandle, J as JSONFileStorage, a as JSONFileStorageCodec } from '../json-file-
|
|
1
|
+
import { A as AbstractStorage } from '../json-file-Bgv9TLcX.js';
|
|
2
|
+
export { c as AbstractFileStorageCodec, F as FileStorage, b as FileStorageFileHandle, J as JSONFileStorage, a as JSONFileStorageCodec } from '../json-file-Bgv9TLcX.js';
|
|
3
3
|
import * as better_sqlite3 from 'better-sqlite3';
|
|
4
4
|
import 'node:fs/promises';
|
|
5
5
|
import 'zod';
|
package/dist/storages/index.js
CHANGED
package/package.json
CHANGED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/storages/abstract.ts","../src/storages/file.ts","../src/storages/json-file.ts"],"sourcesContent":["export abstract class AbstractStorage<TObject> {\n abstract init(params?: unknown): Promise<void>;\n abstract read(key: string, params?: unknown): Promise<TObject | null>;\n abstract readAll(params?: unknown): Promise<TObject[]>;\n abstract write(\n key: string,\n value: TObject,\n params?: unknown\n ): Promise<unknown>;\n}\n","import { FileHandle, open } from \"node:fs/promises\";\nimport { AbstractStorage } from \"./abstract\";\n\nexport class FileStorage<TObject> extends AbstractStorage<TObject> {\n protected path: string;\n protected codec: AbstractFileStorageCodec<TObject>;\n protected fileHandle: FileHandle | undefined;\n\n constructor(config: {\n path: string;\n codec: AbstractFileStorageCodec<TObject>;\n }) {\n super();\n this.path = config.path;\n this.codec = config.codec;\n }\n\n async init(): Promise<void> {\n this.fileHandle = await open(this.path, \"r+\");\n }\n\n override async read(\n _key: string,\n _params?: unknown\n ): Promise<TObject | null> {\n throw new Error(\"Method not implemented.\");\n }\n\n override async readAll(_params?: unknown): Promise<TObject[]> {\n this.assertInitialized();\n\n return await this.codec.readAll({\n fileHandle: {\n readAt: this.readAt.bind(this),\n readChunks: this.readChunks.bind(this),\n size: this.size.bind(this),\n },\n });\n }\n\n override async write(\n _key: string,\n _value: TObject,\n _params?: unknown\n ): Promise<unknown> {\n throw new Error(\"Method not implemented.\");\n }\n\n protected async *readChunks(params: {\n startOffset?: number;\n chunkSize: number;\n }): AsyncIterable<{ offset: number; bytes: Uint8Array }> {\n this.assertInitialized();\n\n const fileSize = await this.size();\n let offset = params.startOffset ?? 0;\n\n // NOTE: What if the file size changed during the iteration?\n while (offset < fileSize) {\n const length = Math.min(params.chunkSize, fileSize - offset);\n if (length === 0) break;\n\n const bytes = await this.readAt(offset, length);\n if (bytes.length === 0) break;\n yield { offset, bytes };\n offset += bytes.length;\n }\n }\n\n protected async size(): Promise<number> {\n this.assertInitialized();\n\n return await this.fileHandle.stat().then((stat) => stat.size);\n }\n\n protected async readAt(offset: number, length: number): Promise<Uint8Array> {\n this.assertInitialized();\n\n const buffer = new Uint8Array(length);\n const result = await this.fileHandle.read(buffer, 0, length, offset);\n\n return new Uint8Array(buffer.subarray(0, result.bytesRead));\n }\n\n protected assertInitialized(): asserts this is this & {\n fileHandle: FileHandle;\n } {\n if (!this.fileHandle) {\n throw new Error(\"File Storage is not initialized\");\n }\n }\n}\n\nexport type FileStorageFileHandle = {\n size(): Promise<number>;\n readAt(offset: number, length: number): Promise<Uint8Array>;\n readChunks(params: {\n startOffset?: number;\n chunkSize: number;\n }): AsyncIterable<{ offset: number; bytes: Uint8Array }>;\n};\n\nexport abstract class AbstractFileStorageCodec<TObject> {\n abstract readAll(params: {\n fileHandle: FileStorageFileHandle;\n }): Promise<TObject[]>;\n // TODO: Add other methods like write, append, read one etc.\n}\n","import { bufferToString } from \"@/utils\";\nimport {\n AbstractFileStorageCodec,\n FileStorage,\n FileStorageFileHandle,\n} from \"./file\";\nimport z from \"zod\";\n\nexport class JSONFileStorage<TObject> extends FileStorage<TObject> {\n declare codec: JSONFileStorageCodec<TObject>;\n\n constructor(config: {\n path: string;\n chunkSize?: number;\n schema: z.ZodType<TObject>;\n }) {\n super({\n path: config.path,\n codec: new JSONFileStorageCodec({\n chunkSize: config.chunkSize,\n schema: config.schema,\n }),\n });\n }\n}\n\nexport class JSONFileStorageCodec<\n TObject,\n> extends AbstractFileStorageCodec<TObject> {\n private chunkSize: number;\n private schema: z.ZodType<TObject>;\n\n constructor(config: { chunkSize?: number; schema: z.ZodType<TObject> }) {\n super();\n this.chunkSize = config?.chunkSize ?? 64 * 1024;\n this.schema = config.schema;\n }\n\n async readAll(params: {\n fileHandle: FileStorageFileHandle;\n }): Promise<TObject[]> {\n const wholeFile: Uint8Array = new Uint8Array(\n await params.fileHandle.size()\n );\n for await (const chunk of params.fileHandle.readChunks({\n chunkSize: this.chunkSize,\n })) {\n wholeFile.set(chunk.bytes, chunk.offset);\n }\n\n return this.schema.array().parse(JSON.parse(bufferToString(wholeFile)));\n }\n}\n"],"mappings":";;;;;AAAO,IAAe,kBAAf,MAAwC;AAS/C;;;ACTA,SAAqB,YAAY;AAG1B,IAAM,cAAN,cAAmC,gBAAyB;AAAA,EACvD;AAAA,EACA;AAAA,EACA;AAAA,EAEV,YAAY,QAGT;AACD,UAAM;AACN,SAAK,OAAO,OAAO;AACnB,SAAK,QAAQ,OAAO;AAAA,EACtB;AAAA,EAEA,MAAM,OAAsB;AAC1B,SAAK,aAAa,MAAM,KAAK,KAAK,MAAM,IAAI;AAAA,EAC9C;AAAA,EAEA,MAAe,KACb,MACA,SACyB;AACzB,UAAM,IAAI,MAAM,yBAAyB;AAAA,EAC3C;AAAA,EAEA,MAAe,QAAQ,SAAuC;AAC5D,SAAK,kBAAkB;AAEvB,WAAO,MAAM,KAAK,MAAM,QAAQ;AAAA,MAC9B,YAAY;AAAA,QACV,QAAQ,KAAK,OAAO,KAAK,IAAI;AAAA,QAC7B,YAAY,KAAK,WAAW,KAAK,IAAI;AAAA,QACrC,MAAM,KAAK,KAAK,KAAK,IAAI;AAAA,MAC3B;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAe,MACb,MACA,QACA,SACkB;AAClB,UAAM,IAAI,MAAM,yBAAyB;AAAA,EAC3C;AAAA,EAEA,OAAiB,WAAW,QAG6B;AACvD,SAAK,kBAAkB;AAEvB,UAAM,WAAW,MAAM,KAAK,KAAK;AACjC,QAAI,SAAS,OAAO,eAAe;AAGnC,WAAO,SAAS,UAAU;AACxB,YAAM,SAAS,KAAK,IAAI,OAAO,WAAW,WAAW,MAAM;AAC3D,UAAI,WAAW,EAAG;AAElB,YAAM,QAAQ,MAAM,KAAK,OAAO,QAAQ,MAAM;AAC9C,UAAI,MAAM,WAAW,EAAG;AACxB,YAAM,EAAE,QAAQ,MAAM;AACtB,gBAAU,MAAM;AAAA,IAClB;AAAA,EACF;AAAA,EAEA,MAAgB,OAAwB;AACtC,SAAK,kBAAkB;AAEvB,WAAO,MAAM,KAAK,WAAW,KAAK,EAAE,KAAK,CAAC,SAAS,KAAK,IAAI;AAAA,EAC9D;AAAA,EAEA,MAAgB,OAAO,QAAgB,QAAqC;AAC1E,SAAK,kBAAkB;AAEvB,UAAM,SAAS,IAAI,WAAW,MAAM;AACpC,UAAM,SAAS,MAAM,KAAK,WAAW,KAAK,QAAQ,GAAG,QAAQ,MAAM;AAEnE,WAAO,IAAI,WAAW,OAAO,SAAS,GAAG,OAAO,SAAS,CAAC;AAAA,EAC5D;AAAA,EAEU,oBAER;AACA,QAAI,CAAC,KAAK,YAAY;AACpB,YAAM,IAAI,MAAM,iCAAiC;AAAA,IACnD;AAAA,EACF;AACF;AAWO,IAAe,2BAAf,MAAiD;AAAA;AAKxD;;;ACnGO,IAAM,kBAAN,cAAuC,YAAqB;AAAA,EAGjE,YAAY,QAIT;AACD,UAAM;AAAA,MACJ,MAAM,OAAO;AAAA,MACb,OAAO,IAAI,qBAAqB;AAAA,QAC9B,WAAW,OAAO;AAAA,QAClB,QAAQ,OAAO;AAAA,MACjB,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF;AAEO,IAAM,uBAAN,cAEG,yBAAkC;AAAA,EAClC;AAAA,EACA;AAAA,EAER,YAAY,QAA4D;AACtE,UAAM;AACN,SAAK,YAAY,QAAQ,aAAa,KAAK;AAC3C,SAAK,SAAS,OAAO;AAAA,EACvB;AAAA,EAEA,MAAM,QAAQ,QAES;AACrB,UAAM,YAAwB,IAAI;AAAA,MAChC,MAAM,OAAO,WAAW,KAAK;AAAA,IAC/B;AACA,qBAAiB,SAAS,OAAO,WAAW,WAAW;AAAA,MACrD,WAAW,KAAK;AAAA,IAClB,CAAC,GAAG;AACF,gBAAU,IAAI,MAAM,OAAO,MAAM,MAAM;AAAA,IACzC;AAEA,WAAO,KAAK,OAAO,MAAM,EAAE,MAAM,KAAK,MAAM,eAAe,SAAS,CAAC,CAAC;AAAA,EACxE;AACF;","names":[]}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/providers/abstract/provider.ts","../src/providers/abstract/llm.ts","../src/providers/mastra.ts","../src/providers/openai.ts","../src/providers/openrouter.ts"],"sourcesContent":["export abstract class AbstractProvider {\n abstract readonly kind: string;\n}\n\nexport type ProviderResponse<TData = unknown> = {\n startedAt: number;\n completedAt: number;\n data: TData;\n};\n","import { AbstractProvider, ProviderResponse } from \"./provider\";\nimport {\n ResponseFormatJSONObject,\n ResponseFormatJSONSchema,\n ResponseFormatText,\n} from \"openai/resources/shared\";\nimport { ChatCompletionMessageParam } from \"openai/resources/chat/completions\";\n\nexport abstract class AbstractLLMProvider extends AbstractProvider {\n abstract forward(args: LLMProviderForwardArgs): Promise<ChatResponse>;\n}\n\nexport type LLMProviderForwardArgs = {\n messages: ChatCompletionMessageParam[];\n model: string;\n abortSignal?: AbortSignal;\n temperature?: number;\n responseFormat?:\n | ResponseFormatText\n | ResponseFormatJSONSchema\n | ResponseFormatJSONObject;\n};\n\nexport type ChatResponse = ProviderResponse<string> & {\n inputTokensUsed?: number;\n outputTokensUsed?: number;\n inputCost?: string;\n outputCost?: string;\n\n metadata?: Record<string, unknown>;\n};\n","import {\n AbstractLLMProvider,\n type ChatResponse,\n type LLMProviderForwardArgs,\n} from \"./abstract/llm\";\nimport { MastraClient, type GetAgentResponse } from \"@mastra/client-js\";\n\nexport class MastraProvider extends AbstractLLMProvider {\n override readonly kind = \"mastra\";\n\n private readonly endpoint: string;\n private readonly authToken?: string;\n private client: MastraClient;\n private memory?: AgentMemoryOption;\n\n constructor(params: {\n endpoint: string;\n authToken?: string;\n memory?: AgentMemoryOption;\n }) {\n super();\n this.endpoint = params.endpoint;\n this.authToken = params.authToken;\n this.client = new MastraClient({\n baseUrl: this.endpoint,\n headers: this.authToken\n ? {\n Authorization: `Bearer ${this.authToken}`,\n }\n : undefined,\n });\n this.memory = params.memory;\n }\n\n override async forward(\n args: LLMProviderForwardArgs & {\n memory?: AgentMemoryOption;\n }\n ): Promise<ChatResponse> {\n const apiMessages = args.messages\n .filter((m) => m.role === \"user\" || m.role === \"assistant\")\n .map((m) => ({\n role: m.role as \"user\" | \"assistant\",\n content: String((m as any).content ?? \"\"),\n }));\n\n const agent = this.client.getAgent(args.model);\n\n const startedAt = Date.now();\n const response = await agent.generate(\n {\n messages: apiMessages,\n runtimeContext: {\n \"model-id\": args.model,\n },\n },\n { memory: args.memory ?? this.memory }\n );\n\n return {\n data: response.text,\n startedAt,\n completedAt: Date.now(),\n };\n }\n\n async getAgentInfo(args: {\n agentId: string;\n runtimeContext?: MastraRuntimeContext;\n }) {\n return await this.client\n .getAgent(args.agentId)\n .details(args.runtimeContext);\n }\n\n async getAgents(args?: {\n runtimeContext?: MastraRuntimeContext;\n partial?: boolean;\n }): Promise<Record<string, GetAgentResponse>> {\n return this.client.getAgents(args?.runtimeContext, args?.partial);\n }\n}\n\n// NOTE: Mastra client does not export these types\nexport type AgentMemoryOption = Parameters<\n Parameters<MastraClient[\"getAgent\"]>[\"0\"] extends string\n ? ReturnType<MastraClient[\"getAgent\"]>[\"generate\"]\n : never\n>[0] extends { memory?: infer M }\n ? M\n : never;\n\ntype MastraRuntimeContext = Parameters<\n Parameters<MastraClient[\"getAgent\"]>[\"0\"] extends string\n ? ReturnType<MastraClient[\"getAgent\"]>[\"generate\"]\n : never\n>[0] extends { runtimeContext?: infer R }\n ? R\n : never;\n","import { RateLimiter } from \"@/utils\";\nimport { ChatCompletionMessageParam } from \"openai/resources/chat/completions\";\nimport {\n ResponseFormatJSONObject,\n ResponseFormatJSONSchema,\n ResponseFormatText,\n} from \"openai/resources/shared\";\nimport OpenAI, { APIError } from \"openai\";\nimport { AbstractLLMProvider, ChatResponse } from \"./abstract/llm\";\nimport { PEERBENCH_NAMESPACE } from \"@/constants\";\n\nexport class OpenAIProvider extends AbstractLLMProvider {\n override readonly kind = `${PEERBENCH_NAMESPACE}/llm/openai` as const;\n\n private client: OpenAI;\n private rateLimiter: RateLimiter;\n private maxRetries: number;\n\n constructor(config: {\n apiKey: string;\n baseURL: string;\n maxRetries?: number;\n timeout?: number;\n rateLimiter?: RateLimiter;\n }) {\n super();\n this.maxRetries = config.maxRetries ?? 3;\n this.rateLimiter =\n config.rateLimiter ??\n new RateLimiter({\n maxWeight: 20,\n timeWindow: 3_000,\n });\n\n this.client = new OpenAI({\n baseURL: config.baseURL,\n apiKey: config.apiKey,\n timeout: config.timeout,\n dangerouslyAllowBrowser: true,\n });\n }\n\n async forward(args: {\n messages: ChatCompletionMessageParam[];\n model: string;\n abortSignal?: AbortSignal;\n temperature?: number;\n responseFormat?:\n | ResponseFormatText\n | ResponseFormatJSONSchema\n | ResponseFormatJSONObject;\n }): Promise<ChatResponse> {\n let retryCount = this.maxRetries;\n while (retryCount > 0) {\n let startedAt: Date = new Date();\n\n try {\n const response = await this.rateLimiter.execute(\n async () => {\n // Capture the start time of the request\n startedAt = new Date();\n return await this.client.chat.completions.create(\n {\n model: args.model,\n messages: args.messages,\n temperature: args.temperature,\n response_format: args.responseFormat,\n },\n // Signal for request\n { signal: args.abortSignal }\n );\n },\n // Signal for rate limiting\n { signal: args.abortSignal }\n );\n\n if (\"error\" in response) {\n const err = response.error as any;\n throw new Error(\n `${err.message} - Code ${err.code} - ${JSON.stringify(err)}`\n );\n }\n\n if (!response?.choices?.[0]?.message?.content) {\n throw new Error(\"No content returned from the model\");\n }\n\n return {\n data: response.choices[0].message.content,\n\n inputTokensUsed: response?.usage?.prompt_tokens,\n outputTokensUsed: response?.usage?.completion_tokens,\n\n startedAt: startedAt.getTime(),\n completedAt: Date.now(),\n };\n } catch (err) {\n if (err instanceof APIError && err.status === 401) {\n throw new Error(`Invalid credentials provided`, { cause: err });\n }\n\n retryCount--;\n\n // More likely an empty HTTP response returned by the Provider\n // and it couldn't be parsed as JSON by the OpenAI SDK. We need to retry the request\n // More info can be found in the following links:\n // https://www.reddit.com/r/SillyTavernAI/comments/1ik95vr/deepseek_r1_on_openrouter_returning_blank_messages/\n // https://github.com/cline/cline/issues/60\n if (err instanceof SyntaxError) {\n console.debug(err);\n continue;\n }\n\n // If it was another error, just continue until we run out of retries\n if (retryCount !== 0) {\n continue;\n }\n\n throw new Error(\n `Failed to forward prompt to the model: ${err instanceof Error ? err.message : err}`,\n { cause: err }\n );\n }\n }\n\n throw new Error(\n `Failed to forward prompt to the model: Max retries reached`,\n { cause: new Error(\"Max retries reached\") }\n );\n }\n}\n","import {\n AbstractLLMProvider,\n ChatResponse,\n LLMProviderForwardArgs,\n} from \"./abstract/llm\";\nimport { RateLimiter } from \"@/utils\";\nimport { OpenAIProvider } from \"./openai\";\nimport { PEERBENCH_NAMESPACE } from \"@/constants\";\nimport Decimal from \"decimal.js\";\nimport axios from \"axios\";\n\nconst baseURL = \"https://openrouter.ai/api/v1\";\nconst MODELS_CACHE_TTL = 1000 * 60 * 60 * 24; // 24 hours\n\nexport class OpenRouterProvider extends AbstractLLMProvider {\n override readonly kind = `${PEERBENCH_NAMESPACE}/llm/openrouter.ai` as const;\n\n private models: ModelsResponse | undefined = undefined;\n private modelsCachePromise: Promise<ModelsResponse | undefined> =\n Promise.resolve(undefined);\n private modelsUpdatedAt = 0;\n private openAIProvider: OpenAIProvider;\n\n constructor(config: {\n apiKey: string;\n maxRetries?: number;\n timeout?: number;\n rateLimiter?: RateLimiter;\n }) {\n super();\n this.openAIProvider = new OpenAIProvider({\n baseURL,\n apiKey: config.apiKey,\n maxRetries: config.maxRetries,\n timeout: config.timeout,\n rateLimiter: config.rateLimiter,\n });\n }\n\n override async forward(args: LLMProviderForwardArgs): Promise<ChatResponse> {\n // Update models cache concurrently (non-blocking)\n const [response] = await Promise.all([\n this.openAIProvider.forward(args),\n this.updateModelsCache().catch(() => {\n // Silently fail if cache update fails so we won't have cost info in the result\n }),\n ]);\n\n // Get the model info from the cache\n const modelInfo = this.models?.data.find((m) => m.id === args.model);\n let inputCost: string | undefined = undefined;\n let outputCost: string | undefined = undefined;\n\n if (modelInfo !== undefined) {\n // Use Decimal.js for more accurate calculation\n if (response.inputTokensUsed !== undefined) {\n inputCost = new Decimal(modelInfo.pricing.prompt)\n .mul(response.inputTokensUsed)\n .toFixed(10);\n }\n if (response.outputTokensUsed !== undefined) {\n outputCost = new Decimal(modelInfo.pricing.completion)\n .mul(response.outputTokensUsed)\n .toFixed(10);\n }\n }\n\n return {\n ...response,\n inputCost,\n outputCost,\n };\n }\n\n /**\n * Updates the cache that holds information about OpenRouter models\n * including pricing information. It will be valid for 24 hours as\n * long as the instance of this Provider object is alive.\n */\n private async updateModelsCache() {\n // Chain each update method call to the promise.\n // This approach prevents race conditions between multiple calls.\n // Since each call is chained to the end of the previous one,\n // each promise makes a request only if the models cache is not updated\n // in the last call. Otherwise it simply resolves to the cached value.\n this.modelsCachePromise = this.modelsCachePromise\n .then(async () => {\n if (\n // The data presented in the cache\n this.models !== undefined &&\n // The cache is still valid\n Date.now() - this.modelsUpdatedAt < MODELS_CACHE_TTL\n ) {\n return this.models;\n }\n\n // If the cache is not valid, update it\n return axios\n .get<ModelsResponse>(`${baseURL}/models`)\n .then((res) => res.data)\n .then((data) => {\n // Only get the models that supports text input and output\n data = {\n data: data.data.filter(\n (m) =>\n m.architecture.input_modalities.includes(\"text\") &&\n m.architecture.output_modalities.includes(\"text\") &&\n // These models are \"fast apply model\" and don't support multi turn conversations so don't include them\n ![\n \"morph/morph-v3-large\",\n \"morph/morph-v3-fast\",\n \"relace/relace-apply-3\",\n ].includes(m.id)\n ),\n };\n\n this.models = data;\n this.modelsUpdatedAt = Date.now();\n\n return data;\n });\n })\n .catch(() => undefined);\n\n // Wait for the promise chain to resolve\n await this.modelsCachePromise;\n }\n}\n\ntype PutModality = \"text\" | \"image\" | \"file\" | \"audio\";\ntype Modality = \"text->text\" | \"text+image->text\" | \"text+image->text+image\";\ntype ModelsResponse = {\n data: {\n readonly id: string;\n readonly canonical_slug: string;\n readonly hugging_face_id: null | string;\n readonly name: string;\n readonly created: number;\n readonly description: string;\n readonly context_length: number;\n readonly architecture: {\n readonly modality: Modality;\n readonly input_modalities: PutModality[];\n readonly output_modalities: PutModality[];\n readonly instruct_type: null | string;\n };\n readonly pricing: {\n readonly prompt: string;\n readonly completion: string;\n readonly request?: string;\n readonly image?: string;\n readonly web_search?: string;\n readonly internal_reasoning?: string;\n readonly input_cache_read?: string;\n readonly input_cache_write?: string;\n readonly audio?: string;\n };\n }[];\n};\n"],"mappings":";;;;;;;;AAAO,IAAe,mBAAf,MAAgC;AAEvC;;;ACMO,IAAe,sBAAf,cAA2C,iBAAiB;AAEnE;;;ACLA,SAAS,oBAA2C;AAE7C,IAAM,iBAAN,cAA6B,oBAAoB;AAAA,EACpC,OAAO;AAAA,EAER;AAAA,EACA;AAAA,EACT;AAAA,EACA;AAAA,EAER,YAAY,QAIT;AACD,UAAM;AACN,SAAK,WAAW,OAAO;AACvB,SAAK,YAAY,OAAO;AACxB,SAAK,SAAS,IAAI,aAAa;AAAA,MAC7B,SAAS,KAAK;AAAA,MACd,SAAS,KAAK,YACV;AAAA,QACE,eAAe,UAAU,KAAK,SAAS;AAAA,MACzC,IACA;AAAA,IACN,CAAC;AACD,SAAK,SAAS,OAAO;AAAA,EACvB;AAAA,EAEA,MAAe,QACb,MAGuB;AACvB,UAAM,cAAc,KAAK,SACtB,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU,EAAE,SAAS,WAAW,EACzD,IAAI,CAAC,OAAO;AAAA,MACX,MAAM,EAAE;AAAA,MACR,SAAS,OAAQ,EAAU,WAAW,EAAE;AAAA,IAC1C,EAAE;AAEJ,UAAM,QAAQ,KAAK,OAAO,SAAS,KAAK,KAAK;AAE7C,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,WAAW,MAAM,MAAM;AAAA,MAC3B;AAAA,QACE,UAAU;AAAA,QACV,gBAAgB;AAAA,UACd,YAAY,KAAK;AAAA,QACnB;AAAA,MACF;AAAA,MACA,EAAE,QAAQ,KAAK,UAAU,KAAK,OAAO;AAAA,IACvC;AAEA,WAAO;AAAA,MACL,MAAM,SAAS;AAAA,MACf;AAAA,MACA,aAAa,KAAK,IAAI;AAAA,IACxB;AAAA,EACF;AAAA,EAEA,MAAM,aAAa,MAGhB;AACD,WAAO,MAAM,KAAK,OACf,SAAS,KAAK,OAAO,EACrB,QAAQ,KAAK,cAAc;AAAA,EAChC;AAAA,EAEA,MAAM,UAAU,MAG8B;AAC5C,WAAO,KAAK,OAAO,UAAU,MAAM,gBAAgB,MAAM,OAAO;AAAA,EAClE;AACF;;;AC1EA,OAAO,UAAU,gBAAgB;AAI1B,IAAM,iBAAN,cAA6B,oBAAoB;AAAA,EACpC,OAAO,GAAG,mBAAmB;AAAA,EAEvC;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,QAMT;AACD,UAAM;AACN,SAAK,aAAa,OAAO,cAAc;AACvC,SAAK,cACH,OAAO,eACP,IAAI,YAAY;AAAA,MACd,WAAW;AAAA,MACX,YAAY;AAAA,IACd,CAAC;AAEH,SAAK,SAAS,IAAI,OAAO;AAAA,MACvB,SAAS,OAAO;AAAA,MAChB,QAAQ,OAAO;AAAA,MACf,SAAS,OAAO;AAAA,MAChB,yBAAyB;AAAA,IAC3B,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,QAAQ,MASY;AACxB,QAAI,aAAa,KAAK;AACtB,WAAO,aAAa,GAAG;AACrB,UAAI,YAAkB,oBAAI,KAAK;AAE/B,UAAI;AACF,cAAM,WAAW,MAAM,KAAK,YAAY;AAAA,UACtC,YAAY;AAEV,wBAAY,oBAAI,KAAK;AACrB,mBAAO,MAAM,KAAK,OAAO,KAAK,YAAY;AAAA,cACxC;AAAA,gBACE,OAAO,KAAK;AAAA,gBACZ,UAAU,KAAK;AAAA,gBACf,aAAa,KAAK;AAAA,gBAClB,iBAAiB,KAAK;AAAA,cACxB;AAAA;AAAA,cAEA,EAAE,QAAQ,KAAK,YAAY;AAAA,YAC7B;AAAA,UACF;AAAA;AAAA,UAEA,EAAE,QAAQ,KAAK,YAAY;AAAA,QAC7B;AAEA,YAAI,WAAW,UAAU;AACvB,gBAAM,MAAM,SAAS;AACrB,gBAAM,IAAI;AAAA,YACR,GAAG,IAAI,OAAO,WAAW,IAAI,IAAI,MAAM,KAAK,UAAU,GAAG,CAAC;AAAA,UAC5D;AAAA,QACF;AAEA,YAAI,CAAC,UAAU,UAAU,CAAC,GAAG,SAAS,SAAS;AAC7C,gBAAM,IAAI,MAAM,oCAAoC;AAAA,QACtD;AAEA,eAAO;AAAA,UACL,MAAM,SAAS,QAAQ,CAAC,EAAE,QAAQ;AAAA,UAElC,iBAAiB,UAAU,OAAO;AAAA,UAClC,kBAAkB,UAAU,OAAO;AAAA,UAEnC,WAAW,UAAU,QAAQ;AAAA,UAC7B,aAAa,KAAK,IAAI;AAAA,QACxB;AAAA,MACF,SAAS,KAAK;AACZ,YAAI,eAAe,YAAY,IAAI,WAAW,KAAK;AACjD,gBAAM,IAAI,MAAM,gCAAgC,EAAE,OAAO,IAAI,CAAC;AAAA,QAChE;AAEA;AAOA,YAAI,eAAe,aAAa;AAC9B,kBAAQ,MAAM,GAAG;AACjB;AAAA,QACF;AAGA,YAAI,eAAe,GAAG;AACpB;AAAA,QACF;AAEA,cAAM,IAAI;AAAA,UACR,0CAA0C,eAAe,QAAQ,IAAI,UAAU,GAAG;AAAA,UAClF,EAAE,OAAO,IAAI;AAAA,QACf;AAAA,MACF;AAAA,IACF;AAEA,UAAM,IAAI;AAAA,MACR;AAAA,MACA,EAAE,OAAO,IAAI,MAAM,qBAAqB,EAAE;AAAA,IAC5C;AAAA,EACF;AACF;;;AC1HA,OAAO,aAAa;AACpB,OAAO,WAAW;AAElB,IAAM,UAAU;AAChB,IAAM,mBAAmB,MAAO,KAAK,KAAK;AAEnC,IAAM,qBAAN,cAAiC,oBAAoB;AAAA,EACxC,OAAO,GAAG,mBAAmB;AAAA,EAEvC,SAAqC;AAAA,EACrC,qBACN,QAAQ,QAAQ,MAAS;AAAA,EACnB,kBAAkB;AAAA,EAClB;AAAA,EAER,YAAY,QAKT;AACD,UAAM;AACN,SAAK,iBAAiB,IAAI,eAAe;AAAA,MACvC;AAAA,MACA,QAAQ,OAAO;AAAA,MACf,YAAY,OAAO;AAAA,MACnB,SAAS,OAAO;AAAA,MAChB,aAAa,OAAO;AAAA,IACtB,CAAC;AAAA,EACH;AAAA,EAEA,MAAe,QAAQ,MAAqD;AAE1E,UAAM,CAAC,QAAQ,IAAI,MAAM,QAAQ,IAAI;AAAA,MACnC,KAAK,eAAe,QAAQ,IAAI;AAAA,MAChC,KAAK,kBAAkB,EAAE,MAAM,MAAM;AAAA,MAErC,CAAC;AAAA,IACH,CAAC;AAGD,UAAM,YAAY,KAAK,QAAQ,KAAK,KAAK,CAAC,MAAM,EAAE,OAAO,KAAK,KAAK;AACnE,QAAI,YAAgC;AACpC,QAAI,aAAiC;AAErC,QAAI,cAAc,QAAW;AAE3B,UAAI,SAAS,oBAAoB,QAAW;AAC1C,oBAAY,IAAI,QAAQ,UAAU,QAAQ,MAAM,EAC7C,IAAI,SAAS,eAAe,EAC5B,QAAQ,EAAE;AAAA,MACf;AACA,UAAI,SAAS,qBAAqB,QAAW;AAC3C,qBAAa,IAAI,QAAQ,UAAU,QAAQ,UAAU,EAClD,IAAI,SAAS,gBAAgB,EAC7B,QAAQ,EAAE;AAAA,MACf;AAAA,IACF;AAEA,WAAO;AAAA,MACL,GAAG;AAAA,MACH;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAc,oBAAoB;AAMhC,SAAK,qBAAqB,KAAK,mBAC5B,KAAK,YAAY;AAChB;AAAA;AAAA,QAEE,KAAK,WAAW;AAAA,QAEhB,KAAK,IAAI,IAAI,KAAK,kBAAkB;AAAA,QACpC;AACA,eAAO,KAAK;AAAA,MACd;AAGA,aAAO,MACJ,IAAoB,GAAG,OAAO,SAAS,EACvC,KAAK,CAAC,QAAQ,IAAI,IAAI,EACtB,KAAK,CAAC,SAAS;AAEd,eAAO;AAAA,UACL,MAAM,KAAK,KAAK;AAAA,YACd,CAAC,MACC,EAAE,aAAa,iBAAiB,SAAS,MAAM,KAC/C,EAAE,aAAa,kBAAkB,SAAS,MAAM;AAAA,YAEhD,CAAC;AAAA,cACC;AAAA,cACA;AAAA,cACA;AAAA,YACF,EAAE,SAAS,EAAE,EAAE;AAAA,UACnB;AAAA,QACF;AAEA,aAAK,SAAS;AACd,aAAK,kBAAkB,KAAK,IAAI;AAEhC,eAAO;AAAA,MACT,CAAC;AAAA,IACL,CAAC,EACA,MAAM,MAAM,MAAS;AAGxB,UAAM,KAAK;AAAA,EACb;AACF;","names":[]}
|
|
File without changes
|