llmist 0.7.0 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.cts CHANGED
@@ -1,7 +1,7 @@
1
1
  import { ZodType, ZodTypeAny } from 'zod';
2
2
  export { z } from 'zod';
3
- import { A as AgentHooks, o as ModelRegistry, L as LLMMessage, P as ParameterFormat, S as StreamEvent, G as GadgetRegistry, p as LLMStreamChunk, q as GadgetExample, B as BaseGadget, r as ParsedGadgetCall, s as GadgetExecutionResult, t as ProviderAdapter, u as ModelDescriptor, v as ModelSpec, w as LLMGenerationOptions, x as LLMStream } from './mock-stream-B2qwECvd.cjs';
4
- export { F as AfterGadgetExecutionAction, I as AfterGadgetExecutionControllerContext, J as AfterLLMCallAction, K as AfterLLMCallControllerContext, N as AfterLLMErrorAction, y as AgentBuilder, O as AgentOptions, Q as BeforeGadgetExecutionAction, R as BeforeLLMCallAction, T as ChunkInterceptorContext, U as Controllers, ab as CostEstimate, al as DEFAULT_PROMPTS, E as EventHandlers, as as GadgetClass, V as GadgetExecutionControllerContext, at as GadgetOrClass, W as GadgetParameterInterceptorContext, X as GadgetResultInterceptorContext, H as HistoryMessage, Y as Interceptors, Z as LLMCallControllerContext, _ as LLMErrorControllerContext, aa as LLMMessageBuilder, a9 as LLMRole, a8 as LLMist, a7 as LLMistOptions, $ as MessageInterceptorContext, a as MockBuilder, d as MockManager, h as MockMatcher, i as MockMatcherContext, j as MockOptions, M as MockProviderAdapter, k as MockRegistration, l as MockResponse, n as MockStats, ac as ModelFeatures, ah as ModelIdentifierParser, ad as ModelLimits, ae as ModelPricing, a0 as ObserveChunkContext, a1 as ObserveGadgetCompleteContext, a2 as ObserveGadgetStartContext, a3 as ObserveLLMCallContext, a4 as ObserveLLMCompleteContext, a5 as ObserveLLMErrorContext, a6 as Observers, ai as PromptConfig, aj as PromptContext, ak as PromptTemplate, af as ProviderIdentifier, ao as QuickOptions, ar as StreamParser, au as TextOnlyAction, av as TextOnlyContext, aw as TextOnlyCustomHandler, ax as TextOnlyGadgetConfig, ay as TextOnlyHandler, az as TextOnlyStrategy, ag as TokenUsage, z as collectEvents, C as collectText, ap as complete, c as createMockAdapter, b as createMockClient, e as createMockStream, f as createTextMockStream, g as getMockManager, m as mockLLM, am as resolvePromptTemplate, an as resolveRulesTemplate, D as runWithHandlers, aq as stream } from './mock-stream-B2qwECvd.cjs';
3
+ import { A as AgentHooks, o as ModelRegistry, L as LLMMessage, S as StreamEvent, T as TokenUsage, G as GadgetRegistry, p as LLMStreamChunk, q as GadgetExample, B as BaseGadget, P as ParsedGadgetCall, r as GadgetExecutionResult, s as ProviderAdapter, t as ModelDescriptor, u as ModelSpec, v as LLMGenerationOptions, w as LLMStream } from './mock-stream-DKF5yatf.cjs';
4
+ export { D as AfterGadgetExecutionAction, F as AfterGadgetExecutionControllerContext, I as AfterLLMCallAction, J as AfterLLMCallControllerContext, K as AfterLLMErrorAction, x as AgentBuilder, N as AgentOptions, O as BeforeGadgetExecutionAction, Q as BeforeLLMCallAction, R as ChunkInterceptorContext, U as Controllers, ab as CostEstimate, ak as DEFAULT_PROMPTS, E as EventHandlers, aq as GadgetClass, V as GadgetExecutionControllerContext, ar as GadgetOrClass, W as GadgetParameterInterceptorContext, X as GadgetResultInterceptorContext, H as HistoryMessage, Y as Interceptors, Z as LLMCallControllerContext, _ as LLMErrorControllerContext, aa as LLMMessageBuilder, a9 as LLMRole, a8 as LLMist, a7 as LLMistOptions, $ as MessageInterceptorContext, a as MockBuilder, d as MockManager, h as MockMatcher, i as MockMatcherContext, j as MockOptions, M as MockProviderAdapter, k as MockRegistration, l as MockResponse, n as MockStats, ac as ModelFeatures, ag as ModelIdentifierParser, ad as ModelLimits, ae as ModelPricing, a0 as ObserveChunkContext, a1 as ObserveGadgetCompleteContext, a2 as ObserveGadgetStartContext, a3 as ObserveLLMCallContext, a4 as ObserveLLMCompleteContext, a5 as ObserveLLMErrorContext, a6 as Observers, ah as PromptConfig, ai as PromptContext, aj as PromptTemplate, af as ProviderIdentifier, an as QuickOptions, as as TextOnlyAction, at as TextOnlyContext, au as TextOnlyCustomHandler, av as TextOnlyGadgetConfig, aw as TextOnlyHandler, ax as TextOnlyStrategy, y as collectEvents, z as collectText, ao as complete, c as createMockAdapter, b as createMockClient, e as createMockStream, f as createTextMockStream, g as getMockManager, m as mockLLM, al as resolvePromptTemplate, am as resolveRulesTemplate, C as runWithHandlers, ap as stream } from './mock-stream-DKF5yatf.cjs';
5
5
  import { Logger, ILogObj } from 'tslog';
6
6
  import { MessageCreateParamsStreaming, MessageStreamEvent } from '@anthropic-ai/sdk/resources/messages';
7
7
  import OpenAI from 'openai';
@@ -724,11 +724,12 @@ interface IConversationManager {
724
724
  * Options for ConversationManager constructor.
725
725
  */
726
726
  interface ConversationManagerOptions {
727
- parameterFormat?: ParameterFormat;
728
727
  /** Custom gadget start marker prefix */
729
728
  startPrefix?: string;
730
729
  /** Custom gadget end marker prefix */
731
730
  endPrefix?: string;
731
+ /** Custom argument prefix for block format */
732
+ argPrefix?: string;
732
733
  }
733
734
  /**
734
735
  * Default implementation of IConversationManager.
@@ -738,7 +739,6 @@ declare class ConversationManager implements IConversationManager {
738
739
  private readonly baseMessages;
739
740
  private readonly initialMessages;
740
741
  private readonly historyBuilder;
741
- private readonly parameterFormat;
742
742
  constructor(baseMessages: LLMMessage[], initialMessages: LLMMessage[], options?: ConversationManagerOptions);
743
743
  addUserMessage(content: string): void;
744
744
  addAssistantMessage(content: string): void;
@@ -761,12 +761,12 @@ interface StreamProcessorOptions {
761
761
  iteration: number;
762
762
  /** Gadget registry for execution */
763
763
  registry: GadgetRegistry;
764
- /** Parameter format for parsing */
765
- parameterFormat: ParameterFormat;
766
764
  /** Custom gadget start prefix */
767
765
  gadgetStartPrefix?: string;
768
766
  /** Custom gadget end prefix */
769
767
  gadgetEndPrefix?: string;
768
+ /** Custom argument prefix for block format */
769
+ gadgetArgPrefix?: string;
770
770
  /** Hooks for lifecycle events */
771
771
  hooks?: AgentHooks;
772
772
  /** Logger instance */
@@ -797,12 +797,8 @@ interface StreamProcessingResult {
797
797
  didExecuteGadgets: boolean;
798
798
  /** LLM finish reason */
799
799
  finishReason: string | null;
800
- /** Token usage */
801
- usage?: {
802
- inputTokens: number;
803
- outputTokens: number;
804
- totalTokens: number;
805
- };
800
+ /** Token usage (including cached token counts when available) */
801
+ usage?: TokenUsage;
806
802
  /** The raw accumulated response text */
807
803
  rawResponse: string;
808
804
  /** The final message (after interceptors) */
@@ -1287,6 +1283,64 @@ declare class GadgetExecutor {
1287
1283
  executeAll(calls: ParsedGadgetCall[]): Promise<GadgetExecutionResult[]>;
1288
1284
  }
1289
1285
 
1286
+ interface StreamParserOptions {
1287
+ startPrefix?: string;
1288
+ endPrefix?: string;
1289
+ /** Prefix for block format arguments. Default: "!!!ARG:" */
1290
+ argPrefix?: string;
1291
+ }
1292
+ declare class StreamParser {
1293
+ private buffer;
1294
+ private lastReportedTextLength;
1295
+ private readonly startPrefix;
1296
+ private readonly endPrefix;
1297
+ private readonly argPrefix;
1298
+ constructor(options?: StreamParserOptions);
1299
+ private takeTextUntil;
1300
+ /**
1301
+ * Parse gadget name, handling both old format (name:invocationId) and new format (just name).
1302
+ * For new format, generates a unique invocation ID.
1303
+ */
1304
+ private parseGadgetName;
1305
+ /**
1306
+ * Truncate verbose parse errors to avoid context overflow.
1307
+ * Keeps first meaningful line and limits total length.
1308
+ */
1309
+ private truncateParseError;
1310
+ /**
1311
+ * Parse parameter string using block format
1312
+ */
1313
+ private parseParameters;
1314
+ feed(chunk: string): Generator<StreamEvent>;
1315
+ finalize(): Generator<StreamEvent>;
1316
+ reset(): void;
1317
+ }
1318
+
1319
+ /**
1320
+ * Type-safe gadget factory with automatic parameter inference.
1321
+ *
1322
+ * Gadget eliminates the need for manual type assertions
1323
+ * by automatically inferring parameter types from the Zod schema.
1324
+ *
1325
+ * @example
1326
+ * ```typescript
1327
+ * class Calculator extends Gadget({
1328
+ * description: "Performs arithmetic operations",
1329
+ * schema: z.object({
1330
+ * operation: z.enum(["add", "subtract"]),
1331
+ * a: z.number(),
1332
+ * b: z.number(),
1333
+ * }),
1334
+ * }) {
1335
+ * // ✨ params is automatically typed!
1336
+ * execute(params: this['params']): string {
1337
+ * const { operation, a, b } = params; // All typed!
1338
+ * return operation === "add" ? String(a + b) : String(a - b);
1339
+ * }
1340
+ * }
1341
+ * ```
1342
+ */
1343
+
1290
1344
  /**
1291
1345
  * Infer the TypeScript type from a Zod schema.
1292
1346
  */
@@ -1393,7 +1447,7 @@ declare function Gadget<TSchema extends ZodType>(config: GadgetConfig<TSchema>):
1393
1447
  */
1394
1448
  execute(params: Record<string, unknown>): string | Promise<string>;
1395
1449
  get instruction(): string;
1396
- getInstruction(format?: ParameterFormat): string;
1450
+ getInstruction(argPrefix?: string): string;
1397
1451
  } & {
1398
1452
  params: InferSchema<TSchema>;
1399
1453
  };
@@ -1660,6 +1714,7 @@ type GeminiChunk = {
1660
1714
  promptTokenCount?: number;
1661
1715
  candidatesTokenCount?: number;
1662
1716
  totalTokenCount?: number;
1717
+ cachedContentTokenCount?: number;
1663
1718
  };
1664
1719
  };
1665
1720
  declare class GeminiGenerativeProvider extends BaseProviderAdapter {
@@ -1769,4 +1824,4 @@ declare class OpenAIChatProvider extends BaseProviderAdapter {
1769
1824
  }
1770
1825
  declare function createOpenAIProviderFromEnv(): OpenAIChatProvider | null;
1771
1826
 
1772
- export { AgentHooks, AnthropicMessagesProvider, BaseGadget, BreakLoopException, ConversationManager, type CreateGadgetConfig, Gadget, type GadgetConfig, GadgetExample, GadgetExecutionResult, GadgetExecutor, GadgetOutputStore, GadgetRegistry, GeminiGenerativeProvider, HookPresets, HumanInputException, type IConversationManager, LLMGenerationOptions, LLMMessage, LLMStream, LLMStreamChunk, type LoggerOptions, type LoggingOptions, MODEL_ALIASES, ModelDescriptor, ModelRegistry, ModelSpec, OpenAIChatProvider, ParsedGadgetCall, ProviderAdapter, type StoredOutput, StreamEvent, type StreamProcessingResult, StreamProcessor, type StreamProcessorOptions, type ValidationIssue, type ValidationResult, createAnthropicProviderFromEnv, createGadget, createGadgetOutputViewer, createGeminiProviderFromEnv, createLogger, createOpenAIProviderFromEnv, defaultLogger, discoverProviderAdapters, getModelId, getProvider, hasProviderPrefix, resolveModel, validateAndApplyDefaults, validateGadgetParams };
1827
+ export { AgentHooks, AnthropicMessagesProvider, BaseGadget, BreakLoopException, ConversationManager, type CreateGadgetConfig, Gadget, type GadgetConfig, GadgetExample, GadgetExecutionResult, GadgetExecutor, GadgetOutputStore, GadgetRegistry, GeminiGenerativeProvider, HookPresets, HumanInputException, type IConversationManager, LLMGenerationOptions, LLMMessage, LLMStream, LLMStreamChunk, type LoggerOptions, type LoggingOptions, MODEL_ALIASES, ModelDescriptor, ModelRegistry, ModelSpec, OpenAIChatProvider, ParsedGadgetCall, ProviderAdapter, type StoredOutput, StreamEvent, StreamParser, type StreamProcessingResult, StreamProcessor, type StreamProcessorOptions, TokenUsage, type ValidationIssue, type ValidationResult, createAnthropicProviderFromEnv, createGadget, createGadgetOutputViewer, createGeminiProviderFromEnv, createLogger, createOpenAIProviderFromEnv, defaultLogger, discoverProviderAdapters, getModelId, getProvider, hasProviderPrefix, resolveModel, validateAndApplyDefaults, validateGadgetParams };
package/dist/index.d.ts CHANGED
@@ -1,7 +1,7 @@
1
1
  import { ZodType, ZodTypeAny } from 'zod';
2
2
  export { z } from 'zod';
3
- import { A as AgentHooks, o as ModelRegistry, L as LLMMessage, P as ParameterFormat, S as StreamEvent, G as GadgetRegistry, p as LLMStreamChunk, q as GadgetExample, B as BaseGadget, r as ParsedGadgetCall, s as GadgetExecutionResult, t as ProviderAdapter, u as ModelDescriptor, v as ModelSpec, w as LLMGenerationOptions, x as LLMStream } from './mock-stream-B2qwECvd.js';
4
- export { F as AfterGadgetExecutionAction, I as AfterGadgetExecutionControllerContext, J as AfterLLMCallAction, K as AfterLLMCallControllerContext, N as AfterLLMErrorAction, y as AgentBuilder, O as AgentOptions, Q as BeforeGadgetExecutionAction, R as BeforeLLMCallAction, T as ChunkInterceptorContext, U as Controllers, ab as CostEstimate, al as DEFAULT_PROMPTS, E as EventHandlers, as as GadgetClass, V as GadgetExecutionControllerContext, at as GadgetOrClass, W as GadgetParameterInterceptorContext, X as GadgetResultInterceptorContext, H as HistoryMessage, Y as Interceptors, Z as LLMCallControllerContext, _ as LLMErrorControllerContext, aa as LLMMessageBuilder, a9 as LLMRole, a8 as LLMist, a7 as LLMistOptions, $ as MessageInterceptorContext, a as MockBuilder, d as MockManager, h as MockMatcher, i as MockMatcherContext, j as MockOptions, M as MockProviderAdapter, k as MockRegistration, l as MockResponse, n as MockStats, ac as ModelFeatures, ah as ModelIdentifierParser, ad as ModelLimits, ae as ModelPricing, a0 as ObserveChunkContext, a1 as ObserveGadgetCompleteContext, a2 as ObserveGadgetStartContext, a3 as ObserveLLMCallContext, a4 as ObserveLLMCompleteContext, a5 as ObserveLLMErrorContext, a6 as Observers, ai as PromptConfig, aj as PromptContext, ak as PromptTemplate, af as ProviderIdentifier, ao as QuickOptions, ar as StreamParser, au as TextOnlyAction, av as TextOnlyContext, aw as TextOnlyCustomHandler, ax as TextOnlyGadgetConfig, ay as TextOnlyHandler, az as TextOnlyStrategy, ag as TokenUsage, z as collectEvents, C as collectText, ap as complete, c as createMockAdapter, b as createMockClient, e as createMockStream, f as createTextMockStream, g as getMockManager, m as mockLLM, am as resolvePromptTemplate, an as resolveRulesTemplate, D as runWithHandlers, aq as stream } from './mock-stream-B2qwECvd.js';
3
+ import { A as AgentHooks, o as ModelRegistry, L as LLMMessage, S as StreamEvent, T as TokenUsage, G as GadgetRegistry, p as LLMStreamChunk, q as GadgetExample, B as BaseGadget, P as ParsedGadgetCall, r as GadgetExecutionResult, s as ProviderAdapter, t as ModelDescriptor, u as ModelSpec, v as LLMGenerationOptions, w as LLMStream } from './mock-stream-DKF5yatf.js';
4
+ export { D as AfterGadgetExecutionAction, F as AfterGadgetExecutionControllerContext, I as AfterLLMCallAction, J as AfterLLMCallControllerContext, K as AfterLLMErrorAction, x as AgentBuilder, N as AgentOptions, O as BeforeGadgetExecutionAction, Q as BeforeLLMCallAction, R as ChunkInterceptorContext, U as Controllers, ab as CostEstimate, ak as DEFAULT_PROMPTS, E as EventHandlers, aq as GadgetClass, V as GadgetExecutionControllerContext, ar as GadgetOrClass, W as GadgetParameterInterceptorContext, X as GadgetResultInterceptorContext, H as HistoryMessage, Y as Interceptors, Z as LLMCallControllerContext, _ as LLMErrorControllerContext, aa as LLMMessageBuilder, a9 as LLMRole, a8 as LLMist, a7 as LLMistOptions, $ as MessageInterceptorContext, a as MockBuilder, d as MockManager, h as MockMatcher, i as MockMatcherContext, j as MockOptions, M as MockProviderAdapter, k as MockRegistration, l as MockResponse, n as MockStats, ac as ModelFeatures, ag as ModelIdentifierParser, ad as ModelLimits, ae as ModelPricing, a0 as ObserveChunkContext, a1 as ObserveGadgetCompleteContext, a2 as ObserveGadgetStartContext, a3 as ObserveLLMCallContext, a4 as ObserveLLMCompleteContext, a5 as ObserveLLMErrorContext, a6 as Observers, ah as PromptConfig, ai as PromptContext, aj as PromptTemplate, af as ProviderIdentifier, an as QuickOptions, as as TextOnlyAction, at as TextOnlyContext, au as TextOnlyCustomHandler, av as TextOnlyGadgetConfig, aw as TextOnlyHandler, ax as TextOnlyStrategy, y as collectEvents, z as collectText, ao as complete, c as createMockAdapter, b as createMockClient, e as createMockStream, f as createTextMockStream, g as getMockManager, m as mockLLM, al as resolvePromptTemplate, am as resolveRulesTemplate, C as runWithHandlers, ap as stream } from './mock-stream-DKF5yatf.js';
5
5
  import { Logger, ILogObj } from 'tslog';
6
6
  import { MessageCreateParamsStreaming, MessageStreamEvent } from '@anthropic-ai/sdk/resources/messages';
7
7
  import OpenAI from 'openai';
@@ -724,11 +724,12 @@ interface IConversationManager {
724
724
  * Options for ConversationManager constructor.
725
725
  */
726
726
  interface ConversationManagerOptions {
727
- parameterFormat?: ParameterFormat;
728
727
  /** Custom gadget start marker prefix */
729
728
  startPrefix?: string;
730
729
  /** Custom gadget end marker prefix */
731
730
  endPrefix?: string;
731
+ /** Custom argument prefix for block format */
732
+ argPrefix?: string;
732
733
  }
733
734
  /**
734
735
  * Default implementation of IConversationManager.
@@ -738,7 +739,6 @@ declare class ConversationManager implements IConversationManager {
738
739
  private readonly baseMessages;
739
740
  private readonly initialMessages;
740
741
  private readonly historyBuilder;
741
- private readonly parameterFormat;
742
742
  constructor(baseMessages: LLMMessage[], initialMessages: LLMMessage[], options?: ConversationManagerOptions);
743
743
  addUserMessage(content: string): void;
744
744
  addAssistantMessage(content: string): void;
@@ -761,12 +761,12 @@ interface StreamProcessorOptions {
761
761
  iteration: number;
762
762
  /** Gadget registry for execution */
763
763
  registry: GadgetRegistry;
764
- /** Parameter format for parsing */
765
- parameterFormat: ParameterFormat;
766
764
  /** Custom gadget start prefix */
767
765
  gadgetStartPrefix?: string;
768
766
  /** Custom gadget end prefix */
769
767
  gadgetEndPrefix?: string;
768
+ /** Custom argument prefix for block format */
769
+ gadgetArgPrefix?: string;
770
770
  /** Hooks for lifecycle events */
771
771
  hooks?: AgentHooks;
772
772
  /** Logger instance */
@@ -797,12 +797,8 @@ interface StreamProcessingResult {
797
797
  didExecuteGadgets: boolean;
798
798
  /** LLM finish reason */
799
799
  finishReason: string | null;
800
- /** Token usage */
801
- usage?: {
802
- inputTokens: number;
803
- outputTokens: number;
804
- totalTokens: number;
805
- };
800
+ /** Token usage (including cached token counts when available) */
801
+ usage?: TokenUsage;
806
802
  /** The raw accumulated response text */
807
803
  rawResponse: string;
808
804
  /** The final message (after interceptors) */
@@ -1287,6 +1283,64 @@ declare class GadgetExecutor {
1287
1283
  executeAll(calls: ParsedGadgetCall[]): Promise<GadgetExecutionResult[]>;
1288
1284
  }
1289
1285
 
1286
+ interface StreamParserOptions {
1287
+ startPrefix?: string;
1288
+ endPrefix?: string;
1289
+ /** Prefix for block format arguments. Default: "!!!ARG:" */
1290
+ argPrefix?: string;
1291
+ }
1292
+ declare class StreamParser {
1293
+ private buffer;
1294
+ private lastReportedTextLength;
1295
+ private readonly startPrefix;
1296
+ private readonly endPrefix;
1297
+ private readonly argPrefix;
1298
+ constructor(options?: StreamParserOptions);
1299
+ private takeTextUntil;
1300
+ /**
1301
+ * Parse gadget name, handling both old format (name:invocationId) and new format (just name).
1302
+ * For new format, generates a unique invocation ID.
1303
+ */
1304
+ private parseGadgetName;
1305
+ /**
1306
+ * Truncate verbose parse errors to avoid context overflow.
1307
+ * Keeps first meaningful line and limits total length.
1308
+ */
1309
+ private truncateParseError;
1310
+ /**
1311
+ * Parse parameter string using block format
1312
+ */
1313
+ private parseParameters;
1314
+ feed(chunk: string): Generator<StreamEvent>;
1315
+ finalize(): Generator<StreamEvent>;
1316
+ reset(): void;
1317
+ }
1318
+
1319
+ /**
1320
+ * Type-safe gadget factory with automatic parameter inference.
1321
+ *
1322
+ * Gadget eliminates the need for manual type assertions
1323
+ * by automatically inferring parameter types from the Zod schema.
1324
+ *
1325
+ * @example
1326
+ * ```typescript
1327
+ * class Calculator extends Gadget({
1328
+ * description: "Performs arithmetic operations",
1329
+ * schema: z.object({
1330
+ * operation: z.enum(["add", "subtract"]),
1331
+ * a: z.number(),
1332
+ * b: z.number(),
1333
+ * }),
1334
+ * }) {
1335
+ * // ✨ params is automatically typed!
1336
+ * execute(params: this['params']): string {
1337
+ * const { operation, a, b } = params; // All typed!
1338
+ * return operation === "add" ? String(a + b) : String(a - b);
1339
+ * }
1340
+ * }
1341
+ * ```
1342
+ */
1343
+
1290
1344
  /**
1291
1345
  * Infer the TypeScript type from a Zod schema.
1292
1346
  */
@@ -1393,7 +1447,7 @@ declare function Gadget<TSchema extends ZodType>(config: GadgetConfig<TSchema>):
1393
1447
  */
1394
1448
  execute(params: Record<string, unknown>): string | Promise<string>;
1395
1449
  get instruction(): string;
1396
- getInstruction(format?: ParameterFormat): string;
1450
+ getInstruction(argPrefix?: string): string;
1397
1451
  } & {
1398
1452
  params: InferSchema<TSchema>;
1399
1453
  };
@@ -1660,6 +1714,7 @@ type GeminiChunk = {
1660
1714
  promptTokenCount?: number;
1661
1715
  candidatesTokenCount?: number;
1662
1716
  totalTokenCount?: number;
1717
+ cachedContentTokenCount?: number;
1663
1718
  };
1664
1719
  };
1665
1720
  declare class GeminiGenerativeProvider extends BaseProviderAdapter {
@@ -1769,4 +1824,4 @@ declare class OpenAIChatProvider extends BaseProviderAdapter {
1769
1824
  }
1770
1825
  declare function createOpenAIProviderFromEnv(): OpenAIChatProvider | null;
1771
1826
 
1772
- export { AgentHooks, AnthropicMessagesProvider, BaseGadget, BreakLoopException, ConversationManager, type CreateGadgetConfig, Gadget, type GadgetConfig, GadgetExample, GadgetExecutionResult, GadgetExecutor, GadgetOutputStore, GadgetRegistry, GeminiGenerativeProvider, HookPresets, HumanInputException, type IConversationManager, LLMGenerationOptions, LLMMessage, LLMStream, LLMStreamChunk, type LoggerOptions, type LoggingOptions, MODEL_ALIASES, ModelDescriptor, ModelRegistry, ModelSpec, OpenAIChatProvider, ParsedGadgetCall, ProviderAdapter, type StoredOutput, StreamEvent, type StreamProcessingResult, StreamProcessor, type StreamProcessorOptions, type ValidationIssue, type ValidationResult, createAnthropicProviderFromEnv, createGadget, createGadgetOutputViewer, createGeminiProviderFromEnv, createLogger, createOpenAIProviderFromEnv, defaultLogger, discoverProviderAdapters, getModelId, getProvider, hasProviderPrefix, resolveModel, validateAndApplyDefaults, validateGadgetParams };
1827
+ export { AgentHooks, AnthropicMessagesProvider, BaseGadget, BreakLoopException, ConversationManager, type CreateGadgetConfig, Gadget, type GadgetConfig, GadgetExample, GadgetExecutionResult, GadgetExecutor, GadgetOutputStore, GadgetRegistry, GeminiGenerativeProvider, HookPresets, HumanInputException, type IConversationManager, LLMGenerationOptions, LLMMessage, LLMStream, LLMStreamChunk, type LoggerOptions, type LoggingOptions, MODEL_ALIASES, ModelDescriptor, ModelRegistry, ModelSpec, OpenAIChatProvider, ParsedGadgetCall, ProviderAdapter, type StoredOutput, StreamEvent, StreamParser, type StreamProcessingResult, StreamProcessor, type StreamProcessorOptions, TokenUsage, type ValidationIssue, type ValidationResult, createAnthropicProviderFromEnv, createGadget, createGadgetOutputViewer, createGeminiProviderFromEnv, createLogger, createOpenAIProviderFromEnv, defaultLogger, discoverProviderAdapters, getModelId, getProvider, hasProviderPrefix, resolveModel, validateAndApplyDefaults, validateGadgetParams };
package/dist/index.js CHANGED
@@ -10,7 +10,7 @@ import {
10
10
  mockLLM,
11
11
  validateAndApplyDefaults,
12
12
  validateGadgetParams
13
- } from "./chunk-CTC2WJZA.js";
13
+ } from "./chunk-53MM55JS.js";
14
14
  import {
15
15
  AgentBuilder,
16
16
  AnthropicMessagesProvider,
@@ -74,7 +74,7 @@ import {
74
74
  resolveRulesTemplate,
75
75
  runWithHandlers,
76
76
  stream
77
- } from "./chunk-ZFHFBEQ5.js";
77
+ } from "./chunk-T24KLXY4.js";
78
78
 
79
79
  // src/index.ts
80
80
  init_builder();