llmist 1.7.0 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.cts CHANGED
@@ -1,7 +1,7 @@
1
1
  import { ZodType, ZodTypeAny } from 'zod';
2
2
  export { z } from 'zod';
3
- import { A as AgentHooks, q as ModelRegistry, I as IConversationManager, b as LLMMessage, S as StreamEvent, T as TokenUsage, G as GadgetRegistry, a as LLMStreamChunk, C as CompactionStrategy, R as ResolvedCompactionConfig, r as CompactionContext, s as CompactionResult, t as LLMist, u as CompactionConfig, v as CompactionEvent, w as CompactionStats, H as HintTemplate, x as GadgetExample, B as BaseGadget, P as ParsedGadgetCall, y as GadgetExecutionResult, z as ProviderAdapter, D as ModelDescriptor, E as ModelSpec, F as LLMGenerationOptions, L as LLMStream } from './mock-stream-BMuFlQI1.cjs';
4
- export { X as AfterGadgetExecutionAction, Y as AfterGadgetExecutionControllerContext, Z as AfterLLMCallAction, _ as AfterLLMCallControllerContext, $ as AfterLLMErrorAction, O as AgentBuilder, a0 as AgentOptions, a1 as BeforeGadgetExecutionAction, a2 as BeforeLLMCallAction, a3 as ChunkInterceptorContext, a4 as Controllers, aq as CostEstimate, al as DEFAULT_COMPACTION_CONFIG, aA as DEFAULT_HINTS, aB as DEFAULT_PROMPTS, am as DEFAULT_SUMMARIZATION_PROMPT, Q as EventHandlers, aI as GadgetClass, a5 as GadgetExecutionControllerContext, aJ as GadgetOrClass, a6 as GadgetParameterInterceptorContext, a7 as GadgetResultInterceptorContext, aw as HintContext, J as HistoryMessage, a8 as Interceptors, a9 as LLMCallControllerContext, aa as LLMErrorControllerContext, ap as LLMMessageBuilder, ao as LLMRole, an as LLMistOptions, ab as MessageInterceptorContext, aj as MessageTurn, d as MockBuilder, f as MockManager, j as MockMatcher, k as MockMatcherContext, l as MockOptions, M as MockProviderAdapter, n as MockRegistration, o as MockResponse, p as MockStats, ar as ModelFeatures, av as ModelIdentifierParser, as as ModelLimits, at as ModelPricing, ac as ObserveChunkContext, ak as ObserveCompactionContext, ad as ObserveGadgetCompleteContext, ae as ObserveGadgetStartContext, af as ObserveLLMCallContext, ag as ObserveLLMCompleteContext, ah as ObserveLLMErrorContext, ai as Observers, ax as PromptConfig, ay as PromptContext, az as PromptTemplate, au as ProviderIdentifier, aF as QuickOptions, aK as TextOnlyAction, aL as TextOnlyContext, aM as TextOnlyCustomHandler, aN as TextOnlyGadgetConfig, aO as TextOnlyHandler, aP as TextOnlyStrategy, K as TrailingMessage, N as TrailingMessageContext, U as collectEvents, V as collectText, aG as complete, c as createMockAdapter, e as createMockClient, h as createMockStream, i as createTextMockStream, g as getMockManager, m as mockLLM, aC as resolveHintTemplate, aD as resolvePromptTemplate, aE as resolveRulesTemplate, W as runWithHandlers, aH as stream } from './mock-stream-BMuFlQI1.cjs';
3
+ import { A as AgentHooks, q as ModelRegistry, I as IConversationManager, b as LLMMessage, S as StreamEvent, T as TokenUsage, G as GadgetRegistry, r as LLMist, a as LLMStreamChunk, C as CompactionStrategy, R as ResolvedCompactionConfig, s as CompactionContext, t as CompactionResult, u as CompactionConfig, v as CompactionEvent, w as CompactionStats, H as HintTemplate, E as ExecutionContext, x as GadgetExecuteReturn, y as GadgetExample, B as BaseGadget, P as ParsedGadgetCall, z as GadgetExecutionResult, D as ProviderAdapter, F as ModelDescriptor, J as ModelSpec, K as LLMGenerationOptions, L as LLMStream } from './mock-stream-BQHut0lQ.cjs';
4
+ export { Z as AfterGadgetExecutionAction, _ as AfterGadgetExecutionControllerContext, $ as AfterLLMCallAction, a0 as AfterLLMCallControllerContext, a1 as AfterLLMErrorAction, U as AgentBuilder, a2 as AgentOptions, a3 as BeforeGadgetExecutionAction, a4 as BeforeLLMCallAction, a5 as ChunkInterceptorContext, a6 as Controllers, as as CostEstimate, aM as CostReportingLLMist, an as DEFAULT_COMPACTION_CONFIG, aC as DEFAULT_HINTS, aD as DEFAULT_PROMPTS, ao as DEFAULT_SUMMARIZATION_PROMPT, V as EventHandlers, aK as GadgetClass, aN as GadgetExecuteResult, a7 as GadgetExecutionControllerContext, aL as GadgetOrClass, a8 as GadgetParameterInterceptorContext, a9 as GadgetResultInterceptorContext, ay as HintContext, N as HistoryMessage, aa as Interceptors, ab as LLMCallControllerContext, ac as LLMErrorControllerContext, ar as LLMMessageBuilder, aq as LLMRole, ap as LLMistOptions, ad as MessageInterceptorContext, al as MessageTurn, d as MockBuilder, f as MockManager, j as MockMatcher, k as MockMatcherContext, l as MockOptions, M as MockProviderAdapter, n as MockRegistration, o as MockResponse, p as MockStats, at as ModelFeatures, ax as ModelIdentifierParser, au as ModelLimits, av as ModelPricing, ae as ObserveChunkContext, am as ObserveCompactionContext, af as ObserveGadgetCompleteContext, ag as ObserveGadgetStartContext, ah as ObserveLLMCallContext, ai as ObserveLLMCompleteContext, aj as ObserveLLMErrorContext, ak as Observers, az as PromptConfig, aA as PromptContext, aB as PromptTemplate, aw as ProviderIdentifier, aH as QuickOptions, aO as TextOnlyAction, aP as TextOnlyContext, aQ as TextOnlyCustomHandler, aR as TextOnlyGadgetConfig, aS as TextOnlyHandler, aT as TextOnlyStrategy, O as TrailingMessage, Q as TrailingMessageContext, W as collectEvents, X as collectText, aI as complete, c as createMockAdapter, e as createMockClient, h as createMockStream, i as createTextMockStream, g as getMockManager, m as mockLLM, aE as resolveHintTemplate, aF as resolvePromptTemplate, aG as resolveRulesTemplate, Y as runWithHandlers, aJ as stream } from './mock-stream-BQHut0lQ.cjs';
5
5
  import { Logger, ILogObj } from 'tslog';
6
6
  import { MessageCreateParamsStreaming, MessageStreamEvent } from '@anthropic-ai/sdk/resources/messages';
7
7
  import OpenAI from 'openai';
@@ -85,7 +85,7 @@ interface ProgressStats {
85
85
  totalOutputTokens: number;
86
86
  /** Total tokens (input + output) */
87
87
  totalTokens: number;
88
- /** Cumulative cost in USD (requires modelRegistry) */
88
+ /** Cumulative cost in USD (includes LLM and gadget costs; requires modelRegistry for LLM cost estimation) */
89
89
  totalCost: number;
90
90
  /** Elapsed time in seconds since first call */
91
91
  elapsedSeconds: number;
@@ -787,6 +787,8 @@ interface StreamProcessorOptions {
787
787
  }) => boolean | Promise<boolean>;
788
788
  /** Default gadget timeout */
789
789
  defaultGadgetTimeoutMs?: number;
790
+ /** LLMist client for ExecutionContext.llmist */
791
+ client?: LLMist;
790
792
  }
791
793
  /**
792
794
  * Result of stream processing.
@@ -1441,8 +1443,12 @@ interface CreateGadgetConfig<TSchema extends ZodType> {
1441
1443
  description: string;
1442
1444
  /** Zod schema for parameter validation */
1443
1445
  schema: TSchema;
1444
- /** Execution function with typed parameters */
1445
- execute: (params: InferSchema$1<TSchema>) => string | Promise<string>;
1446
+ /**
1447
+ * Execution function with typed parameters.
1448
+ * Can return string or { result, cost? }.
1449
+ * Optionally receives ExecutionContext for callback-based cost reporting.
1450
+ */
1451
+ execute: (params: InferSchema$1<TSchema>, ctx?: ExecutionContext) => GadgetExecuteReturn | Promise<GadgetExecuteReturn>;
1446
1452
  /** Optional timeout in milliseconds */
1447
1453
  timeoutMs?: number;
1448
1454
  /** Optional usage examples to help LLMs understand proper invocation */
@@ -1591,6 +1597,38 @@ declare class HumanInputException extends Error {
1591
1597
  readonly question: string;
1592
1598
  constructor(question: string);
1593
1599
  }
1600
+ /**
1601
+ * Exception thrown when gadget execution is aborted.
1602
+ *
1603
+ * Gadgets can throw this exception when they detect the abort signal has been
1604
+ * triggered. This is typically used via the `throwIfAborted()` helper method
1605
+ * on the Gadget base class.
1606
+ *
1607
+ * @example
1608
+ * ```typescript
1609
+ * class LongRunningGadget extends Gadget({
1610
+ * name: 'LongRunning',
1611
+ * description: 'Performs a long operation with checkpoints',
1612
+ * schema: z.object({ data: z.string() }),
1613
+ * }) {
1614
+ * async execute(params: this['params'], ctx: ExecutionContext): Promise<string> {
1615
+ * // Check at key points - throws AbortError if aborted
1616
+ * this.throwIfAborted(ctx);
1617
+ *
1618
+ * await this.doPartOne(params.data);
1619
+ *
1620
+ * this.throwIfAborted(ctx);
1621
+ *
1622
+ * await this.doPartTwo(params.data);
1623
+ *
1624
+ * return 'completed';
1625
+ * }
1626
+ * }
1627
+ * ```
1628
+ */
1629
+ declare class AbortError extends Error {
1630
+ constructor(message?: string);
1631
+ }
1594
1632
 
1595
1633
  interface ErrorFormatterOptions {
1596
1634
  /** Custom argument prefix for block format examples. Default: "!!!ARG:" */
@@ -1605,14 +1643,21 @@ declare class GadgetExecutor {
1605
1643
  private readonly registry;
1606
1644
  private readonly onHumanInputRequired?;
1607
1645
  private readonly defaultGadgetTimeoutMs?;
1646
+ private readonly client?;
1608
1647
  private readonly logger;
1609
1648
  private readonly errorFormatter;
1610
1649
  private readonly argPrefix;
1611
- constructor(registry: GadgetRegistry, onHumanInputRequired?: ((question: string) => Promise<string>) | undefined, logger?: Logger<ILogObj>, defaultGadgetTimeoutMs?: number | undefined, errorFormatterOptions?: ErrorFormatterOptions);
1650
+ constructor(registry: GadgetRegistry, onHumanInputRequired?: ((question: string) => Promise<string>) | undefined, logger?: Logger<ILogObj>, defaultGadgetTimeoutMs?: number | undefined, errorFormatterOptions?: ErrorFormatterOptions, client?: LLMist | undefined);
1612
1651
  /**
1613
1652
  * Creates a promise that rejects with a TimeoutException after the specified timeout.
1653
+ * Aborts the provided AbortController before rejecting, allowing gadgets to clean up.
1614
1654
  */
1615
1655
  private createTimeoutPromise;
1656
+ /**
1657
+ * Normalizes gadget execute result to consistent format.
1658
+ * Handles both string returns (backwards compat) and object returns with cost.
1659
+ */
1660
+ private normalizeExecuteResult;
1616
1661
  execute(call: ParsedGadgetCall): Promise<GadgetExecutionResult>;
1617
1662
  executeAll(calls: ParsedGadgetCall[]): Promise<GadgetExecutionResult[]>;
1618
1663
  /**
@@ -1774,18 +1819,30 @@ declare function Gadget<TSchema extends ZodType>(config: GadgetConfig<TSchema>):
1774
1819
  * Execute the gadget. Subclasses should cast params to this['params'].
1775
1820
  *
1776
1821
  * @param params - Validated parameters from the LLM
1777
- * @returns Result as a string (or Promise<string> for async gadgets)
1822
+ * @param ctx - Optional execution context for cost reporting and LLM access
1823
+ * @returns Result as a string, or an object with result and optional cost
1778
1824
  *
1779
1825
  * @example
1780
1826
  * ```typescript
1781
- * execute(params: Record<string, unknown>): string {
1782
- * const typed = params as this['params'];
1783
- * // Now 'typed' is fully typed!
1784
- * return String(typed.a + typed.b);
1827
+ * // Simple string return (free gadget)
1828
+ * execute(params: this['params']) {
1829
+ * return String(params.a + params.b);
1830
+ * }
1831
+ *
1832
+ * // Using context for callback-based cost reporting
1833
+ * execute(params: this['params'], ctx) {
1834
+ * ctx.reportCost(0.001);
1835
+ * return "result";
1836
+ * }
1837
+ *
1838
+ * // Using wrapped LLMist for automatic cost tracking
1839
+ * async execute(params: this['params'], ctx) {
1840
+ * return ctx.llmist.complete('Summarize: ' + params.text);
1785
1841
  * }
1786
1842
  * ```
1787
1843
  */
1788
- execute(params: Record<string, unknown>): string | Promise<string>;
1844
+ execute(params: Record<string, unknown>, ctx?: ExecutionContext): GadgetExecuteReturn | Promise<GadgetExecuteReturn>;
1845
+ throwIfAborted(ctx?: ExecutionContext): void;
1789
1846
  get instruction(): string;
1790
1847
  getInstruction(optionsOrArgPrefix?: string | {
1791
1848
  argPrefix?: string;
@@ -2169,4 +2226,4 @@ declare class OpenAIChatProvider extends BaseProviderAdapter {
2169
2226
  }
2170
2227
  declare function createOpenAIProviderFromEnv(): OpenAIChatProvider | null;
2171
2228
 
2172
- export { AgentHooks, AnthropicMessagesProvider, BaseGadget, BreakLoopException, CompactionConfig, CompactionContext, CompactionEvent, CompactionManager, CompactionResult, CompactionStats, CompactionStrategy, ConversationManager, type CreateGadgetConfig, Gadget, type GadgetConfig, GadgetExample, GadgetExecutionResult, GadgetExecutor, GadgetOutputStore, GadgetRegistry, GeminiGenerativeProvider, HintTemplate, type HintsConfig, HookPresets, HumanInputException, HybridStrategy, IConversationManager, type IterationHintOptions, LLMGenerationOptions, LLMMessage, LLMStream, LLMStreamChunk, LLMist, type LoggerOptions, type LoggingOptions, MODEL_ALIASES, ModelDescriptor, ModelRegistry, ModelSpec, OpenAIChatProvider, type ParallelGadgetHintOptions, ParsedGadgetCall, ProviderAdapter, ResolvedCompactionConfig, SlidingWindowStrategy, type StoredOutput, StreamEvent, StreamParser, type StreamProcessingResult, StreamProcessor, type StreamProcessorOptions, SummarizationStrategy, TokenUsage, type ValidationIssue, type ValidationResult, createAnthropicProviderFromEnv, createGadget, createGadgetOutputViewer, createGeminiProviderFromEnv, createHints, createLogger, createOpenAIProviderFromEnv, defaultLogger, discoverProviderAdapters, getModelId, getProvider, hasProviderPrefix, iterationProgressHint, parallelGadgetHint, resolveModel, validateAndApplyDefaults, validateGadgetParams };
2229
+ export { AbortError, AgentHooks, AnthropicMessagesProvider, BaseGadget, BreakLoopException, CompactionConfig, CompactionContext, CompactionEvent, CompactionManager, CompactionResult, CompactionStats, CompactionStrategy, ConversationManager, type CreateGadgetConfig, ExecutionContext, Gadget, type GadgetConfig, GadgetExample, GadgetExecuteReturn, GadgetExecutionResult, GadgetExecutor, GadgetOutputStore, GadgetRegistry, GeminiGenerativeProvider, HintTemplate, type HintsConfig, HookPresets, HumanInputException, HybridStrategy, IConversationManager, type IterationHintOptions, LLMGenerationOptions, LLMMessage, LLMStream, LLMStreamChunk, LLMist, type LoggerOptions, type LoggingOptions, MODEL_ALIASES, ModelDescriptor, ModelRegistry, ModelSpec, OpenAIChatProvider, type ParallelGadgetHintOptions, ParsedGadgetCall, ProviderAdapter, ResolvedCompactionConfig, SlidingWindowStrategy, type StoredOutput, StreamEvent, StreamParser, type StreamProcessingResult, StreamProcessor, type StreamProcessorOptions, SummarizationStrategy, TokenUsage, type ValidationIssue, type ValidationResult, createAnthropicProviderFromEnv, createGadget, createGadgetOutputViewer, createGeminiProviderFromEnv, createHints, createLogger, createOpenAIProviderFromEnv, defaultLogger, discoverProviderAdapters, getModelId, getProvider, hasProviderPrefix, iterationProgressHint, parallelGadgetHint, resolveModel, validateAndApplyDefaults, validateGadgetParams };
package/dist/index.d.ts CHANGED
@@ -1,7 +1,7 @@
1
1
  import { ZodType, ZodTypeAny } from 'zod';
2
2
  export { z } from 'zod';
3
- import { A as AgentHooks, q as ModelRegistry, I as IConversationManager, b as LLMMessage, S as StreamEvent, T as TokenUsage, G as GadgetRegistry, a as LLMStreamChunk, C as CompactionStrategy, R as ResolvedCompactionConfig, r as CompactionContext, s as CompactionResult, t as LLMist, u as CompactionConfig, v as CompactionEvent, w as CompactionStats, H as HintTemplate, x as GadgetExample, B as BaseGadget, P as ParsedGadgetCall, y as GadgetExecutionResult, z as ProviderAdapter, D as ModelDescriptor, E as ModelSpec, F as LLMGenerationOptions, L as LLMStream } from './mock-stream-BMuFlQI1.js';
4
- export { X as AfterGadgetExecutionAction, Y as AfterGadgetExecutionControllerContext, Z as AfterLLMCallAction, _ as AfterLLMCallControllerContext, $ as AfterLLMErrorAction, O as AgentBuilder, a0 as AgentOptions, a1 as BeforeGadgetExecutionAction, a2 as BeforeLLMCallAction, a3 as ChunkInterceptorContext, a4 as Controllers, aq as CostEstimate, al as DEFAULT_COMPACTION_CONFIG, aA as DEFAULT_HINTS, aB as DEFAULT_PROMPTS, am as DEFAULT_SUMMARIZATION_PROMPT, Q as EventHandlers, aI as GadgetClass, a5 as GadgetExecutionControllerContext, aJ as GadgetOrClass, a6 as GadgetParameterInterceptorContext, a7 as GadgetResultInterceptorContext, aw as HintContext, J as HistoryMessage, a8 as Interceptors, a9 as LLMCallControllerContext, aa as LLMErrorControllerContext, ap as LLMMessageBuilder, ao as LLMRole, an as LLMistOptions, ab as MessageInterceptorContext, aj as MessageTurn, d as MockBuilder, f as MockManager, j as MockMatcher, k as MockMatcherContext, l as MockOptions, M as MockProviderAdapter, n as MockRegistration, o as MockResponse, p as MockStats, ar as ModelFeatures, av as ModelIdentifierParser, as as ModelLimits, at as ModelPricing, ac as ObserveChunkContext, ak as ObserveCompactionContext, ad as ObserveGadgetCompleteContext, ae as ObserveGadgetStartContext, af as ObserveLLMCallContext, ag as ObserveLLMCompleteContext, ah as ObserveLLMErrorContext, ai as Observers, ax as PromptConfig, ay as PromptContext, az as PromptTemplate, au as ProviderIdentifier, aF as QuickOptions, aK as TextOnlyAction, aL as TextOnlyContext, aM as TextOnlyCustomHandler, aN as TextOnlyGadgetConfig, aO as TextOnlyHandler, aP as TextOnlyStrategy, K as TrailingMessage, N as TrailingMessageContext, U as collectEvents, V as collectText, aG as complete, c as createMockAdapter, e as createMockClient, h as createMockStream, i as createTextMockStream, g as getMockManager, m as mockLLM, aC as resolveHintTemplate, aD as resolvePromptTemplate, aE as resolveRulesTemplate, W as runWithHandlers, aH as stream } from './mock-stream-BMuFlQI1.js';
3
+ import { A as AgentHooks, q as ModelRegistry, I as IConversationManager, b as LLMMessage, S as StreamEvent, T as TokenUsage, G as GadgetRegistry, r as LLMist, a as LLMStreamChunk, C as CompactionStrategy, R as ResolvedCompactionConfig, s as CompactionContext, t as CompactionResult, u as CompactionConfig, v as CompactionEvent, w as CompactionStats, H as HintTemplate, E as ExecutionContext, x as GadgetExecuteReturn, y as GadgetExample, B as BaseGadget, P as ParsedGadgetCall, z as GadgetExecutionResult, D as ProviderAdapter, F as ModelDescriptor, J as ModelSpec, K as LLMGenerationOptions, L as LLMStream } from './mock-stream-BQHut0lQ.js';
4
+ export { Z as AfterGadgetExecutionAction, _ as AfterGadgetExecutionControllerContext, $ as AfterLLMCallAction, a0 as AfterLLMCallControllerContext, a1 as AfterLLMErrorAction, U as AgentBuilder, a2 as AgentOptions, a3 as BeforeGadgetExecutionAction, a4 as BeforeLLMCallAction, a5 as ChunkInterceptorContext, a6 as Controllers, as as CostEstimate, aM as CostReportingLLMist, an as DEFAULT_COMPACTION_CONFIG, aC as DEFAULT_HINTS, aD as DEFAULT_PROMPTS, ao as DEFAULT_SUMMARIZATION_PROMPT, V as EventHandlers, aK as GadgetClass, aN as GadgetExecuteResult, a7 as GadgetExecutionControllerContext, aL as GadgetOrClass, a8 as GadgetParameterInterceptorContext, a9 as GadgetResultInterceptorContext, ay as HintContext, N as HistoryMessage, aa as Interceptors, ab as LLMCallControllerContext, ac as LLMErrorControllerContext, ar as LLMMessageBuilder, aq as LLMRole, ap as LLMistOptions, ad as MessageInterceptorContext, al as MessageTurn, d as MockBuilder, f as MockManager, j as MockMatcher, k as MockMatcherContext, l as MockOptions, M as MockProviderAdapter, n as MockRegistration, o as MockResponse, p as MockStats, at as ModelFeatures, ax as ModelIdentifierParser, au as ModelLimits, av as ModelPricing, ae as ObserveChunkContext, am as ObserveCompactionContext, af as ObserveGadgetCompleteContext, ag as ObserveGadgetStartContext, ah as ObserveLLMCallContext, ai as ObserveLLMCompleteContext, aj as ObserveLLMErrorContext, ak as Observers, az as PromptConfig, aA as PromptContext, aB as PromptTemplate, aw as ProviderIdentifier, aH as QuickOptions, aO as TextOnlyAction, aP as TextOnlyContext, aQ as TextOnlyCustomHandler, aR as TextOnlyGadgetConfig, aS as TextOnlyHandler, aT as TextOnlyStrategy, O as TrailingMessage, Q as TrailingMessageContext, W as collectEvents, X as collectText, aI as complete, c as createMockAdapter, e as createMockClient, h as createMockStream, i as createTextMockStream, g as getMockManager, m as mockLLM, aE as resolveHintTemplate, aF as resolvePromptTemplate, aG as resolveRulesTemplate, Y as runWithHandlers, aJ as stream } from './mock-stream-BQHut0lQ.js';
5
5
  import { Logger, ILogObj } from 'tslog';
6
6
  import { MessageCreateParamsStreaming, MessageStreamEvent } from '@anthropic-ai/sdk/resources/messages';
7
7
  import OpenAI from 'openai';
@@ -85,7 +85,7 @@ interface ProgressStats {
85
85
  totalOutputTokens: number;
86
86
  /** Total tokens (input + output) */
87
87
  totalTokens: number;
88
- /** Cumulative cost in USD (requires modelRegistry) */
88
+ /** Cumulative cost in USD (includes LLM and gadget costs; requires modelRegistry for LLM cost estimation) */
89
89
  totalCost: number;
90
90
  /** Elapsed time in seconds since first call */
91
91
  elapsedSeconds: number;
@@ -787,6 +787,8 @@ interface StreamProcessorOptions {
787
787
  }) => boolean | Promise<boolean>;
788
788
  /** Default gadget timeout */
789
789
  defaultGadgetTimeoutMs?: number;
790
+ /** LLMist client for ExecutionContext.llmist */
791
+ client?: LLMist;
790
792
  }
791
793
  /**
792
794
  * Result of stream processing.
@@ -1441,8 +1443,12 @@ interface CreateGadgetConfig<TSchema extends ZodType> {
1441
1443
  description: string;
1442
1444
  /** Zod schema for parameter validation */
1443
1445
  schema: TSchema;
1444
- /** Execution function with typed parameters */
1445
- execute: (params: InferSchema$1<TSchema>) => string | Promise<string>;
1446
+ /**
1447
+ * Execution function with typed parameters.
1448
+ * Can return string or { result, cost? }.
1449
+ * Optionally receives ExecutionContext for callback-based cost reporting.
1450
+ */
1451
+ execute: (params: InferSchema$1<TSchema>, ctx?: ExecutionContext) => GadgetExecuteReturn | Promise<GadgetExecuteReturn>;
1446
1452
  /** Optional timeout in milliseconds */
1447
1453
  timeoutMs?: number;
1448
1454
  /** Optional usage examples to help LLMs understand proper invocation */
@@ -1591,6 +1597,38 @@ declare class HumanInputException extends Error {
1591
1597
  readonly question: string;
1592
1598
  constructor(question: string);
1593
1599
  }
1600
+ /**
1601
+ * Exception thrown when gadget execution is aborted.
1602
+ *
1603
+ * Gadgets can throw this exception when they detect the abort signal has been
1604
+ * triggered. This is typically used via the `throwIfAborted()` helper method
1605
+ * on the Gadget base class.
1606
+ *
1607
+ * @example
1608
+ * ```typescript
1609
+ * class LongRunningGadget extends Gadget({
1610
+ * name: 'LongRunning',
1611
+ * description: 'Performs a long operation with checkpoints',
1612
+ * schema: z.object({ data: z.string() }),
1613
+ * }) {
1614
+ * async execute(params: this['params'], ctx: ExecutionContext): Promise<string> {
1615
+ * // Check at key points - throws AbortError if aborted
1616
+ * this.throwIfAborted(ctx);
1617
+ *
1618
+ * await this.doPartOne(params.data);
1619
+ *
1620
+ * this.throwIfAborted(ctx);
1621
+ *
1622
+ * await this.doPartTwo(params.data);
1623
+ *
1624
+ * return 'completed';
1625
+ * }
1626
+ * }
1627
+ * ```
1628
+ */
1629
+ declare class AbortError extends Error {
1630
+ constructor(message?: string);
1631
+ }
1594
1632
 
1595
1633
  interface ErrorFormatterOptions {
1596
1634
  /** Custom argument prefix for block format examples. Default: "!!!ARG:" */
@@ -1605,14 +1643,21 @@ declare class GadgetExecutor {
1605
1643
  private readonly registry;
1606
1644
  private readonly onHumanInputRequired?;
1607
1645
  private readonly defaultGadgetTimeoutMs?;
1646
+ private readonly client?;
1608
1647
  private readonly logger;
1609
1648
  private readonly errorFormatter;
1610
1649
  private readonly argPrefix;
1611
- constructor(registry: GadgetRegistry, onHumanInputRequired?: ((question: string) => Promise<string>) | undefined, logger?: Logger<ILogObj>, defaultGadgetTimeoutMs?: number | undefined, errorFormatterOptions?: ErrorFormatterOptions);
1650
+ constructor(registry: GadgetRegistry, onHumanInputRequired?: ((question: string) => Promise<string>) | undefined, logger?: Logger<ILogObj>, defaultGadgetTimeoutMs?: number | undefined, errorFormatterOptions?: ErrorFormatterOptions, client?: LLMist | undefined);
1612
1651
  /**
1613
1652
  * Creates a promise that rejects with a TimeoutException after the specified timeout.
1653
+ * Aborts the provided AbortController before rejecting, allowing gadgets to clean up.
1614
1654
  */
1615
1655
  private createTimeoutPromise;
1656
+ /**
1657
+ * Normalizes gadget execute result to consistent format.
1658
+ * Handles both string returns (backwards compat) and object returns with cost.
1659
+ */
1660
+ private normalizeExecuteResult;
1616
1661
  execute(call: ParsedGadgetCall): Promise<GadgetExecutionResult>;
1617
1662
  executeAll(calls: ParsedGadgetCall[]): Promise<GadgetExecutionResult[]>;
1618
1663
  /**
@@ -1774,18 +1819,30 @@ declare function Gadget<TSchema extends ZodType>(config: GadgetConfig<TSchema>):
1774
1819
  * Execute the gadget. Subclasses should cast params to this['params'].
1775
1820
  *
1776
1821
  * @param params - Validated parameters from the LLM
1777
- * @returns Result as a string (or Promise<string> for async gadgets)
1822
+ * @param ctx - Optional execution context for cost reporting and LLM access
1823
+ * @returns Result as a string, or an object with result and optional cost
1778
1824
  *
1779
1825
  * @example
1780
1826
  * ```typescript
1781
- * execute(params: Record<string, unknown>): string {
1782
- * const typed = params as this['params'];
1783
- * // Now 'typed' is fully typed!
1784
- * return String(typed.a + typed.b);
1827
+ * // Simple string return (free gadget)
1828
+ * execute(params: this['params']) {
1829
+ * return String(params.a + params.b);
1830
+ * }
1831
+ *
1832
+ * // Using context for callback-based cost reporting
1833
+ * execute(params: this['params'], ctx) {
1834
+ * ctx.reportCost(0.001);
1835
+ * return "result";
1836
+ * }
1837
+ *
1838
+ * // Using wrapped LLMist for automatic cost tracking
1839
+ * async execute(params: this['params'], ctx) {
1840
+ * return ctx.llmist.complete('Summarize: ' + params.text);
1785
1841
  * }
1786
1842
  * ```
1787
1843
  */
1788
- execute(params: Record<string, unknown>): string | Promise<string>;
1844
+ execute(params: Record<string, unknown>, ctx?: ExecutionContext): GadgetExecuteReturn | Promise<GadgetExecuteReturn>;
1845
+ throwIfAborted(ctx?: ExecutionContext): void;
1789
1846
  get instruction(): string;
1790
1847
  getInstruction(optionsOrArgPrefix?: string | {
1791
1848
  argPrefix?: string;
@@ -2169,4 +2226,4 @@ declare class OpenAIChatProvider extends BaseProviderAdapter {
2169
2226
  }
2170
2227
  declare function createOpenAIProviderFromEnv(): OpenAIChatProvider | null;
2171
2228
 
2172
- export { AgentHooks, AnthropicMessagesProvider, BaseGadget, BreakLoopException, CompactionConfig, CompactionContext, CompactionEvent, CompactionManager, CompactionResult, CompactionStats, CompactionStrategy, ConversationManager, type CreateGadgetConfig, Gadget, type GadgetConfig, GadgetExample, GadgetExecutionResult, GadgetExecutor, GadgetOutputStore, GadgetRegistry, GeminiGenerativeProvider, HintTemplate, type HintsConfig, HookPresets, HumanInputException, HybridStrategy, IConversationManager, type IterationHintOptions, LLMGenerationOptions, LLMMessage, LLMStream, LLMStreamChunk, LLMist, type LoggerOptions, type LoggingOptions, MODEL_ALIASES, ModelDescriptor, ModelRegistry, ModelSpec, OpenAIChatProvider, type ParallelGadgetHintOptions, ParsedGadgetCall, ProviderAdapter, ResolvedCompactionConfig, SlidingWindowStrategy, type StoredOutput, StreamEvent, StreamParser, type StreamProcessingResult, StreamProcessor, type StreamProcessorOptions, SummarizationStrategy, TokenUsage, type ValidationIssue, type ValidationResult, createAnthropicProviderFromEnv, createGadget, createGadgetOutputViewer, createGeminiProviderFromEnv, createHints, createLogger, createOpenAIProviderFromEnv, defaultLogger, discoverProviderAdapters, getModelId, getProvider, hasProviderPrefix, iterationProgressHint, parallelGadgetHint, resolveModel, validateAndApplyDefaults, validateGadgetParams };
2229
+ export { AbortError, AgentHooks, AnthropicMessagesProvider, BaseGadget, BreakLoopException, CompactionConfig, CompactionContext, CompactionEvent, CompactionManager, CompactionResult, CompactionStats, CompactionStrategy, ConversationManager, type CreateGadgetConfig, ExecutionContext, Gadget, type GadgetConfig, GadgetExample, GadgetExecuteReturn, GadgetExecutionResult, GadgetExecutor, GadgetOutputStore, GadgetRegistry, GeminiGenerativeProvider, HintTemplate, type HintsConfig, HookPresets, HumanInputException, HybridStrategy, IConversationManager, type IterationHintOptions, LLMGenerationOptions, LLMMessage, LLMStream, LLMStreamChunk, LLMist, type LoggerOptions, type LoggingOptions, MODEL_ALIASES, ModelDescriptor, ModelRegistry, ModelSpec, OpenAIChatProvider, type ParallelGadgetHintOptions, ParsedGadgetCall, ProviderAdapter, ResolvedCompactionConfig, SlidingWindowStrategy, type StoredOutput, StreamEvent, StreamParser, type StreamProcessingResult, StreamProcessor, type StreamProcessorOptions, SummarizationStrategy, TokenUsage, type ValidationIssue, type ValidationResult, createAnthropicProviderFromEnv, createGadget, createGadgetOutputViewer, createGeminiProviderFromEnv, createHints, createLogger, createOpenAIProviderFromEnv, defaultLogger, discoverProviderAdapters, getModelId, getProvider, hasProviderPrefix, iterationProgressHint, parallelGadgetHint, resolveModel, validateAndApplyDefaults, validateGadgetParams };
package/dist/index.js CHANGED
@@ -5,8 +5,9 @@ import {
5
5
  iterationProgressHint,
6
6
  parallelGadgetHint,
7
7
  z
8
- } from "./chunk-JGORHSHC.js";
8
+ } from "./chunk-LFSIEPAE.js";
9
9
  import {
10
+ AbortError,
10
11
  AgentBuilder,
11
12
  AnthropicMessagesProvider,
12
13
  BaseGadget,
@@ -64,8 +65,9 @@ import {
64
65
  stream,
65
66
  validateAndApplyDefaults,
66
67
  validateGadgetParams
67
- } from "./chunk-E52IO2NO.js";
68
+ } from "./chunk-LBHWVCZ2.js";
68
69
  export {
70
+ AbortError,
69
71
  AgentBuilder,
70
72
  AnthropicMessagesProvider,
71
73
  BaseGadget,