llmist 2.0.0 → 2.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{chunk-LFSIEPAE.js → chunk-LSCCBXS7.js} +2 -2
- package/dist/{chunk-LBHWVCZ2.js → chunk-PDYVT3FI.js} +106 -1
- package/dist/chunk-PDYVT3FI.js.map +1 -0
- package/dist/cli.cjs +266 -96
- package/dist/cli.cjs.map +1 -1
- package/dist/cli.js +163 -98
- package/dist/cli.js.map +1 -1
- package/dist/index.cjs +105 -0
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +4 -2
- package/dist/index.d.ts +4 -2
- package/dist/index.js +2 -2
- package/dist/{mock-stream-BQHut0lQ.d.cts → mock-stream-HF7MBNhi.d.cts} +82 -6
- package/dist/{mock-stream-BQHut0lQ.d.ts → mock-stream-HF7MBNhi.d.ts} +82 -6
- package/dist/testing/index.cjs +105 -0
- package/dist/testing/index.cjs.map +1 -1
- package/dist/testing/index.d.cts +2 -2
- package/dist/testing/index.d.ts +2 -2
- package/dist/testing/index.js +1 -1
- package/package.json +1 -1
- package/dist/chunk-LBHWVCZ2.js.map +0 -1
- /package/dist/{chunk-LFSIEPAE.js.map → chunk-LSCCBXS7.js.map} +0 -0
package/dist/index.d.cts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { ZodType, ZodTypeAny } from 'zod';
|
|
2
2
|
export { z } from 'zod';
|
|
3
|
-
import { A as AgentHooks, q as ModelRegistry, I as IConversationManager, b as LLMMessage, S as StreamEvent, T as TokenUsage, G as GadgetRegistry, r as LLMist, a as LLMStreamChunk, C as CompactionStrategy, R as ResolvedCompactionConfig, s as CompactionContext, t as CompactionResult, u as CompactionConfig, v as CompactionEvent, w as CompactionStats, H as HintTemplate, E as ExecutionContext, x as GadgetExecuteReturn, y as GadgetExample, B as BaseGadget, P as ParsedGadgetCall, z as GadgetExecutionResult, D as ProviderAdapter, F as ModelDescriptor, J as ModelSpec, K as LLMGenerationOptions, L as LLMStream } from './mock-stream-
|
|
4
|
-
export { Z as AfterGadgetExecutionAction, _ as AfterGadgetExecutionControllerContext, $ as AfterLLMCallAction, a0 as AfterLLMCallControllerContext, a1 as AfterLLMErrorAction, U as AgentBuilder, a2 as AgentOptions, a3 as BeforeGadgetExecutionAction, a4 as BeforeLLMCallAction, a5 as ChunkInterceptorContext, a6 as Controllers, as as CostEstimate, aM as CostReportingLLMist, an as DEFAULT_COMPACTION_CONFIG, aC as DEFAULT_HINTS, aD as DEFAULT_PROMPTS, ao as DEFAULT_SUMMARIZATION_PROMPT, V as EventHandlers, aK as GadgetClass, aN as GadgetExecuteResult, a7 as GadgetExecutionControllerContext, aL as GadgetOrClass, a8 as GadgetParameterInterceptorContext, a9 as GadgetResultInterceptorContext, ay as HintContext, N as HistoryMessage, aa as Interceptors, ab as LLMCallControllerContext, ac as LLMErrorControllerContext, ar as LLMMessageBuilder, aq as LLMRole, ap as LLMistOptions, ad as MessageInterceptorContext, al as MessageTurn, d as MockBuilder, f as MockManager, j as MockMatcher, k as MockMatcherContext, l as MockOptions, M as MockProviderAdapter, n as MockRegistration, o as MockResponse, p as MockStats, at as ModelFeatures, ax as ModelIdentifierParser, au as ModelLimits, av as ModelPricing, ae as ObserveChunkContext, am as ObserveCompactionContext, af as ObserveGadgetCompleteContext, ag as ObserveGadgetStartContext, ah as ObserveLLMCallContext, ai as ObserveLLMCompleteContext, aj as ObserveLLMErrorContext, ak as Observers, az as PromptConfig, aA as PromptContext, aB as PromptTemplate, aw as ProviderIdentifier, aH as QuickOptions, aO as TextOnlyAction, aP as TextOnlyContext, aQ as TextOnlyCustomHandler, aR as TextOnlyGadgetConfig, aS as TextOnlyHandler, aT as TextOnlyStrategy, O as TrailingMessage, Q as TrailingMessageContext, W as collectEvents, X as collectText, aI as complete, c as createMockAdapter, e as createMockClient, h as createMockStream, i as createTextMockStream, g as getMockManager, m as mockLLM, aE as resolveHintTemplate, aF as resolvePromptTemplate, aG as resolveRulesTemplate, Y as runWithHandlers, aJ as stream } from './mock-stream-
|
|
3
|
+
import { A as AgentHooks, q as ModelRegistry, I as IConversationManager, b as LLMMessage, S as StreamEvent, T as TokenUsage, G as GadgetRegistry, r as LLMist, a as LLMStreamChunk, C as CompactionStrategy, R as ResolvedCompactionConfig, s as CompactionContext, t as CompactionResult, u as CompactionConfig, v as CompactionEvent, w as CompactionStats, H as HintTemplate, E as ExecutionContext, x as GadgetExecuteReturn, y as GadgetExample, B as BaseGadget, P as ParsedGadgetCall, z as GadgetExecutionResult, D as ProviderAdapter, F as ModelDescriptor, J as ModelSpec, K as LLMGenerationOptions, L as LLMStream } from './mock-stream-HF7MBNhi.cjs';
|
|
4
|
+
export { Z as AfterGadgetExecutionAction, _ as AfterGadgetExecutionControllerContext, $ as AfterLLMCallAction, a0 as AfterLLMCallControllerContext, a1 as AfterLLMErrorAction, U as AgentBuilder, a2 as AgentOptions, a3 as BeforeGadgetExecutionAction, a4 as BeforeLLMCallAction, a5 as ChunkInterceptorContext, a6 as Controllers, as as CostEstimate, aM as CostReportingLLMist, an as DEFAULT_COMPACTION_CONFIG, aC as DEFAULT_HINTS, aD as DEFAULT_PROMPTS, ao as DEFAULT_SUMMARIZATION_PROMPT, V as EventHandlers, aK as GadgetClass, aN as GadgetExecuteResult, a7 as GadgetExecutionControllerContext, aL as GadgetOrClass, a8 as GadgetParameterInterceptorContext, a9 as GadgetResultInterceptorContext, ay as HintContext, N as HistoryMessage, aa as Interceptors, ab as LLMCallControllerContext, ac as LLMErrorControllerContext, ar as LLMMessageBuilder, aq as LLMRole, ap as LLMistOptions, ad as MessageInterceptorContext, al as MessageTurn, d as MockBuilder, f as MockManager, j as MockMatcher, k as MockMatcherContext, l as MockOptions, M as MockProviderAdapter, n as MockRegistration, o as MockResponse, p as MockStats, at as ModelFeatures, ax as ModelIdentifierParser, au as ModelLimits, av as ModelPricing, ae as ObserveChunkContext, am as ObserveCompactionContext, af as ObserveGadgetCompleteContext, ag as ObserveGadgetStartContext, ah as ObserveLLMCallContext, ai as ObserveLLMCompleteContext, aj as ObserveLLMErrorContext, ak as Observers, az as PromptConfig, aA as PromptContext, aB as PromptTemplate, aw as ProviderIdentifier, aH as QuickOptions, aO as TextOnlyAction, aP as TextOnlyContext, aQ as TextOnlyCustomHandler, aR as TextOnlyGadgetConfig, aS as TextOnlyHandler, aT as TextOnlyStrategy, O as TrailingMessage, Q as TrailingMessageContext, W as collectEvents, X as collectText, aI as complete, c as createMockAdapter, e as createMockClient, h as createMockStream, i as createTextMockStream, g as getMockManager, m as mockLLM, aE as resolveHintTemplate, aF as resolvePromptTemplate, aG as resolveRulesTemplate, Y as runWithHandlers, aJ as stream } from './mock-stream-HF7MBNhi.cjs';
|
|
5
5
|
import { Logger, ILogObj } from 'tslog';
|
|
6
6
|
import { MessageCreateParamsStreaming, MessageStreamEvent } from '@anthropic-ai/sdk/resources/messages';
|
|
7
7
|
import OpenAI from 'openai';
|
|
@@ -1843,6 +1843,8 @@ declare function Gadget<TSchema extends ZodType>(config: GadgetConfig<TSchema>):
|
|
|
1843
1843
|
*/
|
|
1844
1844
|
execute(params: Record<string, unknown>, ctx?: ExecutionContext): GadgetExecuteReturn | Promise<GadgetExecuteReturn>;
|
|
1845
1845
|
throwIfAborted(ctx?: ExecutionContext): void;
|
|
1846
|
+
onAbort(ctx: ExecutionContext | undefined, cleanup: () => void | Promise<void>): void;
|
|
1847
|
+
createLinkedAbortController(ctx?: ExecutionContext): AbortController;
|
|
1846
1848
|
get instruction(): string;
|
|
1847
1849
|
getInstruction(optionsOrArgPrefix?: string | {
|
|
1848
1850
|
argPrefix?: string;
|
package/dist/index.d.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { ZodType, ZodTypeAny } from 'zod';
|
|
2
2
|
export { z } from 'zod';
|
|
3
|
-
import { A as AgentHooks, q as ModelRegistry, I as IConversationManager, b as LLMMessage, S as StreamEvent, T as TokenUsage, G as GadgetRegistry, r as LLMist, a as LLMStreamChunk, C as CompactionStrategy, R as ResolvedCompactionConfig, s as CompactionContext, t as CompactionResult, u as CompactionConfig, v as CompactionEvent, w as CompactionStats, H as HintTemplate, E as ExecutionContext, x as GadgetExecuteReturn, y as GadgetExample, B as BaseGadget, P as ParsedGadgetCall, z as GadgetExecutionResult, D as ProviderAdapter, F as ModelDescriptor, J as ModelSpec, K as LLMGenerationOptions, L as LLMStream } from './mock-stream-
|
|
4
|
-
export { Z as AfterGadgetExecutionAction, _ as AfterGadgetExecutionControllerContext, $ as AfterLLMCallAction, a0 as AfterLLMCallControllerContext, a1 as AfterLLMErrorAction, U as AgentBuilder, a2 as AgentOptions, a3 as BeforeGadgetExecutionAction, a4 as BeforeLLMCallAction, a5 as ChunkInterceptorContext, a6 as Controllers, as as CostEstimate, aM as CostReportingLLMist, an as DEFAULT_COMPACTION_CONFIG, aC as DEFAULT_HINTS, aD as DEFAULT_PROMPTS, ao as DEFAULT_SUMMARIZATION_PROMPT, V as EventHandlers, aK as GadgetClass, aN as GadgetExecuteResult, a7 as GadgetExecutionControllerContext, aL as GadgetOrClass, a8 as GadgetParameterInterceptorContext, a9 as GadgetResultInterceptorContext, ay as HintContext, N as HistoryMessage, aa as Interceptors, ab as LLMCallControllerContext, ac as LLMErrorControllerContext, ar as LLMMessageBuilder, aq as LLMRole, ap as LLMistOptions, ad as MessageInterceptorContext, al as MessageTurn, d as MockBuilder, f as MockManager, j as MockMatcher, k as MockMatcherContext, l as MockOptions, M as MockProviderAdapter, n as MockRegistration, o as MockResponse, p as MockStats, at as ModelFeatures, ax as ModelIdentifierParser, au as ModelLimits, av as ModelPricing, ae as ObserveChunkContext, am as ObserveCompactionContext, af as ObserveGadgetCompleteContext, ag as ObserveGadgetStartContext, ah as ObserveLLMCallContext, ai as ObserveLLMCompleteContext, aj as ObserveLLMErrorContext, ak as Observers, az as PromptConfig, aA as PromptContext, aB as PromptTemplate, aw as ProviderIdentifier, aH as QuickOptions, aO as TextOnlyAction, aP as TextOnlyContext, aQ as TextOnlyCustomHandler, aR as TextOnlyGadgetConfig, aS as TextOnlyHandler, aT as TextOnlyStrategy, O as TrailingMessage, Q as TrailingMessageContext, W as collectEvents, X as collectText, aI as complete, c as createMockAdapter, e as createMockClient, h as createMockStream, i as createTextMockStream, g as getMockManager, m as mockLLM, aE as resolveHintTemplate, aF as resolvePromptTemplate, aG as resolveRulesTemplate, Y as runWithHandlers, aJ as stream } from './mock-stream-
|
|
3
|
+
import { A as AgentHooks, q as ModelRegistry, I as IConversationManager, b as LLMMessage, S as StreamEvent, T as TokenUsage, G as GadgetRegistry, r as LLMist, a as LLMStreamChunk, C as CompactionStrategy, R as ResolvedCompactionConfig, s as CompactionContext, t as CompactionResult, u as CompactionConfig, v as CompactionEvent, w as CompactionStats, H as HintTemplate, E as ExecutionContext, x as GadgetExecuteReturn, y as GadgetExample, B as BaseGadget, P as ParsedGadgetCall, z as GadgetExecutionResult, D as ProviderAdapter, F as ModelDescriptor, J as ModelSpec, K as LLMGenerationOptions, L as LLMStream } from './mock-stream-HF7MBNhi.js';
|
|
4
|
+
export { Z as AfterGadgetExecutionAction, _ as AfterGadgetExecutionControllerContext, $ as AfterLLMCallAction, a0 as AfterLLMCallControllerContext, a1 as AfterLLMErrorAction, U as AgentBuilder, a2 as AgentOptions, a3 as BeforeGadgetExecutionAction, a4 as BeforeLLMCallAction, a5 as ChunkInterceptorContext, a6 as Controllers, as as CostEstimate, aM as CostReportingLLMist, an as DEFAULT_COMPACTION_CONFIG, aC as DEFAULT_HINTS, aD as DEFAULT_PROMPTS, ao as DEFAULT_SUMMARIZATION_PROMPT, V as EventHandlers, aK as GadgetClass, aN as GadgetExecuteResult, a7 as GadgetExecutionControllerContext, aL as GadgetOrClass, a8 as GadgetParameterInterceptorContext, a9 as GadgetResultInterceptorContext, ay as HintContext, N as HistoryMessage, aa as Interceptors, ab as LLMCallControllerContext, ac as LLMErrorControllerContext, ar as LLMMessageBuilder, aq as LLMRole, ap as LLMistOptions, ad as MessageInterceptorContext, al as MessageTurn, d as MockBuilder, f as MockManager, j as MockMatcher, k as MockMatcherContext, l as MockOptions, M as MockProviderAdapter, n as MockRegistration, o as MockResponse, p as MockStats, at as ModelFeatures, ax as ModelIdentifierParser, au as ModelLimits, av as ModelPricing, ae as ObserveChunkContext, am as ObserveCompactionContext, af as ObserveGadgetCompleteContext, ag as ObserveGadgetStartContext, ah as ObserveLLMCallContext, ai as ObserveLLMCompleteContext, aj as ObserveLLMErrorContext, ak as Observers, az as PromptConfig, aA as PromptContext, aB as PromptTemplate, aw as ProviderIdentifier, aH as QuickOptions, aO as TextOnlyAction, aP as TextOnlyContext, aQ as TextOnlyCustomHandler, aR as TextOnlyGadgetConfig, aS as TextOnlyHandler, aT as TextOnlyStrategy, O as TrailingMessage, Q as TrailingMessageContext, W as collectEvents, X as collectText, aI as complete, c as createMockAdapter, e as createMockClient, h as createMockStream, i as createTextMockStream, g as getMockManager, m as mockLLM, aE as resolveHintTemplate, aF as resolvePromptTemplate, aG as resolveRulesTemplate, Y as runWithHandlers, aJ as stream } from './mock-stream-HF7MBNhi.js';
|
|
5
5
|
import { Logger, ILogObj } from 'tslog';
|
|
6
6
|
import { MessageCreateParamsStreaming, MessageStreamEvent } from '@anthropic-ai/sdk/resources/messages';
|
|
7
7
|
import OpenAI from 'openai';
|
|
@@ -1843,6 +1843,8 @@ declare function Gadget<TSchema extends ZodType>(config: GadgetConfig<TSchema>):
|
|
|
1843
1843
|
*/
|
|
1844
1844
|
execute(params: Record<string, unknown>, ctx?: ExecutionContext): GadgetExecuteReturn | Promise<GadgetExecuteReturn>;
|
|
1845
1845
|
throwIfAborted(ctx?: ExecutionContext): void;
|
|
1846
|
+
onAbort(ctx: ExecutionContext | undefined, cleanup: () => void | Promise<void>): void;
|
|
1847
|
+
createLinkedAbortController(ctx?: ExecutionContext): AbortController;
|
|
1846
1848
|
get instruction(): string;
|
|
1847
1849
|
getInstruction(optionsOrArgPrefix?: string | {
|
|
1848
1850
|
argPrefix?: string;
|
package/dist/index.js
CHANGED
|
@@ -5,7 +5,7 @@ import {
|
|
|
5
5
|
iterationProgressHint,
|
|
6
6
|
parallelGadgetHint,
|
|
7
7
|
z
|
|
8
|
-
} from "./chunk-
|
|
8
|
+
} from "./chunk-LSCCBXS7.js";
|
|
9
9
|
import {
|
|
10
10
|
AbortError,
|
|
11
11
|
AgentBuilder,
|
|
@@ -65,7 +65,7 @@ import {
|
|
|
65
65
|
stream,
|
|
66
66
|
validateAndApplyDefaults,
|
|
67
67
|
validateGadgetParams
|
|
68
|
-
} from "./chunk-
|
|
68
|
+
} from "./chunk-PDYVT3FI.js";
|
|
69
69
|
export {
|
|
70
70
|
AbortError,
|
|
71
71
|
AgentBuilder,
|
|
@@ -998,6 +998,67 @@ declare abstract class BaseGadget {
|
|
|
998
998
|
* ```
|
|
999
999
|
*/
|
|
1000
1000
|
throwIfAborted(ctx?: ExecutionContext): void;
|
|
1001
|
+
/**
|
|
1002
|
+
* Register a cleanup function to run when execution is aborted (timeout or cancellation).
|
|
1003
|
+
* The cleanup function is called immediately if the signal is already aborted.
|
|
1004
|
+
* Errors thrown by the cleanup function are silently ignored.
|
|
1005
|
+
*
|
|
1006
|
+
* Use this to clean up resources like browser instances, database connections,
|
|
1007
|
+
* or child processes when the gadget is cancelled due to timeout.
|
|
1008
|
+
*
|
|
1009
|
+
* @param ctx - The execution context containing the abort signal
|
|
1010
|
+
* @param cleanup - Function to run on abort (can be sync or async)
|
|
1011
|
+
*
|
|
1012
|
+
* @example
|
|
1013
|
+
* ```typescript
|
|
1014
|
+
* class BrowserGadget extends Gadget({
|
|
1015
|
+
* description: 'Fetches web page content',
|
|
1016
|
+
* schema: z.object({ url: z.string() }),
|
|
1017
|
+
* }) {
|
|
1018
|
+
* async execute(params: this['params'], ctx?: ExecutionContext): Promise<string> {
|
|
1019
|
+
* const browser = await chromium.launch();
|
|
1020
|
+
* this.onAbort(ctx, () => browser.close());
|
|
1021
|
+
*
|
|
1022
|
+
* const page = await browser.newPage();
|
|
1023
|
+
* this.onAbort(ctx, () => page.close());
|
|
1024
|
+
*
|
|
1025
|
+
* await page.goto(params.url);
|
|
1026
|
+
* const content = await page.content();
|
|
1027
|
+
*
|
|
1028
|
+
* await browser.close();
|
|
1029
|
+
* return content;
|
|
1030
|
+
* }
|
|
1031
|
+
* }
|
|
1032
|
+
* ```
|
|
1033
|
+
*/
|
|
1034
|
+
onAbort(ctx: ExecutionContext | undefined, cleanup: () => void | Promise<void>): void;
|
|
1035
|
+
/**
|
|
1036
|
+
* Create an AbortController linked to the execution context's signal.
|
|
1037
|
+
* When the parent signal aborts, the returned controller also aborts with the same reason.
|
|
1038
|
+
*
|
|
1039
|
+
* Useful for passing abort signals to child operations like fetch() while still
|
|
1040
|
+
* being able to abort them independently if needed.
|
|
1041
|
+
*
|
|
1042
|
+
* @param ctx - The execution context containing the parent abort signal
|
|
1043
|
+
* @returns A new AbortController linked to the parent signal
|
|
1044
|
+
*
|
|
1045
|
+
* @example
|
|
1046
|
+
* ```typescript
|
|
1047
|
+
* class FetchGadget extends Gadget({
|
|
1048
|
+
* description: 'Fetches data from URL',
|
|
1049
|
+
* schema: z.object({ url: z.string() }),
|
|
1050
|
+
* }) {
|
|
1051
|
+
* async execute(params: this['params'], ctx?: ExecutionContext): Promise<string> {
|
|
1052
|
+
* const controller = this.createLinkedAbortController(ctx);
|
|
1053
|
+
*
|
|
1054
|
+
* // fetch() will automatically abort when parent times out
|
|
1055
|
+
* const response = await fetch(params.url, { signal: controller.signal });
|
|
1056
|
+
* return response.text();
|
|
1057
|
+
* }
|
|
1058
|
+
* }
|
|
1059
|
+
* ```
|
|
1060
|
+
*/
|
|
1061
|
+
createLinkedAbortController(ctx?: ExecutionContext): AbortController;
|
|
1001
1062
|
/**
|
|
1002
1063
|
* Auto-generated instruction text for the LLM.
|
|
1003
1064
|
* Combines name, description, and parameter schema into a formatted instruction.
|
|
@@ -1672,16 +1733,17 @@ declare function collectText(agentGenerator: AsyncGenerator<StreamEvent>): Promi
|
|
|
1672
1733
|
* LLM CALL LIFECYCLE:
|
|
1673
1734
|
* 1. onLLMCallStart (observer)
|
|
1674
1735
|
* 2. beforeLLMCall (controller) - can skip/modify
|
|
1675
|
-
* 3.
|
|
1676
|
-
* 4.
|
|
1736
|
+
* 3. onLLMCallReady (observer) - final state before API call
|
|
1737
|
+
* 4. [LLM API Call]
|
|
1738
|
+
* 5. For each stream chunk:
|
|
1677
1739
|
* a. interceptRawChunk (interceptor)
|
|
1678
1740
|
* b. onStreamChunk (observer)
|
|
1679
1741
|
* c. Parse for gadgets
|
|
1680
1742
|
* d. If gadget found -> GADGET LIFECYCLE
|
|
1681
1743
|
* e. If text -> interceptTextChunk -> emit
|
|
1682
|
-
*
|
|
1683
|
-
*
|
|
1684
|
-
*
|
|
1744
|
+
* 6. afterLLMCall (controller) - can append/modify
|
|
1745
|
+
* 7. interceptAssistantMessage (interceptor)
|
|
1746
|
+
* 8. onLLMCallComplete (observer)
|
|
1685
1747
|
*
|
|
1686
1748
|
* GADGET LIFECYCLE:
|
|
1687
1749
|
* 1. interceptGadgetParameters (interceptor)
|
|
@@ -1705,6 +1767,18 @@ interface ObserveLLMCallContext {
|
|
|
1705
1767
|
options: Readonly<LLMGenerationOptions>;
|
|
1706
1768
|
logger: Logger<ILogObj>;
|
|
1707
1769
|
}
|
|
1770
|
+
/**
|
|
1771
|
+
* Context provided when an LLM call is ready to execute.
|
|
1772
|
+
* Fires AFTER beforeLLMCall controller modifications, BEFORE the actual API call.
|
|
1773
|
+
* Use this for logging the exact request being sent to the LLM.
|
|
1774
|
+
*/
|
|
1775
|
+
interface ObserveLLMCallReadyContext {
|
|
1776
|
+
iteration: number;
|
|
1777
|
+
maxIterations: number;
|
|
1778
|
+
/** Final options after any controller modifications (e.g., trailing messages) */
|
|
1779
|
+
options: Readonly<LLMGenerationOptions>;
|
|
1780
|
+
logger: Logger<ILogObj>;
|
|
1781
|
+
}
|
|
1708
1782
|
/**
|
|
1709
1783
|
* Context provided when an LLM call completes successfully.
|
|
1710
1784
|
* Read-only observation point.
|
|
@@ -1786,8 +1860,10 @@ interface ObserveChunkContext {
|
|
|
1786
1860
|
* - Run in parallel (no ordering guarantees)
|
|
1787
1861
|
*/
|
|
1788
1862
|
interface Observers {
|
|
1789
|
-
/** Called when an LLM call starts */
|
|
1863
|
+
/** Called when an LLM call starts (before controller modifications) */
|
|
1790
1864
|
onLLMCallStart?: (context: ObserveLLMCallContext) => void | Promise<void>;
|
|
1865
|
+
/** Called when an LLM call is ready (after controller modifications, before API call) */
|
|
1866
|
+
onLLMCallReady?: (context: ObserveLLMCallReadyContext) => void | Promise<void>;
|
|
1791
1867
|
/** Called when an LLM call completes successfully */
|
|
1792
1868
|
onLLMCallComplete?: (context: ObserveLLMCompleteContext) => void | Promise<void>;
|
|
1793
1869
|
/** Called when an LLM call fails */
|
|
@@ -998,6 +998,67 @@ declare abstract class BaseGadget {
|
|
|
998
998
|
* ```
|
|
999
999
|
*/
|
|
1000
1000
|
throwIfAborted(ctx?: ExecutionContext): void;
|
|
1001
|
+
/**
|
|
1002
|
+
* Register a cleanup function to run when execution is aborted (timeout or cancellation).
|
|
1003
|
+
* The cleanup function is called immediately if the signal is already aborted.
|
|
1004
|
+
* Errors thrown by the cleanup function are silently ignored.
|
|
1005
|
+
*
|
|
1006
|
+
* Use this to clean up resources like browser instances, database connections,
|
|
1007
|
+
* or child processes when the gadget is cancelled due to timeout.
|
|
1008
|
+
*
|
|
1009
|
+
* @param ctx - The execution context containing the abort signal
|
|
1010
|
+
* @param cleanup - Function to run on abort (can be sync or async)
|
|
1011
|
+
*
|
|
1012
|
+
* @example
|
|
1013
|
+
* ```typescript
|
|
1014
|
+
* class BrowserGadget extends Gadget({
|
|
1015
|
+
* description: 'Fetches web page content',
|
|
1016
|
+
* schema: z.object({ url: z.string() }),
|
|
1017
|
+
* }) {
|
|
1018
|
+
* async execute(params: this['params'], ctx?: ExecutionContext): Promise<string> {
|
|
1019
|
+
* const browser = await chromium.launch();
|
|
1020
|
+
* this.onAbort(ctx, () => browser.close());
|
|
1021
|
+
*
|
|
1022
|
+
* const page = await browser.newPage();
|
|
1023
|
+
* this.onAbort(ctx, () => page.close());
|
|
1024
|
+
*
|
|
1025
|
+
* await page.goto(params.url);
|
|
1026
|
+
* const content = await page.content();
|
|
1027
|
+
*
|
|
1028
|
+
* await browser.close();
|
|
1029
|
+
* return content;
|
|
1030
|
+
* }
|
|
1031
|
+
* }
|
|
1032
|
+
* ```
|
|
1033
|
+
*/
|
|
1034
|
+
onAbort(ctx: ExecutionContext | undefined, cleanup: () => void | Promise<void>): void;
|
|
1035
|
+
/**
|
|
1036
|
+
* Create an AbortController linked to the execution context's signal.
|
|
1037
|
+
* When the parent signal aborts, the returned controller also aborts with the same reason.
|
|
1038
|
+
*
|
|
1039
|
+
* Useful for passing abort signals to child operations like fetch() while still
|
|
1040
|
+
* being able to abort them independently if needed.
|
|
1041
|
+
*
|
|
1042
|
+
* @param ctx - The execution context containing the parent abort signal
|
|
1043
|
+
* @returns A new AbortController linked to the parent signal
|
|
1044
|
+
*
|
|
1045
|
+
* @example
|
|
1046
|
+
* ```typescript
|
|
1047
|
+
* class FetchGadget extends Gadget({
|
|
1048
|
+
* description: 'Fetches data from URL',
|
|
1049
|
+
* schema: z.object({ url: z.string() }),
|
|
1050
|
+
* }) {
|
|
1051
|
+
* async execute(params: this['params'], ctx?: ExecutionContext): Promise<string> {
|
|
1052
|
+
* const controller = this.createLinkedAbortController(ctx);
|
|
1053
|
+
*
|
|
1054
|
+
* // fetch() will automatically abort when parent times out
|
|
1055
|
+
* const response = await fetch(params.url, { signal: controller.signal });
|
|
1056
|
+
* return response.text();
|
|
1057
|
+
* }
|
|
1058
|
+
* }
|
|
1059
|
+
* ```
|
|
1060
|
+
*/
|
|
1061
|
+
createLinkedAbortController(ctx?: ExecutionContext): AbortController;
|
|
1001
1062
|
/**
|
|
1002
1063
|
* Auto-generated instruction text for the LLM.
|
|
1003
1064
|
* Combines name, description, and parameter schema into a formatted instruction.
|
|
@@ -1672,16 +1733,17 @@ declare function collectText(agentGenerator: AsyncGenerator<StreamEvent>): Promi
|
|
|
1672
1733
|
* LLM CALL LIFECYCLE:
|
|
1673
1734
|
* 1. onLLMCallStart (observer)
|
|
1674
1735
|
* 2. beforeLLMCall (controller) - can skip/modify
|
|
1675
|
-
* 3.
|
|
1676
|
-
* 4.
|
|
1736
|
+
* 3. onLLMCallReady (observer) - final state before API call
|
|
1737
|
+
* 4. [LLM API Call]
|
|
1738
|
+
* 5. For each stream chunk:
|
|
1677
1739
|
* a. interceptRawChunk (interceptor)
|
|
1678
1740
|
* b. onStreamChunk (observer)
|
|
1679
1741
|
* c. Parse for gadgets
|
|
1680
1742
|
* d. If gadget found -> GADGET LIFECYCLE
|
|
1681
1743
|
* e. If text -> interceptTextChunk -> emit
|
|
1682
|
-
*
|
|
1683
|
-
*
|
|
1684
|
-
*
|
|
1744
|
+
* 6. afterLLMCall (controller) - can append/modify
|
|
1745
|
+
* 7. interceptAssistantMessage (interceptor)
|
|
1746
|
+
* 8. onLLMCallComplete (observer)
|
|
1685
1747
|
*
|
|
1686
1748
|
* GADGET LIFECYCLE:
|
|
1687
1749
|
* 1. interceptGadgetParameters (interceptor)
|
|
@@ -1705,6 +1767,18 @@ interface ObserveLLMCallContext {
|
|
|
1705
1767
|
options: Readonly<LLMGenerationOptions>;
|
|
1706
1768
|
logger: Logger<ILogObj>;
|
|
1707
1769
|
}
|
|
1770
|
+
/**
|
|
1771
|
+
* Context provided when an LLM call is ready to execute.
|
|
1772
|
+
* Fires AFTER beforeLLMCall controller modifications, BEFORE the actual API call.
|
|
1773
|
+
* Use this for logging the exact request being sent to the LLM.
|
|
1774
|
+
*/
|
|
1775
|
+
interface ObserveLLMCallReadyContext {
|
|
1776
|
+
iteration: number;
|
|
1777
|
+
maxIterations: number;
|
|
1778
|
+
/** Final options after any controller modifications (e.g., trailing messages) */
|
|
1779
|
+
options: Readonly<LLMGenerationOptions>;
|
|
1780
|
+
logger: Logger<ILogObj>;
|
|
1781
|
+
}
|
|
1708
1782
|
/**
|
|
1709
1783
|
* Context provided when an LLM call completes successfully.
|
|
1710
1784
|
* Read-only observation point.
|
|
@@ -1786,8 +1860,10 @@ interface ObserveChunkContext {
|
|
|
1786
1860
|
* - Run in parallel (no ordering guarantees)
|
|
1787
1861
|
*/
|
|
1788
1862
|
interface Observers {
|
|
1789
|
-
/** Called when an LLM call starts */
|
|
1863
|
+
/** Called when an LLM call starts (before controller modifications) */
|
|
1790
1864
|
onLLMCallStart?: (context: ObserveLLMCallContext) => void | Promise<void>;
|
|
1865
|
+
/** Called when an LLM call is ready (after controller modifications, before API call) */
|
|
1866
|
+
onLLMCallReady?: (context: ObserveLLMCallReadyContext) => void | Promise<void>;
|
|
1791
1867
|
/** Called when an LLM call completes successfully */
|
|
1792
1868
|
onLLMCallComplete?: (context: ObserveLLMCompleteContext) => void | Promise<void>;
|
|
1793
1869
|
/** Called when an LLM call fails */
|
package/dist/testing/index.cjs
CHANGED
|
@@ -1019,6 +1019,100 @@ var init_gadget = __esm({
|
|
|
1019
1019
|
throw new AbortError();
|
|
1020
1020
|
}
|
|
1021
1021
|
}
|
|
1022
|
+
/**
|
|
1023
|
+
* Register a cleanup function to run when execution is aborted (timeout or cancellation).
|
|
1024
|
+
* The cleanup function is called immediately if the signal is already aborted.
|
|
1025
|
+
* Errors thrown by the cleanup function are silently ignored.
|
|
1026
|
+
*
|
|
1027
|
+
* Use this to clean up resources like browser instances, database connections,
|
|
1028
|
+
* or child processes when the gadget is cancelled due to timeout.
|
|
1029
|
+
*
|
|
1030
|
+
* @param ctx - The execution context containing the abort signal
|
|
1031
|
+
* @param cleanup - Function to run on abort (can be sync or async)
|
|
1032
|
+
*
|
|
1033
|
+
* @example
|
|
1034
|
+
* ```typescript
|
|
1035
|
+
* class BrowserGadget extends Gadget({
|
|
1036
|
+
* description: 'Fetches web page content',
|
|
1037
|
+
* schema: z.object({ url: z.string() }),
|
|
1038
|
+
* }) {
|
|
1039
|
+
* async execute(params: this['params'], ctx?: ExecutionContext): Promise<string> {
|
|
1040
|
+
* const browser = await chromium.launch();
|
|
1041
|
+
* this.onAbort(ctx, () => browser.close());
|
|
1042
|
+
*
|
|
1043
|
+
* const page = await browser.newPage();
|
|
1044
|
+
* this.onAbort(ctx, () => page.close());
|
|
1045
|
+
*
|
|
1046
|
+
* await page.goto(params.url);
|
|
1047
|
+
* const content = await page.content();
|
|
1048
|
+
*
|
|
1049
|
+
* await browser.close();
|
|
1050
|
+
* return content;
|
|
1051
|
+
* }
|
|
1052
|
+
* }
|
|
1053
|
+
* ```
|
|
1054
|
+
*/
|
|
1055
|
+
onAbort(ctx, cleanup) {
|
|
1056
|
+
if (!ctx?.signal) return;
|
|
1057
|
+
const safeCleanup = () => {
|
|
1058
|
+
try {
|
|
1059
|
+
const result = cleanup();
|
|
1060
|
+
if (result && typeof result === "object" && "catch" in result) {
|
|
1061
|
+
result.catch(() => {
|
|
1062
|
+
});
|
|
1063
|
+
}
|
|
1064
|
+
} catch {
|
|
1065
|
+
}
|
|
1066
|
+
};
|
|
1067
|
+
if (ctx.signal.aborted) {
|
|
1068
|
+
safeCleanup();
|
|
1069
|
+
return;
|
|
1070
|
+
}
|
|
1071
|
+
ctx.signal.addEventListener("abort", safeCleanup, { once: true });
|
|
1072
|
+
}
|
|
1073
|
+
/**
|
|
1074
|
+
* Create an AbortController linked to the execution context's signal.
|
|
1075
|
+
* When the parent signal aborts, the returned controller also aborts with the same reason.
|
|
1076
|
+
*
|
|
1077
|
+
* Useful for passing abort signals to child operations like fetch() while still
|
|
1078
|
+
* being able to abort them independently if needed.
|
|
1079
|
+
*
|
|
1080
|
+
* @param ctx - The execution context containing the parent abort signal
|
|
1081
|
+
* @returns A new AbortController linked to the parent signal
|
|
1082
|
+
*
|
|
1083
|
+
* @example
|
|
1084
|
+
* ```typescript
|
|
1085
|
+
* class FetchGadget extends Gadget({
|
|
1086
|
+
* description: 'Fetches data from URL',
|
|
1087
|
+
* schema: z.object({ url: z.string() }),
|
|
1088
|
+
* }) {
|
|
1089
|
+
* async execute(params: this['params'], ctx?: ExecutionContext): Promise<string> {
|
|
1090
|
+
* const controller = this.createLinkedAbortController(ctx);
|
|
1091
|
+
*
|
|
1092
|
+
* // fetch() will automatically abort when parent times out
|
|
1093
|
+
* const response = await fetch(params.url, { signal: controller.signal });
|
|
1094
|
+
* return response.text();
|
|
1095
|
+
* }
|
|
1096
|
+
* }
|
|
1097
|
+
* ```
|
|
1098
|
+
*/
|
|
1099
|
+
createLinkedAbortController(ctx) {
|
|
1100
|
+
const controller = new AbortController();
|
|
1101
|
+
if (ctx?.signal) {
|
|
1102
|
+
if (ctx.signal.aborted) {
|
|
1103
|
+
controller.abort(ctx.signal.reason);
|
|
1104
|
+
} else {
|
|
1105
|
+
ctx.signal.addEventListener(
|
|
1106
|
+
"abort",
|
|
1107
|
+
() => {
|
|
1108
|
+
controller.abort(ctx.signal.reason);
|
|
1109
|
+
},
|
|
1110
|
+
{ once: true }
|
|
1111
|
+
);
|
|
1112
|
+
}
|
|
1113
|
+
}
|
|
1114
|
+
return controller;
|
|
1115
|
+
}
|
|
1022
1116
|
/**
|
|
1023
1117
|
* Auto-generated instruction text for the LLM.
|
|
1024
1118
|
* Combines name, description, and parameter schema into a formatted instruction.
|
|
@@ -3801,6 +3895,17 @@ var init_agent = __esm({
|
|
|
3801
3895
|
llmOptions = { ...llmOptions, ...action.modifiedOptions };
|
|
3802
3896
|
}
|
|
3803
3897
|
}
|
|
3898
|
+
await this.safeObserve(async () => {
|
|
3899
|
+
if (this.hooks.observers?.onLLMCallReady) {
|
|
3900
|
+
const context = {
|
|
3901
|
+
iteration: currentIteration,
|
|
3902
|
+
maxIterations: this.maxIterations,
|
|
3903
|
+
options: llmOptions,
|
|
3904
|
+
logger: this.logger
|
|
3905
|
+
};
|
|
3906
|
+
await this.hooks.observers.onLLMCallReady(context);
|
|
3907
|
+
}
|
|
3908
|
+
});
|
|
3804
3909
|
this.logger.info("Calling LLM", { model: this.model });
|
|
3805
3910
|
this.logger.silly("LLM request details", {
|
|
3806
3911
|
model: llmOptions.model,
|