@fifthrevision/axle 0.6.3 → 0.6.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +6 -3
- package/dist/consoleWriter-Bg94CpP2.js +31 -0
- package/dist/index.d.ts +701 -306
- package/dist/index.js +2 -2
- package/package.json +15 -17
- package/dist/consoleWriter-Cx-CPHU8.js +0 -33
package/dist/index.d.ts
CHANGED
|
@@ -1,5 +1,8 @@
|
|
|
1
1
|
import * as z from 'zod';
|
|
2
2
|
import z__default, { ZodObject, z as z$1 } from 'zod';
|
|
3
|
+
import Anthropic from '@anthropic-ai/sdk';
|
|
4
|
+
import { GoogleGenAI } from '@google/genai';
|
|
5
|
+
import OpenAI from 'openai';
|
|
3
6
|
|
|
4
7
|
type PlainObject = Record<string, unknown>;
|
|
5
8
|
type ProgramOptions = {
|
|
@@ -15,9 +18,6 @@ interface Stats {
|
|
|
15
18
|
in: number;
|
|
16
19
|
out: number;
|
|
17
20
|
}
|
|
18
|
-
interface Task {
|
|
19
|
-
readonly type: string;
|
|
20
|
-
}
|
|
21
21
|
|
|
22
22
|
interface RecorderLevelFunctions {
|
|
23
23
|
log: (...message: (string | unknown | Error)[]) => void;
|
|
@@ -45,6 +45,162 @@ interface RecorderWriter {
|
|
|
45
45
|
flush?(): Promise<void>;
|
|
46
46
|
}
|
|
47
47
|
|
|
48
|
+
declare class Recorder {
|
|
49
|
+
instanceId: `${string}-${string}-${string}-${string}-${string}`;
|
|
50
|
+
private currentLevel;
|
|
51
|
+
private logs;
|
|
52
|
+
private writers;
|
|
53
|
+
private _debug;
|
|
54
|
+
private _info;
|
|
55
|
+
private _warn;
|
|
56
|
+
private _error;
|
|
57
|
+
constructor();
|
|
58
|
+
buildMethods(): void;
|
|
59
|
+
set level(level: LogLevel);
|
|
60
|
+
get level(): LogLevel;
|
|
61
|
+
get info(): RecorderLevelFunctions;
|
|
62
|
+
get warn(): RecorderLevelFunctions;
|
|
63
|
+
get error(): RecorderLevelFunctions;
|
|
64
|
+
get debug(): RecorderLevelFunctions;
|
|
65
|
+
subscribe(writer: RecorderWriter): void;
|
|
66
|
+
unsubscribe(writer: RecorderWriter): void;
|
|
67
|
+
private publish;
|
|
68
|
+
private logFunction;
|
|
69
|
+
private createLoggingFunction;
|
|
70
|
+
getLogs(level?: LogLevel): RecorderEntry[];
|
|
71
|
+
/**
|
|
72
|
+
* Ensures all writers have completed their pending operations
|
|
73
|
+
* Call this before exiting the process to ensure logs are written
|
|
74
|
+
*/
|
|
75
|
+
shutdown(): Promise<void>;
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
interface Tool<TSchema extends ZodObject<any> = ZodObject<any>> {
|
|
79
|
+
name: string;
|
|
80
|
+
description: string;
|
|
81
|
+
schema: TSchema;
|
|
82
|
+
execute(input: z$1.infer<TSchema>): Promise<string>;
|
|
83
|
+
configure?(config: Record<string, any>): void;
|
|
84
|
+
}
|
|
85
|
+
type ToolDefinition = Pick<Tool, "name" | "description" | "schema">;
|
|
86
|
+
|
|
87
|
+
interface FileInfo {
|
|
88
|
+
path: string;
|
|
89
|
+
base64?: string;
|
|
90
|
+
content?: string;
|
|
91
|
+
mimeType: string;
|
|
92
|
+
size: number;
|
|
93
|
+
name: string;
|
|
94
|
+
type: "image" | "document" | "text";
|
|
95
|
+
}
|
|
96
|
+
type TextFileInfo = FileInfo & {
|
|
97
|
+
content: string;
|
|
98
|
+
base64?: never;
|
|
99
|
+
type: "text";
|
|
100
|
+
};
|
|
101
|
+
type Base64FileInfo = FileInfo & {
|
|
102
|
+
base64: string;
|
|
103
|
+
content?: never;
|
|
104
|
+
type: "image" | "document";
|
|
105
|
+
};
|
|
106
|
+
|
|
107
|
+
declare enum ResultType {
|
|
108
|
+
String = "string",
|
|
109
|
+
List = "string[]",
|
|
110
|
+
Number = "number",
|
|
111
|
+
Boolean = "boolean"
|
|
112
|
+
}
|
|
113
|
+
type ResultTypeUnion = `${ResultType}`;
|
|
114
|
+
type DeclarativeSchema = {
|
|
115
|
+
[key: string]: ResultTypeUnion | DeclarativeSchema | DeclarativeSchema[];
|
|
116
|
+
};
|
|
117
|
+
type OutputSchema = Record<string, z__default.ZodTypeAny>;
|
|
118
|
+
type InferedOutputSchema<T extends OutputSchema> = {
|
|
119
|
+
[K in keyof T]: z__default.output<T[K]>;
|
|
120
|
+
};
|
|
121
|
+
|
|
122
|
+
declare abstract class AbstractInstruct<T extends OutputSchema> {
|
|
123
|
+
readonly name = "instruct";
|
|
124
|
+
prompt: string;
|
|
125
|
+
system: string | null;
|
|
126
|
+
inputs: Record<string, string>;
|
|
127
|
+
tools: Record<string, Tool>;
|
|
128
|
+
files: Base64FileInfo[];
|
|
129
|
+
textReferences: Array<{
|
|
130
|
+
content: string;
|
|
131
|
+
name?: string;
|
|
132
|
+
}>;
|
|
133
|
+
instructions: string[];
|
|
134
|
+
schema: T;
|
|
135
|
+
rawResponse: string;
|
|
136
|
+
protected _taggedSections: {
|
|
137
|
+
tags: Record<string, string>;
|
|
138
|
+
remaining: string;
|
|
139
|
+
} | undefined;
|
|
140
|
+
protected _result: InferedOutputSchema<T> | undefined;
|
|
141
|
+
protected constructor(prompt: string, schema: T);
|
|
142
|
+
setInputs(inputs: Record<string, string>): void;
|
|
143
|
+
addInput(name: string, value: string): void;
|
|
144
|
+
addTools(tools: Tool[]): void;
|
|
145
|
+
addTool(tool: Tool): void;
|
|
146
|
+
addImage(file: FileInfo): void;
|
|
147
|
+
addDocument(file: FileInfo): void;
|
|
148
|
+
addFile(file: FileInfo): void;
|
|
149
|
+
addReference(textFile: FileInfo | TextFileInfo | string, options?: {
|
|
150
|
+
name?: string;
|
|
151
|
+
}): void;
|
|
152
|
+
addInstructions(instruction: string): void;
|
|
153
|
+
hasTools(): boolean;
|
|
154
|
+
hasFiles(): boolean;
|
|
155
|
+
get result(): InferedOutputSchema<T> | undefined;
|
|
156
|
+
compile(variables: Record<string, string>, runtime?: {
|
|
157
|
+
recorder?: Recorder;
|
|
158
|
+
options?: {
|
|
159
|
+
warnUnused?: boolean;
|
|
160
|
+
};
|
|
161
|
+
}): {
|
|
162
|
+
message: string;
|
|
163
|
+
instructions: string;
|
|
164
|
+
};
|
|
165
|
+
protected createUserMessage(variables: Record<string, string>, runtime?: {
|
|
166
|
+
recorder?: Recorder;
|
|
167
|
+
options?: {
|
|
168
|
+
warnUnused?: boolean;
|
|
169
|
+
};
|
|
170
|
+
}): string;
|
|
171
|
+
protected createInstructions(instructions?: string): string;
|
|
172
|
+
protected generateFieldInstructions(key: string, schema: z.ZodTypeAny): string;
|
|
173
|
+
finalize(rawValue: string, runtime?: {
|
|
174
|
+
recorder?: Recorder;
|
|
175
|
+
}): InferedOutputSchema<T>;
|
|
176
|
+
private preprocessValue;
|
|
177
|
+
protected parseTaggedSections(input: string): {
|
|
178
|
+
tags: Record<string, string>;
|
|
179
|
+
remaining: string;
|
|
180
|
+
};
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
declare class Instruct<T extends OutputSchema> extends AbstractInstruct<T> {
|
|
184
|
+
constructor(prompt: string, schema: T);
|
|
185
|
+
static with<T extends OutputSchema>(prompt: string, schema: T): Instruct<T>;
|
|
186
|
+
static with<T extends DeclarativeSchema>(prompt: string, schema: T): Instruct<OutputSchema>;
|
|
187
|
+
static with(prompt: string): Instruct<{
|
|
188
|
+
response: z.ZodString;
|
|
189
|
+
}>;
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
interface ActionContext {
|
|
193
|
+
input: string;
|
|
194
|
+
variables: Record<string, any>;
|
|
195
|
+
options?: ProgramOptions;
|
|
196
|
+
recorder?: Recorder;
|
|
197
|
+
}
|
|
198
|
+
interface Action {
|
|
199
|
+
name: string;
|
|
200
|
+
execute(context: ActionContext): Promise<string | void>;
|
|
201
|
+
}
|
|
202
|
+
type WorkflowStep = Instruct<any> | Action;
|
|
203
|
+
|
|
48
204
|
interface StreamChunk {
|
|
49
205
|
type: "start" | "text" | "tool-call-start" | "tool-call-delta" | "tool-call-complete" | "thinking-start" | "thinking-delta" | "complete" | "error";
|
|
50
206
|
id?: string;
|
|
@@ -114,56 +270,6 @@ interface StreamErrorChunk extends StreamChunk {
|
|
|
114
270
|
}
|
|
115
271
|
type AnyStreamChunk = StreamStartChunk | StreamCompleteChunk | StreamTextChunk | StreamToolCallStartChunk | StreamToolCallCompleteChunk | StreamThinkingStartChunk | StreamThinkingDeltaChunk | StreamErrorChunk;
|
|
116
272
|
|
|
117
|
-
declare class Recorder {
|
|
118
|
-
instanceId: `${string}-${string}-${string}-${string}-${string}`;
|
|
119
|
-
private currentLevel;
|
|
120
|
-
private logs;
|
|
121
|
-
private writers;
|
|
122
|
-
private _debug;
|
|
123
|
-
private _info;
|
|
124
|
-
private _warn;
|
|
125
|
-
private _error;
|
|
126
|
-
constructor();
|
|
127
|
-
buildMethods(): void;
|
|
128
|
-
set level(level: LogLevel);
|
|
129
|
-
get level(): LogLevel;
|
|
130
|
-
get info(): RecorderLevelFunctions;
|
|
131
|
-
get warn(): RecorderLevelFunctions;
|
|
132
|
-
get error(): RecorderLevelFunctions;
|
|
133
|
-
get debug(): RecorderLevelFunctions;
|
|
134
|
-
subscribe(writer: RecorderWriter): void;
|
|
135
|
-
unsubscribe(writer: RecorderWriter): void;
|
|
136
|
-
private publish;
|
|
137
|
-
private logFunction;
|
|
138
|
-
private createLoggingFunction;
|
|
139
|
-
getLogs(level?: LogLevel): RecorderEntry[];
|
|
140
|
-
/**
|
|
141
|
-
* Ensures all writers have completed their pending operations
|
|
142
|
-
* Call this before exiting the process to ensure logs are written
|
|
143
|
-
*/
|
|
144
|
-
shutdown(): Promise<void>;
|
|
145
|
-
}
|
|
146
|
-
|
|
147
|
-
interface FileInfo {
|
|
148
|
-
path: string;
|
|
149
|
-
base64?: string;
|
|
150
|
-
content?: string;
|
|
151
|
-
mimeType: string;
|
|
152
|
-
size: number;
|
|
153
|
-
name: string;
|
|
154
|
-
type: "image" | "document" | "text";
|
|
155
|
-
}
|
|
156
|
-
type TextFileInfo = FileInfo & {
|
|
157
|
-
content: string;
|
|
158
|
-
base64?: never;
|
|
159
|
-
type: "text";
|
|
160
|
-
};
|
|
161
|
-
type Base64FileInfo = FileInfo & {
|
|
162
|
-
base64: string;
|
|
163
|
-
content?: never;
|
|
164
|
-
type: "image" | "document";
|
|
165
|
-
};
|
|
166
|
-
|
|
167
273
|
type AxleMessage = AxleUserMessage | AxleAssistantMessage | AxleToolCallMessage;
|
|
168
274
|
interface AxleUserMessage {
|
|
169
275
|
role: "user";
|
|
@@ -209,18 +315,6 @@ interface ContentPartToolCall {
|
|
|
209
315
|
parameters: Record<string, unknown>;
|
|
210
316
|
}
|
|
211
317
|
|
|
212
|
-
type ToolDefinition<Z extends ZodObject = ZodObject> = {
|
|
213
|
-
name: string;
|
|
214
|
-
description?: string;
|
|
215
|
-
schema: Z;
|
|
216
|
-
};
|
|
217
|
-
interface ToolExecutable<Z extends ZodObject = ZodObject> extends ToolDefinition<Z> {
|
|
218
|
-
setConfig?: (config: {
|
|
219
|
-
[key: string]: any;
|
|
220
|
-
}) => void;
|
|
221
|
-
execute: (params: z$1.infer<Z>) => Promise<string>;
|
|
222
|
-
}
|
|
223
|
-
|
|
224
318
|
type OllamaProviderConfig = {
|
|
225
319
|
url?: string;
|
|
226
320
|
model: string;
|
|
@@ -323,11 +417,11 @@ declare class AxleError extends Error {
|
|
|
323
417
|
}
|
|
324
418
|
|
|
325
419
|
interface Planner {
|
|
326
|
-
plan(
|
|
420
|
+
plan(steps: WorkflowStep[]): Promise<Run[]>;
|
|
327
421
|
}
|
|
328
422
|
|
|
329
423
|
interface Run {
|
|
330
|
-
|
|
424
|
+
steps: WorkflowStep[];
|
|
331
425
|
variables: Record<string, any>;
|
|
332
426
|
}
|
|
333
427
|
interface SerializedExecutionResponse {
|
|
@@ -351,16 +445,16 @@ interface WorkflowExecutable {
|
|
|
351
445
|
}) => Promise<WorkflowResult>;
|
|
352
446
|
}
|
|
353
447
|
interface DAGNodeDefinition {
|
|
354
|
-
|
|
448
|
+
step: WorkflowStep | WorkflowStep[];
|
|
355
449
|
dependsOn?: string | string[];
|
|
356
450
|
}
|
|
357
451
|
interface DAGConcurrentNodeDefinition {
|
|
358
452
|
planner: Planner;
|
|
359
|
-
|
|
453
|
+
steps: WorkflowStep[];
|
|
360
454
|
dependsOn?: string | string[];
|
|
361
455
|
}
|
|
362
456
|
interface DAGDefinition {
|
|
363
|
-
[nodeName: string]:
|
|
457
|
+
[nodeName: string]: WorkflowStep | WorkflowStep[] | DAGNodeDefinition | DAGConcurrentNodeDefinition;
|
|
364
458
|
}
|
|
365
459
|
interface DAGWorkflowOptions {
|
|
366
460
|
continueOnError?: boolean;
|
|
@@ -376,10 +470,10 @@ declare class Axle {
|
|
|
376
470
|
addWriter(writer: RecorderWriter): void;
|
|
377
471
|
/**
|
|
378
472
|
* The execute function takes in a list of Tasks
|
|
379
|
-
* @param
|
|
473
|
+
* @param steps
|
|
380
474
|
* @returns
|
|
381
475
|
*/
|
|
382
|
-
execute(...
|
|
476
|
+
execute(...steps: WorkflowStep[]): Promise<WorkflowResult>;
|
|
383
477
|
/**
|
|
384
478
|
* Execute a DAG workflow
|
|
385
479
|
* @param dagDefinition - The DAG definition object
|
|
@@ -400,6 +494,21 @@ declare class Axle {
|
|
|
400
494
|
static loadFileContent(filePath: string, encoding: "base64"): Promise<Base64FileInfo>;
|
|
401
495
|
}
|
|
402
496
|
|
|
497
|
+
declare class ChainOfThought<T extends OutputSchema> extends AbstractInstruct<T> {
|
|
498
|
+
constructor(prompt: string, schema: T);
|
|
499
|
+
static with<T extends OutputSchema>(prompt: string, schema: T): ChainOfThought<T>;
|
|
500
|
+
static with<T extends DeclarativeSchema>(prompt: string, schema: T): ChainOfThought<OutputSchema>;
|
|
501
|
+
static with(prompt: string): ChainOfThought<{
|
|
502
|
+
response: z.ZodString;
|
|
503
|
+
}>;
|
|
504
|
+
createInstructions(instructions?: string): string;
|
|
505
|
+
finalize(rawValue: string, runtime?: {
|
|
506
|
+
recorder?: Recorder;
|
|
507
|
+
}): InferedOutputSchema<T> & {
|
|
508
|
+
thinking: string;
|
|
509
|
+
};
|
|
510
|
+
}
|
|
511
|
+
|
|
403
512
|
declare const Models$2: {
|
|
404
513
|
readonly CLAUDE_SONNET_4_5_20250929: "claude-sonnet-4-5-20250929";
|
|
405
514
|
readonly CLAUDE_SONNET_4_5_LATEST: "claude-sonnet-4-5";
|
|
@@ -419,13 +528,54 @@ declare const Models$2: {
|
|
|
419
528
|
};
|
|
420
529
|
declare const DEFAULT_MODEL$2: "claude-haiku-4-5";
|
|
421
530
|
|
|
422
|
-
declare const NAME$3: "
|
|
531
|
+
declare const NAME$3: "anthropic";
|
|
532
|
+
declare class AnthropicProvider implements AIProvider {
|
|
533
|
+
name: "anthropic";
|
|
534
|
+
client: Anthropic;
|
|
535
|
+
model: string;
|
|
536
|
+
constructor(apiKey: string, model?: string);
|
|
537
|
+
createGenerationRequest(params: {
|
|
538
|
+
messages: Array<AxleMessage>;
|
|
539
|
+
system?: string;
|
|
540
|
+
tools?: Array<ToolDefinition>;
|
|
541
|
+
context: {
|
|
542
|
+
recorder?: Recorder;
|
|
543
|
+
};
|
|
544
|
+
options?: {
|
|
545
|
+
temperature?: number;
|
|
546
|
+
top_p?: number;
|
|
547
|
+
max_tokens?: number;
|
|
548
|
+
frequency_penalty?: number;
|
|
549
|
+
presence_penalty?: number;
|
|
550
|
+
stop?: string | string[];
|
|
551
|
+
[key: string]: any;
|
|
552
|
+
};
|
|
553
|
+
}): Promise<ModelResult>;
|
|
554
|
+
createStreamingRequest(params: {
|
|
555
|
+
messages: Array<AxleMessage>;
|
|
556
|
+
system?: string;
|
|
557
|
+
tools?: Array<ToolDefinition>;
|
|
558
|
+
context: {
|
|
559
|
+
recorder?: Recorder;
|
|
560
|
+
};
|
|
561
|
+
options?: {
|
|
562
|
+
temperature?: number;
|
|
563
|
+
top_p?: number;
|
|
564
|
+
max_tokens?: number;
|
|
565
|
+
frequency_penalty?: number;
|
|
566
|
+
presence_penalty?: number;
|
|
567
|
+
stop?: string | string[];
|
|
568
|
+
[key: string]: any;
|
|
569
|
+
};
|
|
570
|
+
}): AsyncGenerator<AnyStreamChunk, void, unknown>;
|
|
571
|
+
}
|
|
423
572
|
|
|
424
573
|
declare namespace index$3 {
|
|
425
574
|
export {
|
|
426
575
|
DEFAULT_MODEL$2 as DEFAULT_MODEL,
|
|
427
576
|
Models$2 as Models,
|
|
428
577
|
NAME$3 as NAME,
|
|
578
|
+
AnthropicProvider as Provider,
|
|
429
579
|
};
|
|
430
580
|
}
|
|
431
581
|
|
|
@@ -471,23 +621,182 @@ declare const Models$1: {
|
|
|
471
621
|
declare const DEFAULT_MODEL$1: "gemini-2.5-flash";
|
|
472
622
|
|
|
473
623
|
declare const NAME$2: "Gemini";
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
624
|
+
declare class GeminiProvider implements AIProvider {
|
|
625
|
+
name: "Gemini";
|
|
626
|
+
client: GoogleGenAI;
|
|
627
|
+
model: string;
|
|
628
|
+
constructor(apiKey: string, model?: string);
|
|
629
|
+
createGenerationRequest(params: {
|
|
630
|
+
messages: Array<AxleMessage>;
|
|
631
|
+
system?: string;
|
|
632
|
+
tools?: Array<ToolDefinition>;
|
|
633
|
+
context: {
|
|
634
|
+
recorder?: Recorder;
|
|
635
|
+
};
|
|
636
|
+
options?: {
|
|
637
|
+
temperature?: number;
|
|
638
|
+
top_p?: number;
|
|
639
|
+
max_tokens?: number;
|
|
640
|
+
frequency_penalty?: number;
|
|
641
|
+
presence_penalty?: number;
|
|
642
|
+
stop?: string | string[];
|
|
643
|
+
[key: string]: any;
|
|
644
|
+
};
|
|
645
|
+
}): Promise<ModelResult>;
|
|
646
|
+
createStreamingRequest(params: {
|
|
647
|
+
messages: Array<AxleMessage>;
|
|
648
|
+
system?: string;
|
|
649
|
+
tools?: Array<ToolDefinition>;
|
|
650
|
+
context: {
|
|
651
|
+
recorder?: Recorder;
|
|
652
|
+
};
|
|
653
|
+
options?: {
|
|
654
|
+
temperature?: number;
|
|
655
|
+
top_p?: number;
|
|
656
|
+
max_tokens?: number;
|
|
657
|
+
frequency_penalty?: number;
|
|
658
|
+
presence_penalty?: number;
|
|
659
|
+
stop?: string | string[];
|
|
660
|
+
[key: string]: any;
|
|
661
|
+
};
|
|
662
|
+
}): AsyncGenerator<AnyStreamChunk, void, unknown>;
|
|
663
|
+
}
|
|
664
|
+
|
|
665
|
+
declare namespace index$2 {
|
|
666
|
+
export {
|
|
667
|
+
DEFAULT_MODEL$1 as DEFAULT_MODEL,
|
|
668
|
+
Models$1 as Models,
|
|
669
|
+
NAME$2 as NAME,
|
|
670
|
+
GeminiProvider as Provider,
|
|
671
|
+
};
|
|
672
|
+
}
|
|
673
|
+
|
|
674
|
+
declare const DEFAULT_OLLAMA_URL = "http://localhost:11434";
|
|
675
|
+
declare const NAME$1: "Ollama";
|
|
676
|
+
declare class OllamaProvider implements AIProvider {
|
|
677
|
+
name: string;
|
|
678
|
+
url: string;
|
|
679
|
+
model: string;
|
|
680
|
+
recorder?: Recorder;
|
|
681
|
+
constructor(model: string, url?: string);
|
|
682
|
+
createGenerationRequest(params: {
|
|
683
|
+
messages: Array<AxleMessage>;
|
|
684
|
+
system?: string;
|
|
685
|
+
tools?: Array<ToolDefinition>;
|
|
686
|
+
context: {
|
|
687
|
+
recorder?: Recorder;
|
|
688
|
+
};
|
|
689
|
+
options?: {
|
|
690
|
+
temperature?: number;
|
|
691
|
+
top_p?: number;
|
|
692
|
+
max_tokens?: number;
|
|
693
|
+
frequency_penalty?: number;
|
|
694
|
+
presence_penalty?: number;
|
|
695
|
+
stop?: string | string[];
|
|
696
|
+
[key: string]: any;
|
|
697
|
+
};
|
|
698
|
+
}): Promise<ModelResult>;
|
|
699
|
+
createStreamingRequest(params: {
|
|
700
|
+
messages: Array<AxleMessage>;
|
|
701
|
+
system?: string;
|
|
702
|
+
tools?: Array<ToolDefinition>;
|
|
703
|
+
context: {
|
|
704
|
+
recorder?: Recorder;
|
|
705
|
+
};
|
|
706
|
+
options?: {
|
|
707
|
+
temperature?: number;
|
|
708
|
+
top_p?: number;
|
|
709
|
+
max_tokens?: number;
|
|
710
|
+
frequency_penalty?: number;
|
|
711
|
+
presence_penalty?: number;
|
|
712
|
+
stop?: string | string[];
|
|
713
|
+
[key: string]: any;
|
|
714
|
+
};
|
|
715
|
+
}): AsyncGenerator<AnyStreamChunk, void, unknown>;
|
|
716
|
+
}
|
|
717
|
+
|
|
718
|
+
declare const NAME: "OpenAI";
|
|
719
|
+
declare class OpenAIProvider implements AIProvider {
|
|
720
|
+
name: "OpenAI";
|
|
721
|
+
client: OpenAI;
|
|
722
|
+
model: string;
|
|
723
|
+
constructor(apiKey: string, model?: string | undefined);
|
|
724
|
+
createGenerationRequest(params: {
|
|
725
|
+
messages: Array<AxleMessage>;
|
|
726
|
+
system?: string;
|
|
727
|
+
tools?: Array<ToolDefinition>;
|
|
728
|
+
context: {
|
|
729
|
+
recorder?: Recorder;
|
|
730
|
+
};
|
|
731
|
+
options?: {
|
|
732
|
+
temperature?: number;
|
|
733
|
+
top_p?: number;
|
|
734
|
+
max_tokens?: number;
|
|
735
|
+
frequency_penalty?: number;
|
|
736
|
+
presence_penalty?: number;
|
|
737
|
+
stop?: string | string[];
|
|
738
|
+
[key: string]: any;
|
|
739
|
+
};
|
|
740
|
+
}): Promise<ModelResult>;
|
|
741
|
+
createStreamingRequest(params: {
|
|
742
|
+
messages: Array<AxleMessage>;
|
|
743
|
+
system?: string;
|
|
744
|
+
tools?: Array<ToolDefinition>;
|
|
745
|
+
context: {
|
|
746
|
+
recorder?: Recorder;
|
|
747
|
+
};
|
|
748
|
+
options?: {
|
|
749
|
+
temperature?: number;
|
|
750
|
+
top_p?: number;
|
|
751
|
+
max_tokens?: number;
|
|
752
|
+
frequency_penalty?: number;
|
|
753
|
+
presence_penalty?: number;
|
|
754
|
+
stop?: string | string[];
|
|
755
|
+
[key: string]: any;
|
|
756
|
+
};
|
|
757
|
+
}): AsyncGenerator<AnyStreamChunk, void, unknown>;
|
|
758
|
+
}
|
|
759
|
+
|
|
760
|
+
interface GenerateOptions {
|
|
761
|
+
temperature?: number;
|
|
762
|
+
top_p?: number;
|
|
763
|
+
max_tokens?: number;
|
|
764
|
+
frequency_penalty?: number;
|
|
765
|
+
presence_penalty?: number;
|
|
766
|
+
stop?: string | string[];
|
|
767
|
+
[key: string]: any;
|
|
768
|
+
}
|
|
769
|
+
interface GenerateProps {
|
|
770
|
+
provider: AIProvider;
|
|
771
|
+
messages: Array<AxleMessage>;
|
|
772
|
+
system?: string;
|
|
773
|
+
tools?: Array<ToolDefinition>;
|
|
774
|
+
recorder?: Recorder;
|
|
775
|
+
options?: GenerateOptions;
|
|
776
|
+
}
|
|
777
|
+
declare function generate(props: GenerateProps): Promise<ModelResult>;
|
|
778
|
+
|
|
779
|
+
interface StreamProps {
|
|
780
|
+
provider: AIProvider;
|
|
781
|
+
messages: Array<AxleMessage>;
|
|
782
|
+
system?: string;
|
|
783
|
+
tools?: Array<ToolDefinition>;
|
|
784
|
+
recorder?: Recorder;
|
|
785
|
+
options?: GenerateOptions;
|
|
786
|
+
}
|
|
787
|
+
interface StreamResult {
|
|
788
|
+
get final(): Promise<ModelResult>;
|
|
789
|
+
get current(): AxleAssistantMessage;
|
|
790
|
+
[Symbol.asyncIterator](): AsyncIterator<AnyStreamChunk>;
|
|
791
|
+
}
|
|
792
|
+
declare function stream(props: StreamProps): StreamResult;
|
|
793
|
+
|
|
486
794
|
declare const index$1_DEFAULT_OLLAMA_URL: typeof DEFAULT_OLLAMA_URL;
|
|
487
795
|
declare namespace index$1 {
|
|
488
796
|
export {
|
|
489
797
|
index$1_DEFAULT_OLLAMA_URL as DEFAULT_OLLAMA_URL,
|
|
490
798
|
NAME$1 as NAME,
|
|
799
|
+
OllamaProvider as Provider,
|
|
491
800
|
};
|
|
492
801
|
}
|
|
493
802
|
|
|
@@ -558,8 +867,6 @@ declare const Models: {
|
|
|
558
867
|
};
|
|
559
868
|
declare const DEFAULT_MODEL: "gpt-5";
|
|
560
869
|
|
|
561
|
-
declare const NAME: "OpenAI";
|
|
562
|
-
|
|
563
870
|
declare const index_DEFAULT_MODEL: typeof DEFAULT_MODEL;
|
|
564
871
|
declare const index_Models: typeof Models;
|
|
565
872
|
declare const index_NAME: typeof NAME;
|
|
@@ -568,218 +875,306 @@ declare namespace index {
|
|
|
568
875
|
index_DEFAULT_MODEL as DEFAULT_MODEL,
|
|
569
876
|
index_Models as Models,
|
|
570
877
|
index_NAME as NAME,
|
|
878
|
+
OpenAIProvider as Provider,
|
|
571
879
|
};
|
|
572
880
|
}
|
|
573
881
|
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
type
|
|
616
|
-
|
|
617
|
-
}
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
882
|
+
declare const BraveProviderConfigSchema: z$1.ZodObject<{
|
|
883
|
+
"api-key": z$1.ZodString;
|
|
884
|
+
rateLimit: z$1.ZodOptional<z$1.ZodNumber>;
|
|
885
|
+
}, z$1.core.$strip>;
|
|
886
|
+
type BraveProviderConfig = z$1.infer<typeof BraveProviderConfigSchema>;
|
|
887
|
+
declare const JobSchema: z$1.ZodPipe<z$1.ZodTransform<any, any>, z$1.ZodDiscriminatedUnion<[z$1.ZodObject<{
|
|
888
|
+
type: z$1.ZodLiteral<"serial">;
|
|
889
|
+
tools: z$1.ZodOptional<z$1.ZodArray<z$1.ZodString>>;
|
|
890
|
+
steps: z$1.ZodArray<z$1.ZodDiscriminatedUnion<[z$1.ZodObject<{
|
|
891
|
+
uses: z$1.ZodLiteral<"chat">;
|
|
892
|
+
system: z$1.ZodOptional<z$1.ZodString>;
|
|
893
|
+
message: z$1.ZodString;
|
|
894
|
+
output: z$1.ZodOptional<z$1.ZodRecord<z$1.ZodString, z$1.ZodAny>>;
|
|
895
|
+
replace: z$1.ZodOptional<z$1.ZodArray<z$1.ZodObject<{
|
|
896
|
+
source: z$1.ZodLiteral<"file">;
|
|
897
|
+
pattern: z$1.ZodString;
|
|
898
|
+
files: z$1.ZodUnion<readonly [z$1.ZodString, z$1.ZodArray<z$1.ZodString>]>;
|
|
899
|
+
}, z$1.core.$strip>>>;
|
|
900
|
+
tools: z$1.ZodOptional<z$1.ZodArray<z$1.ZodString>>;
|
|
901
|
+
images: z$1.ZodOptional<z$1.ZodArray<z$1.ZodObject<{
|
|
902
|
+
file: z$1.ZodString;
|
|
903
|
+
}, z$1.core.$strip>>>;
|
|
904
|
+
documents: z$1.ZodOptional<z$1.ZodArray<z$1.ZodObject<{
|
|
905
|
+
file: z$1.ZodString;
|
|
906
|
+
}, z$1.core.$strip>>>;
|
|
907
|
+
references: z$1.ZodOptional<z$1.ZodArray<z$1.ZodObject<{
|
|
908
|
+
file: z$1.ZodString;
|
|
909
|
+
}, z$1.core.$strip>>>;
|
|
910
|
+
}, z$1.core.$strip>, z$1.ZodObject<{
|
|
911
|
+
uses: z$1.ZodLiteral<"write-to-disk">;
|
|
912
|
+
output: z$1.ZodString;
|
|
913
|
+
keys: z$1.ZodOptional<z$1.ZodUnion<readonly [z$1.ZodString, z$1.ZodArray<z$1.ZodString>]>>;
|
|
914
|
+
}, z$1.core.$strip>], "uses">>;
|
|
915
|
+
}, z$1.core.$strip>, z$1.ZodObject<{
|
|
916
|
+
type: z$1.ZodLiteral<"batch">;
|
|
917
|
+
tools: z$1.ZodOptional<z$1.ZodArray<z$1.ZodString>>;
|
|
918
|
+
batch: z$1.ZodArray<z$1.ZodObject<{
|
|
919
|
+
type: z$1.ZodLiteral<"files">;
|
|
920
|
+
source: z$1.ZodString;
|
|
921
|
+
bind: z$1.ZodString;
|
|
922
|
+
"skip-if": z$1.ZodOptional<z$1.ZodArray<z$1.ZodObject<{
|
|
923
|
+
type: z$1.ZodLiteral<"file-exist">;
|
|
924
|
+
pattern: z$1.ZodString;
|
|
925
|
+
}, z$1.core.$strip>>>;
|
|
926
|
+
}, z$1.core.$strip>>;
|
|
927
|
+
steps: z$1.ZodArray<z$1.ZodDiscriminatedUnion<[z$1.ZodObject<{
|
|
928
|
+
uses: z$1.ZodLiteral<"chat">;
|
|
929
|
+
system: z$1.ZodOptional<z$1.ZodString>;
|
|
930
|
+
message: z$1.ZodString;
|
|
931
|
+
output: z$1.ZodOptional<z$1.ZodRecord<z$1.ZodString, z$1.ZodAny>>;
|
|
932
|
+
replace: z$1.ZodOptional<z$1.ZodArray<z$1.ZodObject<{
|
|
933
|
+
source: z$1.ZodLiteral<"file">;
|
|
934
|
+
pattern: z$1.ZodString;
|
|
935
|
+
files: z$1.ZodUnion<readonly [z$1.ZodString, z$1.ZodArray<z$1.ZodString>]>;
|
|
936
|
+
}, z$1.core.$strip>>>;
|
|
937
|
+
tools: z$1.ZodOptional<z$1.ZodArray<z$1.ZodString>>;
|
|
938
|
+
images: z$1.ZodOptional<z$1.ZodArray<z$1.ZodObject<{
|
|
939
|
+
file: z$1.ZodString;
|
|
940
|
+
}, z$1.core.$strip>>>;
|
|
941
|
+
documents: z$1.ZodOptional<z$1.ZodArray<z$1.ZodObject<{
|
|
942
|
+
file: z$1.ZodString;
|
|
943
|
+
}, z$1.core.$strip>>>;
|
|
944
|
+
references: z$1.ZodOptional<z$1.ZodArray<z$1.ZodObject<{
|
|
945
|
+
file: z$1.ZodString;
|
|
946
|
+
}, z$1.core.$strip>>>;
|
|
947
|
+
}, z$1.core.$strip>, z$1.ZodObject<{
|
|
948
|
+
uses: z$1.ZodLiteral<"write-to-disk">;
|
|
949
|
+
output: z$1.ZodString;
|
|
950
|
+
keys: z$1.ZodOptional<z$1.ZodUnion<readonly [z$1.ZodString, z$1.ZodArray<z$1.ZodString>]>>;
|
|
951
|
+
}, z$1.core.$strip>], "uses">>;
|
|
952
|
+
}, z$1.core.$strip>], "type">>;
|
|
953
|
+
type Job = z$1.infer<typeof JobSchema>;
|
|
954
|
+
type SerialJob = Extract<Job, {
|
|
955
|
+
type: "serial";
|
|
956
|
+
}>;
|
|
957
|
+
type BatchJob = Extract<Job, {
|
|
958
|
+
type: "batch";
|
|
959
|
+
}>;
|
|
960
|
+
declare const DAGJobSchema: z$1.ZodRecord<z$1.ZodString, z$1.ZodPipe<z$1.ZodTransform<any, any>, z$1.ZodDiscriminatedUnion<[z$1.ZodObject<{
|
|
961
|
+
type: z$1.ZodLiteral<"serial">;
|
|
962
|
+
tools: z$1.ZodOptional<z$1.ZodArray<z$1.ZodString>>;
|
|
963
|
+
steps: z$1.ZodArray<z$1.ZodDiscriminatedUnion<[z$1.ZodObject<{
|
|
964
|
+
uses: z$1.ZodLiteral<"chat">;
|
|
965
|
+
system: z$1.ZodOptional<z$1.ZodString>;
|
|
966
|
+
message: z$1.ZodString;
|
|
967
|
+
output: z$1.ZodOptional<z$1.ZodRecord<z$1.ZodString, z$1.ZodAny>>;
|
|
968
|
+
replace: z$1.ZodOptional<z$1.ZodArray<z$1.ZodObject<{
|
|
969
|
+
source: z$1.ZodLiteral<"file">;
|
|
970
|
+
pattern: z$1.ZodString;
|
|
971
|
+
files: z$1.ZodUnion<readonly [z$1.ZodString, z$1.ZodArray<z$1.ZodString>]>;
|
|
972
|
+
}, z$1.core.$strip>>>;
|
|
973
|
+
tools: z$1.ZodOptional<z$1.ZodArray<z$1.ZodString>>;
|
|
974
|
+
images: z$1.ZodOptional<z$1.ZodArray<z$1.ZodObject<{
|
|
975
|
+
file: z$1.ZodString;
|
|
976
|
+
}, z$1.core.$strip>>>;
|
|
977
|
+
documents: z$1.ZodOptional<z$1.ZodArray<z$1.ZodObject<{
|
|
978
|
+
file: z$1.ZodString;
|
|
979
|
+
}, z$1.core.$strip>>>;
|
|
980
|
+
references: z$1.ZodOptional<z$1.ZodArray<z$1.ZodObject<{
|
|
981
|
+
file: z$1.ZodString;
|
|
982
|
+
}, z$1.core.$strip>>>;
|
|
983
|
+
}, z$1.core.$strip>, z$1.ZodObject<{
|
|
984
|
+
uses: z$1.ZodLiteral<"write-to-disk">;
|
|
985
|
+
output: z$1.ZodString;
|
|
986
|
+
keys: z$1.ZodOptional<z$1.ZodUnion<readonly [z$1.ZodString, z$1.ZodArray<z$1.ZodString>]>>;
|
|
987
|
+
}, z$1.core.$strip>], "uses">>;
|
|
988
|
+
dependsOn: z$1.ZodOptional<z$1.ZodUnion<readonly [z$1.ZodString, z$1.ZodArray<z$1.ZodString>]>>;
|
|
989
|
+
}, z$1.core.$strip>, z$1.ZodObject<{
|
|
990
|
+
type: z$1.ZodLiteral<"batch">;
|
|
991
|
+
tools: z$1.ZodOptional<z$1.ZodArray<z$1.ZodString>>;
|
|
992
|
+
batch: z$1.ZodArray<z$1.ZodObject<{
|
|
993
|
+
type: z$1.ZodLiteral<"files">;
|
|
994
|
+
source: z$1.ZodString;
|
|
995
|
+
bind: z$1.ZodString;
|
|
996
|
+
"skip-if": z$1.ZodOptional<z$1.ZodArray<z$1.ZodObject<{
|
|
997
|
+
type: z$1.ZodLiteral<"file-exist">;
|
|
998
|
+
pattern: z$1.ZodString;
|
|
999
|
+
}, z$1.core.$strip>>>;
|
|
1000
|
+
}, z$1.core.$strip>>;
|
|
1001
|
+
steps: z$1.ZodArray<z$1.ZodDiscriminatedUnion<[z$1.ZodObject<{
|
|
1002
|
+
uses: z$1.ZodLiteral<"chat">;
|
|
1003
|
+
system: z$1.ZodOptional<z$1.ZodString>;
|
|
1004
|
+
message: z$1.ZodString;
|
|
1005
|
+
output: z$1.ZodOptional<z$1.ZodRecord<z$1.ZodString, z$1.ZodAny>>;
|
|
1006
|
+
replace: z$1.ZodOptional<z$1.ZodArray<z$1.ZodObject<{
|
|
1007
|
+
source: z$1.ZodLiteral<"file">;
|
|
1008
|
+
pattern: z$1.ZodString;
|
|
1009
|
+
files: z$1.ZodUnion<readonly [z$1.ZodString, z$1.ZodArray<z$1.ZodString>]>;
|
|
1010
|
+
}, z$1.core.$strip>>>;
|
|
1011
|
+
tools: z$1.ZodOptional<z$1.ZodArray<z$1.ZodString>>;
|
|
1012
|
+
images: z$1.ZodOptional<z$1.ZodArray<z$1.ZodObject<{
|
|
1013
|
+
file: z$1.ZodString;
|
|
1014
|
+
}, z$1.core.$strip>>>;
|
|
1015
|
+
documents: z$1.ZodOptional<z$1.ZodArray<z$1.ZodObject<{
|
|
1016
|
+
file: z$1.ZodString;
|
|
1017
|
+
}, z$1.core.$strip>>>;
|
|
1018
|
+
references: z$1.ZodOptional<z$1.ZodArray<z$1.ZodObject<{
|
|
1019
|
+
file: z$1.ZodString;
|
|
1020
|
+
}, z$1.core.$strip>>>;
|
|
1021
|
+
}, z$1.core.$strip>, z$1.ZodObject<{
|
|
1022
|
+
uses: z$1.ZodLiteral<"write-to-disk">;
|
|
1023
|
+
output: z$1.ZodString;
|
|
1024
|
+
keys: z$1.ZodOptional<z$1.ZodUnion<readonly [z$1.ZodString, z$1.ZodArray<z$1.ZodString>]>>;
|
|
1025
|
+
}, z$1.core.$strip>], "uses">>;
|
|
1026
|
+
dependsOn: z$1.ZodOptional<z$1.ZodUnion<readonly [z$1.ZodString, z$1.ZodArray<z$1.ZodString>]>>;
|
|
1027
|
+
}, z$1.core.$strip>], "type">>>;
|
|
1028
|
+
type DAGJob = z$1.infer<typeof DAGJobSchema>;
|
|
622
1029
|
|
|
623
|
-
declare
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
} | undefined;
|
|
641
|
-
protected _result: InferedOutputSchema<T> | undefined;
|
|
642
|
-
protected constructor(prompt: string, schema: T);
|
|
643
|
-
setInputs(inputs: Record<string, string>): void;
|
|
644
|
-
addInput(name: string, value: string): void;
|
|
645
|
-
addTools(tools: ToolExecutable[]): void;
|
|
646
|
-
addTool(tool: ToolExecutable): void;
|
|
647
|
-
addImage(file: FileInfo): void;
|
|
648
|
-
addDocument(file: FileInfo): void;
|
|
649
|
-
addFile(file: FileInfo): void;
|
|
650
|
-
addReference(textFile: FileInfo | TextFileInfo | string, options?: {
|
|
651
|
-
name?: string;
|
|
652
|
-
}): void;
|
|
653
|
-
addInstructions(instruction: string): void;
|
|
654
|
-
hasTools(): boolean;
|
|
655
|
-
hasFiles(): boolean;
|
|
656
|
-
get result(): InferedOutputSchema<T> | undefined;
|
|
657
|
-
compile(variables: Record<string, string>, runtime?: {
|
|
658
|
-
recorder?: Recorder;
|
|
659
|
-
options?: {
|
|
660
|
-
warnUnused?: boolean;
|
|
661
|
-
};
|
|
662
|
-
}): {
|
|
663
|
-
message: string;
|
|
664
|
-
instructions: string;
|
|
665
|
-
};
|
|
666
|
-
protected createUserMessage(variables: Record<string, string>, runtime?: {
|
|
667
|
-
recorder?: Recorder;
|
|
668
|
-
options?: {
|
|
669
|
-
warnUnused?: boolean;
|
|
670
|
-
};
|
|
671
|
-
}): string;
|
|
672
|
-
protected createInstructions(instructions?: string): string;
|
|
673
|
-
protected generateFieldInstructions(key: string, schema: z.ZodTypeAny): string;
|
|
674
|
-
finalize(rawValue: string, runtime?: {
|
|
675
|
-
recorder?: Recorder;
|
|
676
|
-
}): InferedOutputSchema<T>;
|
|
677
|
-
private preprocessValue;
|
|
678
|
-
protected parseTaggedSections(input: string): {
|
|
679
|
-
tags: Record<string, string>;
|
|
680
|
-
remaining: string;
|
|
681
|
-
};
|
|
682
|
-
}
|
|
683
|
-
|
|
684
|
-
declare class Instruct<T extends OutputSchema> extends AbstractInstruct<T> {
|
|
685
|
-
constructor(prompt: string, schema: T);
|
|
686
|
-
static with<T extends OutputSchema>(prompt: string, schema: T): Instruct<T>;
|
|
687
|
-
static with<T extends DeclarativeSchema>(prompt: string, schema: T): Instruct<OutputSchema>;
|
|
688
|
-
static with(prompt: string): Instruct<{
|
|
689
|
-
response: z.ZodString;
|
|
690
|
-
}>;
|
|
691
|
-
}
|
|
1030
|
+
declare const braveSearchSchema: z.ZodObject<{
|
|
1031
|
+
searchTerm: z.ZodString;
|
|
1032
|
+
}, z.core.$strip>;
|
|
1033
|
+
declare class BraveSearchTool implements Tool<typeof braveSearchSchema> {
|
|
1034
|
+
name: string;
|
|
1035
|
+
description: string;
|
|
1036
|
+
schema: z.ZodObject<{
|
|
1037
|
+
searchTerm: z.ZodString;
|
|
1038
|
+
}, z.core.$strip>;
|
|
1039
|
+
apiKey: string;
|
|
1040
|
+
throttle: number | undefined;
|
|
1041
|
+
lastExecTime: number;
|
|
1042
|
+
constructor(config?: BraveProviderConfig);
|
|
1043
|
+
configure(config: BraveProviderConfig): void;
|
|
1044
|
+
execute(params: z.infer<typeof braveSearchSchema>): Promise<string>;
|
|
1045
|
+
}
|
|
1046
|
+
declare const braveSearchTool: BraveSearchTool;
|
|
692
1047
|
|
|
693
|
-
declare
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
1048
|
+
declare const calculatorSchema: z$1.ZodObject<{
|
|
1049
|
+
operation: z$1.ZodEnum<{
|
|
1050
|
+
add: "add";
|
|
1051
|
+
subtract: "subtract";
|
|
1052
|
+
multiply: "multiply";
|
|
1053
|
+
divide: "divide";
|
|
699
1054
|
}>;
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
thinking: string;
|
|
705
|
-
};
|
|
706
|
-
}
|
|
1055
|
+
a: z$1.ZodNumber;
|
|
1056
|
+
b: z$1.ZodNumber;
|
|
1057
|
+
}, z$1.core.$strip>;
|
|
1058
|
+
declare const calculatorTool: Tool<typeof calculatorSchema>;
|
|
707
1059
|
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
}
|
|
776
|
-
|
|
777
|
-
|
|
1060
|
+
/**
|
|
1061
|
+
* WriteToDisk Action
|
|
1062
|
+
*
|
|
1063
|
+
* Writes content to a file on disk. This action is typically used as a workflow
|
|
1064
|
+
* step following an LLM call to persist the generated output.
|
|
1065
|
+
*
|
|
1066
|
+
* ## CLI Job Definition (YAML)
|
|
1067
|
+
*
|
|
1068
|
+
* In job YAML files, use the `write-to-disk` step type:
|
|
1069
|
+
*
|
|
1070
|
+
* ```yaml
|
|
1071
|
+
* steps:
|
|
1072
|
+
* - uses: chat
|
|
1073
|
+
* message: Generate a greeting for {{name}}
|
|
1074
|
+
* - uses: write-to-disk
|
|
1075
|
+
* output: ./output/greeting-{{name}}.txt
|
|
1076
|
+
* ```
|
|
1077
|
+
*
|
|
1078
|
+
* ### Properties
|
|
1079
|
+
*
|
|
1080
|
+
* | Property | Type | Required | Description |
|
|
1081
|
+
* |----------|----------------------|----------|--------------------------------------------------|
|
|
1082
|
+
* | `uses` | `"write-to-disk"` | Yes | Identifies this as a WriteToDisk step |
|
|
1083
|
+
* | `output` | `string` | Yes | File path template (supports `{{}}` placeholders)|
|
|
1084
|
+
* | `keys` | `string \| string[]` | No | Variable keys to include in output content |
|
|
1085
|
+
*
|
|
1086
|
+
* ### Examples
|
|
1087
|
+
*
|
|
1088
|
+
* **Basic usage** - writes the LLM response to a file:
|
|
1089
|
+
* ```yaml
|
|
1090
|
+
* - uses: write-to-disk
|
|
1091
|
+
* output: ./output/result.txt
|
|
1092
|
+
* ```
|
|
1093
|
+
*
|
|
1094
|
+
* **With path variables** - uses `{{}}` placeholders in path:
|
|
1095
|
+
* ```yaml
|
|
1096
|
+
* - uses: write-to-disk
|
|
1097
|
+
* output: ./output/greeting-{{name}}.txt
|
|
1098
|
+
* ```
|
|
1099
|
+
*
|
|
1100
|
+
* **With file pattern** (batch processing) - uses `*` to substitute file stem:
|
|
1101
|
+
* ```yaml
|
|
1102
|
+
* - uses: write-to-disk
|
|
1103
|
+
* output: ./output/results-*.txt
|
|
1104
|
+
* ```
|
|
1105
|
+
*
|
|
1106
|
+
* **With specific keys** - outputs only specified variables:
|
|
1107
|
+
* ```yaml
|
|
1108
|
+
* - uses: write-to-disk
|
|
1109
|
+
* output: ./output/summary.txt
|
|
1110
|
+
* keys: summary
|
|
1111
|
+
* ```
|
|
1112
|
+
*
|
|
1113
|
+
* **With multiple keys** - outputs multiple variables, each on a new line:
|
|
1114
|
+
* ```yaml
|
|
1115
|
+
* - uses: write-to-disk
|
|
1116
|
+
* output: ./output/report.txt
|
|
1117
|
+
* keys:
|
|
1118
|
+
* - title
|
|
1119
|
+
* - summary
|
|
1120
|
+
* - conclusion
|
|
1121
|
+
* ```
|
|
1122
|
+
*
|
|
1123
|
+
* ## Placeholder Styles
|
|
1124
|
+
*
|
|
1125
|
+
* This action uses `{{variable}}` placeholder style for all variable substitution:
|
|
1126
|
+
*
|
|
1127
|
+
* - **Path template** (`output`): Uses `{{variable}}` placeholders
|
|
1128
|
+
* - Example: `./output/greeting-{{name}}.txt`
|
|
1129
|
+
* - Also supports `*` for file stem substitution in batch processing
|
|
1130
|
+
*
|
|
1131
|
+
* - **Content template** (`keys`): Uses `{{variable}}` placeholders
|
|
1132
|
+
* - Default template: `{{response}}`
|
|
1133
|
+
* - When `keys` is specified, generates: `{{key1}}\n{{key2}}\n...`
|
|
1134
|
+
*
|
|
1135
|
+
* ## Variables Available
|
|
1136
|
+
*
|
|
1137
|
+
* All variables from the workflow context are available for substitution:
|
|
1138
|
+
* - `response` - The text response from the previous LLM step
|
|
1139
|
+
* - `$previous` - The full output object from the previous step
|
|
1140
|
+
* - `file` - File info object when processing batches (contains `stem`, `name`, `ext`, etc.)
|
|
1141
|
+
* - Any custom variables defined in the workflow or extracted by previous steps
|
|
1142
|
+
*
|
|
1143
|
+
* @see WriteToDiskStep in `src/cli/configs/types.ts` for the TypeScript interface
|
|
1144
|
+
* @see writeToDiskConverter in `src/cli/converters/writeToDisk.ts` for CLI conversion logic
|
|
1145
|
+
*/
|
|
1146
|
+
declare class WriteToDisk implements Action {
|
|
1147
|
+
private pathTemplate;
|
|
1148
|
+
private contentTemplate;
|
|
1149
|
+
name: string;
|
|
1150
|
+
/**
|
|
1151
|
+
* Creates a new WriteToDisk action.
|
|
1152
|
+
*
|
|
1153
|
+
* @param pathTemplate - The file path template. Supports:
|
|
1154
|
+
* - `{{variable}}` placeholders for variable substitution
|
|
1155
|
+
* - `*` for file stem substitution (batch processing)
|
|
1156
|
+
* @param contentTemplate - The content template using `{{variable}}` placeholders.
|
|
1157
|
+
* Defaults to `{{response}}` to output the LLM response.
|
|
1158
|
+
*/
|
|
1159
|
+
constructor(pathTemplate: string, contentTemplate?: string);
|
|
1160
|
+
/**
|
|
1161
|
+
* Executes the write-to-disk action.
|
|
1162
|
+
*
|
|
1163
|
+
* Resolves the path and content templates using workflow variables,
|
|
1164
|
+
* then writes the content to the resolved file path.
|
|
1165
|
+
*
|
|
1166
|
+
* @param context - The action execution context containing:
|
|
1167
|
+
* - `variables`: All workflow variables available for substitution
|
|
1168
|
+
* - `options`: Execution options (e.g., `dryRun`)
|
|
1169
|
+
* - `recorder`: Optional recorder for logging
|
|
1170
|
+
* @returns A promise that resolves when the file has been written
|
|
1171
|
+
*/
|
|
1172
|
+
execute(context: ActionContext): Promise<void>;
|
|
778
1173
|
}
|
|
779
1174
|
|
|
780
1175
|
interface ConcurrentWorkflow {
|
|
781
1176
|
(jobConfig: BatchJob): WorkflowExecutable;
|
|
782
|
-
(planner: Planner, ...
|
|
1177
|
+
(planner: Planner, ...steps: WorkflowStep[]): WorkflowExecutable;
|
|
783
1178
|
}
|
|
784
1179
|
declare const concurrentWorkflow: ConcurrentWorkflow;
|
|
785
1180
|
|
|
@@ -790,10 +1185,25 @@ declare const dagWorkflow: DAGWorkflow;
|
|
|
790
1185
|
|
|
791
1186
|
interface SerialWorkflow {
|
|
792
1187
|
(jobConfig: SerialJob): WorkflowExecutable;
|
|
793
|
-
(...
|
|
1188
|
+
(...steps: WorkflowStep[]): WorkflowExecutable;
|
|
794
1189
|
}
|
|
795
1190
|
declare const serialWorkflow: SerialWorkflow;
|
|
796
1191
|
|
|
1192
|
+
declare class Conversation {
|
|
1193
|
+
system: string;
|
|
1194
|
+
private _messages;
|
|
1195
|
+
constructor(messages?: AxleMessage[]);
|
|
1196
|
+
get messages(): AxleMessage[];
|
|
1197
|
+
addSystem(message: string): void;
|
|
1198
|
+
addUser(message: string): void;
|
|
1199
|
+
addUser(parts: AxleUserMessage["content"]): void;
|
|
1200
|
+
addAssistant(message: string): void;
|
|
1201
|
+
addAssistant(params: Omit<AxleAssistantMessage, "role">): void;
|
|
1202
|
+
addToolResults(input: Array<AxleToolCallResult>): void;
|
|
1203
|
+
latest(): AxleMessage | undefined;
|
|
1204
|
+
toString(): string;
|
|
1205
|
+
}
|
|
1206
|
+
|
|
797
1207
|
declare class ConsoleWriter implements RecorderWriter {
|
|
798
1208
|
private tasks;
|
|
799
1209
|
private entries;
|
|
@@ -814,20 +1224,5 @@ declare class ConsoleWriter implements RecorderWriter {
|
|
|
814
1224
|
destroy(): void;
|
|
815
1225
|
}
|
|
816
1226
|
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
private _messages;
|
|
820
|
-
constructor(messages?: AxleMessage[]);
|
|
821
|
-
get messages(): AxleMessage[];
|
|
822
|
-
addSystem(message: string): void;
|
|
823
|
-
addUser(message: string): void;
|
|
824
|
-
addUser(parts: AxleUserMessage["content"]): void;
|
|
825
|
-
addAssistant(message: string): void;
|
|
826
|
-
addAssistant(params: Omit<AxleAssistantMessage, "role">): void;
|
|
827
|
-
addToolResults(input: Array<AxleToolCallResult>): void;
|
|
828
|
-
latest(): AxleMessage | undefined;
|
|
829
|
-
toString(): string;
|
|
830
|
-
}
|
|
831
|
-
|
|
832
|
-
export { index$3 as Anthropic, Axle, AxleStopReason, ChainOfThought, ConsoleWriter, Conversation, index$2 as Gemini, Instruct, LogLevel, index$1 as Ollama, index as OpenAI, WriteOutputTask, concurrentWorkflow, dagWorkflow, generate, serialWorkflow, stream };
|
|
833
|
-
export type { AIProvider, AxleAssistantMessage, AxleMessage, AxleToolCallMessage, AxleToolCallResult, AxleUserMessage, ContentPart, ContentPartFile, ContentPartText, ContentPartThinking, ContentPartToolCall, DAGDefinition, DAGWorkflowOptions, FileInfo, SerializedExecutionResponse };
|
|
1227
|
+
export { index$3 as Anthropic, Axle, AxleStopReason, ChainOfThought, ConsoleWriter, Conversation, index$2 as Gemini, Instruct, LogLevel, index$1 as Ollama, index as OpenAI, WriteToDisk, braveSearchTool, calculatorTool, concurrentWorkflow, dagWorkflow, generate, serialWorkflow, stream };
|
|
1228
|
+
export type { AIProvider, Action, ActionContext, AxleAssistantMessage, AxleMessage, AxleToolCallMessage, AxleToolCallResult, AxleUserMessage, ContentPart, ContentPartFile, ContentPartText, ContentPartThinking, ContentPartToolCall, DAGDefinition, DAGWorkflowOptions, FileInfo, SerializedExecutionResponse, Tool, ToolDefinition, WorkflowStep };
|