@fifthrevision/axle 0.6.5 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +285 -11
- package/dist/cli.js +6 -6
- package/dist/index.d.ts +592 -1014
- package/dist/index.js +1 -3
- package/dist/simple-Bu-04pw1.js +28 -0
- package/package.json +13 -12
- package/dist/consoleWriter-Bg94CpP2.js +0 -31
package/README.md
CHANGED
|
@@ -1,24 +1,298 @@
|
|
|
1
|
-
# Axle
|
|
1
|
+
# Axle
|
|
2
2
|
|
|
3
|
-
Axle is a
|
|
3
|
+
Axle is a TypeScript library for building multi-turn LLM agents. It provides a
|
|
4
|
+
small, focused API for building agentic applications.
|
|
4
5
|
|
|
5
|
-
|
|
6
|
+
## Quick Start
|
|
6
7
|
|
|
7
|
-
|
|
8
|
+
```typescript
|
|
9
|
+
import { Agent, Instruct, anthropic } from "@fifthrevision/axle";
|
|
8
10
|
|
|
9
|
-
|
|
10
|
-
|
|
11
|
+
const provider = anthropic(process.env.ANTHROPIC_API_KEY);
|
|
12
|
+
const agent = new Agent({ provider, model: "claude-sonnet-4-5-20250929" });
|
|
11
13
|
|
|
12
|
-
|
|
14
|
+
const r1 = await agent.send("What is the capital of France?").final;
|
|
15
|
+
console.log(r1.response); // "Paris is the capital of France."
|
|
13
16
|
|
|
17
|
+
// Multi-turn — history is managed automatically
|
|
18
|
+
const r2 = await agent.send("And what about Germany?").final;
|
|
14
19
|
```
|
|
20
|
+
|
|
21
|
+
## Philosophy
|
|
22
|
+
|
|
23
|
+
Axle has two big goals
|
|
24
|
+
|
|
25
|
+
1. A small, focused, and ergonomic interface for building agents. The Agent,
|
|
26
|
+
Instruct, and other APIs are the entire surface, and there is a lot of thought
|
|
27
|
+
to make them distinct and composable.
|
|
28
|
+
2. Systematic prompt improvement. Log what was sent, validate what came back, feed
|
|
29
|
+
learnings into the next run. (This is where the roadmap is headed.)
|
|
30
|
+
|
|
31
|
+
Axle started as a DSPy-inspired workflow tool. As models got better with reasoning
|
|
32
|
+
and tool use, rigid workflow graphs felt unnecessary — but the goals behind them
|
|
33
|
+
(structured output, verification, multi-step reasoning) didn't go away. The project
|
|
34
|
+
shifted toward making those capabilities composable primitives rather than
|
|
35
|
+
fixed pipelines.
|
|
36
|
+
|
|
37
|
+
### Roadmap
|
|
38
|
+
|
|
39
|
+
- **Memory:** Ways to remember previous runs to retrieve them and add them back
|
|
40
|
+
into the prompt for future runs.
|
|
41
|
+
- **Verification:** Automatic and manual ways to verify the output hits goals
|
|
42
|
+
|
|
43
|
+
## Core Concepts
|
|
44
|
+
|
|
45
|
+
### Agent
|
|
46
|
+
|
|
47
|
+
Agent is the primary interface. It owns the provider, model, system prompt,
|
|
48
|
+
tools, and conversation history. `send()` is the only verb — it accepts either a
|
|
49
|
+
plain string or an Instruct.
|
|
50
|
+
|
|
51
|
+
```typescript
|
|
52
|
+
const agent = new Agent({
|
|
53
|
+
provider: anthropic(apiKey),
|
|
54
|
+
model: "claude-sonnet-4-5-20250929",
|
|
55
|
+
system: "You are a helpful assistant.",
|
|
56
|
+
tools: [calculatorTool],
|
|
57
|
+
});
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
### Instruct
|
|
61
|
+
|
|
62
|
+
Instruct is a rich message. Use it when you need structured output, file
|
|
63
|
+
attachments, variable substitution, or additional instructions.
|
|
64
|
+
|
|
65
|
+
```typescript
|
|
66
|
+
import * as z from "zod";
|
|
67
|
+
|
|
68
|
+
const instruct = new Instruct("Summarize the following document.", {
|
|
69
|
+
summary: z.string(),
|
|
70
|
+
keyPoints: z.array(z.string()),
|
|
71
|
+
});
|
|
72
|
+
instruct.addFile(await loadFileContent("./report.pdf"));
|
|
73
|
+
|
|
74
|
+
const result = await agent.send(instruct).final;
|
|
75
|
+
// result.response is { summary: string, keyPoints: string[] }
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
For plain text interactions, pass a string directly to `send()` instead.
|
|
79
|
+
|
|
80
|
+
### Providers
|
|
81
|
+
|
|
82
|
+
Axle ships with first-party support for Anthropic, OpenAI, and Gemini, plus a
|
|
83
|
+
generic ChatCompletions provider for any OpenAI-compatible API.
|
|
84
|
+
|
|
85
|
+
```typescript
|
|
86
|
+
import { anthropic, openai, gemini, chatCompletions } from "@fifthrevision/axle";
|
|
87
|
+
|
|
88
|
+
const a = anthropic(process.env.ANTHROPIC_API_KEY);
|
|
89
|
+
const o = openai(process.env.OPENAI_API_KEY);
|
|
90
|
+
const g = gemini(process.env.GEMINI_API_KEY);
|
|
91
|
+
const local = chatCompletions("http://localhost:11434/v1");
|
|
92
|
+
```
|
|
93
|
+
|
|
94
|
+
### `stream()` and `generate()`
|
|
95
|
+
|
|
96
|
+
Agent is built on two lower-level primitives that can be used directly when you
|
|
97
|
+
want full control without conversation management.
|
|
98
|
+
|
|
99
|
+
`stream()` runs a tool loop over a streaming request and returns a handle with
|
|
100
|
+
callbacks for real-time output:
|
|
101
|
+
|
|
102
|
+
```typescript
|
|
103
|
+
import { stream } from "@fifthrevision/axle";
|
|
104
|
+
|
|
105
|
+
const handle = stream({
|
|
106
|
+
provider,
|
|
107
|
+
model,
|
|
108
|
+
messages: [{ role: "user", content: "Hello" }],
|
|
109
|
+
tools: [myTool],
|
|
110
|
+
onToolCall: async (name, params) => ({ type: "success", content: "result" }),
|
|
111
|
+
});
|
|
112
|
+
|
|
113
|
+
handle.onPartUpdate((index, type, delta) => process.stdout.write(delta));
|
|
114
|
+
const result = await handle.final;
|
|
115
|
+
```
|
|
116
|
+
|
|
117
|
+
`generate()` does the same but without streaming — it returns the final result
|
|
118
|
+
directly as a promise:
|
|
119
|
+
|
|
120
|
+
```typescript
|
|
121
|
+
import { generate } from "@fifthrevision/axle";
|
|
122
|
+
|
|
123
|
+
const result = await generate({
|
|
124
|
+
provider,
|
|
125
|
+
model,
|
|
126
|
+
messages: [{ role: "user", content: "Hello" }],
|
|
127
|
+
tools: [myTool],
|
|
128
|
+
onToolCall: async (name, params) => ({ type: "success", content: "result" }),
|
|
129
|
+
});
|
|
130
|
+
```
|
|
131
|
+
|
|
132
|
+
Both handle the full tool-call loop automatically. Agent uses `stream()`
|
|
133
|
+
internally and adds history management, system prompt, and callback wiring on
|
|
134
|
+
top.
|
|
135
|
+
|
|
136
|
+
## Details
|
|
137
|
+
|
|
138
|
+
### Structured Output
|
|
139
|
+
|
|
140
|
+
Pass a Zod schema as the second argument to Instruct. Axle compiles the schema
|
|
141
|
+
into output format instructions, then parses the response back into typed
|
|
142
|
+
objects.
|
|
143
|
+
|
|
144
|
+
```typescript
|
|
145
|
+
import * as z from "zod";
|
|
146
|
+
|
|
147
|
+
const instruct = new Instruct("Tell me about Mars.", {
|
|
148
|
+
name: z.string(),
|
|
149
|
+
distanceFromSun: z.number(),
|
|
150
|
+
moons: z.array(z.string()),
|
|
151
|
+
});
|
|
152
|
+
|
|
153
|
+
const agent = new Agent({ provider, model });
|
|
154
|
+
const result = await agent.send(instruct).final;
|
|
155
|
+
|
|
156
|
+
result.response.name; // string
|
|
157
|
+
result.response.distanceFromSun; // number
|
|
158
|
+
result.response.moons; // string[]
|
|
159
|
+
```
|
|
160
|
+
|
|
161
|
+
### Tools
|
|
162
|
+
|
|
163
|
+
A tool is an object with a name, description, Zod schema, and an `execute`
|
|
164
|
+
function. Pass tools to the Agent constructor.
|
|
165
|
+
|
|
166
|
+
```typescript
|
|
167
|
+
import { z } from "zod";
|
|
168
|
+
|
|
169
|
+
const weatherTool = {
|
|
170
|
+
name: "getWeather",
|
|
171
|
+
description: "Get current weather for a city",
|
|
172
|
+
schema: z.object({ city: z.string() }),
|
|
173
|
+
async execute(input) {
|
|
174
|
+
return JSON.stringify({ temp: 72, condition: "sunny" });
|
|
175
|
+
},
|
|
176
|
+
};
|
|
177
|
+
|
|
178
|
+
const agent = new Agent({
|
|
179
|
+
provider,
|
|
180
|
+
model,
|
|
181
|
+
tools: [weatherTool],
|
|
182
|
+
});
|
|
183
|
+
```
|
|
184
|
+
|
|
185
|
+
Axle includes several built-in tools: `braveSearchTool`, `calculatorTool`,
|
|
186
|
+
`execTool`, `readFileTool`, `writeFileTool`, and `patchFileTool`.
|
|
187
|
+
|
|
188
|
+
### Streaming
|
|
189
|
+
|
|
190
|
+
Agent exposes callbacks for streaming output as it arrives.
|
|
191
|
+
|
|
192
|
+
```typescript
|
|
193
|
+
const agent = new Agent({ provider, model });
|
|
194
|
+
|
|
195
|
+
agent.onPartStart((index, type) => {
|
|
196
|
+
/* text, tool-call, thinking */
|
|
197
|
+
});
|
|
198
|
+
agent.onPartUpdate((index, type, delta) => process.stdout.write(delta));
|
|
199
|
+
agent.onPartEnd((index, type) => {
|
|
200
|
+
/* part finished */
|
|
201
|
+
});
|
|
202
|
+
agent.onError((error) => console.error(error));
|
|
203
|
+
|
|
204
|
+
const handle = agent.send("Write me a poem.");
|
|
205
|
+
// handle.cancel() to abort mid-stream
|
|
206
|
+
const result = await handle.final;
|
|
207
|
+
```
|
|
208
|
+
|
|
209
|
+
Callbacks are registered once and fire on every subsequent `send()`.
|
|
210
|
+
|
|
211
|
+
## Known Limitations
|
|
212
|
+
|
|
213
|
+
1. Axle does not support multi-modal output right now.
|
|
214
|
+
|
|
215
|
+
## CLI
|
|
216
|
+
|
|
217
|
+
In accordance to Axle's lineage of a workflow tool, Axle exposes a command
|
|
218
|
+
line interface that accepts a declarative config file.
|
|
219
|
+
|
|
220
|
+
### Installation
|
|
221
|
+
|
|
222
|
+
```bash
|
|
223
|
+
npm install -g @fifthrevision/axle
|
|
224
|
+
```
|
|
225
|
+
|
|
226
|
+
### Usage
|
|
227
|
+
|
|
228
|
+
The CLI looks for `axle.job.yaml` and `axle.config.yaml` in the current
|
|
229
|
+
directory by default. You can also specify them using the `-j` and `-c` flags
|
|
230
|
+
|
|
231
|
+
```bash
|
|
232
|
+
axle
|
|
233
|
+
axle -j path/to/job.yaml -c path/to/config.yaml
|
|
234
|
+
axle --args key=value other=thing
|
|
235
|
+
axle --debug
|
|
236
|
+
```
|
|
237
|
+
|
|
238
|
+
A job file specifies the provider, task prompt, and optional tools/files:
|
|
239
|
+
|
|
240
|
+
```yaml
|
|
241
|
+
# axle.job.yaml
|
|
242
|
+
provider:
|
|
243
|
+
type: anthropic
|
|
244
|
+
model: claude-sonnet-4-5-20250929
|
|
245
|
+
|
|
246
|
+
task: |
|
|
247
|
+
Summarize the attached document.
|
|
248
|
+
|
|
249
|
+
tools:
|
|
250
|
+
- calculator
|
|
251
|
+
|
|
252
|
+
files:
|
|
253
|
+
- ./data/report.txt
|
|
254
|
+
```
|
|
255
|
+
|
|
256
|
+
### Batch
|
|
257
|
+
|
|
258
|
+
Add a `batch` key to the job file to run the same task across multiple files.
|
|
259
|
+
Each matched file is attached to the instruct automatically.
|
|
260
|
+
|
|
261
|
+
```yaml
|
|
262
|
+
# axle.job.yaml
|
|
263
|
+
provider:
|
|
264
|
+
type: openai
|
|
265
|
+
|
|
266
|
+
task: |
|
|
267
|
+
Summarize this file.
|
|
268
|
+
|
|
269
|
+
batch:
|
|
270
|
+
files: "./data/*.txt"
|
|
271
|
+
concurrency: 3
|
|
272
|
+
resume: true
|
|
273
|
+
```
|
|
274
|
+
|
|
275
|
+
- `files` — glob pattern for input files
|
|
276
|
+
- `concurrency` — max parallel runs (default 3)
|
|
277
|
+
- `resume` — skip files already processed in a previous run
|
|
278
|
+
|
|
279
|
+
### Configuration
|
|
280
|
+
|
|
281
|
+
For CLI use, create an `axle.config.yaml` in your working directory with API
|
|
282
|
+
keys:
|
|
283
|
+
|
|
284
|
+
```yaml
|
|
285
|
+
# axle.config.yaml
|
|
15
286
|
openai:
|
|
16
287
|
api-key: "<api-key>"
|
|
17
288
|
anthropic:
|
|
18
289
|
api-key: "<api-key>"
|
|
19
|
-
|
|
20
|
-
url: "<url>"
|
|
21
|
-
brave:
|
|
290
|
+
gemini:
|
|
22
291
|
api-key: "<api-key>"
|
|
23
|
-
|
|
292
|
+
chatcompletions:
|
|
293
|
+
base-url: "http://localhost:11434/v1"
|
|
294
|
+
model: "llama3"
|
|
295
|
+
api-key: "<api-key>" # optional
|
|
24
296
|
```
|
|
297
|
+
|
|
298
|
+
Provider-level keys in the job file override the config file.
|
package/dist/cli.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
var
|
|
3
|
-
${
|
|
4
|
-
${
|
|
5
|
-
`)}
|
|
6
|
-
`)
|
|
7
|
-
`,
|
|
2
|
+
var Y=Object.defineProperty;var i=(e,t)=>Y(e,"name",{value:t,configurable:!0});import{Command as Z}from"@commander-js/extra-typings";import J from"yaml";import{l as q,a as K,n as Q,D as ee,i as te,d as oe,p as ne,r as N,o as j,I as U,h as P,w as re,t as ae,u as ie,v as se,k as ce,j as le,T as ue,S as M}from"./simple-Bu-04pw1.js";import{z as n}from"zod";import{glob as pe}from"glob";import{readFile as _,mkdir as de,appendFile as fe}from"node:fs/promises";import{createHash as me}from"node:crypto";import{dirname as he}from"node:path";import"mime";import"@anthropic-ai/sdk";import"@google/genai";import"openai";import"node:child_process";import"node:util";class A extends Error{static{i(this,"AxleError")}code;id;details;constructor(t,o){super(t,{cause:o?.cause}),this.name=this.constructor.name,this.code=o?.code||"AXLE_ERROR",this.id=o?.id,this.details=o?.details,Object.setPrototypeOf(this,A.prototype)}toJSON(){return{name:this.name,message:this.message,code:this.code,...this.id&&{id:this.id},...this.details&&{details:this.details},...this.cause&&{cause:R(this.cause)}}}}function R(e){return e instanceof Error?{name:e.name,message:e.message,...e.stack&&{stack:e.stack},..."cause"in e&&e.cause&&{cause:R(e.cause)}}:e}i(R,"serializeError");function ge(e,t){if(!t||Object.keys(t).length===0)throw new A(`The provider ${e} is not configured. Please check your configuration.`);switch(e){case"openai":return{provider:ne(t["api-key"]),model:t.model||oe};case"anthropic":return{provider:te(t["api-key"]),model:t.model||ee};case"gemini":return{provider:Q(t["api-key"]),model:t.model||K};case"chatcompletions":{const o=t;return{provider:q(o["base-url"],o["api-key"]),model:o.model}}default:throw new A("The provider is unsupported")}}i(ge,"getProvider");var we="0.7.0",ye={version:we};n.object({value:n.string()});const be=n.object({"api-key":n.string(),rateLimit:n.number().optional()});n.object({timeout:n.number().optional(),maxBuffer:n.number().optional(),cwd:n.string().optional()});const ve=n.object({type:n.literal("chatcompletions")}).loose(),Se=n.object({type:n.literal("anthropic")}).loose(),ke=n.object({type:n.literal("openai")}).loose(),Ee=n.object({type:n.literal("gemini")}).loose(),Te=n.discriminatedUnion("type",[ve,Se,ke,Ee]),Ae=n.object({chatcompletions:n.custom().optional(),anthropic:n.custom().optional(),openai:n.custom().optional(),gemini:n.custom().optional(),brave:be.optional()}).loose(),Oe=n.object({files:n.string(),resume:n.boolean().default(!1),concurrency:n.number().int().positive().default(3)}),je=n.object({provider:Te,task:n.string(),tools:n.array(n.string()).optional(),files:n.array(n.string()).optional(),batch:Oe.optional()}),Le="axle.job",Fe=["yaml","yml","json"];async function xe(e,t){const{tracer:o}=t,{content:l,format:s}=await N(e,{defaults:{name:Le,formats:Fe},tag:"Job File"});let a=null;if(s==="json")a=JSON.parse(l);else if(s==="yaml"||s==="yml")a=J.parse(l);else throw new Error("Invalid job file format");o?.debug("Job config: "+JSON.stringify(a,null,2));const c=je.safeParse(a);if(!c.success)throw new Error(`The job file is not valid:
|
|
3
|
+
${B(c.error)}`);return c.data}i(xe,"getJobConfig");const De="axle.config",$e=["yaml","yml","json"];async function Ie(e,t){const{tracer:o}=t,{content:l,format:s}=await N(e,{defaults:{name:De,formats:$e},tag:"Config File"});let a=null;if(s==="json")a=JSON.parse(l);else if(s==="yaml"||s==="yml")a=J.parse(l);else throw new Error("Invalid config file format");o?.debug("Service config: "+JSON.stringify(a,null,2));const c=Ae.safeParse(a);if(!c.success)throw new Error(`The config file is not valid:
|
|
4
|
+
${B(c.error)}`);return c.data}i(Ie,"getServiceConfig");function B(e){return e.issues.map(t=>` - ${t.path.join(".")||"root"}: ${t.message}`).join(`
|
|
5
|
+
`)}i(B,"formatZodError");const W=".axle/batch.jsonl";function Ce(e,t){const o=me("sha256");return o.update(e),o.update("\0"),o.update(t),o.digest("hex")}i(Ce,"computeHash");async function Je(e=W){const t=new Map;let o;try{o=await _(e,"utf-8")}catch{return t}for(const l of o.split(`
|
|
6
|
+
`)){const s=l.trim();if(s)try{const a=JSON.parse(s);a.file&&a.hash&&t.set(a.file,a)}catch{}}return t}i(Je,"loadLedger");async function Ne(e,t=W){await de(he(t),{recursive:!0}),await fe(t,JSON.stringify(e)+`
|
|
7
|
+
`,"utf-8")}i(Ne,"appendLedgerEntry");async function Ue(e,t,o,l,s,a,c,m){const p=new U(e.task);if(e.files)for(const v of e.files)p.addFile(await j(v));const g=m.startSpan("job",{type:"workflow"}),w=await new P({provider:t,model:o,tools:l,tracer:g}).send(p,s).final;g.end(),c.in+=w.usage.in,c.out+=w.usage.out,w.response&&m.info("Response: "+JSON.stringify(w.response,null,2))}i(Ue,"runSingle");async function Pe(e,t,o,l,s,a,c,m){const p=e.batch,g=await pe(p.files);if(g.length===0){m.warn(`No files matched pattern: ${p.files}`);return}m.info(`Batch: ${g.length} file(s) matched "${p.files}"`);const x=p.resume?await Je():new Map,w=e.files?await Promise.all(e.files.map(d=>j(d))):[];let v=0,D=0,$=0;const z=p.concurrency??3;await Me(z,g,async d=>{const y=m.startSpan(`batch:${d}`,{type:"workflow"});try{const b=await _(d),S=Ce(e.task,b),I=x.get(d);if(p.resume&&I&&I.hash===S){y.info("Skipped (already completed)"),y.end(),D++;return}const O=new U(e.task);for(const X of w)O.addFile(X);O.addFile(await j(d));const V={...s,file:d},C=await new P({provider:t,model:o,tools:l,tracer:y}).send(O,V).final;c.in+=C.usage.in,c.out+=C.usage.out,await Ne({file:d,hash:S,timestamp:Date.now()}),y.end(),v++}catch(b){const S=b instanceof Error?b.message:String(b);y.error(`Failed: ${S}`),y.end("error"),$++}}),m.info(`Batch complete: ${v} completed, ${D} skipped, ${$} failed`)}i(Pe,"runBatch");async function Me(e,t,o){let l=0;async function s(){for(;l<t.length;){const c=l++;await o(t[c])}}i(s,"worker");const a=Array.from({length:Math.min(e,t.length)},()=>s());await Promise.all(a)}i(Me,"runWithConcurrency");function _e(e,t){switch(e){case"brave":return le;case"calculator":return ce;case"exec":return se;case"patch-file":return ie;case"read-file":return ae;case"write-file":return re;default:throw new Error(`Unknown tool: ${e}`)}}i(_e,"createTool");function Re(e,t){return e.map(o=>_e(o))}i(Re,"createTools");const k=new Z().name("axle").description("Axle is a CLI tool for running AI workflows").version(ye.version).option("--dry-run","Run the application without executing against the AI providers").option("-c, --config <path>","Path to the config file").option("-j, --job <path>","Path to the job file").option("--no-log","Do not write the output to a log file").option("--no-warn-unused","Do not warn about unused variables").option("--no-inline","Do not inline the console output").option("-d, --debug","Print additional debug information").option("--truncate <num>","Truncate printed strings to a certain number of characters, 0 to disable",parseInt,100).option("--args <args...>","Additional arguments in the form key=value");k.parse(process.argv);const u=k.opts(),E={date:new Date().toISOString().split("T")[0],datetime:new Date().toISOString(),cwd:process.cwd()};u.args&&u.args.forEach(e=>{const[t,o]=e.split("=");t&&o&&(E[t.trim()]=o.trim())});const f=new ue;u.debug&&(f.minLevel="debug");const Be=new M({minLevel:u.debug?"debug":"info",showInternal:u.debug,showTimestamp:!0});if(f.addWriter(Be),u.log){const e=new M({minLevel:"debug",showInternal:!0,showTimestamp:!0,output:i(t=>{},"output")});f.addWriter(e)}const r=f.startSpan("cli",{type:"root"});process.on("uncaughtException",async e=>{console.error("Uncaught exception:"),console.error(e),r.error("Uncaught exception:"),r.error(e.message),r.error(e.stack||""),r.end("error"),await f.flush(),process.exit(1)}),u.debug&&(r.debug("Options: "+JSON.stringify(u,null,2)),r.debug("Additional Arguments: "+JSON.stringify(E,null,2)));let G,h;try{G=await Ie(u.config??null,{tracer:r}),h=await xe(u.job??null,{tracer:r})}catch(e){const t=e instanceof Error?e:new Error(String(e));r.error(t.message),r.debug(t.stack??""),r.end("error"),await f.flush(),k.outputHelp(),process.exit(1)}let L,F;try{const{type:e,...t}=h.provider,o={...G[e],...t};({provider:L,model:F}=ge(e,o))}catch(e){const t=e instanceof Error?e:new Error(String(e));r.error(t.message),r.error(t.stack??""),r.end("error"),await f.flush(),k.outputHelp(),process.exit(1)}r.info("All systems operational. Running job..."),u.dryRun&&r.info("Dry run mode enabled. No API calls will be made.");const H=h.tools?.length?Re(h.tools):[],T={in:0,out:0},We=performance.now();h.batch?await Pe(h,L,F,H,E,u,T,r):await Ue(h,L,F,H,E,u,T,r);const Ge=performance.now()-We;r.info(`Total run time: ${Math.round(Ge)}ms`),r.info(`Input tokens: ${T.in}`),r.info(`Output tokens: ${T.out}`),r.info("Complete. Goodbye"),r.end(),await f.flush();
|