@theclawlab/pai 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +58 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +2540 -0
- package/dist/index.js.map +1 -0
- package/package.json +41 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,2540 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
// src/index.ts
|
|
4
|
+
import { Command } from "commander";
|
|
5
|
+
import { readFileSync } from "fs";
|
|
6
|
+
import { join as join2, dirname as dirname2 } from "path";
|
|
7
|
+
import { fileURLToPath } from "url";
|
|
8
|
+
|
|
9
|
+
// src/types.ts
|
|
10
|
+
var PAIError = class extends Error {
|
|
11
|
+
constructor(message, exitCode, context) {
|
|
12
|
+
super(message);
|
|
13
|
+
this.exitCode = exitCode;
|
|
14
|
+
this.context = context;
|
|
15
|
+
this.name = "PAIError";
|
|
16
|
+
}
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
// src/config-manager.ts
|
|
20
|
+
import { readFile, writeFile, mkdir } from "fs/promises";
|
|
21
|
+
import { existsSync } from "fs";
|
|
22
|
+
import { dirname, join } from "path";
|
|
23
|
+
import { homedir } from "os";
|
|
24
|
+
var DEFAULT_SCHEMA_VERSION = "1.0.0";
|
|
25
|
+
var ConfigurationManager = class {
|
|
26
|
+
configPath;
|
|
27
|
+
constructor(options = {}) {
|
|
28
|
+
this.configPath = options.config || process.env.PAI_CONFIG || join(homedir(), ".config", "pai", "default.json");
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Load configuration from file
|
|
32
|
+
* Returns default config if file doesn't exist
|
|
33
|
+
*/
|
|
34
|
+
async loadConfig() {
|
|
35
|
+
if (!existsSync(this.configPath)) {
|
|
36
|
+
return this.getDefaultConfig();
|
|
37
|
+
}
|
|
38
|
+
try {
|
|
39
|
+
const content = await readFile(this.configPath, "utf-8");
|
|
40
|
+
const config = JSON.parse(content);
|
|
41
|
+
if (!config.schema_version) {
|
|
42
|
+
throw new PAIError(
|
|
43
|
+
"Config file is missing schema_version field",
|
|
44
|
+
4,
|
|
45
|
+
{ path: this.configPath }
|
|
46
|
+
);
|
|
47
|
+
}
|
|
48
|
+
if (!config.providers) {
|
|
49
|
+
throw new PAIError(
|
|
50
|
+
"Config file is missing providers field",
|
|
51
|
+
4,
|
|
52
|
+
{ path: this.configPath }
|
|
53
|
+
);
|
|
54
|
+
}
|
|
55
|
+
if (!Array.isArray(config.providers)) {
|
|
56
|
+
throw new PAIError(
|
|
57
|
+
"Config file providers field must be an array",
|
|
58
|
+
4,
|
|
59
|
+
{ path: this.configPath }
|
|
60
|
+
);
|
|
61
|
+
}
|
|
62
|
+
return config;
|
|
63
|
+
} catch (error) {
|
|
64
|
+
if (error instanceof PAIError) {
|
|
65
|
+
throw error;
|
|
66
|
+
}
|
|
67
|
+
if (error instanceof SyntaxError) {
|
|
68
|
+
throw new PAIError(
|
|
69
|
+
"Config file is malformed",
|
|
70
|
+
4,
|
|
71
|
+
{ path: this.configPath, error: error.message }
|
|
72
|
+
);
|
|
73
|
+
}
|
|
74
|
+
throw new PAIError(
|
|
75
|
+
"Failed to read config file",
|
|
76
|
+
4,
|
|
77
|
+
{ path: this.configPath, error: String(error) }
|
|
78
|
+
);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
/**
|
|
82
|
+
* Save configuration to file
|
|
83
|
+
* Creates directory if needed
|
|
84
|
+
*/
|
|
85
|
+
async saveConfig(config) {
|
|
86
|
+
if (!config.schema_version) {
|
|
87
|
+
config.schema_version = DEFAULT_SCHEMA_VERSION;
|
|
88
|
+
}
|
|
89
|
+
try {
|
|
90
|
+
const dir = dirname(this.configPath);
|
|
91
|
+
if (!existsSync(dir)) {
|
|
92
|
+
await mkdir(dir, { recursive: true });
|
|
93
|
+
}
|
|
94
|
+
const content = JSON.stringify(config, null, 2);
|
|
95
|
+
await writeFile(this.configPath, content, "utf-8");
|
|
96
|
+
} catch (error) {
|
|
97
|
+
throw new PAIError(
|
|
98
|
+
"Failed to write config file",
|
|
99
|
+
4,
|
|
100
|
+
{ path: this.configPath, error: String(error) }
|
|
101
|
+
);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
/**
|
|
105
|
+
* Get provider configuration by name
|
|
106
|
+
* Throws error if not found
|
|
107
|
+
*/
|
|
108
|
+
async getProvider(name) {
|
|
109
|
+
const config = await this.loadConfig();
|
|
110
|
+
const providerName = name || config.defaultProvider;
|
|
111
|
+
if (!providerName) {
|
|
112
|
+
throw new PAIError(
|
|
113
|
+
"No provider specified and no default provider configured",
|
|
114
|
+
1,
|
|
115
|
+
{ configPath: this.configPath }
|
|
116
|
+
);
|
|
117
|
+
}
|
|
118
|
+
const provider = config.providers.find((p) => p.name === providerName);
|
|
119
|
+
if (!provider) {
|
|
120
|
+
throw new PAIError(
|
|
121
|
+
`Provider not found: ${providerName}`,
|
|
122
|
+
1,
|
|
123
|
+
{ provider: providerName, configPath: this.configPath }
|
|
124
|
+
);
|
|
125
|
+
}
|
|
126
|
+
return provider;
|
|
127
|
+
}
|
|
128
|
+
/**
|
|
129
|
+
* Add or update a provider configuration
|
|
130
|
+
*/
|
|
131
|
+
async addProvider(provider) {
|
|
132
|
+
const config = await this.loadConfig();
|
|
133
|
+
const existingIndex = config.providers.findIndex(
|
|
134
|
+
(p) => p.name === provider.name
|
|
135
|
+
);
|
|
136
|
+
if (existingIndex >= 0) {
|
|
137
|
+
config.providers[existingIndex] = provider;
|
|
138
|
+
} else {
|
|
139
|
+
config.providers.push(provider);
|
|
140
|
+
}
|
|
141
|
+
await this.saveConfig(config);
|
|
142
|
+
}
|
|
143
|
+
/**
|
|
144
|
+
* Patch an existing provider configuration (merge fields)
|
|
145
|
+
*/
|
|
146
|
+
async updateProvider(name, updates) {
|
|
147
|
+
const config = await this.loadConfig();
|
|
148
|
+
const existingIndex = config.providers.findIndex((p) => p.name === name);
|
|
149
|
+
if (existingIndex < 0) {
|
|
150
|
+
throw new PAIError(
|
|
151
|
+
`Provider not found: ${name}`,
|
|
152
|
+
1,
|
|
153
|
+
{ provider: name, configPath: this.configPath }
|
|
154
|
+
);
|
|
155
|
+
}
|
|
156
|
+
const existing = config.providers[existingIndex];
|
|
157
|
+
for (const [key, value] of Object.entries(updates)) {
|
|
158
|
+
if (key === "name") continue;
|
|
159
|
+
if (typeof value === "object" && value !== null && !Array.isArray(value) && typeof existing[key] === "object" && existing[key] !== null) {
|
|
160
|
+
existing[key] = { ...existing[key], ...value };
|
|
161
|
+
} else {
|
|
162
|
+
existing[key] = value;
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
config.providers[existingIndex] = existing;
|
|
166
|
+
await this.saveConfig(config);
|
|
167
|
+
}
|
|
168
|
+
/**
|
|
169
|
+
* Set the default provider
|
|
170
|
+
*/
|
|
171
|
+
async setDefaultProvider(name) {
|
|
172
|
+
const config = await this.loadConfig();
|
|
173
|
+
const exists = config.providers.some((p) => p.name === name);
|
|
174
|
+
if (!exists) {
|
|
175
|
+
throw new PAIError(
|
|
176
|
+
`Provider not found: ${name}`,
|
|
177
|
+
1,
|
|
178
|
+
{ provider: name, configPath: this.configPath }
|
|
179
|
+
);
|
|
180
|
+
}
|
|
181
|
+
config.defaultProvider = name;
|
|
182
|
+
await this.saveConfig(config);
|
|
183
|
+
}
|
|
184
|
+
/**
|
|
185
|
+
* Set the default embed provider and/or model
|
|
186
|
+
*/
|
|
187
|
+
async setDefaultEmbed(embedProvider, embedModel) {
|
|
188
|
+
const config = await this.loadConfig();
|
|
189
|
+
if (embedProvider) {
|
|
190
|
+
const exists = config.providers.some((p) => p.name === embedProvider);
|
|
191
|
+
if (!exists) {
|
|
192
|
+
throw new PAIError(
|
|
193
|
+
`Provider not found: ${embedProvider}`,
|
|
194
|
+
1,
|
|
195
|
+
{ provider: embedProvider, configPath: this.configPath }
|
|
196
|
+
);
|
|
197
|
+
}
|
|
198
|
+
config.defaultEmbedProvider = embedProvider;
|
|
199
|
+
}
|
|
200
|
+
if (embedModel) {
|
|
201
|
+
config.defaultEmbedModel = embedModel;
|
|
202
|
+
}
|
|
203
|
+
await this.saveConfig(config);
|
|
204
|
+
}
|
|
205
|
+
/**
|
|
206
|
+
* Delete a provider configuration
|
|
207
|
+
*/
|
|
208
|
+
async deleteProvider(name) {
|
|
209
|
+
const config = await this.loadConfig();
|
|
210
|
+
const existingIndex = config.providers.findIndex((p) => p.name === name);
|
|
211
|
+
if (existingIndex < 0) {
|
|
212
|
+
throw new PAIError(
|
|
213
|
+
`Provider not found: ${name}`,
|
|
214
|
+
1,
|
|
215
|
+
{ provider: name, configPath: this.configPath }
|
|
216
|
+
);
|
|
217
|
+
}
|
|
218
|
+
config.providers.splice(existingIndex, 1);
|
|
219
|
+
if (config.defaultProvider === name) {
|
|
220
|
+
delete config.defaultProvider;
|
|
221
|
+
}
|
|
222
|
+
await this.saveConfig(config);
|
|
223
|
+
}
|
|
224
|
+
/**
|
|
225
|
+
* Get default configuration
|
|
226
|
+
*/
|
|
227
|
+
getDefaultConfig() {
|
|
228
|
+
return {
|
|
229
|
+
schema_version: DEFAULT_SCHEMA_VERSION,
|
|
230
|
+
providers: []
|
|
231
|
+
};
|
|
232
|
+
}
|
|
233
|
+
/**
|
|
234
|
+
* Get config file path
|
|
235
|
+
*/
|
|
236
|
+
getConfigPath() {
|
|
237
|
+
return this.configPath;
|
|
238
|
+
}
|
|
239
|
+
/**
|
|
240
|
+
* Resolve credentials for a provider
|
|
241
|
+
* Priority: CLI param > env var > config file (apiKey or oauth)
|
|
242
|
+
*/
|
|
243
|
+
async resolveCredentials(provider, cliKey) {
|
|
244
|
+
if (cliKey) {
|
|
245
|
+
return cliKey;
|
|
246
|
+
}
|
|
247
|
+
const envVarName = `PAI_${provider.toUpperCase().replace(/-/g, "_")}_API_KEY`;
|
|
248
|
+
const envKey = process.env[envVarName];
|
|
249
|
+
if (envKey) {
|
|
250
|
+
return envKey;
|
|
251
|
+
}
|
|
252
|
+
try {
|
|
253
|
+
const providerConfig = await this.getProvider(provider);
|
|
254
|
+
if (providerConfig.apiKey) {
|
|
255
|
+
return providerConfig.apiKey;
|
|
256
|
+
}
|
|
257
|
+
if (providerConfig.oauth) {
|
|
258
|
+
const oauthApiKey = await this.resolveOAuthCredentials(providerConfig);
|
|
259
|
+
if (oauthApiKey) {
|
|
260
|
+
return oauthApiKey;
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
} catch {
|
|
264
|
+
}
|
|
265
|
+
throw new PAIError(
|
|
266
|
+
`No credentials found for provider: ${provider}`,
|
|
267
|
+
1,
|
|
268
|
+
{
|
|
269
|
+
provider,
|
|
270
|
+
checkedSources: ["CLI parameter", "environment variable", "config file"]
|
|
271
|
+
}
|
|
272
|
+
);
|
|
273
|
+
}
|
|
274
|
+
/**
|
|
275
|
+
* Resolve OAuth credentials from provider config.
|
|
276
|
+
* Automatically refreshes expired tokens and saves updated credentials.
|
|
277
|
+
*/
|
|
278
|
+
async resolveOAuthCredentials(providerConfig) {
|
|
279
|
+
const oauth = providerConfig.oauth;
|
|
280
|
+
if (!oauth || !oauth.access) {
|
|
281
|
+
return null;
|
|
282
|
+
}
|
|
283
|
+
if (oauth.expires && Date.now() >= oauth.expires) {
|
|
284
|
+
try {
|
|
285
|
+
const { getOAuthProvider: getOAuthProvider2 } = await import("@mariozechner/pi-ai/oauth");
|
|
286
|
+
const oauthProvider = getOAuthProvider2(providerConfig.name);
|
|
287
|
+
if (oauthProvider) {
|
|
288
|
+
const newCredentials = await oauthProvider.refreshToken(oauth);
|
|
289
|
+
providerConfig.oauth = {
|
|
290
|
+
...oauth,
|
|
291
|
+
refresh: newCredentials.refresh,
|
|
292
|
+
access: newCredentials.access,
|
|
293
|
+
expires: newCredentials.expires
|
|
294
|
+
};
|
|
295
|
+
await this.addProvider(providerConfig);
|
|
296
|
+
return oauthProvider.getApiKey(providerConfig.oauth);
|
|
297
|
+
}
|
|
298
|
+
} catch {
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
try {
|
|
302
|
+
const { getOAuthProvider: getOAuthProvider2 } = await import("@mariozechner/pi-ai/oauth");
|
|
303
|
+
const oauthProvider = getOAuthProvider2(providerConfig.name);
|
|
304
|
+
if (oauthProvider) {
|
|
305
|
+
return oauthProvider.getApiKey(oauth);
|
|
306
|
+
}
|
|
307
|
+
} catch {
|
|
308
|
+
}
|
|
309
|
+
return oauth.access;
|
|
310
|
+
}
|
|
311
|
+
};
|
|
312
|
+
|
|
313
|
+
// src/session-manager.ts
|
|
314
|
+
import { writeFile as writeFile2, appendFile } from "fs/promises";
|
|
315
|
+
import { existsSync as existsSync2 } from "fs";
|
|
316
|
+
import { createReadStream } from "fs";
|
|
317
|
+
import { createInterface } from "readline";
|
|
318
|
+
var SessionManager = class {
|
|
319
|
+
sessionPath;
|
|
320
|
+
constructor(sessionPath) {
|
|
321
|
+
this.sessionPath = sessionPath;
|
|
322
|
+
}
|
|
323
|
+
/**
|
|
324
|
+
* Load messages from session file
|
|
325
|
+
* Returns empty array if file doesn't exist
|
|
326
|
+
*/
|
|
327
|
+
async loadMessages() {
|
|
328
|
+
if (!this.sessionPath || !existsSync2(this.sessionPath)) {
|
|
329
|
+
return [];
|
|
330
|
+
}
|
|
331
|
+
try {
|
|
332
|
+
const messages = [];
|
|
333
|
+
const fileStream = createReadStream(this.sessionPath, "utf-8");
|
|
334
|
+
const rl = createInterface({
|
|
335
|
+
input: fileStream,
|
|
336
|
+
crlfDelay: Infinity
|
|
337
|
+
});
|
|
338
|
+
let lineNumber = 0;
|
|
339
|
+
for await (const line of rl) {
|
|
340
|
+
lineNumber++;
|
|
341
|
+
if (line.trim() === "") {
|
|
342
|
+
continue;
|
|
343
|
+
}
|
|
344
|
+
try {
|
|
345
|
+
const message = JSON.parse(line);
|
|
346
|
+
if (!message.role || message.content === void 0) {
|
|
347
|
+
throw new Error("Missing required fields: role and content");
|
|
348
|
+
}
|
|
349
|
+
messages.push(message);
|
|
350
|
+
} catch (error) {
|
|
351
|
+
throw new PAIError(
|
|
352
|
+
`Malformed JSONL at line ${lineNumber}`,
|
|
353
|
+
4,
|
|
354
|
+
{
|
|
355
|
+
path: this.sessionPath,
|
|
356
|
+
line: lineNumber,
|
|
357
|
+
error: error instanceof Error ? error.message : String(error)
|
|
358
|
+
}
|
|
359
|
+
);
|
|
360
|
+
}
|
|
361
|
+
}
|
|
362
|
+
return messages;
|
|
363
|
+
} catch (error) {
|
|
364
|
+
if (error instanceof PAIError) {
|
|
365
|
+
throw error;
|
|
366
|
+
}
|
|
367
|
+
throw new PAIError(
|
|
368
|
+
"Failed to read session file",
|
|
369
|
+
4,
|
|
370
|
+
{ path: this.sessionPath, error: String(error) }
|
|
371
|
+
);
|
|
372
|
+
}
|
|
373
|
+
}
|
|
374
|
+
/**
|
|
375
|
+
* Append a single message to session file
|
|
376
|
+
* Creates file if it doesn't exist
|
|
377
|
+
*/
|
|
378
|
+
async appendMessage(message) {
|
|
379
|
+
if (!this.sessionPath) {
|
|
380
|
+
return;
|
|
381
|
+
}
|
|
382
|
+
if (!message.timestamp) {
|
|
383
|
+
message.timestamp = (/* @__PURE__ */ new Date()).toISOString();
|
|
384
|
+
}
|
|
385
|
+
try {
|
|
386
|
+
const line = JSON.stringify(message) + "\n";
|
|
387
|
+
if (existsSync2(this.sessionPath)) {
|
|
388
|
+
await appendFile(this.sessionPath, line, "utf-8");
|
|
389
|
+
} else {
|
|
390
|
+
await writeFile2(this.sessionPath, line, "utf-8");
|
|
391
|
+
}
|
|
392
|
+
} catch (error) {
|
|
393
|
+
throw new PAIError(
|
|
394
|
+
"Failed to write to session file",
|
|
395
|
+
4,
|
|
396
|
+
{ path: this.sessionPath, error: String(error) }
|
|
397
|
+
);
|
|
398
|
+
}
|
|
399
|
+
}
|
|
400
|
+
/**
|
|
401
|
+
* Append multiple messages to session file
|
|
402
|
+
* TODO: Implement atomic write for concurrent access
|
|
403
|
+
*/
|
|
404
|
+
async appendMessages(messages) {
|
|
405
|
+
if (!this.sessionPath || messages.length === 0) {
|
|
406
|
+
return;
|
|
407
|
+
}
|
|
408
|
+
const timestampedMessages = messages.map((msg) => ({
|
|
409
|
+
...msg,
|
|
410
|
+
timestamp: msg.timestamp || (/* @__PURE__ */ new Date()).toISOString()
|
|
411
|
+
}));
|
|
412
|
+
try {
|
|
413
|
+
const lines = timestampedMessages.map((msg) => JSON.stringify(msg)).join("\n") + "\n";
|
|
414
|
+
if (existsSync2(this.sessionPath)) {
|
|
415
|
+
await appendFile(this.sessionPath, lines, "utf-8");
|
|
416
|
+
} else {
|
|
417
|
+
await writeFile2(this.sessionPath, lines, "utf-8");
|
|
418
|
+
}
|
|
419
|
+
} catch (error) {
|
|
420
|
+
throw new PAIError(
|
|
421
|
+
"Failed to write to session file",
|
|
422
|
+
4,
|
|
423
|
+
{ path: this.sessionPath, error: String(error) }
|
|
424
|
+
);
|
|
425
|
+
}
|
|
426
|
+
}
|
|
427
|
+
/**
|
|
428
|
+
* Get the session file path
|
|
429
|
+
*/
|
|
430
|
+
getSessionPath() {
|
|
431
|
+
return this.sessionPath;
|
|
432
|
+
}
|
|
433
|
+
};
|
|
434
|
+
|
|
435
|
+
// src/input-resolver.ts
|
|
436
|
+
import { readFile as readFile2 } from "fs/promises";
|
|
437
|
+
var InputResolver = class {
|
|
438
|
+
/**
|
|
439
|
+
* Resolve user input from message/stdin/file/images
|
|
440
|
+
* Validates mutual exclusivity of input sources
|
|
441
|
+
*/
|
|
442
|
+
async resolveUserInput(source) {
|
|
443
|
+
const sourceCount = [
|
|
444
|
+
source.message !== void 0,
|
|
445
|
+
source.stdin === true,
|
|
446
|
+
source.file !== void 0
|
|
447
|
+
].filter(Boolean).length;
|
|
448
|
+
if (sourceCount > 1) {
|
|
449
|
+
throw new PAIError(
|
|
450
|
+
"Multiple input sources specified",
|
|
451
|
+
1,
|
|
452
|
+
{ message: "Cannot use both positional argument and --input-file or stdin" }
|
|
453
|
+
);
|
|
454
|
+
}
|
|
455
|
+
if (sourceCount === 0) {
|
|
456
|
+
throw new PAIError(
|
|
457
|
+
"No user input provided",
|
|
458
|
+
1,
|
|
459
|
+
{ message: "Provide input via argument, stdin, or --input-file" }
|
|
460
|
+
);
|
|
461
|
+
}
|
|
462
|
+
let textContent;
|
|
463
|
+
if (source.message !== void 0) {
|
|
464
|
+
textContent = source.message;
|
|
465
|
+
} else if (source.stdin) {
|
|
466
|
+
textContent = await this.readStdin();
|
|
467
|
+
} else if (source.file) {
|
|
468
|
+
textContent = await this.readFile(source.file);
|
|
469
|
+
} else {
|
|
470
|
+
throw new PAIError("No input source available", 1);
|
|
471
|
+
}
|
|
472
|
+
if (source.images && source.images.length > 0) {
|
|
473
|
+
const content = [{ type: "text", text: textContent }];
|
|
474
|
+
for (const imagePath of source.images) {
|
|
475
|
+
const imageData = await this.readImage(imagePath);
|
|
476
|
+
content.push(imageData);
|
|
477
|
+
}
|
|
478
|
+
return content;
|
|
479
|
+
}
|
|
480
|
+
return textContent;
|
|
481
|
+
}
|
|
482
|
+
/**
|
|
483
|
+
* Resolve system instruction from text or file
|
|
484
|
+
*/
|
|
485
|
+
async resolveSystemInput(systemText, systemFile) {
|
|
486
|
+
if (systemText && systemFile) {
|
|
487
|
+
throw new PAIError(
|
|
488
|
+
"Multiple system instruction sources specified",
|
|
489
|
+
1,
|
|
490
|
+
{ message: "Cannot use both --system and --system-file" }
|
|
491
|
+
);
|
|
492
|
+
}
|
|
493
|
+
if (systemFile) {
|
|
494
|
+
return await this.readFile(systemFile);
|
|
495
|
+
}
|
|
496
|
+
return systemText;
|
|
497
|
+
}
|
|
498
|
+
/**
|
|
499
|
+
* Read from stdin
|
|
500
|
+
*/
|
|
501
|
+
async readStdin() {
|
|
502
|
+
return new Promise((resolve, reject) => {
|
|
503
|
+
let data = "";
|
|
504
|
+
process.stdin.setEncoding("utf-8");
|
|
505
|
+
process.stdin.on("data", (chunk) => {
|
|
506
|
+
data += chunk;
|
|
507
|
+
});
|
|
508
|
+
process.stdin.on("end", () => {
|
|
509
|
+
resolve(data);
|
|
510
|
+
});
|
|
511
|
+
process.stdin.on("error", (error) => {
|
|
512
|
+
reject(
|
|
513
|
+
new PAIError(
|
|
514
|
+
"Failed to read from stdin",
|
|
515
|
+
4,
|
|
516
|
+
{ error: String(error) }
|
|
517
|
+
)
|
|
518
|
+
);
|
|
519
|
+
});
|
|
520
|
+
});
|
|
521
|
+
}
|
|
522
|
+
/**
|
|
523
|
+
* Read file content
|
|
524
|
+
*/
|
|
525
|
+
async readFile(path) {
|
|
526
|
+
try {
|
|
527
|
+
return await readFile2(path, "utf-8");
|
|
528
|
+
} catch (error) {
|
|
529
|
+
throw new PAIError(
|
|
530
|
+
"Failed to read file",
|
|
531
|
+
4,
|
|
532
|
+
{ path, error: String(error) }
|
|
533
|
+
);
|
|
534
|
+
}
|
|
535
|
+
}
|
|
536
|
+
/**
|
|
537
|
+
* Read image file and encode for pi-ai
|
|
538
|
+
*/
|
|
539
|
+
async readImage(path) {
|
|
540
|
+
try {
|
|
541
|
+
const buffer = await readFile2(path);
|
|
542
|
+
const base64Data = buffer.toString("base64");
|
|
543
|
+
const ext = path.toLowerCase().split(".").pop();
|
|
544
|
+
let mimeType = "image/jpeg";
|
|
545
|
+
if (ext === "png") mimeType = "image/png";
|
|
546
|
+
else if (ext === "gif") mimeType = "image/gif";
|
|
547
|
+
else if (ext === "webp") mimeType = "image/webp";
|
|
548
|
+
return {
|
|
549
|
+
type: "image",
|
|
550
|
+
data: base64Data,
|
|
551
|
+
mimeType
|
|
552
|
+
};
|
|
553
|
+
} catch (error) {
|
|
554
|
+
throw new PAIError(
|
|
555
|
+
"Failed to read image file",
|
|
556
|
+
4,
|
|
557
|
+
{ path, error: String(error) }
|
|
558
|
+
);
|
|
559
|
+
}
|
|
560
|
+
}
|
|
561
|
+
};
|
|
562
|
+
|
|
563
|
+
// src/output-formatter.ts
|
|
564
|
+
import { appendFile as appendFile2, writeFile as writeFile3 } from "fs/promises";
|
|
565
|
+
import { existsSync as existsSync3 } from "fs";
|
|
566
|
+
|
|
567
|
+
// src/sanitize.ts
|
|
568
|
+
var SENSITIVE_KEYS = /* @__PURE__ */ new Set([
|
|
569
|
+
"apikey",
|
|
570
|
+
"api_key",
|
|
571
|
+
"apiKey",
|
|
572
|
+
"access",
|
|
573
|
+
"access_token",
|
|
574
|
+
"refresh",
|
|
575
|
+
"refresh_token",
|
|
576
|
+
"token",
|
|
577
|
+
"oauth",
|
|
578
|
+
"client_secret",
|
|
579
|
+
"client-secret",
|
|
580
|
+
"secret",
|
|
581
|
+
"password",
|
|
582
|
+
"passwd",
|
|
583
|
+
"authorization",
|
|
584
|
+
"auth"
|
|
585
|
+
]);
|
|
586
|
+
function isObject(v) {
|
|
587
|
+
return v !== null && typeof v === "object" && !Array.isArray(v);
|
|
588
|
+
}
|
|
589
|
+
function maskPartial(s, keep = 4) {
|
|
590
|
+
if (s.length <= keep * 2) return "*".repeat(s.length);
|
|
591
|
+
return s.slice(0, keep) + "*".repeat(Math.max(0, s.length - keep * 2)) + s.slice(-keep);
|
|
592
|
+
}
|
|
593
|
+
var PATTERNS = [
|
|
594
|
+
// URL query params like ?key=... or &token=...
|
|
595
|
+
{ re: /(https?:\/\/[^\s]*?[?&](?:api_key|apikey|key|token|access_token|auth)=)([^&\s]+)/gi, replace: (_m) => "***REDACTED***" },
|
|
596
|
+
// OpenAI keys
|
|
597
|
+
{ re: /\bsk-[A-Za-z0-9\-_]{16,}\b/g, replace: (m) => maskPartial(m, 6) },
|
|
598
|
+
// Anthropic-ish
|
|
599
|
+
{ re: /\bsk-ant-[A-Za-z0-9\-_]{8,}\b/g, replace: (m) => maskPartial(m, 6) },
|
|
600
|
+
// HuggingFace
|
|
601
|
+
{ re: /\bhf_[A-Za-z0-9\-_]{16,}\b/g, replace: (m) => maskPartial(m, 6) },
|
|
602
|
+
// Bearer tokens (simple)
|
|
603
|
+
{ re: /\bBearer\s+[A-Za-z0-9\-\._=\/+]{8,}\b/gi, replace: () => "Bearer ***REDACTED***" },
|
|
604
|
+
// JWT-ish (three base64url parts)
|
|
605
|
+
{ re: /\b[a-zA-Z0-9-_]{10,}\.[a-zA-Z0-9-_]{10,}\.[a-zA-Z0-9-_]{8,}\b/g, replace: () => "JWT <redacted>" },
|
|
606
|
+
// Long base64-like strings (avoid short hashes)
|
|
607
|
+
{ re: /\b[A-Za-z0-9+\/]{40,}={0,2}\b/g, replace: () => "***REDACTED***" }
|
|
608
|
+
];
|
|
609
|
+
function sanitizeString(input) {
|
|
610
|
+
if (!input) return { sanitized: input, secrets: [] };
|
|
611
|
+
let out = input;
|
|
612
|
+
const found = [];
|
|
613
|
+
out = out.replace(/(https?:\/\/[^\s]*?[?&](?:api_key|apikey|key|token|access_token|auth)=)([^&\s]+)/gi, (_, prefix, secret) => {
|
|
614
|
+
found.push(secret);
|
|
615
|
+
return prefix + "***REDACTED***";
|
|
616
|
+
});
|
|
617
|
+
for (const p of PATTERNS) {
|
|
618
|
+
out = out.replace(p.re, (m) => {
|
|
619
|
+
found.push(m);
|
|
620
|
+
try {
|
|
621
|
+
return p.replace(m);
|
|
622
|
+
} catch {
|
|
623
|
+
return "***REDACTED***";
|
|
624
|
+
}
|
|
625
|
+
});
|
|
626
|
+
}
|
|
627
|
+
return { sanitized: out, secrets: found };
|
|
628
|
+
}
|
|
629
|
+
function sanitizeContent(value) {
|
|
630
|
+
const secrets = [];
|
|
631
|
+
function _sanitize(v) {
|
|
632
|
+
if (v === null || v === void 0) return v;
|
|
633
|
+
if (typeof v === "string") {
|
|
634
|
+
const { sanitized: sanitized2, secrets: s } = sanitizeString(v);
|
|
635
|
+
secrets.push(...s);
|
|
636
|
+
return sanitized2;
|
|
637
|
+
}
|
|
638
|
+
if (Array.isArray(v)) {
|
|
639
|
+
return v.map((item) => _sanitize(item));
|
|
640
|
+
}
|
|
641
|
+
if (isObject(v)) {
|
|
642
|
+
const out = {};
|
|
643
|
+
for (const [k, val] of Object.entries(v)) {
|
|
644
|
+
try {
|
|
645
|
+
if (SENSITIVE_KEYS.has(k.toLowerCase()) || SENSITIVE_KEYS.has(k)) {
|
|
646
|
+
if (typeof val === "string") {
|
|
647
|
+
secrets.push(String(val));
|
|
648
|
+
out[k] = "***REDACTED***";
|
|
649
|
+
} else {
|
|
650
|
+
out[k] = "***REDACTED***";
|
|
651
|
+
}
|
|
652
|
+
} else {
|
|
653
|
+
out[k] = _sanitize(val);
|
|
654
|
+
}
|
|
655
|
+
} catch (e) {
|
|
656
|
+
const str = String(val);
|
|
657
|
+
const { sanitized: sanitized2, secrets: s } = sanitizeString(str);
|
|
658
|
+
secrets.push(...s);
|
|
659
|
+
out[k] = sanitized2;
|
|
660
|
+
}
|
|
661
|
+
}
|
|
662
|
+
return out;
|
|
663
|
+
}
|
|
664
|
+
return v;
|
|
665
|
+
}
|
|
666
|
+
const sanitized = _sanitize(value);
|
|
667
|
+
return { sanitized, secrets };
|
|
668
|
+
}
|
|
669
|
+
|
|
670
|
+
// src/output-formatter.ts
|
|
671
|
+
var OutputFormatter = class {
|
|
672
|
+
jsonMode;
|
|
673
|
+
quietMode;
|
|
674
|
+
logFile;
|
|
675
|
+
constructor(jsonMode = false, quietMode = false, logFile) {
|
|
676
|
+
this.jsonMode = jsonMode;
|
|
677
|
+
this.quietMode = quietMode;
|
|
678
|
+
this.logFile = logFile;
|
|
679
|
+
}
|
|
680
|
+
/**
|
|
681
|
+
* Write model output to stdout
|
|
682
|
+
* Always writes to stdout regardless of mode
|
|
683
|
+
*/
|
|
684
|
+
writeModelOutput(content) {
|
|
685
|
+
process.stdout.write(content);
|
|
686
|
+
if (this.logFile) {
|
|
687
|
+
const { sanitized } = sanitizeString(content);
|
|
688
|
+
this.appendToLog("assistant", sanitized).catch(() => {
|
|
689
|
+
});
|
|
690
|
+
}
|
|
691
|
+
}
|
|
692
|
+
/**
|
|
693
|
+
* Write progress/diagnostic information to stderr
|
|
694
|
+
*/
|
|
695
|
+
writeProgress(event) {
|
|
696
|
+
if (this.quietMode) return;
|
|
697
|
+
if (this.jsonMode) {
|
|
698
|
+
const sc = sanitizeContent(event.data);
|
|
699
|
+
const outEvent = { ...event, data: sc.sanitized, timestamp: event.timestamp || Date.now() };
|
|
700
|
+
this.writeNDJSON(outEvent);
|
|
701
|
+
} else {
|
|
702
|
+
const sc = sanitizeContent(event.data);
|
|
703
|
+
const outEvent = { ...event, data: sc.sanitized };
|
|
704
|
+
this.writeHumanReadable(outEvent);
|
|
705
|
+
}
|
|
706
|
+
}
|
|
707
|
+
/**
|
|
708
|
+
* Write error to stderr
|
|
709
|
+
*/
|
|
710
|
+
writeError(error) {
|
|
711
|
+
if (this.jsonMode) {
|
|
712
|
+
const errorObj = {
|
|
713
|
+
type: "error",
|
|
714
|
+
message: sanitizeString(error.message).sanitized,
|
|
715
|
+
...error instanceof PAIError && error.context ? { context: sanitizeContent(error.context).sanitized } : {}
|
|
716
|
+
};
|
|
717
|
+
process.stderr.write(JSON.stringify(errorObj) + "\n");
|
|
718
|
+
} else {
|
|
719
|
+
process.stderr.write(`Error: ${sanitizeString(error.message).sanitized}
|
|
720
|
+
`);
|
|
721
|
+
if (error instanceof PAIError && error.context) {
|
|
722
|
+
process.stderr.write(`Context: ${JSON.stringify(sanitizeContent(error.context).sanitized)}
|
|
723
|
+
`);
|
|
724
|
+
}
|
|
725
|
+
}
|
|
726
|
+
if (this.logFile) {
|
|
727
|
+
const msg = sanitizeString(error.message).sanitized;
|
|
728
|
+
const ctx = error instanceof PAIError && error.context ? JSON.stringify(sanitizeContent(error.context).sanitized) : "{}";
|
|
729
|
+
this.appendToLog("error", `${msg}
|
|
730
|
+
|
|
731
|
+
Context: ${ctx}`).catch(() => {
|
|
732
|
+
});
|
|
733
|
+
}
|
|
734
|
+
}
|
|
735
|
+
/**
|
|
736
|
+
* Write NDJSON event to stderr
|
|
737
|
+
*/
|
|
738
|
+
writeNDJSON(event) {
|
|
739
|
+
const line = JSON.stringify({
|
|
740
|
+
...event,
|
|
741
|
+
timestamp: event.timestamp || Date.now()
|
|
742
|
+
});
|
|
743
|
+
process.stderr.write(line + "\n");
|
|
744
|
+
}
|
|
745
|
+
/**
|
|
746
|
+
* Write human-readable event to stderr
|
|
747
|
+
*/
|
|
748
|
+
writeHumanReadable(event) {
|
|
749
|
+
switch (event.type) {
|
|
750
|
+
case "start": {
|
|
751
|
+
const d = event.data;
|
|
752
|
+
const parts = [`Starting request (${d.provider || "?"}/${d.model || "?"}`];
|
|
753
|
+
if (d.messages) parts.push(`${d.messages} msgs`);
|
|
754
|
+
if (d.tools) parts.push(`${d.tools} tools`);
|
|
755
|
+
if (d.stream) parts.push("stream");
|
|
756
|
+
process.stderr.write(parts.join(", ") + ")...\n");
|
|
757
|
+
break;
|
|
758
|
+
}
|
|
759
|
+
case "chunk":
|
|
760
|
+
break;
|
|
761
|
+
case "tool_call":
|
|
762
|
+
process.stderr.write(`Tool call: ${JSON.stringify(event.data)}
|
|
763
|
+
`);
|
|
764
|
+
break;
|
|
765
|
+
case "tool_result":
|
|
766
|
+
process.stderr.write(`Tool result: ${JSON.stringify(event.data)}
|
|
767
|
+
`);
|
|
768
|
+
break;
|
|
769
|
+
case "complete": {
|
|
770
|
+
const d = event.data;
|
|
771
|
+
const parts = ["Request complete"];
|
|
772
|
+
if (d.finishReason) parts.push(`reason=${d.finishReason}`);
|
|
773
|
+
if (d.usage) parts.push(`tokens: in=${d.usage.input} out=${d.usage.output}`);
|
|
774
|
+
process.stderr.write(parts.join(", ") + ".\n");
|
|
775
|
+
break;
|
|
776
|
+
}
|
|
777
|
+
case "error":
|
|
778
|
+
process.stderr.write(`Error: ${event.data}
|
|
779
|
+
`);
|
|
780
|
+
break;
|
|
781
|
+
}
|
|
782
|
+
}
|
|
783
|
+
/**
|
|
784
|
+
* Append to log file in Markdown format
|
|
785
|
+
*/
|
|
786
|
+
async appendToLog(role, content) {
|
|
787
|
+
if (!this.logFile) return;
|
|
788
|
+
try {
|
|
789
|
+
const timestamp = (/* @__PURE__ */ new Date()).toISOString();
|
|
790
|
+
const entry = `
|
|
791
|
+
### ${role.charAt(0).toUpperCase() + role.slice(1)} (${timestamp})
|
|
792
|
+
|
|
793
|
+
${content}
|
|
794
|
+
`;
|
|
795
|
+
if (existsSync3(this.logFile)) {
|
|
796
|
+
await appendFile2(this.logFile, entry, "utf-8");
|
|
797
|
+
} else {
|
|
798
|
+
const header = `# Chat Log
|
|
799
|
+
|
|
800
|
+
Generated: ${timestamp}
|
|
801
|
+
`;
|
|
802
|
+
await writeFile3(this.logFile, header + entry, "utf-8");
|
|
803
|
+
}
|
|
804
|
+
} catch (error) {
|
|
805
|
+
throw new PAIError(
|
|
806
|
+
"Failed to write to log file",
|
|
807
|
+
4,
|
|
808
|
+
{ path: this.logFile, error: String(error) }
|
|
809
|
+
);
|
|
810
|
+
}
|
|
811
|
+
}
|
|
812
|
+
/**
|
|
813
|
+
* Log user message
|
|
814
|
+
*/
|
|
815
|
+
async logUserMessage(content) {
|
|
816
|
+
if (this.logFile) {
|
|
817
|
+
await this.appendToLog("user", content).catch(() => {
|
|
818
|
+
});
|
|
819
|
+
}
|
|
820
|
+
}
|
|
821
|
+
/**
|
|
822
|
+
* Log system message
|
|
823
|
+
*/
|
|
824
|
+
async logSystemMessage(content) {
|
|
825
|
+
if (this.logFile) {
|
|
826
|
+
await this.appendToLog("system", content).catch(() => {
|
|
827
|
+
});
|
|
828
|
+
}
|
|
829
|
+
}
|
|
830
|
+
/**
|
|
831
|
+
* Log request summary (provider, model, parameters)
|
|
832
|
+
*/
|
|
833
|
+
async logRequestSummary(info) {
|
|
834
|
+
if (this.logFile) {
|
|
835
|
+
const lines = [
|
|
836
|
+
`Provider: ${info.provider}`,
|
|
837
|
+
`Model: ${info.model}`
|
|
838
|
+
];
|
|
839
|
+
if (info.temperature !== void 0) lines.push(`Temperature: ${info.temperature}`);
|
|
840
|
+
if (info.maxTokens !== void 0) lines.push(`Max Tokens: ${info.maxTokens}`);
|
|
841
|
+
if (info.stream) lines.push(`Stream: true`);
|
|
842
|
+
await this.appendToLog("request", lines.join("\n")).catch(() => {
|
|
843
|
+
});
|
|
844
|
+
}
|
|
845
|
+
}
|
|
846
|
+
/**
|
|
847
|
+
* Log tool call
|
|
848
|
+
*/
|
|
849
|
+
async logToolCall(name, args) {
|
|
850
|
+
if (this.logFile) {
|
|
851
|
+
const content = `**Tool:** ${name}
|
|
852
|
+
\`\`\`json
|
|
853
|
+
${JSON.stringify(args, null, 2)}
|
|
854
|
+
\`\`\``;
|
|
855
|
+
await this.appendToLog("tool_call", content).catch(() => {
|
|
856
|
+
});
|
|
857
|
+
}
|
|
858
|
+
}
|
|
859
|
+
/**
|
|
860
|
+
* Log tool result
|
|
861
|
+
*/
|
|
862
|
+
async logToolResult(name, result) {
|
|
863
|
+
if (this.logFile) {
|
|
864
|
+
const content = `**Tool:** ${name}
|
|
865
|
+
\`\`\`json
|
|
866
|
+
${JSON.stringify(result, null, 2)}
|
|
867
|
+
\`\`\``;
|
|
868
|
+
await this.appendToLog("tool_result", content).catch(() => {
|
|
869
|
+
});
|
|
870
|
+
}
|
|
871
|
+
}
|
|
872
|
+
/**
|
|
873
|
+
* Log error
|
|
874
|
+
*/
|
|
875
|
+
async logError(error) {
|
|
876
|
+
if (this.logFile) {
|
|
877
|
+
const detail = error instanceof PAIError && error.context ? `
|
|
878
|
+
|
|
879
|
+
Context: ${JSON.stringify(error.context)}` : "";
|
|
880
|
+
await this.appendToLog("error", `${error.message}${detail}`).catch(() => {
|
|
881
|
+
});
|
|
882
|
+
}
|
|
883
|
+
}
|
|
884
|
+
};
|
|
885
|
+
|
|
886
|
+
// src/llm-client.ts
|
|
887
|
+
import { getModel, stream, complete } from "@mariozechner/pi-ai";
|
|
888
|
+
function buildModel(config) {
|
|
889
|
+
if (config.api) {
|
|
890
|
+
return {
|
|
891
|
+
id: config.model,
|
|
892
|
+
name: config.model,
|
|
893
|
+
api: config.api,
|
|
894
|
+
provider: config.provider,
|
|
895
|
+
baseUrl: config.baseUrl || "",
|
|
896
|
+
reasoning: config.reasoning ?? false,
|
|
897
|
+
input: config.input ?? ["text"],
|
|
898
|
+
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
|
|
899
|
+
contextWindow: config.contextWindow ?? 128e3,
|
|
900
|
+
maxTokens: config.maxTokens ?? 16384
|
|
901
|
+
};
|
|
902
|
+
}
|
|
903
|
+
try {
|
|
904
|
+
return getModel(config.provider, config.model);
|
|
905
|
+
} catch {
|
|
906
|
+
return {
|
|
907
|
+
id: config.model,
|
|
908
|
+
name: config.model,
|
|
909
|
+
api: "openai-completions",
|
|
910
|
+
provider: config.provider,
|
|
911
|
+
baseUrl: config.baseUrl || "",
|
|
912
|
+
reasoning: false,
|
|
913
|
+
input: ["text"],
|
|
914
|
+
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
|
|
915
|
+
contextWindow: 128e3,
|
|
916
|
+
maxTokens: 16384
|
|
917
|
+
};
|
|
918
|
+
}
|
|
919
|
+
}
|
|
920
|
+
var LLMClient = class {
|
|
921
|
+
config;
|
|
922
|
+
model;
|
|
923
|
+
constructor(config) {
|
|
924
|
+
this.config = config;
|
|
925
|
+
this.model = buildModel(config);
|
|
926
|
+
}
|
|
927
|
+
/**
|
|
928
|
+
* Chat with streaming responses
|
|
929
|
+
*/
|
|
930
|
+
async *chat(messages, tools) {
|
|
931
|
+
const context = this.buildContext(messages, tools);
|
|
932
|
+
const options = this.buildOptions();
|
|
933
|
+
const streamResult = stream(this.model, context, options);
|
|
934
|
+
let currentContent = "";
|
|
935
|
+
let currentToolCalls = [];
|
|
936
|
+
for await (const event of streamResult) {
|
|
937
|
+
switch (event.type) {
|
|
938
|
+
case "text_delta":
|
|
939
|
+
currentContent += event.delta;
|
|
940
|
+
yield {
|
|
941
|
+
content: event.delta,
|
|
942
|
+
finishReason: "streaming"
|
|
943
|
+
};
|
|
944
|
+
break;
|
|
945
|
+
case "toolcall_end":
|
|
946
|
+
currentToolCalls.push({
|
|
947
|
+
id: event.toolCall.id,
|
|
948
|
+
name: event.toolCall.name,
|
|
949
|
+
arguments: event.toolCall.arguments
|
|
950
|
+
});
|
|
951
|
+
break;
|
|
952
|
+
case "done": {
|
|
953
|
+
const usageData = event.usage;
|
|
954
|
+
const doneResponse = {
|
|
955
|
+
content: currentContent,
|
|
956
|
+
finishReason: event.reason,
|
|
957
|
+
...usageData ? {
|
|
958
|
+
usage: {
|
|
959
|
+
input: usageData.input ?? 0,
|
|
960
|
+
output: usageData.output ?? 0,
|
|
961
|
+
...usageData.cost ? { cost: { total: usageData.cost.total ?? 0 } } : {}
|
|
962
|
+
}
|
|
963
|
+
} : {}
|
|
964
|
+
};
|
|
965
|
+
if (currentToolCalls.length > 0) {
|
|
966
|
+
doneResponse.toolCalls = currentToolCalls;
|
|
967
|
+
}
|
|
968
|
+
yield doneResponse;
|
|
969
|
+
break;
|
|
970
|
+
}
|
|
971
|
+
case "error":
|
|
972
|
+
throw new Error(event.error.errorMessage || "LLM request failed");
|
|
973
|
+
}
|
|
974
|
+
}
|
|
975
|
+
}
|
|
976
|
+
/**
|
|
977
|
+
* Chat without streaming (complete response)
|
|
978
|
+
*/
|
|
979
|
+
async chatComplete(messages, tools) {
|
|
980
|
+
const context = this.buildContext(messages, tools);
|
|
981
|
+
const options = this.buildOptions();
|
|
982
|
+
const result = await complete(this.model, context, options);
|
|
983
|
+
let content = "";
|
|
984
|
+
const toolCalls = [];
|
|
985
|
+
for (const block of result.content) {
|
|
986
|
+
if (block.type === "text") {
|
|
987
|
+
content += block.text;
|
|
988
|
+
} else if (block.type === "toolCall") {
|
|
989
|
+
toolCalls.push({
|
|
990
|
+
id: block.id,
|
|
991
|
+
name: block.name,
|
|
992
|
+
arguments: block.arguments
|
|
993
|
+
});
|
|
994
|
+
}
|
|
995
|
+
}
|
|
996
|
+
const response = {
|
|
997
|
+
content,
|
|
998
|
+
finishReason: result.stopReason,
|
|
999
|
+
...result.usage ? {
|
|
1000
|
+
usage: {
|
|
1001
|
+
input: result.usage.input ?? 0,
|
|
1002
|
+
output: result.usage.output ?? 0,
|
|
1003
|
+
...result.usage.cost ? { cost: { total: result.usage.cost.total ?? 0 } } : {}
|
|
1004
|
+
}
|
|
1005
|
+
} : {}
|
|
1006
|
+
};
|
|
1007
|
+
if (toolCalls.length > 0) {
|
|
1008
|
+
response.toolCalls = toolCalls;
|
|
1009
|
+
}
|
|
1010
|
+
return response;
|
|
1011
|
+
}
|
|
1012
|
+
/**
|
|
1013
|
+
* Build context for pi-ai
|
|
1014
|
+
*/
|
|
1015
|
+
buildContext(messages, tools) {
|
|
1016
|
+
const piMessages = messages.map((msg) => {
|
|
1017
|
+
if (msg.role === "system") {
|
|
1018
|
+
return { role: "system", content: String(msg.content) };
|
|
1019
|
+
} else if (msg.role === "user") {
|
|
1020
|
+
return { role: "user", content: this.formatContent(msg.content), timestamp: Date.now() };
|
|
1021
|
+
} else if (msg.role === "assistant") {
|
|
1022
|
+
return this.buildAssistantMessage(msg);
|
|
1023
|
+
} else if (msg.role === "tool") {
|
|
1024
|
+
return {
|
|
1025
|
+
role: "toolResult",
|
|
1026
|
+
toolCallId: msg.tool_call_id || "",
|
|
1027
|
+
toolName: msg.name || "",
|
|
1028
|
+
content: [{ type: "text", text: String(msg.content) }],
|
|
1029
|
+
isError: false,
|
|
1030
|
+
timestamp: Date.now()
|
|
1031
|
+
};
|
|
1032
|
+
}
|
|
1033
|
+
return { role: "user", content: String(msg.content), timestamp: Date.now() };
|
|
1034
|
+
});
|
|
1035
|
+
let systemPrompt;
|
|
1036
|
+
let contextMessages = piMessages;
|
|
1037
|
+
if (piMessages.length > 0 && piMessages[0]?.role === "system") {
|
|
1038
|
+
systemPrompt = String(piMessages[0].content);
|
|
1039
|
+
contextMessages = piMessages.slice(1);
|
|
1040
|
+
}
|
|
1041
|
+
const context = {
|
|
1042
|
+
messages: contextMessages
|
|
1043
|
+
};
|
|
1044
|
+
if (systemPrompt) {
|
|
1045
|
+
context.systemPrompt = systemPrompt;
|
|
1046
|
+
}
|
|
1047
|
+
if (tools && tools.length > 0) {
|
|
1048
|
+
context.tools = tools.map((tool) => ({
|
|
1049
|
+
name: tool.name,
|
|
1050
|
+
description: tool.description,
|
|
1051
|
+
parameters: tool.parameters
|
|
1052
|
+
}));
|
|
1053
|
+
}
|
|
1054
|
+
return context;
|
|
1055
|
+
}
|
|
1056
|
+
/**
|
|
1057
|
+
* Build a pi-ai AssistantMessage from a PAI Message.
|
|
1058
|
+
* pi-ai expects assistant messages with content as an array of typed blocks.
|
|
1059
|
+
*/
|
|
1060
|
+
buildAssistantMessage(msg) {
|
|
1061
|
+
const contentBlocks = [];
|
|
1062
|
+
const textContent = typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content);
|
|
1063
|
+
if (textContent) {
|
|
1064
|
+
contentBlocks.push({ type: "text", text: textContent });
|
|
1065
|
+
}
|
|
1066
|
+
const toolCalls = msg.tool_calls;
|
|
1067
|
+
if (toolCalls && Array.isArray(toolCalls)) {
|
|
1068
|
+
for (const tc of toolCalls) {
|
|
1069
|
+
contentBlocks.push({
|
|
1070
|
+
type: "toolCall",
|
|
1071
|
+
id: tc.id,
|
|
1072
|
+
name: tc.name,
|
|
1073
|
+
arguments: tc.arguments
|
|
1074
|
+
});
|
|
1075
|
+
}
|
|
1076
|
+
}
|
|
1077
|
+
return {
|
|
1078
|
+
role: "assistant",
|
|
1079
|
+
content: contentBlocks,
|
|
1080
|
+
api: this.model.api,
|
|
1081
|
+
provider: this.model.provider,
|
|
1082
|
+
model: this.model.id,
|
|
1083
|
+
usage: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, totalTokens: 0, cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 } },
|
|
1084
|
+
stopReason: toolCalls ? "toolUse" : "stop",
|
|
1085
|
+
timestamp: Date.now()
|
|
1086
|
+
};
|
|
1087
|
+
}
|
|
1088
|
+
/**
|
|
1089
|
+
* Format message content for pi-ai
|
|
1090
|
+
*/
|
|
1091
|
+
formatContent(content) {
|
|
1092
|
+
if (typeof content === "string") {
|
|
1093
|
+
return content;
|
|
1094
|
+
}
|
|
1095
|
+
if (Array.isArray(content)) {
|
|
1096
|
+
return content;
|
|
1097
|
+
}
|
|
1098
|
+
if (typeof content === "object") {
|
|
1099
|
+
return JSON.stringify(content);
|
|
1100
|
+
}
|
|
1101
|
+
return String(content);
|
|
1102
|
+
}
|
|
1103
|
+
/**
|
|
1104
|
+
* Build options for pi-ai, including provider-specific options
|
|
1105
|
+
*/
|
|
1106
|
+
buildOptions() {
|
|
1107
|
+
const options = {
|
|
1108
|
+
apiKey: this.config.apiKey
|
|
1109
|
+
};
|
|
1110
|
+
if (this.config.temperature !== void 0) {
|
|
1111
|
+
options.temperature = this.config.temperature;
|
|
1112
|
+
}
|
|
1113
|
+
if (this.config.maxTokens !== void 0) {
|
|
1114
|
+
options.maxTokens = this.config.maxTokens;
|
|
1115
|
+
}
|
|
1116
|
+
if (this.config.providerOptions) {
|
|
1117
|
+
Object.assign(options, this.config.providerOptions);
|
|
1118
|
+
}
|
|
1119
|
+
return options;
|
|
1120
|
+
}
|
|
1121
|
+
/**
|
|
1122
|
+
* Get model information
|
|
1123
|
+
*/
|
|
1124
|
+
getModel() {
|
|
1125
|
+
return this.model;
|
|
1126
|
+
}
|
|
1127
|
+
};
|
|
1128
|
+
|
|
1129
|
+
// src/tools/bash-exec.ts
|
|
1130
|
+
import { exec, execSync } from "child_process";
|
|
1131
|
+
import { platform } from "os";
|
|
1132
|
+
var BASH_EXEC_TOOL_MAX_OUTPUT_MB = 8;
|
|
1133
|
+
var BASH_EXEC_TOOL_MAX_TIMEOUT_S = 3600;
|
|
1134
|
+
function detectShell() {
|
|
1135
|
+
const isWin32 = platform() === "win32";
|
|
1136
|
+
if (!isWin32) {
|
|
1137
|
+
return "bash";
|
|
1138
|
+
}
|
|
1139
|
+
const shellEnv = process.env.SHELL;
|
|
1140
|
+
if (shellEnv && /bash/i.test(shellEnv)) {
|
|
1141
|
+
return shellEnv;
|
|
1142
|
+
}
|
|
1143
|
+
try {
|
|
1144
|
+
execSync("bash --version", { stdio: "ignore", timeout: 3e3 });
|
|
1145
|
+
return "bash";
|
|
1146
|
+
} catch {
|
|
1147
|
+
}
|
|
1148
|
+
throw new Error(
|
|
1149
|
+
"bash is required but was not found on this Windows system. Please install one of: Git Bash, MSYS2, Cygwin, or use WSL2."
|
|
1150
|
+
);
|
|
1151
|
+
}
|
|
1152
|
+
var BASH_EXEC_DESCRIPTION = "Execute a shell command and return the result. Supports pipes, redirections, xargs, heredocs, and shell scripts. Use cwd parameter to set working directory. Running on bash.";
|
|
1153
|
+
function createBashExecTool() {
|
|
1154
|
+
const shell = detectShell();
|
|
1155
|
+
return {
|
|
1156
|
+
name: "bash_exec",
|
|
1157
|
+
description: BASH_EXEC_DESCRIPTION,
|
|
1158
|
+
parameters: {
|
|
1159
|
+
type: "object",
|
|
1160
|
+
properties: {
|
|
1161
|
+
command: {
|
|
1162
|
+
type: "string",
|
|
1163
|
+
description: "The shell command to execute. Supports bash syntax including pipes, redirections, xargs, heredocs, etc."
|
|
1164
|
+
},
|
|
1165
|
+
cwd: {
|
|
1166
|
+
type: "string",
|
|
1167
|
+
description: "Optional working directory for command execution"
|
|
1168
|
+
},
|
|
1169
|
+
comment: {
|
|
1170
|
+
type: "string",
|
|
1171
|
+
description: "very short briefing about intention and reason of this tool call, improve observability and auditability to the user."
|
|
1172
|
+
}
|
|
1173
|
+
},
|
|
1174
|
+
required: ["command", "comment"]
|
|
1175
|
+
},
|
|
1176
|
+
handler: async (args) => {
|
|
1177
|
+
if (!args.command) {
|
|
1178
|
+
return {
|
|
1179
|
+
stdout: "",
|
|
1180
|
+
stderr: "Error: empty command",
|
|
1181
|
+
exitCode: 1
|
|
1182
|
+
};
|
|
1183
|
+
}
|
|
1184
|
+
return new Promise((resolve) => {
|
|
1185
|
+
const options = {
|
|
1186
|
+
shell,
|
|
1187
|
+
maxBuffer: BASH_EXEC_TOOL_MAX_OUTPUT_MB * 1024 * 1024,
|
|
1188
|
+
timeout: BASH_EXEC_TOOL_MAX_TIMEOUT_S * 1e3,
|
|
1189
|
+
encoding: "buffer"
|
|
1190
|
+
};
|
|
1191
|
+
if (args.cwd) {
|
|
1192
|
+
options.cwd = args.cwd;
|
|
1193
|
+
}
|
|
1194
|
+
exec(args.command, options, (error, stdout, stderr) => {
|
|
1195
|
+
const stdoutBuf = stdout || Buffer.alloc(0);
|
|
1196
|
+
const stderrBuf = stderr || Buffer.alloc(0);
|
|
1197
|
+
resolve({
|
|
1198
|
+
stdout: stdoutBuf.toString("utf-8"),
|
|
1199
|
+
stderr: stderrBuf.toString("utf-8"),
|
|
1200
|
+
exitCode: error ? error.code ?? 1 : 0
|
|
1201
|
+
});
|
|
1202
|
+
});
|
|
1203
|
+
});
|
|
1204
|
+
}
|
|
1205
|
+
};
|
|
1206
|
+
}
|
|
1207
|
+
|
|
1208
|
+
// src/tool-registry.ts
|
|
1209
|
+
var ToolRegistry = class {
|
|
1210
|
+
tools;
|
|
1211
|
+
constructor() {
|
|
1212
|
+
this.tools = /* @__PURE__ */ new Map();
|
|
1213
|
+
this.registerBuiltinTools();
|
|
1214
|
+
}
|
|
1215
|
+
/**
|
|
1216
|
+
* Register built-in tools
|
|
1217
|
+
*/
|
|
1218
|
+
registerBuiltinTools() {
|
|
1219
|
+
this.register(createBashExecTool());
|
|
1220
|
+
}
|
|
1221
|
+
/**
|
|
1222
|
+
* Register a tool
|
|
1223
|
+
*/
|
|
1224
|
+
register(tool) {
|
|
1225
|
+
this.tools.set(tool.name, tool);
|
|
1226
|
+
}
|
|
1227
|
+
/**
|
|
1228
|
+
* Get a tool by name
|
|
1229
|
+
*/
|
|
1230
|
+
get(name) {
|
|
1231
|
+
return this.tools.get(name);
|
|
1232
|
+
}
|
|
1233
|
+
/**
|
|
1234
|
+
* Get all registered tools
|
|
1235
|
+
*/
|
|
1236
|
+
getAll() {
|
|
1237
|
+
return Array.from(this.tools.values());
|
|
1238
|
+
}
|
|
1239
|
+
/**
|
|
1240
|
+
* Execute a tool by name with arguments
|
|
1241
|
+
*/
|
|
1242
|
+
async execute(name, args) {
|
|
1243
|
+
const tool = this.get(name);
|
|
1244
|
+
if (!tool) {
|
|
1245
|
+
throw new Error(`Tool not found: ${name}`);
|
|
1246
|
+
}
|
|
1247
|
+
return await tool.handler(args);
|
|
1248
|
+
}
|
|
1249
|
+
/**
|
|
1250
|
+
* Check if a tool exists
|
|
1251
|
+
*/
|
|
1252
|
+
has(name) {
|
|
1253
|
+
return this.tools.has(name);
|
|
1254
|
+
}
|
|
1255
|
+
/**
|
|
1256
|
+
* Get tool count
|
|
1257
|
+
*/
|
|
1258
|
+
size() {
|
|
1259
|
+
return this.tools.size;
|
|
1260
|
+
}
|
|
1261
|
+
};
|
|
1262
|
+
|
|
1263
|
+
// src/commands/chat.ts
|
|
1264
|
+
import { getModels } from "@mariozechner/pi-ai";
|
|
1265
|
+
var DEFAULT_MAX_TURNS = 100;
|
|
1266
|
+
async function handleChatCommand(prompt, options) {
|
|
1267
|
+
const configManager = new ConfigurationManager(options);
|
|
1268
|
+
const sessionManager = new SessionManager(options.session);
|
|
1269
|
+
const inputResolver = new InputResolver();
|
|
1270
|
+
const outputFormatter = new OutputFormatter(
|
|
1271
|
+
options.json,
|
|
1272
|
+
options.quiet,
|
|
1273
|
+
options.log
|
|
1274
|
+
);
|
|
1275
|
+
const toolRegistry = new ToolRegistry();
|
|
1276
|
+
try {
|
|
1277
|
+
if (options.temperature !== void 0) {
|
|
1278
|
+
if (isNaN(options.temperature) || !isFinite(options.temperature)) {
|
|
1279
|
+
throw new PAIError(
|
|
1280
|
+
"Invalid temperature value",
|
|
1281
|
+
1,
|
|
1282
|
+
{ temperature: options.temperature, message: "Temperature must be a finite number" }
|
|
1283
|
+
);
|
|
1284
|
+
}
|
|
1285
|
+
if (options.temperature < 0 || options.temperature > 2) {
|
|
1286
|
+
throw new PAIError(
|
|
1287
|
+
"Invalid temperature value",
|
|
1288
|
+
1,
|
|
1289
|
+
{ temperature: options.temperature, message: "Temperature must be between 0 and 2" }
|
|
1290
|
+
);
|
|
1291
|
+
}
|
|
1292
|
+
}
|
|
1293
|
+
if (options.maxTokens !== void 0) {
|
|
1294
|
+
if (isNaN(options.maxTokens) || !isFinite(options.maxTokens)) {
|
|
1295
|
+
throw new PAIError(
|
|
1296
|
+
"Invalid maxTokens value",
|
|
1297
|
+
1,
|
|
1298
|
+
{ maxTokens: options.maxTokens, message: "maxTokens must be a finite number" }
|
|
1299
|
+
);
|
|
1300
|
+
}
|
|
1301
|
+
if (options.maxTokens <= 0) {
|
|
1302
|
+
throw new PAIError(
|
|
1303
|
+
"Invalid maxTokens value",
|
|
1304
|
+
1,
|
|
1305
|
+
{ maxTokens: options.maxTokens, message: "maxTokens must be greater than 0" }
|
|
1306
|
+
);
|
|
1307
|
+
}
|
|
1308
|
+
}
|
|
1309
|
+
const provider = await configManager.getProvider(options.provider);
|
|
1310
|
+
let modelName = options.model || provider.defaultModel || provider.models?.[0];
|
|
1311
|
+
if (!modelName) {
|
|
1312
|
+
try {
|
|
1313
|
+
const knownModels = getModels(provider.name);
|
|
1314
|
+
if (knownModels.length > 0) {
|
|
1315
|
+
modelName = knownModels[0].id;
|
|
1316
|
+
}
|
|
1317
|
+
} catch {
|
|
1318
|
+
}
|
|
1319
|
+
}
|
|
1320
|
+
if (!modelName) {
|
|
1321
|
+
throw new PAIError(
|
|
1322
|
+
"No model specified",
|
|
1323
|
+
1,
|
|
1324
|
+
{ provider: provider.name, message: "Specify --model or configure a default model" }
|
|
1325
|
+
);
|
|
1326
|
+
}
|
|
1327
|
+
const apiKey = await configManager.resolveCredentials(provider.name, void 0);
|
|
1328
|
+
if (options.dryRun) {
|
|
1329
|
+
const info = {
|
|
1330
|
+
provider: provider.name,
|
|
1331
|
+
model: modelName,
|
|
1332
|
+
configFile: configManager.getConfigPath(),
|
|
1333
|
+
temperature: options.temperature ?? provider.temperature,
|
|
1334
|
+
maxTokens: options.maxTokens ?? provider.maxTokens,
|
|
1335
|
+
stream: options.stream ?? false,
|
|
1336
|
+
credentialSource: "resolved"
|
|
1337
|
+
};
|
|
1338
|
+
process.stderr.write(JSON.stringify(info, null, 2) + "\n");
|
|
1339
|
+
return;
|
|
1340
|
+
}
|
|
1341
|
+
const llmClient = new LLMClient({
|
|
1342
|
+
provider: provider.name,
|
|
1343
|
+
model: modelName,
|
|
1344
|
+
apiKey,
|
|
1345
|
+
temperature: options.temperature ?? provider.temperature,
|
|
1346
|
+
maxTokens: options.maxTokens ?? provider.maxTokens,
|
|
1347
|
+
stream: options.stream,
|
|
1348
|
+
api: provider.api,
|
|
1349
|
+
baseUrl: provider.baseUrl,
|
|
1350
|
+
reasoning: provider.reasoning,
|
|
1351
|
+
input: provider.input,
|
|
1352
|
+
contextWindow: provider.contextWindow,
|
|
1353
|
+
providerOptions: provider.providerOptions
|
|
1354
|
+
});
|
|
1355
|
+
const messages = await sessionManager.loadMessages();
|
|
1356
|
+
const loadedMessageCount = messages.length;
|
|
1357
|
+
const newMessages = [];
|
|
1358
|
+
const systemInstruction = await inputResolver.resolveSystemInput(
|
|
1359
|
+
options.system,
|
|
1360
|
+
options.systemFile
|
|
1361
|
+
);
|
|
1362
|
+
if (systemInstruction) {
|
|
1363
|
+
if (messages.length > 0 && messages[0]?.role === "system") {
|
|
1364
|
+
messages[0] = { role: "system", content: systemInstruction };
|
|
1365
|
+
} else {
|
|
1366
|
+
const sysMsg = { role: "system", content: systemInstruction };
|
|
1367
|
+
messages.unshift(sysMsg);
|
|
1368
|
+
newMessages.push(sysMsg);
|
|
1369
|
+
}
|
|
1370
|
+
await outputFormatter.logSystemMessage(systemInstruction);
|
|
1371
|
+
}
|
|
1372
|
+
const hasExplicitInput = prompt !== void 0 || options.inputFile !== void 0;
|
|
1373
|
+
const stdinAvailable = !process.stdin.isTTY && !hasExplicitInput;
|
|
1374
|
+
const userInput = await inputResolver.resolveUserInput({
|
|
1375
|
+
message: prompt,
|
|
1376
|
+
stdin: stdinAvailable,
|
|
1377
|
+
file: options.inputFile,
|
|
1378
|
+
images: options.image
|
|
1379
|
+
});
|
|
1380
|
+
const userMessage = { role: "user", content: userInput };
|
|
1381
|
+
const lastLoadedIsUser = loadedMessageCount > 0 && messages[messages.length - 1]?.role === "user";
|
|
1382
|
+
if (lastLoadedIsUser) {
|
|
1383
|
+
messages[messages.length - 1] = userMessage;
|
|
1384
|
+
} else {
|
|
1385
|
+
messages.push(userMessage);
|
|
1386
|
+
newMessages.push(userMessage);
|
|
1387
|
+
}
|
|
1388
|
+
await outputFormatter.logUserMessage(
|
|
1389
|
+
typeof userInput === "string" ? userInput : JSON.stringify(userInput)
|
|
1390
|
+
);
|
|
1391
|
+
await outputFormatter.logRequestSummary({
|
|
1392
|
+
provider: provider.name,
|
|
1393
|
+
model: modelName,
|
|
1394
|
+
temperature: options.temperature ?? provider.temperature,
|
|
1395
|
+
maxTokens: options.maxTokens ?? provider.maxTokens,
|
|
1396
|
+
stream: options.stream
|
|
1397
|
+
});
|
|
1398
|
+
const tools = toolRegistry.getAll();
|
|
1399
|
+
let continueLoop = true;
|
|
1400
|
+
let maxTurns = options.maxTurns ?? DEFAULT_MAX_TURNS;
|
|
1401
|
+
const turnsLimit = maxTurns;
|
|
1402
|
+
let finalRoundAttempted = false;
|
|
1403
|
+
const systemMsg = messages.find((m) => m.role === "system");
|
|
1404
|
+
const userMsg = [...messages].reverse().find((m) => m.role === "user");
|
|
1405
|
+
const systemChars = systemMsg ? String(systemMsg.content).length : 0;
|
|
1406
|
+
const userChars = userMsg ? typeof userMsg.content === "string" ? userMsg.content.length : JSON.stringify(userMsg.content).length : 0;
|
|
1407
|
+
while (continueLoop && maxTurns > 0) {
|
|
1408
|
+
maxTurns--;
|
|
1409
|
+
const isLastTurn = maxTurns === 0;
|
|
1410
|
+
const currentTools = isLastTurn ? [] : tools;
|
|
1411
|
+
if (isLastTurn) {
|
|
1412
|
+
process.stderr.write(
|
|
1413
|
+
`[Info] Approaching tool-call turn limit. Requesting final text response from model.
|
|
1414
|
+
`
|
|
1415
|
+
);
|
|
1416
|
+
}
|
|
1417
|
+
outputFormatter.writeProgress({ type: "start", data: {
|
|
1418
|
+
provider: provider.name,
|
|
1419
|
+
model: modelName,
|
|
1420
|
+
stream: options.stream ?? false,
|
|
1421
|
+
messages: messages.length,
|
|
1422
|
+
systemChars,
|
|
1423
|
+
userChars,
|
|
1424
|
+
tools: currentTools.length
|
|
1425
|
+
} });
|
|
1426
|
+
let assistantMessage;
|
|
1427
|
+
let lastResponse;
|
|
1428
|
+
if (options.stream) {
|
|
1429
|
+
let fullContent = "";
|
|
1430
|
+
const toolCalls2 = [];
|
|
1431
|
+
for await (const response of llmClient.chat(messages, currentTools)) {
|
|
1432
|
+
if (response.content && response.finishReason === "streaming") {
|
|
1433
|
+
fullContent += response.content;
|
|
1434
|
+
outputFormatter.writeModelOutput(response.content);
|
|
1435
|
+
}
|
|
1436
|
+
if (response.finishReason !== "streaming") {
|
|
1437
|
+
lastResponse = response;
|
|
1438
|
+
if (response.toolCalls) {
|
|
1439
|
+
toolCalls2.push(...response.toolCalls);
|
|
1440
|
+
}
|
|
1441
|
+
}
|
|
1442
|
+
}
|
|
1443
|
+
assistantMessage = {
|
|
1444
|
+
role: "assistant",
|
|
1445
|
+
content: fullContent
|
|
1446
|
+
};
|
|
1447
|
+
if (toolCalls2.length > 0) {
|
|
1448
|
+
assistantMessage.tool_calls = toolCalls2;
|
|
1449
|
+
}
|
|
1450
|
+
} else {
|
|
1451
|
+
const response = await llmClient.chatComplete(messages, currentTools);
|
|
1452
|
+
lastResponse = response;
|
|
1453
|
+
outputFormatter.writeModelOutput(response.content);
|
|
1454
|
+
assistantMessage = {
|
|
1455
|
+
role: "assistant",
|
|
1456
|
+
content: response.content
|
|
1457
|
+
};
|
|
1458
|
+
if (response.toolCalls) {
|
|
1459
|
+
assistantMessage.tool_calls = response.toolCalls;
|
|
1460
|
+
}
|
|
1461
|
+
}
|
|
1462
|
+
messages.push(assistantMessage);
|
|
1463
|
+
newMessages.push(assistantMessage);
|
|
1464
|
+
outputFormatter.writeProgress({ type: "complete", data: {
|
|
1465
|
+
finishReason: lastResponse?.finishReason ?? "unknown",
|
|
1466
|
+
usage: lastResponse?.usage
|
|
1467
|
+
} });
|
|
1468
|
+
const toolCalls = assistantMessage.tool_calls;
|
|
1469
|
+
if (toolCalls && toolCalls.length > 0) {
|
|
1470
|
+
for (const toolCall of toolCalls) {
|
|
1471
|
+
outputFormatter.writeProgress({
|
|
1472
|
+
type: "tool_call",
|
|
1473
|
+
data: { name: toolCall.name, arguments: toolCall.arguments }
|
|
1474
|
+
});
|
|
1475
|
+
await outputFormatter.logToolCall(toolCall.name, toolCall.arguments);
|
|
1476
|
+
if (maxTurns <= 0) {
|
|
1477
|
+
const rejectContent = `Error: Tool-call turn limit (${turnsLimit}) reached. Please provide a final text summary without further tool calls.`;
|
|
1478
|
+
const rejectMessage = {
|
|
1479
|
+
role: "tool",
|
|
1480
|
+
name: toolCall.name,
|
|
1481
|
+
tool_call_id: toolCall.id,
|
|
1482
|
+
content: rejectContent
|
|
1483
|
+
};
|
|
1484
|
+
messages.push(rejectMessage);
|
|
1485
|
+
newMessages.push(rejectMessage);
|
|
1486
|
+
outputFormatter.writeProgress({
|
|
1487
|
+
type: "tool_result",
|
|
1488
|
+
data: { error: rejectContent }
|
|
1489
|
+
});
|
|
1490
|
+
await outputFormatter.logToolResult(toolCall.name, { error: rejectContent });
|
|
1491
|
+
continue;
|
|
1492
|
+
}
|
|
1493
|
+
try {
|
|
1494
|
+
const result = await toolRegistry.execute(
|
|
1495
|
+
toolCall.name,
|
|
1496
|
+
toolCall.arguments
|
|
1497
|
+
);
|
|
1498
|
+
const toolResultMessage = {
|
|
1499
|
+
role: "tool",
|
|
1500
|
+
name: toolCall.name,
|
|
1501
|
+
tool_call_id: toolCall.id,
|
|
1502
|
+
content: JSON.stringify(result)
|
|
1503
|
+
};
|
|
1504
|
+
messages.push(toolResultMessage);
|
|
1505
|
+
newMessages.push(toolResultMessage);
|
|
1506
|
+
outputFormatter.writeProgress({
|
|
1507
|
+
type: "tool_result",
|
|
1508
|
+
data: result
|
|
1509
|
+
});
|
|
1510
|
+
await outputFormatter.logToolResult(toolCall.name, result);
|
|
1511
|
+
} catch (error) {
|
|
1512
|
+
const errorMessage = {
|
|
1513
|
+
role: "tool",
|
|
1514
|
+
name: toolCall.name,
|
|
1515
|
+
tool_call_id: toolCall.id,
|
|
1516
|
+
content: `Error: ${error instanceof Error ? error.message : String(error)}`
|
|
1517
|
+
};
|
|
1518
|
+
messages.push(errorMessage);
|
|
1519
|
+
newMessages.push(errorMessage);
|
|
1520
|
+
outputFormatter.writeProgress({
|
|
1521
|
+
type: "tool_result",
|
|
1522
|
+
data: { error: String(error) }
|
|
1523
|
+
});
|
|
1524
|
+
await outputFormatter.logToolResult(toolCall.name, { error: String(error) });
|
|
1525
|
+
}
|
|
1526
|
+
}
|
|
1527
|
+
if (maxTurns <= 0) {
|
|
1528
|
+
if (finalRoundAttempted) {
|
|
1529
|
+
process.stderr.write(
|
|
1530
|
+
`[Warning] Model continues to request tool calls after final round. Stopping.
|
|
1531
|
+
`
|
|
1532
|
+
);
|
|
1533
|
+
continueLoop = false;
|
|
1534
|
+
} else {
|
|
1535
|
+
finalRoundAttempted = true;
|
|
1536
|
+
process.stderr.write(
|
|
1537
|
+
`[Warning] Tool-call turn limit (${turnsLimit}) reached. Making one final request for a text summary.
|
|
1538
|
+
`
|
|
1539
|
+
);
|
|
1540
|
+
maxTurns = 1;
|
|
1541
|
+
}
|
|
1542
|
+
}
|
|
1543
|
+
continueLoop = true;
|
|
1544
|
+
} else {
|
|
1545
|
+
continueLoop = false;
|
|
1546
|
+
}
|
|
1547
|
+
}
|
|
1548
|
+
if (options.append !== false && sessionManager.getSessionPath()) {
|
|
1549
|
+
await sessionManager.appendMessages(newMessages);
|
|
1550
|
+
}
|
|
1551
|
+
} catch (error) {
|
|
1552
|
+
if (error instanceof PAIError) {
|
|
1553
|
+
await outputFormatter.logError(error);
|
|
1554
|
+
outputFormatter.writeError(error);
|
|
1555
|
+
process.exit(error.exitCode);
|
|
1556
|
+
} else {
|
|
1557
|
+
const paiError = new PAIError(
|
|
1558
|
+
error instanceof Error ? error.message : String(error),
|
|
1559
|
+
2,
|
|
1560
|
+
{ originalError: String(error) }
|
|
1561
|
+
);
|
|
1562
|
+
await outputFormatter.logError(paiError);
|
|
1563
|
+
outputFormatter.writeError(paiError);
|
|
1564
|
+
process.exit(2);
|
|
1565
|
+
}
|
|
1566
|
+
}
|
|
1567
|
+
}
|
|
1568
|
+
|
|
1569
|
+
// src/embedding-client.ts
|
|
1570
|
+
var PROVIDER_DEFAULT_BASE_URLS = {
|
|
1571
|
+
openai: "https://api.openai.com"
|
|
1572
|
+
};
|
|
1573
|
+
var EmbeddingClient = class _EmbeddingClient {
|
|
1574
|
+
endpoint;
|
|
1575
|
+
apiKey;
|
|
1576
|
+
model;
|
|
1577
|
+
isAzure;
|
|
1578
|
+
constructor(config) {
|
|
1579
|
+
this.apiKey = config.apiKey;
|
|
1580
|
+
this.model = config.model;
|
|
1581
|
+
const apiType = config.api ?? config.provider;
|
|
1582
|
+
this.isAzure = apiType === "azure-openai-responses" || apiType === "azure-openai";
|
|
1583
|
+
this.endpoint = this.isAzure ? _EmbeddingClient.resolveAzureEndpoint(config.baseUrl, config.model, config.providerOptions) : _EmbeddingClient.resolveEndpoint(config.provider, config.baseUrl);
|
|
1584
|
+
}
|
|
1585
|
+
/**
|
|
1586
|
+
* Resolve the full embeddings API endpoint URL.
|
|
1587
|
+
* If baseUrl is provided, use it; otherwise fall back to the provider default.
|
|
1588
|
+
*/
|
|
1589
|
+
static resolveEndpoint(provider, baseUrl) {
|
|
1590
|
+
const base = baseUrl ?? (Object.hasOwn(PROVIDER_DEFAULT_BASE_URLS, provider) ? PROVIDER_DEFAULT_BASE_URLS[provider] : void 0);
|
|
1591
|
+
if (!base) {
|
|
1592
|
+
throw new PAIError(
|
|
1593
|
+
`No base URL configured for provider "${provider}". Please specify a baseUrl.`,
|
|
1594
|
+
1 /* PARAMETER_ERROR */,
|
|
1595
|
+
{ provider }
|
|
1596
|
+
);
|
|
1597
|
+
}
|
|
1598
|
+
return `${base.replace(/\/+$/, "")}/v1/embeddings`;
|
|
1599
|
+
}
|
|
1600
|
+
/**
|
|
1601
|
+
* Resolve the Azure OpenAI embeddings endpoint URL.
|
|
1602
|
+
* Azure format: {baseUrl}/openai/deployments/{deployment}/embeddings?api-version={version}
|
|
1603
|
+
*/
|
|
1604
|
+
static resolveAzureEndpoint(baseUrl, model, providerOptions) {
|
|
1605
|
+
if (!baseUrl) {
|
|
1606
|
+
throw new PAIError(
|
|
1607
|
+
"Azure OpenAI requires a baseUrl. Please specify a baseUrl.",
|
|
1608
|
+
1 /* PARAMETER_ERROR */
|
|
1609
|
+
);
|
|
1610
|
+
}
|
|
1611
|
+
const deployment = model ?? providerOptions?.azureDeploymentName;
|
|
1612
|
+
if (!deployment) {
|
|
1613
|
+
throw new PAIError(
|
|
1614
|
+
"Azure OpenAI requires a deployment name. Specify a model or set providerOptions.azureDeploymentName.",
|
|
1615
|
+
1 /* PARAMETER_ERROR */
|
|
1616
|
+
);
|
|
1617
|
+
}
|
|
1618
|
+
const apiVersion = providerOptions?.azureApiVersion;
|
|
1619
|
+
const resolvedVersion = apiVersion && /^\d{4}-\d{2}-\d{2}/.test(apiVersion) ? apiVersion : "2024-06-01";
|
|
1620
|
+
const resourceBase = baseUrl.replace(/\/openai\/v1\/?$/, "").replace(/\/+$/, "");
|
|
1621
|
+
return `${resourceBase}/openai/deployments/${deployment}/embeddings?api-version=${resolvedVersion}`;
|
|
1622
|
+
}
|
|
1623
|
+
/**
|
|
1624
|
+
* Call the embedding API for the given texts.
|
|
1625
|
+
*/
|
|
1626
|
+
async embed(request) {
|
|
1627
|
+
const body = JSON.stringify({
|
|
1628
|
+
model: request.model,
|
|
1629
|
+
input: request.texts
|
|
1630
|
+
});
|
|
1631
|
+
let response;
|
|
1632
|
+
try {
|
|
1633
|
+
const headers = {
|
|
1634
|
+
"Content-Type": "application/json"
|
|
1635
|
+
};
|
|
1636
|
+
if (this.isAzure) {
|
|
1637
|
+
headers["api-key"] = this.apiKey;
|
|
1638
|
+
} else {
|
|
1639
|
+
headers["Authorization"] = `Bearer ${this.apiKey}`;
|
|
1640
|
+
}
|
|
1641
|
+
response = await fetch(this.endpoint, {
|
|
1642
|
+
method: "POST",
|
|
1643
|
+
headers,
|
|
1644
|
+
body
|
|
1645
|
+
});
|
|
1646
|
+
} catch (err) {
|
|
1647
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
1648
|
+
throw new PAIError(
|
|
1649
|
+
`Network error calling embedding API: ${message}`,
|
|
1650
|
+
2 /* RUNTIME_ERROR */,
|
|
1651
|
+
{ endpoint: this.endpoint, cause: message }
|
|
1652
|
+
);
|
|
1653
|
+
}
|
|
1654
|
+
if (!response.ok) {
|
|
1655
|
+
let detail = "";
|
|
1656
|
+
try {
|
|
1657
|
+
const errorBody = await response.text();
|
|
1658
|
+
detail = errorBody;
|
|
1659
|
+
} catch {
|
|
1660
|
+
}
|
|
1661
|
+
throw new PAIError(
|
|
1662
|
+
`Embedding API error (${response.status}): ${detail || response.statusText}`,
|
|
1663
|
+
3 /* API_ERROR */,
|
|
1664
|
+
{ status: response.status, detail }
|
|
1665
|
+
);
|
|
1666
|
+
}
|
|
1667
|
+
const json = await response.json();
|
|
1668
|
+
const sorted = [...json.data].sort((a, b) => a.index - b.index);
|
|
1669
|
+
return {
|
|
1670
|
+
embeddings: sorted.map((d) => d.embedding),
|
|
1671
|
+
model: json.model,
|
|
1672
|
+
usage: {
|
|
1673
|
+
promptTokens: json.usage.prompt_tokens,
|
|
1674
|
+
totalTokens: json.usage.total_tokens
|
|
1675
|
+
}
|
|
1676
|
+
};
|
|
1677
|
+
}
|
|
1678
|
+
};
|
|
1679
|
+
|
|
1680
|
+
// src/embed-model-resolver.ts
|
|
1681
|
+
function resolveEmbedModel(options, config) {
|
|
1682
|
+
const provider = options.provider ?? config.defaultEmbedProvider ?? config.defaultProvider;
|
|
1683
|
+
if (!provider) {
|
|
1684
|
+
throw new PAIError(
|
|
1685
|
+
"No embed provider specified and no default provider configured",
|
|
1686
|
+
1 /* PARAMETER_ERROR */
|
|
1687
|
+
);
|
|
1688
|
+
}
|
|
1689
|
+
const model = options.model ?? config.defaultEmbedModel;
|
|
1690
|
+
if (!model) {
|
|
1691
|
+
throw new PAIError(
|
|
1692
|
+
"No embed model specified and no default embed model configured",
|
|
1693
|
+
1 /* PARAMETER_ERROR */
|
|
1694
|
+
);
|
|
1695
|
+
}
|
|
1696
|
+
return { provider, model };
|
|
1697
|
+
}
|
|
1698
|
+
|
|
1699
|
+
// src/embedding-models.ts
|
|
1700
|
+
var EMBEDDING_MODEL_LIMITS = {
|
|
1701
|
+
// OpenAI
|
|
1702
|
+
"text-embedding-3-small": 8191,
|
|
1703
|
+
"text-embedding-3-large": 8191,
|
|
1704
|
+
"text-embedding-ada-002": 8191,
|
|
1705
|
+
// Google
|
|
1706
|
+
"text-embedding-004": 2048,
|
|
1707
|
+
// Cohere
|
|
1708
|
+
"embed-english-v3.0": 512,
|
|
1709
|
+
"embed-multilingual-v3.0": 512,
|
|
1710
|
+
"embed-english-light-v3.0": 512,
|
|
1711
|
+
"embed-multilingual-light-v3.0": 512
|
|
1712
|
+
};
|
|
1713
|
+
var CHARS_PER_TOKEN = 4;
|
|
1714
|
+
function estimateTokens(text) {
|
|
1715
|
+
return Math.ceil(text.length / CHARS_PER_TOKEN);
|
|
1716
|
+
}
|
|
1717
|
+
function truncateText(text, model) {
|
|
1718
|
+
const limit = EMBEDDING_MODEL_LIMITS[model];
|
|
1719
|
+
const originalTokens = estimateTokens(text);
|
|
1720
|
+
if (limit === void 0) {
|
|
1721
|
+
return { text, truncated: false, originalTokens };
|
|
1722
|
+
}
|
|
1723
|
+
if (originalTokens <= limit) {
|
|
1724
|
+
return { text, truncated: false, originalTokens };
|
|
1725
|
+
}
|
|
1726
|
+
const maxChars = limit * CHARS_PER_TOKEN;
|
|
1727
|
+
return {
|
|
1728
|
+
text: text.slice(0, maxChars),
|
|
1729
|
+
truncated: true,
|
|
1730
|
+
originalTokens
|
|
1731
|
+
};
|
|
1732
|
+
}
|
|
1733
|
+
|
|
1734
|
+
// src/embed-io.ts
|
|
1735
|
+
function vectorToHex(vec) {
|
|
1736
|
+
const buf = new ArrayBuffer(4);
|
|
1737
|
+
const view = new DataView(buf);
|
|
1738
|
+
const result = new Array(vec.length);
|
|
1739
|
+
for (let i = 0; i < vec.length; i++) {
|
|
1740
|
+
view.setFloat32(0, vec[i], false);
|
|
1741
|
+
let hex = "";
|
|
1742
|
+
for (let b = 0; b < 4; b++) {
|
|
1743
|
+
const byte = view.getUint8(b);
|
|
1744
|
+
hex += (byte < 16 ? "0" : "") + byte.toString(16);
|
|
1745
|
+
}
|
|
1746
|
+
result[i] = hex;
|
|
1747
|
+
}
|
|
1748
|
+
return result;
|
|
1749
|
+
}
|
|
1750
|
+
function parseBatchInput(raw) {
|
|
1751
|
+
let parsed;
|
|
1752
|
+
try {
|
|
1753
|
+
parsed = JSON.parse(raw);
|
|
1754
|
+
} catch {
|
|
1755
|
+
throw new PAIError(
|
|
1756
|
+
"Invalid batch input: not valid JSON",
|
|
1757
|
+
1 /* PARAMETER_ERROR */
|
|
1758
|
+
);
|
|
1759
|
+
}
|
|
1760
|
+
if (!Array.isArray(parsed)) {
|
|
1761
|
+
throw new PAIError(
|
|
1762
|
+
"Invalid batch input: expected a JSON array of strings",
|
|
1763
|
+
1 /* PARAMETER_ERROR */
|
|
1764
|
+
);
|
|
1765
|
+
}
|
|
1766
|
+
for (let i = 0; i < parsed.length; i++) {
|
|
1767
|
+
if (typeof parsed[i] !== "string") {
|
|
1768
|
+
throw new PAIError(
|
|
1769
|
+
`Invalid batch input: element at index ${i} is not a string`,
|
|
1770
|
+
1 /* PARAMETER_ERROR */
|
|
1771
|
+
);
|
|
1772
|
+
}
|
|
1773
|
+
}
|
|
1774
|
+
return parsed;
|
|
1775
|
+
}
|
|
1776
|
+
function formatEmbeddingOutput(result, options) {
|
|
1777
|
+
if (!options.json) {
|
|
1778
|
+
return result.embeddings.map((emb) => JSON.stringify(vectorToHex(emb))).join("\n");
|
|
1779
|
+
}
|
|
1780
|
+
const usage = {
|
|
1781
|
+
prompt_tokens: result.usage.promptTokens,
|
|
1782
|
+
total_tokens: result.usage.totalTokens
|
|
1783
|
+
};
|
|
1784
|
+
if (options.batch) {
|
|
1785
|
+
return JSON.stringify({
|
|
1786
|
+
embeddings: result.embeddings.map((emb) => vectorToHex(emb)),
|
|
1787
|
+
model: result.model,
|
|
1788
|
+
usage
|
|
1789
|
+
});
|
|
1790
|
+
}
|
|
1791
|
+
return JSON.stringify({
|
|
1792
|
+
embedding: vectorToHex(result.embeddings[0]),
|
|
1793
|
+
model: result.model,
|
|
1794
|
+
usage
|
|
1795
|
+
});
|
|
1796
|
+
}
|
|
1797
|
+
|
|
1798
|
+
// src/commands/embed.ts
|
|
1799
|
+
async function handleEmbedCommand(text, options) {
|
|
1800
|
+
const configManager = new ConfigurationManager(options);
|
|
1801
|
+
const inputResolver = new InputResolver();
|
|
1802
|
+
const outputFormatter = new OutputFormatter(
|
|
1803
|
+
options.json,
|
|
1804
|
+
options.quiet ?? true
|
|
1805
|
+
);
|
|
1806
|
+
try {
|
|
1807
|
+
const config = await configManager.loadConfig();
|
|
1808
|
+
const { provider: providerName, model: modelName } = resolveEmbedModel(options, config);
|
|
1809
|
+
const providerConfig = config.providers.find((p) => p.name === providerName);
|
|
1810
|
+
if (!providerConfig) {
|
|
1811
|
+
throw new PAIError(
|
|
1812
|
+
`Provider not found: ${providerName}`,
|
|
1813
|
+
1 /* PARAMETER_ERROR */,
|
|
1814
|
+
{ provider: providerName }
|
|
1815
|
+
);
|
|
1816
|
+
}
|
|
1817
|
+
const apiKey = await configManager.resolveCredentials(providerName, void 0);
|
|
1818
|
+
const hasExplicitInput = text !== void 0 || options.inputFile !== void 0;
|
|
1819
|
+
const stdinAvailable = !process.stdin.isTTY && !hasExplicitInput;
|
|
1820
|
+
const sourceCount = [
|
|
1821
|
+
text !== void 0,
|
|
1822
|
+
stdinAvailable,
|
|
1823
|
+
options.inputFile !== void 0
|
|
1824
|
+
].filter(Boolean).length;
|
|
1825
|
+
if (sourceCount > 1) {
|
|
1826
|
+
throw new PAIError(
|
|
1827
|
+
"Multiple input sources specified",
|
|
1828
|
+
1 /* PARAMETER_ERROR */,
|
|
1829
|
+
{ message: "Provide input via argument, stdin, or --input-file (only one)" }
|
|
1830
|
+
);
|
|
1831
|
+
}
|
|
1832
|
+
let rawInput;
|
|
1833
|
+
if (text !== void 0) {
|
|
1834
|
+
rawInput = text;
|
|
1835
|
+
} else if (options.inputFile) {
|
|
1836
|
+
rawInput = await inputResolver.resolveUserInput({
|
|
1837
|
+
file: options.inputFile
|
|
1838
|
+
});
|
|
1839
|
+
} else if (stdinAvailable) {
|
|
1840
|
+
rawInput = await inputResolver.resolveUserInput({
|
|
1841
|
+
stdin: true
|
|
1842
|
+
});
|
|
1843
|
+
} else {
|
|
1844
|
+
throw new PAIError(
|
|
1845
|
+
"No input text provided",
|
|
1846
|
+
1 /* PARAMETER_ERROR */,
|
|
1847
|
+
{ message: "Provide input via argument, stdin, or --input-file" }
|
|
1848
|
+
);
|
|
1849
|
+
}
|
|
1850
|
+
let texts;
|
|
1851
|
+
if (options.batch) {
|
|
1852
|
+
texts = parseBatchInput(rawInput);
|
|
1853
|
+
} else {
|
|
1854
|
+
texts = [rawInput];
|
|
1855
|
+
}
|
|
1856
|
+
texts = texts.map((t) => {
|
|
1857
|
+
const result = truncateText(t, modelName);
|
|
1858
|
+
if (result.truncated) {
|
|
1859
|
+
const truncatedTokens = Math.ceil(result.text.length / 4);
|
|
1860
|
+
const modelLimit = EMBEDDING_MODEL_LIMITS[modelName] ?? truncatedTokens;
|
|
1861
|
+
if (options.json) {
|
|
1862
|
+
const warning = {
|
|
1863
|
+
type: "warning",
|
|
1864
|
+
data: {
|
|
1865
|
+
message: `Input text truncated from ~${result.originalTokens} tokens to ${truncatedTokens} tokens (model limit: ${modelLimit})`,
|
|
1866
|
+
originalTokens: result.originalTokens,
|
|
1867
|
+
truncatedTokens
|
|
1868
|
+
}
|
|
1869
|
+
};
|
|
1870
|
+
process.stderr.write(JSON.stringify(warning) + "\n");
|
|
1871
|
+
} else {
|
|
1872
|
+
process.stderr.write(
|
|
1873
|
+
`[Warning] Input text truncated from ~${result.originalTokens} tokens to ${truncatedTokens} tokens (model limit: ${modelLimit})
|
|
1874
|
+
`
|
|
1875
|
+
);
|
|
1876
|
+
}
|
|
1877
|
+
}
|
|
1878
|
+
return result.text;
|
|
1879
|
+
});
|
|
1880
|
+
outputFormatter.writeProgress({
|
|
1881
|
+
type: "start",
|
|
1882
|
+
data: {
|
|
1883
|
+
provider: providerName,
|
|
1884
|
+
model: modelName,
|
|
1885
|
+
texts: texts.length,
|
|
1886
|
+
batch: options.batch ?? false
|
|
1887
|
+
}
|
|
1888
|
+
});
|
|
1889
|
+
const clientConfig = {
|
|
1890
|
+
provider: providerName,
|
|
1891
|
+
apiKey,
|
|
1892
|
+
model: modelName
|
|
1893
|
+
};
|
|
1894
|
+
if (providerConfig.baseUrl) {
|
|
1895
|
+
clientConfig.baseUrl = providerConfig.baseUrl;
|
|
1896
|
+
}
|
|
1897
|
+
if (providerConfig.providerOptions) {
|
|
1898
|
+
clientConfig.providerOptions = providerConfig.providerOptions;
|
|
1899
|
+
}
|
|
1900
|
+
if (providerConfig.api) {
|
|
1901
|
+
clientConfig.api = providerConfig.api;
|
|
1902
|
+
}
|
|
1903
|
+
const client = new EmbeddingClient(clientConfig);
|
|
1904
|
+
const response = await client.embed({ texts, model: modelName });
|
|
1905
|
+
outputFormatter.writeProgress({
|
|
1906
|
+
type: "complete",
|
|
1907
|
+
data: {
|
|
1908
|
+
model: response.model,
|
|
1909
|
+
usage: response.usage
|
|
1910
|
+
}
|
|
1911
|
+
});
|
|
1912
|
+
const output = formatEmbeddingOutput(response, {
|
|
1913
|
+
json: options.json ?? false,
|
|
1914
|
+
batch: options.batch ?? false
|
|
1915
|
+
});
|
|
1916
|
+
process.stdout.write(output + "\n");
|
|
1917
|
+
} catch (error) {
|
|
1918
|
+
if (error instanceof PAIError) {
|
|
1919
|
+
outputFormatter.writeError(error);
|
|
1920
|
+
process.exit(error.exitCode);
|
|
1921
|
+
} else {
|
|
1922
|
+
const paiError = new PAIError(
|
|
1923
|
+
error instanceof Error ? error.message : String(error),
|
|
1924
|
+
2 /* RUNTIME_ERROR */,
|
|
1925
|
+
{ originalError: String(error) }
|
|
1926
|
+
);
|
|
1927
|
+
outputFormatter.writeError(paiError);
|
|
1928
|
+
process.exit(2 /* RUNTIME_ERROR */);
|
|
1929
|
+
}
|
|
1930
|
+
}
|
|
1931
|
+
}
|
|
1932
|
+
|
|
1933
|
+
// src/commands/model.ts
|
|
1934
|
+
import { getProviders, getModels as getModels2 } from "@mariozechner/pi-ai";
|
|
1935
|
+
import { getOAuthProvider, getOAuthProviders } from "@mariozechner/pi-ai/oauth";
|
|
1936
|
+
import { createInterface as createInterface2 } from "readline";
|
|
1937
|
+
async function handleModelList(options) {
|
|
1938
|
+
const configManager = new ConfigurationManager(options);
|
|
1939
|
+
try {
|
|
1940
|
+
const config = await configManager.loadConfig();
|
|
1941
|
+
if (!options.json) {
|
|
1942
|
+
console.log(`Config: ${configManager.getConfigPath()}
|
|
1943
|
+
`);
|
|
1944
|
+
} else {
|
|
1945
|
+
process.stderr.write(`Config: ${configManager.getConfigPath()}
|
|
1946
|
+
`);
|
|
1947
|
+
}
|
|
1948
|
+
if (options.all) {
|
|
1949
|
+
const allProviders = getProviders();
|
|
1950
|
+
if (options.json) {
|
|
1951
|
+
const output = {
|
|
1952
|
+
defaultEmbedProvider: config.defaultEmbedProvider ?? null,
|
|
1953
|
+
defaultEmbedModel: config.defaultEmbedModel ?? null,
|
|
1954
|
+
providers: allProviders.map((provider) => {
|
|
1955
|
+
const models = getModels2(provider);
|
|
1956
|
+
const configured = config.providers.some((p) => p.name === provider);
|
|
1957
|
+
return {
|
|
1958
|
+
name: provider,
|
|
1959
|
+
provider,
|
|
1960
|
+
configured,
|
|
1961
|
+
models: models.map((m) => m.id)
|
|
1962
|
+
};
|
|
1963
|
+
})
|
|
1964
|
+
};
|
|
1965
|
+
console.log(JSON.stringify(output, null, 2));
|
|
1966
|
+
} else {
|
|
1967
|
+
if (config.defaultEmbedProvider || config.defaultEmbedModel) {
|
|
1968
|
+
const ep = config.defaultEmbedProvider ?? "(not set)";
|
|
1969
|
+
const em = config.defaultEmbedModel ?? "(not set)";
|
|
1970
|
+
console.log(`Default Embed: ${ep}/${em}
|
|
1971
|
+
`);
|
|
1972
|
+
}
|
|
1973
|
+
console.log("Available Providers:\n");
|
|
1974
|
+
for (const provider of allProviders) {
|
|
1975
|
+
const models = getModels2(provider);
|
|
1976
|
+
const configured = config.providers.some((p) => p.name === provider);
|
|
1977
|
+
const status = configured ? "\u2713" : " ";
|
|
1978
|
+
console.log(`[${status}] ${provider}`);
|
|
1979
|
+
console.log(` Models: ${models.length} available`);
|
|
1980
|
+
if (models.length > 0 && models.length <= 5) {
|
|
1981
|
+
models.forEach((m) => console.log(` - ${m.id}`));
|
|
1982
|
+
}
|
|
1983
|
+
console.log();
|
|
1984
|
+
}
|
|
1985
|
+
}
|
|
1986
|
+
} else {
|
|
1987
|
+
if (config.providers.length === 0) {
|
|
1988
|
+
console.log("No providers configured.");
|
|
1989
|
+
console.log('Use "pai model config --add" to add a provider.');
|
|
1990
|
+
return;
|
|
1991
|
+
}
|
|
1992
|
+
if (options.json) {
|
|
1993
|
+
const output = {
|
|
1994
|
+
defaultEmbedProvider: config.defaultEmbedProvider ?? null,
|
|
1995
|
+
defaultEmbedModel: config.defaultEmbedModel ?? null,
|
|
1996
|
+
providers: config.providers.map((p) => ({
|
|
1997
|
+
name: p.name,
|
|
1998
|
+
provider: p.name,
|
|
1999
|
+
configured: true,
|
|
2000
|
+
models: p.models || [],
|
|
2001
|
+
defaultModel: p.defaultModel
|
|
2002
|
+
}))
|
|
2003
|
+
};
|
|
2004
|
+
console.log(JSON.stringify(output, null, 2));
|
|
2005
|
+
} else {
|
|
2006
|
+
if (config.defaultEmbedProvider || config.defaultEmbedModel) {
|
|
2007
|
+
const ep = config.defaultEmbedProvider ?? "(not set)";
|
|
2008
|
+
const em = config.defaultEmbedModel ?? "(not set)";
|
|
2009
|
+
console.log(`Default Embed: ${ep}/${em}
|
|
2010
|
+
`);
|
|
2011
|
+
}
|
|
2012
|
+
console.log("Configured Providers:\n");
|
|
2013
|
+
for (const provider of config.providers) {
|
|
2014
|
+
console.log(`\u2713 ${provider.name}`);
|
|
2015
|
+
if (provider.defaultModel) {
|
|
2016
|
+
console.log(` Default: ${provider.defaultModel}`);
|
|
2017
|
+
}
|
|
2018
|
+
if (provider.models && provider.models.length > 0) {
|
|
2019
|
+
console.log(` Models: ${provider.models.join(", ")}`);
|
|
2020
|
+
}
|
|
2021
|
+
console.log();
|
|
2022
|
+
}
|
|
2023
|
+
}
|
|
2024
|
+
}
|
|
2025
|
+
} catch (error) {
|
|
2026
|
+
if (error instanceof PAIError) {
|
|
2027
|
+
console.error(`Error: ${error.message}`);
|
|
2028
|
+
process.exit(error.exitCode);
|
|
2029
|
+
} else {
|
|
2030
|
+
console.error(`Error: ${error instanceof Error ? error.message : String(error)}`);
|
|
2031
|
+
process.exit(2);
|
|
2032
|
+
}
|
|
2033
|
+
}
|
|
2034
|
+
}
|
|
2035
|
+
async function handleModelConfig(options) {
|
|
2036
|
+
const configManager = new ConfigurationManager(options);
|
|
2037
|
+
try {
|
|
2038
|
+
process.stderr.write(`Config: ${configManager.getConfigPath()}
|
|
2039
|
+
`);
|
|
2040
|
+
if (options.show) {
|
|
2041
|
+
if (!options.name) {
|
|
2042
|
+
throw new PAIError("Provider name is required", 1, {
|
|
2043
|
+
message: "Use --name <provider-name>"
|
|
2044
|
+
});
|
|
2045
|
+
}
|
|
2046
|
+
const provider = await configManager.getProvider(options.name);
|
|
2047
|
+
const masked = { ...provider };
|
|
2048
|
+
if (masked.apiKey) masked.apiKey = "***";
|
|
2049
|
+
if (masked.oauth) {
|
|
2050
|
+
masked.oauth = {
|
|
2051
|
+
...masked.oauth,
|
|
2052
|
+
refresh: "***",
|
|
2053
|
+
access: "***"
|
|
2054
|
+
};
|
|
2055
|
+
}
|
|
2056
|
+
if (options.json) {
|
|
2057
|
+
console.log(JSON.stringify(masked, null, 2));
|
|
2058
|
+
} else {
|
|
2059
|
+
console.log(`
|
|
2060
|
+
Provider: ${provider.name}`);
|
|
2061
|
+
for (const [key, value] of Object.entries(masked)) {
|
|
2062
|
+
if (key === "name") continue;
|
|
2063
|
+
if (typeof value === "object" && value !== null) {
|
|
2064
|
+
console.log(` ${key}: ${JSON.stringify(value)}`);
|
|
2065
|
+
} else {
|
|
2066
|
+
console.log(` ${key}: ${value}`);
|
|
2067
|
+
}
|
|
2068
|
+
}
|
|
2069
|
+
}
|
|
2070
|
+
} else if (options.add) {
|
|
2071
|
+
if (!options.name) {
|
|
2072
|
+
throw new PAIError("Provider name is required", 1, {
|
|
2073
|
+
message: "Use --name <provider-name>"
|
|
2074
|
+
});
|
|
2075
|
+
}
|
|
2076
|
+
if (!options.provider) {
|
|
2077
|
+
throw new PAIError("Provider type is required", 1, {
|
|
2078
|
+
message: "Use --provider <provider-type>"
|
|
2079
|
+
});
|
|
2080
|
+
}
|
|
2081
|
+
const supportedProviders = getProviders();
|
|
2082
|
+
if (!supportedProviders.includes(options.provider)) {
|
|
2083
|
+
throw new PAIError(
|
|
2084
|
+
`Unsupported provider: ${options.provider}`,
|
|
2085
|
+
1,
|
|
2086
|
+
{
|
|
2087
|
+
message: `Supported providers: ${supportedProviders.join(", ")}`
|
|
2088
|
+
}
|
|
2089
|
+
);
|
|
2090
|
+
}
|
|
2091
|
+
const providerConfig = {
|
|
2092
|
+
name: options.name
|
|
2093
|
+
};
|
|
2094
|
+
const knownKeys = /* @__PURE__ */ new Set([
|
|
2095
|
+
"apiKey",
|
|
2096
|
+
"defaultModel",
|
|
2097
|
+
"models",
|
|
2098
|
+
"temperature",
|
|
2099
|
+
"maxTokens",
|
|
2100
|
+
"api",
|
|
2101
|
+
"baseUrl",
|
|
2102
|
+
"reasoning",
|
|
2103
|
+
"input",
|
|
2104
|
+
"contextWindow",
|
|
2105
|
+
"providerOptions"
|
|
2106
|
+
]);
|
|
2107
|
+
if (options.set && options.set.length > 0) {
|
|
2108
|
+
for (const setting of options.set) {
|
|
2109
|
+
const eqIndex = setting.indexOf("=");
|
|
2110
|
+
if (eqIndex < 1) {
|
|
2111
|
+
throw new PAIError(
|
|
2112
|
+
`Invalid --set format: ${setting}`,
|
|
2113
|
+
1,
|
|
2114
|
+
{ message: "Use --set key=value" }
|
|
2115
|
+
);
|
|
2116
|
+
}
|
|
2117
|
+
const key = setting.substring(0, eqIndex);
|
|
2118
|
+
const value = setting.substring(eqIndex + 1);
|
|
2119
|
+
const topKey = key.split(".")[0];
|
|
2120
|
+
if (!knownKeys.has(topKey)) {
|
|
2121
|
+
console.error(`Warning: unknown key "${key}". Known keys: ${[...knownKeys].join(", ")}`);
|
|
2122
|
+
}
|
|
2123
|
+
if (key.includes(".")) {
|
|
2124
|
+
const parts = key.split(".");
|
|
2125
|
+
let target = providerConfig;
|
|
2126
|
+
for (let i = 0; i < parts.length - 1; i++) {
|
|
2127
|
+
const part = parts[i];
|
|
2128
|
+
if (!target[part] || typeof target[part] !== "object") {
|
|
2129
|
+
target[part] = {};
|
|
2130
|
+
}
|
|
2131
|
+
target = target[part];
|
|
2132
|
+
}
|
|
2133
|
+
target[parts[parts.length - 1]] = value;
|
|
2134
|
+
} else {
|
|
2135
|
+
providerConfig[key] = value;
|
|
2136
|
+
}
|
|
2137
|
+
}
|
|
2138
|
+
}
|
|
2139
|
+
await configManager.addProvider(providerConfig);
|
|
2140
|
+
if (options.default) {
|
|
2141
|
+
await configManager.setDefaultProvider(options.name);
|
|
2142
|
+
}
|
|
2143
|
+
console.log(`Provider "${options.name}" configured successfully.`);
|
|
2144
|
+
} else if (options.update) {
|
|
2145
|
+
if (!options.name) {
|
|
2146
|
+
throw new PAIError("Provider name is required", 1, {
|
|
2147
|
+
message: "Use --name <provider-name>"
|
|
2148
|
+
});
|
|
2149
|
+
}
|
|
2150
|
+
if (!options.set || options.set.length === 0) {
|
|
2151
|
+
throw new PAIError("No fields to update", 1, {
|
|
2152
|
+
message: "Use --set key=value to specify fields to update"
|
|
2153
|
+
});
|
|
2154
|
+
}
|
|
2155
|
+
const knownKeys = /* @__PURE__ */ new Set([
|
|
2156
|
+
"apiKey",
|
|
2157
|
+
"defaultModel",
|
|
2158
|
+
"models",
|
|
2159
|
+
"temperature",
|
|
2160
|
+
"maxTokens",
|
|
2161
|
+
"api",
|
|
2162
|
+
"baseUrl",
|
|
2163
|
+
"reasoning",
|
|
2164
|
+
"input",
|
|
2165
|
+
"contextWindow",
|
|
2166
|
+
"providerOptions"
|
|
2167
|
+
]);
|
|
2168
|
+
const updates = {};
|
|
2169
|
+
for (const setting of options.set) {
|
|
2170
|
+
const eqIndex = setting.indexOf("=");
|
|
2171
|
+
if (eqIndex < 1) {
|
|
2172
|
+
throw new PAIError(
|
|
2173
|
+
`Invalid --set format: ${setting}`,
|
|
2174
|
+
1,
|
|
2175
|
+
{ message: "Use --set key=value" }
|
|
2176
|
+
);
|
|
2177
|
+
}
|
|
2178
|
+
const key = setting.substring(0, eqIndex);
|
|
2179
|
+
const value = setting.substring(eqIndex + 1);
|
|
2180
|
+
const topKey = key.split(".")[0];
|
|
2181
|
+
if (!knownKeys.has(topKey)) {
|
|
2182
|
+
console.error(`Warning: unknown key "${key}". Known keys: ${[...knownKeys].join(", ")}`);
|
|
2183
|
+
}
|
|
2184
|
+
if (key.includes(".")) {
|
|
2185
|
+
const parts = key.split(".");
|
|
2186
|
+
let target = updates;
|
|
2187
|
+
for (let i = 0; i < parts.length - 1; i++) {
|
|
2188
|
+
const part = parts[i];
|
|
2189
|
+
if (!target[part] || typeof target[part] !== "object") {
|
|
2190
|
+
target[part] = {};
|
|
2191
|
+
}
|
|
2192
|
+
target = target[part];
|
|
2193
|
+
}
|
|
2194
|
+
target[parts[parts.length - 1]] = value;
|
|
2195
|
+
} else {
|
|
2196
|
+
updates[key] = value;
|
|
2197
|
+
}
|
|
2198
|
+
}
|
|
2199
|
+
await configManager.updateProvider(options.name, updates);
|
|
2200
|
+
if (options.default) {
|
|
2201
|
+
await configManager.setDefaultProvider(options.name);
|
|
2202
|
+
}
|
|
2203
|
+
console.log(`Provider "${options.name}" updated successfully.`);
|
|
2204
|
+
} else if (options.delete) {
|
|
2205
|
+
if (!options.name) {
|
|
2206
|
+
throw new PAIError("Provider name is required", 1, {
|
|
2207
|
+
message: "Use --name <provider-name>"
|
|
2208
|
+
});
|
|
2209
|
+
}
|
|
2210
|
+
await configManager.deleteProvider(options.name);
|
|
2211
|
+
console.log(`Provider "${options.name}" deleted successfully.`);
|
|
2212
|
+
} else {
|
|
2213
|
+
throw new PAIError("No action specified", 1, {
|
|
2214
|
+
message: "Use --add, --update, --delete, or --show"
|
|
2215
|
+
});
|
|
2216
|
+
}
|
|
2217
|
+
} catch (error) {
|
|
2218
|
+
if (error instanceof PAIError) {
|
|
2219
|
+
console.error(`Error: ${error.message}`);
|
|
2220
|
+
if (error.context) {
|
|
2221
|
+
console.error(`Context: ${JSON.stringify(error.context)}`);
|
|
2222
|
+
}
|
|
2223
|
+
process.exit(error.exitCode);
|
|
2224
|
+
} else {
|
|
2225
|
+
console.error(`Error: ${error instanceof Error ? error.message : String(error)}`);
|
|
2226
|
+
process.exit(2);
|
|
2227
|
+
}
|
|
2228
|
+
}
|
|
2229
|
+
}
|
|
2230
|
+
async function handleModelDefault(options) {
|
|
2231
|
+
const configManager = new ConfigurationManager(options);
|
|
2232
|
+
try {
|
|
2233
|
+
const hasSetEmbed = options.embedProvider || options.embedModel;
|
|
2234
|
+
if (options.name || hasSetEmbed) {
|
|
2235
|
+
if (options.name) {
|
|
2236
|
+
await configManager.setDefaultProvider(options.name);
|
|
2237
|
+
}
|
|
2238
|
+
if (hasSetEmbed) {
|
|
2239
|
+
await configManager.setDefaultEmbed(options.embedProvider, options.embedModel);
|
|
2240
|
+
}
|
|
2241
|
+
const parts = [];
|
|
2242
|
+
if (options.name) parts.push(`Default provider set to "${options.name}".`);
|
|
2243
|
+
if (options.embedProvider) parts.push(`Default embed provider set to "${options.embedProvider}".`);
|
|
2244
|
+
if (options.embedModel) parts.push(`Default embed model set to "${options.embedModel}".`);
|
|
2245
|
+
console.log(parts.join("\n"));
|
|
2246
|
+
} else {
|
|
2247
|
+
const config = await configManager.loadConfig();
|
|
2248
|
+
if (options.json) {
|
|
2249
|
+
console.log(JSON.stringify({
|
|
2250
|
+
defaultProvider: config.defaultProvider ?? null,
|
|
2251
|
+
defaultEmbedProvider: config.defaultEmbedProvider ?? null,
|
|
2252
|
+
defaultEmbedModel: config.defaultEmbedModel ?? null
|
|
2253
|
+
}));
|
|
2254
|
+
} else {
|
|
2255
|
+
if (config.defaultProvider) {
|
|
2256
|
+
console.log(`Default provider: ${config.defaultProvider}`);
|
|
2257
|
+
} else {
|
|
2258
|
+
console.log("No default provider configured.");
|
|
2259
|
+
console.log('Use "pai model default --name <provider>" to set one.');
|
|
2260
|
+
}
|
|
2261
|
+
if (config.defaultEmbedProvider || config.defaultEmbedModel) {
|
|
2262
|
+
const embedParts = [];
|
|
2263
|
+
if (config.defaultEmbedProvider) embedParts.push(`provider: ${config.defaultEmbedProvider}`);
|
|
2264
|
+
if (config.defaultEmbedModel) embedParts.push(`model: ${config.defaultEmbedModel}`);
|
|
2265
|
+
console.log(`Default embed: ${embedParts.join(", ")}`);
|
|
2266
|
+
}
|
|
2267
|
+
}
|
|
2268
|
+
}
|
|
2269
|
+
} catch (error) {
|
|
2270
|
+
if (error instanceof PAIError) {
|
|
2271
|
+
console.error(`Error: ${error.message}`);
|
|
2272
|
+
if (error.context) {
|
|
2273
|
+
console.error(`Context: ${JSON.stringify(error.context)}`);
|
|
2274
|
+
}
|
|
2275
|
+
process.exit(error.exitCode);
|
|
2276
|
+
} else {
|
|
2277
|
+
console.error(`Error: ${error instanceof Error ? error.message : String(error)}`);
|
|
2278
|
+
process.exit(2);
|
|
2279
|
+
}
|
|
2280
|
+
}
|
|
2281
|
+
}
|
|
2282
|
+
async function handleModelLogin(options) {
|
|
2283
|
+
const configManager = new ConfigurationManager(options);
|
|
2284
|
+
try {
|
|
2285
|
+
if (!options.name) {
|
|
2286
|
+
throw new PAIError("Provider name is required", 1, {
|
|
2287
|
+
message: "Use --name <provider-name>"
|
|
2288
|
+
});
|
|
2289
|
+
}
|
|
2290
|
+
const oauthProviders = getOAuthProviders();
|
|
2291
|
+
const oauthProvider = getOAuthProvider(options.name);
|
|
2292
|
+
if (!oauthProvider) {
|
|
2293
|
+
const oauthIds = oauthProviders.map((p) => p.id).join(", ");
|
|
2294
|
+
throw new PAIError(
|
|
2295
|
+
`Provider "${options.name}" does not support OAuth login`,
|
|
2296
|
+
1,
|
|
2297
|
+
{ message: `OAuth providers: ${oauthIds}` }
|
|
2298
|
+
);
|
|
2299
|
+
}
|
|
2300
|
+
const rl = createInterface2({ input: process.stdin, output: process.stdout });
|
|
2301
|
+
const prompt = (msg) => new Promise((resolve) => rl.question(`${msg} `, resolve));
|
|
2302
|
+
try {
|
|
2303
|
+
console.log(`Logging in to ${oauthProvider.name}...`);
|
|
2304
|
+
const credentials = await oauthProvider.login({
|
|
2305
|
+
onAuth: (info) => {
|
|
2306
|
+
console.log(`
|
|
2307
|
+
Open this URL in your browser:
|
|
2308
|
+
${info.url}`);
|
|
2309
|
+
if (info.instructions) console.log(info.instructions);
|
|
2310
|
+
console.log();
|
|
2311
|
+
},
|
|
2312
|
+
onPrompt: async (p) => {
|
|
2313
|
+
return await prompt(
|
|
2314
|
+
`${p.message}${p.placeholder ? ` (${p.placeholder})` : ""}:`
|
|
2315
|
+
);
|
|
2316
|
+
},
|
|
2317
|
+
onProgress: (msg) => console.log(msg)
|
|
2318
|
+
});
|
|
2319
|
+
let providerConfig;
|
|
2320
|
+
try {
|
|
2321
|
+
providerConfig = await configManager.getProvider(options.name);
|
|
2322
|
+
} catch {
|
|
2323
|
+
providerConfig = { name: options.name };
|
|
2324
|
+
}
|
|
2325
|
+
providerConfig.oauth = {
|
|
2326
|
+
refresh: credentials.refresh,
|
|
2327
|
+
access: credentials.access,
|
|
2328
|
+
expires: credentials.expires,
|
|
2329
|
+
...Object.fromEntries(
|
|
2330
|
+
Object.entries(credentials).filter(
|
|
2331
|
+
([k]) => !["refresh", "access", "expires"].includes(k)
|
|
2332
|
+
)
|
|
2333
|
+
)
|
|
2334
|
+
};
|
|
2335
|
+
await configManager.addProvider(providerConfig);
|
|
2336
|
+
console.log(`
|
|
2337
|
+
Provider "${options.name}" logged in and credentials saved to config.`);
|
|
2338
|
+
} finally {
|
|
2339
|
+
rl.close();
|
|
2340
|
+
}
|
|
2341
|
+
} catch (error) {
|
|
2342
|
+
if (error instanceof PAIError) {
|
|
2343
|
+
console.error(`Error: ${error.message}`);
|
|
2344
|
+
if (error.context) {
|
|
2345
|
+
console.error(`Context: ${JSON.stringify(error.context)}`);
|
|
2346
|
+
}
|
|
2347
|
+
process.exit(error.exitCode);
|
|
2348
|
+
} else {
|
|
2349
|
+
console.error(
|
|
2350
|
+
`Error: ${error instanceof Error ? error.message : String(error)}`
|
|
2351
|
+
);
|
|
2352
|
+
process.exit(2);
|
|
2353
|
+
}
|
|
2354
|
+
}
|
|
2355
|
+
}
|
|
2356
|
+
|
|
2357
|
+
// src/help.ts
|
|
2358
|
+
var MAIN_EXAMPLES = `
|
|
2359
|
+
Examples:
|
|
2360
|
+
$ pai chat "Hello, how are you?" # \u7B80\u5355\u5BF9\u8BDD
|
|
2361
|
+
$ echo "Explain this" | pai chat # stdin \u8F93\u5165
|
|
2362
|
+
$ pai chat "Write a story" --stream # \u6D41\u5F0F\u8F93\u51FA
|
|
2363
|
+
$ pai model config --add --name openai --provider openai --set apiKey=sk-...
|
|
2364
|
+
$ pai model list # \u67E5\u770B\u5DF2\u914D\u7F6E provider
|
|
2365
|
+
$ pai model default --name openai # \u8BBE\u7F6E\u9ED8\u8BA4 provider`;
|
|
2366
|
+
var MAIN_VERBOSE = `
|
|
2367
|
+
Prerequisites:
|
|
2368
|
+
\u4F7F\u7528\u524D\u9700\u5148\u914D\u7F6E\u81F3\u5C11\u4E00\u4E2A provider:
|
|
2369
|
+
pai model config --add --name <name> --provider <type> --set apiKey=<key>
|
|
2370
|
+
\u6216\u4F7F\u7528 OAuth \u767B\u5F55:
|
|
2371
|
+
pai model login --name github-copilot
|
|
2372
|
+
|
|
2373
|
+
Config:
|
|
2374
|
+
\u9ED8\u8BA4\u914D\u7F6E\u6587\u4EF6: ~/.config/pai/default.json
|
|
2375
|
+
\u53EF\u901A\u8FC7 --config <path> \u6216 PAI_CONFIG \u73AF\u5883\u53D8\u91CF\u8986\u76D6
|
|
2376
|
+
|
|
2377
|
+
Exit Codes:
|
|
2378
|
+
0 \u6210\u529F
|
|
2379
|
+
1 \u53C2\u6570/\u7528\u6CD5\u9519\u8BEF
|
|
2380
|
+
2 \u672C\u5730\u8FD0\u884C\u65F6\u9519\u8BEF
|
|
2381
|
+
3 \u5916\u90E8 API/Provider \u9519\u8BEF
|
|
2382
|
+
4 IO/\u6587\u4EF6\u9519\u8BEF`;
|
|
2383
|
+
var CHAT_EXAMPLES = `
|
|
2384
|
+
Examples:
|
|
2385
|
+
$ pai chat "What is the capital of France?"
|
|
2386
|
+
$ echo "Summarize this" | pai chat # stdin \u8F93\u5165
|
|
2387
|
+
$ cat doc.txt | pai chat "Summarize this document"
|
|
2388
|
+
$ pai chat "Hello" --session chat.jsonl # \u591A\u8F6E\u5BF9\u8BDD
|
|
2389
|
+
$ pai chat "Describe this" --image photo.jpg # \u56FE\u7247\u5206\u6790
|
|
2390
|
+
$ pai chat --dry-run --provider openai # \u67E5\u770B\u914D\u7F6E\u4E0D\u8C03\u7528 LLM
|
|
2391
|
+
|
|
2392
|
+
Stdin:
|
|
2393
|
+
\u652F\u6301\u901A\u8FC7\u7BA1\u9053\u4F20\u5165\u7528\u6237\u6D88\u606F\uFF0C\u4E0E --input-file \u548C\u4F4D\u7F6E\u53C2\u6570\u4E92\u65A5\u3002
|
|
2394
|
+
|
|
2395
|
+
JSON output (--json):
|
|
2396
|
+
\u8FDB\u5EA6\u4FE1\u606F\u4EE5 NDJSON \u8F93\u51FA\u5230 stderr\uFF0CLLM \u56DE\u590D\u8F93\u51FA\u5230 stdout\u3002`;
|
|
2397
|
+
var EMBED_EXAMPLES = `
|
|
2398
|
+
Examples:
|
|
2399
|
+
$ pai embed "hello world" --provider openai --model text-embedding-3-small
|
|
2400
|
+
$ echo "hello" | pai embed # stdin \u8F93\u5165
|
|
2401
|
+
$ pai embed --input-file doc.txt # \u6587\u4EF6\u8F93\u5165
|
|
2402
|
+
$ pai embed --batch '["hello","world"]' # \u6279\u91CF\u6A21\u5F0F
|
|
2403
|
+
|
|
2404
|
+
Stdin:
|
|
2405
|
+
\u652F\u6301\u901A\u8FC7\u7BA1\u9053\u4F20\u5165\u6587\u672C\u3002\u4E0E\u4F4D\u7F6E\u53C2\u6570\u548C --input-file \u4E92\u65A5\u3002
|
|
2406
|
+
|
|
2407
|
+
JSON output (--json):
|
|
2408
|
+
\u5355\u6761: {"embedding":[...],"model":"...","usage":{...}}
|
|
2409
|
+
\u6279\u91CF: {"embeddings":[[...],[...]],"model":"...","usage":{...}}`;
|
|
2410
|
+
var MODEL_LIST_EXAMPLES = `
|
|
2411
|
+
Examples:
|
|
2412
|
+
$ pai model list # \u5DF2\u914D\u7F6E provider
|
|
2413
|
+
$ pai model list --all # \u6240\u6709\u652F\u6301\u7684 provider
|
|
2414
|
+
$ pai model list --json # JSON \u8F93\u51FA`;
|
|
2415
|
+
var MODEL_CONFIG_EXAMPLES = `
|
|
2416
|
+
Examples:
|
|
2417
|
+
$ pai model config --add --name openai --provider openai --set apiKey=sk-...
|
|
2418
|
+
$ pai model config --update --name openai --set defaultModel=gpt-4o
|
|
2419
|
+
$ pai model config --show --name openai # \u67E5\u770B\u914D\u7F6E\uFF08\u654F\u611F\u4FE1\u606F\u8131\u654F\uFF09
|
|
2420
|
+
$ pai model config --delete --name openai
|
|
2421
|
+
|
|
2422
|
+
Note:
|
|
2423
|
+
--add \u5BF9\u540C\u540D provider \u6267\u884C upsert\uFF08\u5E42\u7B49\u64CD\u4F5C\uFF09\u3002`;
|
|
2424
|
+
var MODEL_DEFAULT_EXAMPLES = `
|
|
2425
|
+
Examples:
|
|
2426
|
+
$ pai model default # \u67E5\u770B\u5F53\u524D\u9ED8\u8BA4
|
|
2427
|
+
$ pai model default --name openai # \u8BBE\u7F6E\u9ED8\u8BA4 provider
|
|
2428
|
+
$ pai model default --embed-provider openai --embed-model text-embedding-3-small`;
|
|
2429
|
+
var MODEL_LOGIN_EXAMPLES = `
|
|
2430
|
+
Examples:
|
|
2431
|
+
$ pai model login --name github-copilot # OAuth \u767B\u5F55
|
|
2432
|
+
$ pai model login --name anthropic # Anthropic OAuth
|
|
2433
|
+
|
|
2434
|
+
Supported: github-copilot, anthropic, google-gemini-cli, google-antigravity, openai-codex`;
|
|
2435
|
+
function installHelp(program2) {
|
|
2436
|
+
program2.addHelpText("after", MAIN_EXAMPLES);
|
|
2437
|
+
installVerboseHelp(program2);
|
|
2438
|
+
}
|
|
2439
|
+
function addSubcommandExamples(cmd, name) {
|
|
2440
|
+
const examples = {
|
|
2441
|
+
"chat": CHAT_EXAMPLES,
|
|
2442
|
+
"embed": EMBED_EXAMPLES,
|
|
2443
|
+
"list": MODEL_LIST_EXAMPLES,
|
|
2444
|
+
"config": MODEL_CONFIG_EXAMPLES,
|
|
2445
|
+
"default": MODEL_DEFAULT_EXAMPLES,
|
|
2446
|
+
"login": MODEL_LOGIN_EXAMPLES
|
|
2447
|
+
};
|
|
2448
|
+
const text = examples[name];
|
|
2449
|
+
if (text) {
|
|
2450
|
+
cmd.addHelpText("after", text);
|
|
2451
|
+
}
|
|
2452
|
+
}
|
|
2453
|
+
function installVerboseHelp(program2) {
|
|
2454
|
+
program2.option("--verbose", "(\u4E0E --help \u4E00\u8D77\u4F7F\u7528) \u663E\u793A\u5B8C\u6574\u5E2E\u52A9\u4FE1\u606F");
|
|
2455
|
+
program2.on("option:verbose", () => {
|
|
2456
|
+
program2.__verboseHelp = true;
|
|
2457
|
+
});
|
|
2458
|
+
program2.addHelpText("afterAll", () => {
|
|
2459
|
+
if (program2.__verboseHelp) {
|
|
2460
|
+
return MAIN_VERBOSE;
|
|
2461
|
+
}
|
|
2462
|
+
return "";
|
|
2463
|
+
});
|
|
2464
|
+
}
|
|
2465
|
+
|
|
2466
|
+
// src/index.ts
|
|
2467
|
+
process.stdout.on("error", (err) => {
|
|
2468
|
+
if (err.code === "EPIPE") process.exit(0);
|
|
2469
|
+
throw err;
|
|
2470
|
+
});
|
|
2471
|
+
process.stderr.on("error", (err) => {
|
|
2472
|
+
if (err.code === "EPIPE") process.exit(0);
|
|
2473
|
+
throw err;
|
|
2474
|
+
});
|
|
2475
|
+
var __dirname = dirname2(fileURLToPath(import.meta.url));
|
|
2476
|
+
var packageJson = JSON.parse(
|
|
2477
|
+
readFileSync(join2(__dirname, "../package.json"), "utf8")
|
|
2478
|
+
);
|
|
2479
|
+
var piAiVersion = "unknown";
|
|
2480
|
+
try {
|
|
2481
|
+
const piAiPkg = JSON.parse(
|
|
2482
|
+
readFileSync(join2(__dirname, "../node_modules/@mariozechner/pi-ai/package.json"), "utf8")
|
|
2483
|
+
);
|
|
2484
|
+
piAiVersion = piAiPkg.version;
|
|
2485
|
+
} catch {
|
|
2486
|
+
}
|
|
2487
|
+
var versionString = `pai ${packageJson.version} (pi-ai ${piAiVersion}, Node ${process.version})`;
|
|
2488
|
+
var program = new Command();
|
|
2489
|
+
program.name("pai").description("PAI - A Unix-style CLI tool for interacting with LLMs").version(versionString).showHelpAfterError(true).configureOutput({
|
|
2490
|
+
writeErr: (str) => process.stderr.write(str),
|
|
2491
|
+
writeOut: (str) => process.stdout.write(str)
|
|
2492
|
+
});
|
|
2493
|
+
program.exitOverride();
|
|
2494
|
+
installHelp(program);
|
|
2495
|
+
var chatCmd = program.command("chat").description("Chat with an LLM").argument("[prompt]", "User message (or use stdin/--input-file)").option("--config <path>", "Config file path").option("--session <path>", "Session file path (JSONL)").option("--system <text>", "System instruction").option("--system-file <path>", "System instruction from file").option("--input-file <path>", "User input from file").option("--image <path...>", "Image file(s) to include").option("--provider <name>", "Provider name").option("--model <name>", "Model name").option("--temperature <number>", "Temperature (0-2)", parseFloat).option("--max-tokens <number>", "Max tokens", parseInt).option("--stream", "Enable streaming output").option("--no-append", "Do not append to session file").option("--json", "Output progress as NDJSON").option("--quiet", "Suppress progress output").option("--log <path>", "Log file path (Markdown)").option("--max-turns <number>", "Max tool-call turns (default: 100)", parseInt).option("--dry-run", "Show resolved config without calling LLM").action(async (prompt, options) => {
|
|
2496
|
+
await handleChatCommand(prompt, options);
|
|
2497
|
+
});
|
|
2498
|
+
addSubcommandExamples(chatCmd, "chat");
|
|
2499
|
+
var embedCmd = program.command("embed").description("Generate text embeddings").argument("[text]", "Text to embed (or use stdin/--input-file)").option("--provider <name>", "Provider name").option("--model <name>", "Embedding model name").option("--config <path>", "Config file path").option("--json", "Output as JSON").option("--quiet", "Suppress progress output").option("--batch", "Enable batch embedding mode (input is JSON string array)").option("--input-file <path>", "Read input from file").action(async (text, options) => {
|
|
2500
|
+
await handleEmbedCommand(text, options);
|
|
2501
|
+
});
|
|
2502
|
+
addSubcommandExamples(embedCmd, "embed");
|
|
2503
|
+
var modelCommand = program.command("model").description("Manage model configurations");
|
|
2504
|
+
modelCommand.command("list").description("List providers and models").option("--config <path>", "Config file path").option("--all", "Show all supported providers").option("--json", "Output as JSON").action(async (options) => {
|
|
2505
|
+
await handleModelList(options);
|
|
2506
|
+
});
|
|
2507
|
+
addSubcommandExamples(modelCommand.commands.find((c) => c.name() === "list"), "list");
|
|
2508
|
+
modelCommand.command("config").description("Configure providers").option("--config <path>", "Config file path").option("--add", "Add or update provider").option("--update", "Update fields on an existing provider").option("--delete", "Delete provider").option("--show", "Show provider configuration").option("--name <name>", "Provider name").option("--provider <type>", "Provider type").option("--set <key=value...>", "Set configuration values").option("--default", "Set as default provider (with --add or --update)").option("--json", "Output as JSON").action(async (options) => {
|
|
2509
|
+
await handleModelConfig(options);
|
|
2510
|
+
});
|
|
2511
|
+
addSubcommandExamples(modelCommand.commands.find((c) => c.name() === "config"), "config");
|
|
2512
|
+
modelCommand.command("login").description("Login to an OAuth provider (github-copilot, anthropic, google-gemini-cli, etc.)").option("--config <path>", "Config file path").option("--name <name>", "Provider name").action(async (options) => {
|
|
2513
|
+
await handleModelLogin(options);
|
|
2514
|
+
});
|
|
2515
|
+
addSubcommandExamples(modelCommand.commands.find((c) => c.name() === "login"), "login");
|
|
2516
|
+
modelCommand.command("default").description("View or set the default provider").option("--config <path>", "Config file path").option("--name <name>", "Provider name to set as default").option("--embed-provider <name>", "Set default embed provider").option("--embed-model <model>", "Set default embed model").option("--json", "Output as JSON").action(async (options) => {
|
|
2517
|
+
await handleModelDefault(options);
|
|
2518
|
+
});
|
|
2519
|
+
addSubcommandExamples(modelCommand.commands.find((c) => c.name() === "default"), "default");
|
|
2520
|
+
program.on("command:*", () => {
|
|
2521
|
+
process.stderr.write(`Invalid command: ${program.args.join(" ")}
|
|
2522
|
+
See --help for available commands.
|
|
2523
|
+
`);
|
|
2524
|
+
process.exit(2);
|
|
2525
|
+
});
|
|
2526
|
+
(async () => {
|
|
2527
|
+
try {
|
|
2528
|
+
await program.parseAsync(process.argv);
|
|
2529
|
+
} catch (err) {
|
|
2530
|
+
if (err && typeof err === "object" && "exitCode" in err) {
|
|
2531
|
+
const exitCode = err.exitCode;
|
|
2532
|
+
process.exitCode = exitCode === 1 ? 2 : exitCode;
|
|
2533
|
+
} else {
|
|
2534
|
+
process.stderr.write(`Error: ${err instanceof Error ? err.message : String(err)}
|
|
2535
|
+
`);
|
|
2536
|
+
process.exitCode = 2;
|
|
2537
|
+
}
|
|
2538
|
+
}
|
|
2539
|
+
})();
|
|
2540
|
+
//# sourceMappingURL=index.js.map
|