@tyvm/knowhow 0.0.10 → 0.0.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +4 -1
- package/src/agents/base/base.ts +45 -0
- package/src/agents/researcher/researcher.ts +2 -1
- package/src/agents/tools/index.ts +1 -0
- package/src/agents/tools/list.ts +34 -0
- package/src/agents/tools/stringReplace.ts +51 -0
- package/src/chat.ts +2 -0
- package/src/clients/gemini.ts +2 -2
- package/src/clients/index.ts +11 -3
- package/src/clients/openai.ts +4 -4
- package/src/clients/xai.ts +4 -0
- package/src/config.ts +12 -3
- package/src/conversion.ts +3 -1
- package/src/embeddings.ts +3 -3
- package/src/hashes.ts +5 -0
- package/src/index.ts +13 -3
- package/src/services/EmbeddingService.ts +9 -3
- package/src/services/Mcp.ts +31 -14
- package/src/types.ts +16 -8
- package/ts_build/src/agents/base/base.d.ts +1 -0
- package/ts_build/src/agents/base/base.js +24 -0
- package/ts_build/src/agents/base/base.js.map +1 -1
- package/ts_build/src/agents/researcher/researcher.js +2 -0
- package/ts_build/src/agents/researcher/researcher.js.map +1 -1
- package/ts_build/src/agents/tools/index.d.ts +1 -0
- package/ts_build/src/agents/tools/index.js +1 -0
- package/ts_build/src/agents/tools/index.js.map +1 -1
- package/ts_build/src/agents/tools/list.js +33 -0
- package/ts_build/src/agents/tools/list.js.map +1 -1
- package/ts_build/src/agents/tools/stringReplace.d.ts +1 -0
- package/ts_build/src/agents/tools/stringReplace.js +62 -0
- package/ts_build/src/agents/tools/stringReplace.js.map +1 -0
- package/ts_build/src/chat.js +2 -0
- package/ts_build/src/chat.js.map +1 -1
- package/ts_build/src/clients/gemini.js +1 -1
- package/ts_build/src/clients/gemini.js.map +1 -1
- package/ts_build/src/clients/index.js +6 -2
- package/ts_build/src/clients/index.js.map +1 -1
- package/ts_build/src/clients/openai.js +3 -3
- package/ts_build/src/clients/openai.js.map +1 -1
- package/ts_build/src/clients/xai.js +4 -0
- package/ts_build/src/clients/xai.js.map +1 -1
- package/ts_build/src/commands/chat-ui.d.ts +1 -0
- package/ts_build/src/commands/chat-ui.js +14 -0
- package/ts_build/src/commands/chat-ui.js.map +1 -0
- package/ts_build/src/config.js +5 -2
- package/ts_build/src/config.js.map +1 -1
- package/ts_build/src/conversion.js +3 -1
- package/ts_build/src/conversion.js.map +1 -1
- package/ts_build/src/demo/chat-ui-demo.d.ts +3 -0
- package/ts_build/src/demo/chat-ui-demo.js +20 -0
- package/ts_build/src/demo/chat-ui-demo.js.map +1 -0
- package/ts_build/src/embeddings.js +2 -2
- package/ts_build/src/embeddings.js.map +1 -1
- package/ts_build/src/hashes.js +7 -0
- package/ts_build/src/hashes.js.map +1 -1
- package/ts_build/src/index.js +2 -2
- package/ts_build/src/index.js.map +1 -1
- package/ts_build/src/services/EmbeddingService.js +2 -2
- package/ts_build/src/services/EmbeddingService.js.map +1 -1
- package/ts_build/src/services/Mcp.d.ts +1 -0
- package/ts_build/src/services/Mcp.js +29 -12
- package/ts_build/src/services/Mcp.js.map +1 -1
- package/ts_build/src/services/MessagePreprocessor.d.ts +26 -0
- package/ts_build/src/services/MessagePreprocessor.js +190 -0
- package/ts_build/src/services/MessagePreprocessor.js.map +1 -0
- package/ts_build/src/services/__tests__/MessagePreprocessor.test.d.ts +1 -0
- package/ts_build/src/services/__tests__/MessagePreprocessor.test.js +117 -0
- package/ts_build/src/services/__tests__/MessagePreprocessor.test.js.map +1 -0
- package/ts_build/src/types.d.ts +11 -4
- package/ts_build/src/types.js +16 -9
- package/ts_build/src/types.js.map +1 -1
- package/ts_build/src/ui/InkChatUI.d.ts +1 -0
- package/ts_build/src/ui/InkChatUI.js +792 -0
- package/ts_build/src/ui/InkChatUI.js.map +1 -0
- package/ts_build/src/ui/components/ChatInterface.d.ts +15 -0
- package/ts_build/src/ui/components/ChatInterface.js +39 -0
- package/ts_build/src/ui/components/ChatInterface.js.map +1 -0
- package/ts_build/src/ui/components/ChatMessage.d.ts +8 -0
- package/ts_build/src/ui/components/ChatMessage.js +7 -0
- package/ts_build/src/ui/components/ChatMessage.js.map +1 -0
- package/ts_build/src/ui/components/CommandPalette.d.ts +8 -0
- package/ts_build/src/ui/components/CommandPalette.js +23 -0
- package/ts_build/src/ui/components/CommandPalette.js.map +1 -0
- package/ts_build/src/ui/components/InputBar.d.ts +8 -0
- package/ts_build/src/ui/components/InputBar.js +8 -0
- package/ts_build/src/ui/components/InputBar.js.map +1 -0
- package/ts_build/src/ui/components/Sidebar.d.ts +9 -0
- package/ts_build/src/ui/components/Sidebar.js +7 -0
- package/ts_build/src/ui/components/Sidebar.js.map +1 -0
- package/ts_build/src/ui/components/StatusBar.d.ts +10 -0
- package/ts_build/src/ui/components/StatusBar.js +8 -0
- package/ts_build/src/ui/components/StatusBar.js.map +1 -0
- package/ts_build/src/ui/demo.d.ts +3 -0
- package/ts_build/src/ui/demo.js +26 -0
- package/ts_build/src/ui/demo.js.map +1 -0
- package/ts_build/src/ui/index.d.ts +13 -0
- package/ts_build/src/ui/index.js +16 -0
- package/ts_build/src/ui/index.js.map +1 -0
- package/ts_build/tests/integration/OpenAI-MessagePreprocessor.test.d.ts +1 -0
- package/ts_build/tests/integration/OpenAI-MessagePreprocessor.test.js +148 -0
- package/ts_build/tests/integration/OpenAI-MessagePreprocessor.test.js.map +1 -0
- package/ts_build/tests/services/MessagePreprocessor.test.d.ts +1 -0
- package/ts_build/tests/services/MessagePreprocessor.test.js +117 -0
- package/ts_build/tests/services/MessagePreprocessor.test.js.map +1 -0
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@tyvm/knowhow",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.12",
|
|
4
4
|
"description": "ai cli with plugins and agents",
|
|
5
5
|
"main": "ts_build/src/index.js",
|
|
6
6
|
"bin": {
|
|
@@ -45,6 +45,7 @@
|
|
|
45
45
|
"@modelcontextprotocol/sdk": "^1.13.3",
|
|
46
46
|
"@notionhq/client": "^2.2.14",
|
|
47
47
|
"@octokit/rest": "^20.0.2",
|
|
48
|
+
"@types/react": "^19.1.8",
|
|
48
49
|
"asana": "^3.0.16",
|
|
49
50
|
"axios": "^1.5.0",
|
|
50
51
|
"cheerio": "^1.0.0",
|
|
@@ -52,6 +53,7 @@
|
|
|
52
53
|
"express": "^4.19.2",
|
|
53
54
|
"figma-js": "^1.16.1-0",
|
|
54
55
|
"gitignore-to-glob": "^0.3.0",
|
|
56
|
+
"ink": "^6.0.1",
|
|
55
57
|
"jira-client": "^8.2.2",
|
|
56
58
|
"marked": "^10.0.0",
|
|
57
59
|
"marked-terminal": "^6.2.0",
|
|
@@ -68,6 +70,7 @@
|
|
|
68
70
|
"progress-estimator": "^0.3.1",
|
|
69
71
|
"puppeteer": "^24.4.0",
|
|
70
72
|
"puppeteer-extra-plugin-stealth": "^2.11.2",
|
|
73
|
+
"react": "^19.1.0",
|
|
71
74
|
"source-map-support": "^0.5.21",
|
|
72
75
|
"typescript": "^4.6.3",
|
|
73
76
|
"ws": "^8.18.1",
|
package/src/agents/base/base.ts
CHANGED
|
@@ -333,6 +333,40 @@ export abstract class BaseAgent implements IAgent {
|
|
|
333
333
|
await this.healthCheck();
|
|
334
334
|
}
|
|
335
335
|
|
|
336
|
+
isRequiredToolMissing() {
|
|
337
|
+
const requiredToolAvailable = this.getEnabledToolNames().some(
|
|
338
|
+
(t) =>
|
|
339
|
+
this.requiredToolNames.includes(t) ||
|
|
340
|
+
this.requiredToolNames.includes(mcpToolName(t))
|
|
341
|
+
);
|
|
342
|
+
|
|
343
|
+
if (requiredToolAvailable) {
|
|
344
|
+
return false;
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
console.log(
|
|
348
|
+
"Required tool not available, checking for finalAnswer",
|
|
349
|
+
this.getEnabledToolNames(),
|
|
350
|
+
this.requiredToolNames
|
|
351
|
+
);
|
|
352
|
+
|
|
353
|
+
// Otherwise we're missing the required tool, lets use finalAnswer if we have it
|
|
354
|
+
const finalAnswer = "finalAnswer";
|
|
355
|
+
const requiredFinalAnswer = this.requiredToolNames.includes(finalAnswer);
|
|
356
|
+
const hasFinalAnswer = this.getEnabledToolNames().includes(finalAnswer);
|
|
357
|
+
|
|
358
|
+
// We have the final answer tool, but it wasn't required
|
|
359
|
+
if (hasFinalAnswer && !requiredFinalAnswer) {
|
|
360
|
+
console.warn(
|
|
361
|
+
"Required tool not available, setting finalAnswer as required tool"
|
|
362
|
+
);
|
|
363
|
+
this.requiredToolNames.push("finalAnswer");
|
|
364
|
+
return false;
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
return true;
|
|
368
|
+
}
|
|
369
|
+
|
|
336
370
|
setNotHealthy() {
|
|
337
371
|
this.lastHealthCheckTime = 0;
|
|
338
372
|
}
|
|
@@ -391,6 +425,7 @@ export abstract class BaseAgent implements IAgent {
|
|
|
391
425
|
|
|
392
426
|
messages = this.formatInputMessages(messages);
|
|
393
427
|
this.updateCurrentThread(messages);
|
|
428
|
+
const isMissingTool = this.isRequiredToolMissing();
|
|
394
429
|
|
|
395
430
|
const startIndex = 0;
|
|
396
431
|
const endIndex = messages.length;
|
|
@@ -454,6 +489,16 @@ export abstract class BaseAgent implements IAgent {
|
|
|
454
489
|
return firstMessage.content;
|
|
455
490
|
}
|
|
456
491
|
|
|
492
|
+
// infinite loop if we cannot exit
|
|
493
|
+
if (isMissingTool) {
|
|
494
|
+
const error = `Required tool: ${JSON.stringify(
|
|
495
|
+
this.requiredToolNames
|
|
496
|
+
)} not available, options are ${this.getEnabledToolNames().join(", ")}`;
|
|
497
|
+
console.error(error);
|
|
498
|
+
this.agentEvents.emit(this.eventTypes.done, error);
|
|
499
|
+
return error;
|
|
500
|
+
}
|
|
501
|
+
|
|
457
502
|
// Early exit: killed, agent was requested to wrap up
|
|
458
503
|
if (this.pendingUserMessages.length === 0 && this.status === "killed") {
|
|
459
504
|
console.log("Agent killed, stopping execution");
|
|
@@ -10,8 +10,9 @@ export class ResearcherAgent extends BaseAgent {
|
|
|
10
10
|
super();
|
|
11
11
|
this.setModel(Models.google.Gemini_20_Flash);
|
|
12
12
|
this.setProvider("google");
|
|
13
|
-
|
|
14
13
|
this.disableTool("patchFile");
|
|
14
|
+
this.disableTool("writeFile");
|
|
15
|
+
this.disableTool("writeChunk");
|
|
15
16
|
}
|
|
16
17
|
|
|
17
18
|
async getInitialMessages(userInput: string) {
|
package/src/agents/tools/list.ts
CHANGED
|
@@ -511,4 +511,38 @@ export const includedTools = [
|
|
|
511
511
|
...asana.definitions,
|
|
512
512
|
...github.definitions,
|
|
513
513
|
...language.definitions,
|
|
514
|
+
{
|
|
515
|
+
type: "function",
|
|
516
|
+
function: {
|
|
517
|
+
name: "stringReplace",
|
|
518
|
+
description:
|
|
519
|
+
"Replace exact string matches in multiple files. Performs global replacement of all occurrences of the find string with the replace string.",
|
|
520
|
+
parameters: {
|
|
521
|
+
type: "object",
|
|
522
|
+
positional: true,
|
|
523
|
+
properties: {
|
|
524
|
+
findString: {
|
|
525
|
+
type: "string",
|
|
526
|
+
description: "The exact string to find and replace",
|
|
527
|
+
},
|
|
528
|
+
replaceString: {
|
|
529
|
+
type: "string",
|
|
530
|
+
description: "The string to replace the found string with",
|
|
531
|
+
},
|
|
532
|
+
filePaths: {
|
|
533
|
+
type: "array",
|
|
534
|
+
items: {
|
|
535
|
+
type: "string",
|
|
536
|
+
},
|
|
537
|
+
description: "Array of file paths where the replacement should be performed",
|
|
538
|
+
},
|
|
539
|
+
},
|
|
540
|
+
required: ["findString", "replaceString", "filePaths"],
|
|
541
|
+
},
|
|
542
|
+
returns: {
|
|
543
|
+
type: "string",
|
|
544
|
+
description: "A summary of the replacement results for each file",
|
|
545
|
+
},
|
|
546
|
+
},
|
|
547
|
+
},
|
|
514
548
|
] as Tool[];
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import * as fs from "fs";
|
|
2
|
+
import { fileExists } from "../../utils";
|
|
3
|
+
|
|
4
|
+
export async function stringReplace(
|
|
5
|
+
findString: string,
|
|
6
|
+
replaceString: string,
|
|
7
|
+
filePaths: string[]
|
|
8
|
+
): Promise<string> {
|
|
9
|
+
if (!findString || replaceString === undefined || !filePaths || filePaths.length === 0) {
|
|
10
|
+
throw new Error("findString, replaceString, and filePaths are all required parameters");
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
const results: string[] = [];
|
|
14
|
+
let totalReplacements = 0;
|
|
15
|
+
|
|
16
|
+
for (const filePath of filePaths) {
|
|
17
|
+
try {
|
|
18
|
+
const exists = await fileExists(filePath);
|
|
19
|
+
if (!exists) {
|
|
20
|
+
results.push(`❌ File not found: ${filePath}`);
|
|
21
|
+
continue;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
const content = fs.readFileSync(filePath, "utf8");
|
|
25
|
+
const originalContent = content;
|
|
26
|
+
|
|
27
|
+
// Count occurrences before replacement
|
|
28
|
+
const matches = content.split(findString).length - 1;
|
|
29
|
+
|
|
30
|
+
if (matches === 0) {
|
|
31
|
+
results.push(`ℹ️ No matches found in: ${filePath}`);
|
|
32
|
+
continue;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
// Perform the replacement
|
|
36
|
+
const newContent = content.replace(new RegExp(findString.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'), 'g'), replaceString);
|
|
37
|
+
|
|
38
|
+
// Write the modified content back to the file
|
|
39
|
+
fs.writeFileSync(filePath, newContent);
|
|
40
|
+
|
|
41
|
+
totalReplacements += matches;
|
|
42
|
+
results.push(`✅ Replaced ${matches} occurrence(s) in: ${filePath}`);
|
|
43
|
+
|
|
44
|
+
} catch (error) {
|
|
45
|
+
results.push(`❌ Error processing ${filePath}: ${error.message}`);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
const summary = `\n📊 Summary: ${totalReplacements} total replacements made across ${filePaths.length} file(s)`;
|
|
50
|
+
return results.join("\n") + summary;
|
|
51
|
+
}
|
package/src/chat.ts
CHANGED
|
@@ -218,6 +218,8 @@ export async function chatLoop<E extends GptQuestionEmbedding>(
|
|
|
218
218
|
agents
|
|
219
219
|
);
|
|
220
220
|
activeAgent = Agents.getAgent(selected) as BaseAgent;
|
|
221
|
+
model = activeAgent.getModel();
|
|
222
|
+
provider = activeAgent.getProvider() as keyof typeof Clients.clients;
|
|
221
223
|
break;
|
|
222
224
|
case ChatFlags.agent:
|
|
223
225
|
Flags.flip(ChatFlags.agent);
|
package/src/clients/gemini.ts
CHANGED
|
@@ -10,7 +10,7 @@ import {
|
|
|
10
10
|
UsageMetadata,
|
|
11
11
|
} from "@google/genai";
|
|
12
12
|
import { wait } from "../utils";
|
|
13
|
-
import { Models } from "../types";
|
|
13
|
+
import { EmbeddingModels, Models } from "../types";
|
|
14
14
|
|
|
15
15
|
import {
|
|
16
16
|
GenericClient,
|
|
@@ -393,7 +393,7 @@ export class GenericGeminiClient extends GoogleGenAI implements GenericClient {
|
|
|
393
393
|
[Models.google.Veo_2]: {
|
|
394
394
|
video_generation: 0.35,
|
|
395
395
|
},
|
|
396
|
-
[
|
|
396
|
+
[EmbeddingModels.google.Gemini_Embedding]: {
|
|
397
397
|
input: 0, // Free of charge
|
|
398
398
|
output: 0, // Free of charge
|
|
399
399
|
},
|
package/src/clients/index.ts
CHANGED
|
@@ -9,7 +9,7 @@ import { GenericOpenAiClient } from "./openai";
|
|
|
9
9
|
import { GenericAnthropicClient } from "./anthropic";
|
|
10
10
|
import { GenericGeminiClient } from "./gemini";
|
|
11
11
|
import { HttpClient } from "./http";
|
|
12
|
-
import { Models } from "../types";
|
|
12
|
+
import { EmbeddingModels, Models } from "../types";
|
|
13
13
|
import { getConfig } from "../config";
|
|
14
14
|
import { GenericXAIClient } from "./xai";
|
|
15
15
|
|
|
@@ -34,11 +34,19 @@ export class AIClient {
|
|
|
34
34
|
};
|
|
35
35
|
|
|
36
36
|
clientModels = {
|
|
37
|
-
...(envCheck("OPENAI_KEY") && {
|
|
37
|
+
...(envCheck("OPENAI_KEY") && {
|
|
38
|
+
openai: Object.values(Models.openai).concat(
|
|
39
|
+
Object.values(EmbeddingModels.openai)
|
|
40
|
+
),
|
|
41
|
+
}),
|
|
38
42
|
...(envCheck("ANTHROPIC_API_KEY") && {
|
|
39
43
|
anthropic: Object.values(Models.anthropic),
|
|
40
44
|
}),
|
|
41
|
-
...(envCheck("GEMINI_API_KEY") && {
|
|
45
|
+
...(envCheck("GEMINI_API_KEY") && {
|
|
46
|
+
google: Object.values(Models.google).concat(
|
|
47
|
+
Object.values(EmbeddingModels.google)
|
|
48
|
+
),
|
|
49
|
+
}),
|
|
42
50
|
...(envCheck("XAI_API_KEY") && { xai: Object.values(Models.xai) }),
|
|
43
51
|
};
|
|
44
52
|
|
package/src/clients/openai.ts
CHANGED
|
@@ -13,7 +13,7 @@ import {
|
|
|
13
13
|
ChatCompletionMessageToolCall,
|
|
14
14
|
} from "openai/resources/chat";
|
|
15
15
|
|
|
16
|
-
import { Models, OpenAiReasoningModels } from "../types";
|
|
16
|
+
import { EmbeddingModels, Models, OpenAiReasoningModels } from "../types";
|
|
17
17
|
|
|
18
18
|
const config = getConfigSync();
|
|
19
19
|
|
|
@@ -177,17 +177,17 @@ export class GenericOpenAiClient extends OpenAI implements GenericClient {
|
|
|
177
177
|
cached_input: 0.375,
|
|
178
178
|
output: 6.0,
|
|
179
179
|
},
|
|
180
|
-
[
|
|
180
|
+
[EmbeddingModels.openai.EmbeddingAda2]: {
|
|
181
181
|
input: 0.1,
|
|
182
182
|
cached_input: 0,
|
|
183
183
|
output: 0,
|
|
184
184
|
},
|
|
185
|
-
[
|
|
185
|
+
[EmbeddingModels.openai.EmbeddingLarge3]: {
|
|
186
186
|
input: 0.13,
|
|
187
187
|
cached_input: 0,
|
|
188
188
|
output: 0,
|
|
189
189
|
},
|
|
190
|
-
[
|
|
190
|
+
[EmbeddingModels.openai.EmbeddingLarge3]: {
|
|
191
191
|
input: 0.02,
|
|
192
192
|
cached_input: 0,
|
|
193
193
|
output: 0,
|
package/src/clients/xai.ts
CHANGED
package/src/config.ts
CHANGED
|
@@ -4,7 +4,13 @@ import * as os from "os";
|
|
|
4
4
|
import gitignoreToGlob from "gitignore-to-glob";
|
|
5
5
|
import { Prompts } from "./prompts";
|
|
6
6
|
import { promisify } from "util";
|
|
7
|
-
import {
|
|
7
|
+
import {
|
|
8
|
+
Config,
|
|
9
|
+
Language,
|
|
10
|
+
AssistantConfig,
|
|
11
|
+
Models,
|
|
12
|
+
EmbeddingModels,
|
|
13
|
+
} from "./types";
|
|
8
14
|
import { mkdir, writeFile, readFile, fileExists } from "./utils";
|
|
9
15
|
|
|
10
16
|
const defaultConfig = {
|
|
@@ -46,7 +52,7 @@ const defaultConfig = {
|
|
|
46
52
|
chunkSize: 2000,
|
|
47
53
|
},
|
|
48
54
|
],
|
|
49
|
-
embeddingModel:
|
|
55
|
+
embeddingModel: EmbeddingModels.openai.EmbeddingAda2,
|
|
50
56
|
|
|
51
57
|
agents: [
|
|
52
58
|
{
|
|
@@ -106,7 +112,7 @@ async function ensureGlobalConfigDir() {
|
|
|
106
112
|
for (const folder of globalTemplateFolders) {
|
|
107
113
|
const folderPath = path.join(globalConfigDir, folder);
|
|
108
114
|
await mkdir(folderPath, { recursive: true });
|
|
109
|
-
fs.chmodSync(folderPath,
|
|
115
|
+
fs.chmodSync(folderPath, 0o744);
|
|
110
116
|
}
|
|
111
117
|
|
|
112
118
|
for (const file of Object.keys(globalTemplateFiles)) {
|
|
@@ -180,6 +186,9 @@ export function getConfigSync() {
|
|
|
180
186
|
}
|
|
181
187
|
|
|
182
188
|
export async function getConfig() {
|
|
189
|
+
if (!fs.existsSync(".knowhow/knowhow.json")) {
|
|
190
|
+
return {} as Config;
|
|
191
|
+
}
|
|
183
192
|
const config = JSON.parse(await readFile(".knowhow/knowhow.json", "utf8"));
|
|
184
193
|
return config as Config;
|
|
185
194
|
}
|
package/src/conversion.ts
CHANGED
|
@@ -16,7 +16,9 @@ export async function processAudio(
|
|
|
16
16
|
if (exists && reusePreviousTranscript) {
|
|
17
17
|
console.log(`Transcription ${outputPath} already exists, skipping`);
|
|
18
18
|
const fileContent = await readFile(outputPath, "utf8");
|
|
19
|
-
return outputPath.endsWith("txt")
|
|
19
|
+
return outputPath.endsWith("txt")
|
|
20
|
+
? fileContent.split("\n")
|
|
21
|
+
: JSON.parse(fileContent);
|
|
20
22
|
}
|
|
21
23
|
|
|
22
24
|
const chunks = await Downloader.chunk(
|
package/src/embeddings.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import glob from "glob";
|
|
2
2
|
import * as path from "path";
|
|
3
3
|
import { getConfig, loadPrompt } from "./config";
|
|
4
|
-
import { Config, Hashes, Embeddable, EmbeddingBase, Models } from "./types";
|
|
4
|
+
import { Config, Hashes, Embeddable, EmbeddingBase, Models, EmbeddingModels } from "./types";
|
|
5
5
|
import {
|
|
6
6
|
readFile,
|
|
7
7
|
writeFile,
|
|
@@ -168,7 +168,7 @@ export async function embed(
|
|
|
168
168
|
console.log("Embedding", chunkId);
|
|
169
169
|
const providerEmbeddings = await Clients.createEmbedding("", {
|
|
170
170
|
input: textOfChunk,
|
|
171
|
-
model: model ||
|
|
171
|
+
model: model || EmbeddingModels.openai.EmbeddingAda2,
|
|
172
172
|
});
|
|
173
173
|
|
|
174
174
|
vector = providerEmbeddings.data[0].embedding;
|
|
@@ -392,7 +392,7 @@ export function pruneMetadata(embeddings: Embeddable[], characterLimit = 5000) {
|
|
|
392
392
|
export async function queryEmbedding<E>(
|
|
393
393
|
query: string,
|
|
394
394
|
embeddings: Embeddable<E>[],
|
|
395
|
-
model =
|
|
395
|
+
model = EmbeddingModels.openai.EmbeddingAda2
|
|
396
396
|
) {
|
|
397
397
|
const providerEmbeddings = await Clients.createEmbedding("", {
|
|
398
398
|
input: takeFirstNWords(query, 5000),
|
package/src/hashes.ts
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import fs from "fs";
|
|
1
2
|
import * as crypto from "crypto";
|
|
2
3
|
import { Hashes } from "./types";
|
|
3
4
|
import { readFile, writeFile } from "./utils";
|
|
@@ -22,6 +23,10 @@ export async function checkNoFilesChanged(
|
|
|
22
23
|
hashes: any
|
|
23
24
|
) {
|
|
24
25
|
for (const file of files) {
|
|
26
|
+
if (!fs.existsSync(file)) {
|
|
27
|
+
return false;
|
|
28
|
+
}
|
|
29
|
+
|
|
25
30
|
// get the hash of the file
|
|
26
31
|
const fileContent = await convertToText(file);
|
|
27
32
|
const fileHash = crypto.createHash("md5").update(fileContent).digest("hex");
|
package/src/index.ts
CHANGED
|
@@ -13,7 +13,13 @@ import { promisify } from "util";
|
|
|
13
13
|
import glob from "glob";
|
|
14
14
|
|
|
15
15
|
import { Prompts } from "./prompts";
|
|
16
|
-
import {
|
|
16
|
+
import {
|
|
17
|
+
Config,
|
|
18
|
+
Hashes,
|
|
19
|
+
Embeddable,
|
|
20
|
+
GenerationSource,
|
|
21
|
+
EmbeddingModels,
|
|
22
|
+
} from "./types";
|
|
17
23
|
import { readFile, writeFile, fileExists } from "./utils";
|
|
18
24
|
import {
|
|
19
25
|
getConfig,
|
|
@@ -54,7 +60,8 @@ export async function embed() {
|
|
|
54
60
|
const config = await getConfig();
|
|
55
61
|
const ignorePattern = await getIgnorePattern();
|
|
56
62
|
|
|
57
|
-
const defaultModel =
|
|
63
|
+
const defaultModel =
|
|
64
|
+
config.embeddingModel || EmbeddingModels.openai.EmbeddingAda2;
|
|
58
65
|
for (const source of config.embedSources) {
|
|
59
66
|
await embedSource(defaultModel, source, ignorePattern);
|
|
60
67
|
}
|
|
@@ -257,7 +264,10 @@ export async function handleMultiOutputGeneration(
|
|
|
257
264
|
}
|
|
258
265
|
|
|
259
266
|
const outputFileName = outputName || name;
|
|
260
|
-
const outputFile = path.join(
|
|
267
|
+
const outputFile = path.join(
|
|
268
|
+
outputFolder,
|
|
269
|
+
outputFileName + "." + outputExt
|
|
270
|
+
);
|
|
261
271
|
console.log({ dir, inputPath, nestedFolder, outputFile });
|
|
262
272
|
|
|
263
273
|
const toCheck = [file, outputFile];
|
|
@@ -1,4 +1,10 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import {
|
|
2
|
+
Config,
|
|
3
|
+
Embeddable,
|
|
4
|
+
EmbeddingBase,
|
|
5
|
+
EmbeddingModels,
|
|
6
|
+
Models,
|
|
7
|
+
} from "../types";
|
|
2
8
|
import {
|
|
3
9
|
readFile,
|
|
4
10
|
writeFile,
|
|
@@ -262,7 +268,7 @@ export class EmbeddingService {
|
|
|
262
268
|
console.log("Embedding", chunkId);
|
|
263
269
|
const providerEmbeddings = await this.createEmbedding({
|
|
264
270
|
input: textOfChunk,
|
|
265
|
-
model: model ||
|
|
271
|
+
model: model || EmbeddingModels.openai.EmbeddingAda2,
|
|
266
272
|
});
|
|
267
273
|
|
|
268
274
|
vector = providerEmbeddings.data[0].embedding;
|
|
@@ -305,7 +311,7 @@ export class EmbeddingService {
|
|
|
305
311
|
async queryEmbedding<E>(
|
|
306
312
|
query: string,
|
|
307
313
|
embeddings: Embeddable<E>[],
|
|
308
|
-
model =
|
|
314
|
+
model = EmbeddingModels.openai.EmbeddingAda2
|
|
309
315
|
): Promise<EmbeddingBase<E>[]> {
|
|
310
316
|
// Implementation of queryEmbedding method
|
|
311
317
|
const providerEmbeddings = await this.createEmbedding({
|
package/src/services/Mcp.ts
CHANGED
|
@@ -39,6 +39,7 @@ export class McpService {
|
|
|
39
39
|
config: McpConfig[] = [];
|
|
40
40
|
tools: Tool[] = [];
|
|
41
41
|
mcpPrefix = "mcp";
|
|
42
|
+
toolAliases: Record<string, string> = {};
|
|
42
43
|
|
|
43
44
|
async createStdioClients(mcpServers: McpConfig[] = []) {
|
|
44
45
|
if (this.clients.length) {
|
|
@@ -47,11 +48,24 @@ export class McpService {
|
|
|
47
48
|
|
|
48
49
|
this.config = mcpServers;
|
|
49
50
|
this.transports = mcpServers.map((mcp) => {
|
|
50
|
-
const
|
|
51
|
+
const commandString = mcp.command
|
|
52
|
+
? `${mcp.command} ${mcp.args?.join(" ")}`
|
|
53
|
+
: "";
|
|
54
|
+
const logFormat = `${mcp.name}: Command: ${commandString}, URL: ${mcp.url}`;
|
|
51
55
|
|
|
52
56
|
console.log("Creating transport for", logFormat);
|
|
53
57
|
if (mcp.command) {
|
|
54
|
-
|
|
58
|
+
const stdioParams: StdioServerParameters = {
|
|
59
|
+
command: mcp.command,
|
|
60
|
+
args: mcp.args,
|
|
61
|
+
env: mcp.env
|
|
62
|
+
? {
|
|
63
|
+
...process.env,
|
|
64
|
+
...mcp.env,
|
|
65
|
+
}
|
|
66
|
+
: undefined,
|
|
67
|
+
};
|
|
68
|
+
return new StdioClientTransport(stdioParams);
|
|
55
69
|
}
|
|
56
70
|
if (mcp?.params?.socket) {
|
|
57
71
|
return new MCPWebSocketTransport(mcp.params.socket);
|
|
@@ -139,13 +153,7 @@ export class McpService {
|
|
|
139
153
|
}
|
|
140
154
|
|
|
141
155
|
parseToolName(toolName: string) {
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
if (split.length < 2) {
|
|
145
|
-
return null;
|
|
146
|
-
}
|
|
147
|
-
|
|
148
|
-
return split.slice(2).join("_");
|
|
156
|
+
return this.toolAliases[toolName] || toolName;
|
|
149
157
|
}
|
|
150
158
|
|
|
151
159
|
getToolClientIndex(toolName: string) {
|
|
@@ -238,10 +246,14 @@ export class McpService {
|
|
|
238
246
|
const config = this.config[i];
|
|
239
247
|
const client = this.clients[i];
|
|
240
248
|
const clientTools = await client.listTools();
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
249
|
+
|
|
250
|
+
for (const tool of clientTools.tools) {
|
|
251
|
+
const transformed = this.toOpenAiTool(i, tool as any as McpTool);
|
|
252
|
+
if (transformed.function.name !== tool.name) {
|
|
253
|
+
this.toolAliases[transformed.function.name] = tool.name;
|
|
254
|
+
}
|
|
255
|
+
tools.push(transformed);
|
|
256
|
+
}
|
|
245
257
|
}
|
|
246
258
|
|
|
247
259
|
this.tools = tools;
|
|
@@ -249,10 +261,15 @@ export class McpService {
|
|
|
249
261
|
}
|
|
250
262
|
|
|
251
263
|
toOpenAiTool(index: number, tool: McpTool) {
|
|
264
|
+
const mcpName = this.config[index].name;
|
|
265
|
+
const prefix = mcpName
|
|
266
|
+
? `${this.mcpPrefix}_${index}_${mcpName}`
|
|
267
|
+
: `${this.mcpPrefix}_${index}`;
|
|
268
|
+
|
|
252
269
|
const transformed: Tool = {
|
|
253
270
|
type: "function",
|
|
254
271
|
function: {
|
|
255
|
-
name: `${
|
|
272
|
+
name: `${prefix}_${tool.name}`,
|
|
256
273
|
description: tool.description,
|
|
257
274
|
parameters: {
|
|
258
275
|
type: "object",
|
package/src/types.ts
CHANGED
|
@@ -136,6 +136,7 @@ export const Models = {
|
|
|
136
136
|
Haiku3: "claude-3-haiku-20240307",
|
|
137
137
|
},
|
|
138
138
|
xai: {
|
|
139
|
+
Grok4: "grok-4-0709",
|
|
139
140
|
Grok3Beta: "grok-3-beta",
|
|
140
141
|
Grok3MiniBeta: "grok-3-mini-beta",
|
|
141
142
|
Grok3FastBeta: "grok-3-fast-beta",
|
|
@@ -164,9 +165,6 @@ export const Models = {
|
|
|
164
165
|
GPT_4o_Search: "gpt-4o-search-preview-2025-03-11",
|
|
165
166
|
Computer_Use: "computer-use-preview-2025-03-11",
|
|
166
167
|
Codex_Mini: "codex-mini-latest",
|
|
167
|
-
EmbeddingAda2: "text-embedding-ada-002",
|
|
168
|
-
EmbeddingLarge3: "text-embedding-3-large",
|
|
169
|
-
EmbeddingSmall3: "text-embedding-3-small",
|
|
170
168
|
},
|
|
171
169
|
google: {
|
|
172
170
|
Gemini_25_Flash_Preview: "gemini-2.5-flash-preview-05-20",
|
|
@@ -178,13 +176,23 @@ export const Models = {
|
|
|
178
176
|
Gemini_15_Flash: "gemini-1.5-flash",
|
|
179
177
|
Gemini_15_Flash_8B: "gemini-1.5-flash-8b",
|
|
180
178
|
Gemini_15_Pro: "gemini-1.5-pro",
|
|
181
|
-
Gemini_Embedding: "gemini-embedding-exp",
|
|
182
179
|
Imagen_3: "imagen-3.0-generate-002",
|
|
183
180
|
Veo_2: "veo-2.0-generate-001",
|
|
184
181
|
Gemini_20_Flash_Live: "gemini-2.0-flash-live-001",
|
|
185
182
|
},
|
|
186
183
|
};
|
|
187
184
|
|
|
185
|
+
export const EmbeddingModels = {
|
|
186
|
+
openai: {
|
|
187
|
+
EmbeddingAda2: "text-embedding-ada-002",
|
|
188
|
+
EmbeddingLarge3: "text-embedding-3-large",
|
|
189
|
+
EmbeddingSmall3: "text-embedding-3-small",
|
|
190
|
+
},
|
|
191
|
+
google: {
|
|
192
|
+
Gemini_Embedding: "gemini-embedding-exp",
|
|
193
|
+
},
|
|
194
|
+
};
|
|
195
|
+
|
|
188
196
|
export const Providers = Object.keys(Models).reduce((obj, key) => {
|
|
189
197
|
obj[key] = key;
|
|
190
198
|
return obj;
|
|
@@ -199,9 +207,9 @@ export const OpenAiReasoningModels = [
|
|
|
199
207
|
];
|
|
200
208
|
|
|
201
209
|
export const OpenAiEmbeddingModels = [
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
210
|
+
EmbeddingModels.openai.EmbeddingAda2,
|
|
211
|
+
EmbeddingModels.openai.EmbeddingLarge3,
|
|
212
|
+
EmbeddingModels.openai.EmbeddingSmall3,
|
|
205
213
|
];
|
|
206
214
|
export const OpenAiResponseOnlyModels = [Models.openai.Codex_Mini];
|
|
207
215
|
|
|
@@ -222,4 +230,4 @@ export const GoogleImageModels = [
|
|
|
222
230
|
|
|
223
231
|
export const GoogleVideoModels = [Models.google.Veo_2];
|
|
224
232
|
|
|
225
|
-
export const GoogleEmbeddingModels = [
|
|
233
|
+
export const GoogleEmbeddingModels = [EmbeddingModels.google.Gemini_Embedding];
|
|
@@ -75,6 +75,7 @@ export declare abstract class BaseAgent implements IAgent {
|
|
|
75
75
|
formatOutputMessages(messages: Message[]): Message[];
|
|
76
76
|
healthCheck(): Promise<boolean>;
|
|
77
77
|
selectHealthyModel(): Promise<void>;
|
|
78
|
+
isRequiredToolMissing(): boolean;
|
|
78
79
|
setNotHealthy(): void;
|
|
79
80
|
pause(): void;
|
|
80
81
|
unpause(): void;
|
|
@@ -252,6 +252,23 @@ class BaseAgent {
|
|
|
252
252
|
}
|
|
253
253
|
await this.healthCheck();
|
|
254
254
|
}
|
|
255
|
+
isRequiredToolMissing() {
|
|
256
|
+
const requiredToolAvailable = this.getEnabledToolNames().some((t) => this.requiredToolNames.includes(t) ||
|
|
257
|
+
this.requiredToolNames.includes((0, utils_1.mcpToolName)(t)));
|
|
258
|
+
if (requiredToolAvailable) {
|
|
259
|
+
return false;
|
|
260
|
+
}
|
|
261
|
+
console.log("Required tool not available, checking for finalAnswer", this.getEnabledToolNames(), this.requiredToolNames);
|
|
262
|
+
const finalAnswer = "finalAnswer";
|
|
263
|
+
const requiredFinalAnswer = this.requiredToolNames.includes(finalAnswer);
|
|
264
|
+
const hasFinalAnswer = this.getEnabledToolNames().includes(finalAnswer);
|
|
265
|
+
if (hasFinalAnswer && !requiredFinalAnswer) {
|
|
266
|
+
console.warn("Required tool not available, setting finalAnswer as required tool");
|
|
267
|
+
this.requiredToolNames.push("finalAnswer");
|
|
268
|
+
return false;
|
|
269
|
+
}
|
|
270
|
+
return true;
|
|
271
|
+
}
|
|
255
272
|
setNotHealthy() {
|
|
256
273
|
this.lastHealthCheckTime = 0;
|
|
257
274
|
}
|
|
@@ -300,6 +317,7 @@ class BaseAgent {
|
|
|
300
317
|
}
|
|
301
318
|
messages = this.formatInputMessages(messages);
|
|
302
319
|
this.updateCurrentThread(messages);
|
|
320
|
+
const isMissingTool = this.isRequiredToolMissing();
|
|
303
321
|
const startIndex = 0;
|
|
304
322
|
const endIndex = messages.length;
|
|
305
323
|
const compressThreshold = 10000;
|
|
@@ -341,6 +359,12 @@ class BaseAgent {
|
|
|
341
359
|
this.agentEvents.emit(this.eventTypes.done, firstMessage.content);
|
|
342
360
|
return firstMessage.content;
|
|
343
361
|
}
|
|
362
|
+
if (isMissingTool) {
|
|
363
|
+
const error = `Required tool: ${JSON.stringify(this.requiredToolNames)} not available, options are ${this.getEnabledToolNames().join(", ")}`;
|
|
364
|
+
console.error(error);
|
|
365
|
+
this.agentEvents.emit(this.eventTypes.done, error);
|
|
366
|
+
return error;
|
|
367
|
+
}
|
|
344
368
|
if (this.pendingUserMessages.length === 0 && this.status === "killed") {
|
|
345
369
|
console.log("Agent killed, stopping execution");
|
|
346
370
|
this.status = "killed";
|