@tyvm/knowhow 0.0.36 → 0.0.38

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (103) hide show
  1. package/package.json +1 -1
  2. package/src/agents/base/base.ts +8 -0
  3. package/src/agents/tools/aiClient.ts +36 -0
  4. package/src/agents/tools/lintFile.ts +1 -1
  5. package/src/agents/tools/list.ts +34 -0
  6. package/src/ai.ts +5 -4
  7. package/src/auth/browserLogin.ts +283 -0
  8. package/src/auth/errors.ts +6 -0
  9. package/src/auth/spinner.ts +23 -0
  10. package/src/chat/CliChatService.ts +25 -6
  11. package/src/chat/modules/AgentModule.ts +1 -2
  12. package/src/chat/modules/AskModule.ts +1 -2
  13. package/src/chat/types.ts +14 -4
  14. package/src/chat-old.ts +446 -0
  15. package/src/chat.ts +48 -433
  16. package/src/cli.ts +9 -14
  17. package/src/clients/index.ts +35 -2
  18. package/src/embeddings.ts +1 -1
  19. package/src/index.ts +1 -8
  20. package/src/login.ts +14 -1
  21. package/src/microphone.ts +0 -1
  22. package/src/plugins/downloader/downloader.ts +4 -2
  23. package/src/services/KnowhowClient.ts +1 -1
  24. package/src/services/index.ts +1 -2
  25. package/tests/manual/browser-login/README.md +189 -0
  26. package/tests/manual/browser-login/test_browser_login_basic.ts +115 -0
  27. package/tests/manual/browser-login/test_cli_integration.ts +169 -0
  28. package/tests/manual/browser-login/test_cross_platform_browser.ts +186 -0
  29. package/tests/manual/browser-login/test_error_scenarios.ts +223 -0
  30. package/tests/manual/cli/no-env.sh +267 -0
  31. package/ts_build/src/agents/base/base.js +4 -0
  32. package/ts_build/src/agents/base/base.js.map +1 -1
  33. package/ts_build/src/agents/tools/aiClient.d.ts +2 -0
  34. package/ts_build/src/agents/tools/aiClient.js +21 -1
  35. package/ts_build/src/agents/tools/aiClient.js.map +1 -1
  36. package/ts_build/src/agents/tools/lintFile.js +1 -1
  37. package/ts_build/src/agents/tools/lintFile.js.map +1 -1
  38. package/ts_build/src/agents/tools/list.js +32 -0
  39. package/ts_build/src/agents/tools/list.js.map +1 -1
  40. package/ts_build/src/ai.d.ts +1 -1
  41. package/ts_build/src/ai.js +2 -1
  42. package/ts_build/src/ai.js.map +1 -1
  43. package/ts_build/src/auth/browserLogin.d.ts +11 -0
  44. package/ts_build/src/auth/browserLogin.js +197 -0
  45. package/ts_build/src/auth/browserLogin.js.map +1 -0
  46. package/ts_build/src/auth/errors.d.ts +4 -0
  47. package/ts_build/src/auth/errors.js +13 -0
  48. package/ts_build/src/auth/errors.js.map +1 -0
  49. package/ts_build/src/auth/spinner.d.ts +7 -0
  50. package/ts_build/src/auth/spinner.js +23 -0
  51. package/ts_build/src/auth/spinner.js.map +1 -0
  52. package/ts_build/src/chat/CliChatService.d.ts +4 -3
  53. package/ts_build/src/chat/CliChatService.js +18 -4
  54. package/ts_build/src/chat/CliChatService.js.map +1 -1
  55. package/ts_build/src/chat/modules/AgentModule.d.ts +1 -1
  56. package/ts_build/src/chat/modules/AgentModule.js +1 -2
  57. package/ts_build/src/chat/modules/AgentModule.js.map +1 -1
  58. package/ts_build/src/chat/modules/AskModule.js +1 -2
  59. package/ts_build/src/chat/modules/AskModule.js.map +1 -1
  60. package/ts_build/src/chat/types.d.ts +5 -3
  61. package/ts_build/src/chat-old.d.ts +13 -0
  62. package/ts_build/src/chat-old.js +340 -0
  63. package/ts_build/src/chat-old.js.map +1 -0
  64. package/ts_build/src/chat.d.ts +3 -13
  65. package/ts_build/src/chat.js +38 -331
  66. package/ts_build/src/chat.js.map +1 -1
  67. package/ts_build/src/chat2.d.ts +1 -1
  68. package/ts_build/src/chat2.js +2 -2
  69. package/ts_build/src/chat2.js.map +1 -1
  70. package/ts_build/src/cli.js +7 -11
  71. package/ts_build/src/cli.js.map +1 -1
  72. package/ts_build/src/clients/index.d.ts +2 -2
  73. package/ts_build/src/clients/index.js +16 -1
  74. package/ts_build/src/clients/index.js.map +1 -1
  75. package/ts_build/src/embeddings.js.map +1 -1
  76. package/ts_build/src/index.d.ts +1 -2
  77. package/ts_build/src/index.js +2 -9
  78. package/ts_build/src/index.js.map +1 -1
  79. package/ts_build/src/login.d.ts +1 -1
  80. package/ts_build/src/login.js +14 -0
  81. package/ts_build/src/login.js.map +1 -1
  82. package/ts_build/src/microphone.js.map +1 -1
  83. package/ts_build/src/plugins/downloader/downloader.d.ts +1 -3
  84. package/ts_build/src/plugins/downloader/downloader.js +4 -4
  85. package/ts_build/src/plugins/downloader/downloader.js.map +1 -1
  86. package/ts_build/src/services/KnowhowClient.js +1 -1
  87. package/ts_build/src/services/KnowhowClient.js.map +1 -1
  88. package/ts_build/src/services/index.js +1 -2
  89. package/ts_build/src/services/index.js.map +1 -1
  90. package/ts_build/tests/manual/browser-login/test_browser_login_basic.d.ts +2 -0
  91. package/ts_build/tests/manual/browser-login/test_browser_login_basic.js +108 -0
  92. package/ts_build/tests/manual/browser-login/test_browser_login_basic.js.map +1 -0
  93. package/ts_build/tests/manual/browser-login/test_cli_integration.d.ts +2 -0
  94. package/ts_build/tests/manual/browser-login/test_cli_integration.js +153 -0
  95. package/ts_build/tests/manual/browser-login/test_cli_integration.js.map +1 -0
  96. package/ts_build/tests/manual/browser-login/test_cross_platform_browser.d.ts +2 -0
  97. package/ts_build/tests/manual/browser-login/test_cross_platform_browser.js +159 -0
  98. package/ts_build/tests/manual/browser-login/test_cross_platform_browser.js.map +1 -0
  99. package/ts_build/tests/manual/browser-login/test_error_scenarios.d.ts +2 -0
  100. package/ts_build/tests/manual/browser-login/test_error_scenarios.js +197 -0
  101. package/ts_build/tests/manual/browser-login/test_error_scenarios.js.map +1 -0
  102. package/src/agents/vim/vim.ts +0 -152
  103. package/src/chat2.ts +0 -62
package/src/chat.ts CHANGED
@@ -1,447 +1,62 @@
1
- import { ChatCompletionMessageParam } from "openai/resources/chat";
2
- import Ora from "ora";
3
- import editor from "@inquirer/editor";
4
- import { openai } from "./ai";
5
- import { cosineSimilarity } from "./utils";
6
- import {
7
- EmbeddingBase,
8
- GptQuestionEmbedding,
9
- Embeddable,
10
- ChatInteraction,
11
- } from "./types";
12
- import { Marked } from "./utils";
13
- import { ask } from "./utils";
14
- import { Plugins } from "./plugins/plugins";
15
- import { queryEmbedding, getConfiguredEmbeddingMap } from "./embeddings";
16
- import { services } from "./services/";
17
- import { FlagsService } from "./services/flags";
18
- import { IAgent } from "./agents/interface";
19
- import { Message } from "./clients";
20
- import { recordAudio, voiceToText } from "./microphone";
21
- import { Models } from "./ai";
22
- import { BaseAgent } from "./agents";
23
- import { getConfig } from "./config";
24
- import { TokenCompressor } from "./processors/TokenCompressor";
25
- import { ToolResponseCache } from "./processors/ToolResponseCache";
26
- import { CustomVariables, XmlToolCallProcessor, HarmonyToolProcessor } from "./processors";
1
+ #!/usr/bin/env node
27
2
 
28
- enum ChatFlags {
29
- agent = "agent",
30
- agents = "agents",
31
- debug = "debug",
32
- multi = "multi",
33
- model = "model",
34
- search = "search",
35
- clear = "clear",
36
- provider = "provider",
37
- voice = "voice",
38
- }
39
-
40
- const Flags = new FlagsService(
41
- [ChatFlags.agent, ChatFlags.debug, ChatFlags.multi, ChatFlags.voice],
42
- true
43
- );
44
-
45
- const taskRegistry = new Map<string, BaseAgent>();
3
+ /**
4
+ * New Modular Chat Interface - Simplified and cleaner than original chat.ts
5
+ */
46
6
 
47
- export async function askEmbedding<E>(promptText: string) {
48
- const options = ["next", "exit", "embeddings", "use"];
49
- console.log(`Commands: ${options.join(", ")}`);
50
- let input = await ask(promptText + ": ", options);
51
- let answer: EmbeddingBase<any> | undefined;
52
- let results = new Array<EmbeddingBase>();
53
- let embedMap = await getConfiguredEmbeddingMap();
54
- const config = await getConfig();
55
- const files = Object.keys(embedMap);
7
+ import { CliChatService } from "./chat/CliChatService.js";
8
+ import { InternalChatModule } from "./chat/modules/InternalChatModule.js";
9
+ import { getConfig } from "./config.js";
56
10
 
57
- while (input !== "exit") {
58
- const embeddings = Object.values(embedMap).flat();
59
-
60
- switch (input) {
61
- case "next":
62
- answer = results.shift();
63
- break;
64
- case "embeddings":
65
- console.log(files);
66
- break;
67
- case "use":
68
- const searchOptions = ["all", ...files];
69
- console.log(searchOptions);
70
- const embeddingName = await ask("Embedding to search: ", searchOptions);
71
- if (embeddingName === "all") {
72
- embedMap = await getConfiguredEmbeddingMap();
73
- break;
74
- }
75
-
76
- embedMap = { ...{ [embeddingName]: embedMap[embeddingName] } };
77
- break;
78
- default:
79
- results = await queryEmbedding(
80
- input,
81
- embeddings,
82
- config.embeddingModel
83
- );
84
- answer = results.shift();
85
- break;
86
- }
87
- if (answer) {
88
- console.log(
89
- Marked.parse(
90
- "### TEXT \n" +
91
- answer.text +
92
- "\n### METADATA \n" +
93
- JSON.stringify(answer.metadata, null, 2)
94
- )
11
+ async function main() {
12
+ try {
13
+ // Load configuration and plugins
14
+ let config;
15
+ try {
16
+ config = await getConfig();
17
+ } catch (configError) {
18
+ console.warn(
19
+ "Warning: Could not load config, using default plugins:",
20
+ configError
95
21
  );
22
+ config = {
23
+ plugins: [
24
+ "embeddings",
25
+ "language",
26
+ "vim",
27
+ "github",
28
+ "asana",
29
+ "jira",
30
+ "linear",
31
+ "download",
32
+ "figma",
33
+ "url",
34
+ ],
35
+ };
96
36
  }
97
37
 
98
- input = await ask(promptText + ": ");
99
- }
100
- }
101
-
102
- const ChatModelDefaults = {
103
- openai: Models.openai.GPT_4o,
104
- anthropic: Models.anthropic.Sonnet4,
105
- google: Models.google.Gemini_25_Flash_Preview,
106
- xai: Models.xai.Grok3Beta,
107
- };
108
- export async function askAI<E extends EmbeddingBase>(
109
- query: string,
110
- provider = "openai",
111
- model = ChatModelDefaults[provider]
112
- ) {
113
- const gptPrompt = `
38
+ // Create chat service with plugins
39
+ const chatService = new CliChatService(config.plugins);
114
40
 
115
- The user has asked:
116
- ${query}
41
+ // Load internal chat module (includes all core functionality)
42
+ const internalModule = new InternalChatModule();
43
+ await internalModule.initialize(chatService);
117
44
 
118
- Output Format in Markdown
119
- `;
120
- if (Flags.enabled("debugger")) {
121
- console.log(gptPrompt);
45
+ // Start the chat loop
46
+ await chatService.startChatLoop();
47
+ } catch (error) {
48
+ console.error("Error starting chat:", error);
49
+ process.exit(1);
122
50
  }
123
-
124
- const thread = [
125
- {
126
- role: "system",
127
- content:
128
- "Helpful Codebase assistant. Answer users questions using the embedding data that is provided with the user's question. You have limited access to the codebase based off of how similar the codebase is to the user's question. You may reference file paths by using the IDs present in the embedding data, but be sure to remove the chunk from the end of the filepaths.",
129
- },
130
- { role: "user", content: gptPrompt },
131
- ] as Message[];
132
-
133
- const { Clients } = services();
134
- const response = await Clients.createCompletion(provider, {
135
- messages: thread,
136
- model,
137
- });
138
-
139
- return response.choices[0].message.content;
140
- }
141
-
142
- export async function getInput(
143
- question: string,
144
- options = [],
145
- chatHistory: ChatInteraction[] = []
146
- ): Promise<string> {
147
- const multiLine = Flags.enabled(ChatFlags.multi);
148
- const voice = Flags.enabled(ChatFlags.voice);
149
-
150
- let value = "";
151
- if (voice) {
152
- value = await voiceToText();
153
- } else if (multiLine) {
154
- value = await editor({ message: question });
155
- Flags.disable(ChatFlags.multi);
156
- } else {
157
- const history = chatHistory.map((c) => c.input).reverse();
158
- value = await ask(question, options, history);
159
- }
160
-
161
- return value.trim();
162
51
  }
163
52
 
164
- export async function formatChatInput(
165
- input: string,
166
- plugins: string[] = [],
167
- chatHistory: ChatInteraction[] = []
168
- ) {
169
- const pluginText = await Plugins.callMany(plugins, input);
170
- const historyMessage = `<PreviousChats>
171
- This information is provided as historical context and is likely not related to the current task:
172
- ${JSON.stringify(chatHistory)}
173
- </PreviousChats>`;
174
- const fullPrompt = `
175
- ${historyMessage} \n
176
- <PluginContext> ${pluginText} </PluginContext>
177
- <CurrentTask>${input}</CurrentTask>
178
- `;
179
- return fullPrompt;
180
- }
181
-
182
- export async function chatLoop<E extends GptQuestionEmbedding>(
183
- aiName: string,
184
- embeddings: Embeddable<E>[],
185
- plugins: string[] = []
186
- ) {
187
- const { Agents, Clients } = services();
188
- let activeAgent = Agents.getAgent("Developer") as BaseAgent;
189
- let provider = "openai" as keyof typeof Clients.clients;
190
- let model = ChatModelDefaults[provider];
191
- const providers = Object.keys(Clients.clients);
192
- const commands = [
193
- "agent",
194
- "agents",
195
- "clear",
196
- "debugger",
197
- "exit",
198
- "model",
199
- "multi",
200
- "provider",
201
- "search",
202
- "voice",
203
- ];
204
- console.log("Commands: ", commands.join(", "));
205
- const promptText = () =>
206
- Flags.enabled(ChatFlags.agent)
207
- ? `\nAsk ${aiName} ${activeAgent.name}: `
208
- : `\nAsk ${aiName}: `;
209
-
210
- let chatHistory = new Array<ChatInteraction>();
211
- let input = await getInput(promptText(), commands, chatHistory);
212
-
213
- let results = "";
214
- while (input !== "exit") {
215
- try {
216
- switch (input) {
217
- case ChatFlags.agents:
218
- Flags.enable(ChatFlags.agent);
219
- const agents = Agents.listAgents();
220
- console.log(agents);
221
- const selected = await ask(
222
- "Which agent would you like to use: ",
223
- agents
224
- );
225
- activeAgent = Agents.getAgent(selected) as BaseAgent;
226
- model = activeAgent.getModel();
227
- provider = activeAgent.getProvider() as keyof typeof Clients.clients;
228
- break;
229
- case ChatFlags.agent:
230
- Flags.flip(ChatFlags.agent);
231
- break;
232
- case ChatFlags.debug:
233
- Flags.flip(ChatFlags.debug);
234
- break;
235
- case ChatFlags.multi:
236
- Flags.flip(ChatFlags.multi);
237
- break;
238
- case ChatFlags.voice:
239
- Flags.flip(ChatFlags.voice);
240
- break;
241
- case ChatFlags.search:
242
- await askEmbedding("searching");
243
- break;
244
- case ChatFlags.clear:
245
- chatHistory = [];
246
- break;
247
- case ChatFlags.provider:
248
- console.log(providers);
249
- provider = await ask(
250
- `\n\nCurrent Provider: ${provider}\nCurrent Model: ${model}\n\nWhich provider would you like to use: `,
251
- providers
252
- );
253
- model =
254
- ChatModelDefaults[provider] ||
255
- (await Clients.getRegisteredModels(provider))[0];
256
-
257
- if (Flags.enabled("agent")) {
258
- activeAgent.setProvider(provider);
259
- activeAgent.setModel(model);
260
- }
261
-
262
- break;
263
- case ChatFlags.model:
264
- const models = Clients.getRegisteredModels(provider);
265
- console.log(models);
266
- const selectedModel = await ask(
267
- `\n\nCurrent Provider: ${provider}\nCurrent Model: ${model}\n\nWhich model would you like to use: `,
268
- models
269
- );
270
- model = selectedModel;
53
+ // Check if this file is being run directly
54
+ const isMainModule =
55
+ (process.argv[1] && process.argv[1].endsWith("chat2.ts")) ||
56
+ (process.argv[1] && process.argv[1].endsWith("chat2.js"));
271
57
 
272
- if (Flags.enabled("agent")) {
273
- activeAgent.setProvider(provider);
274
- activeAgent.setModel(model);
275
- }
276
- break;
277
- case "attach":
278
- if (taskRegistry.size > 0) {
279
- const options = Array.from(taskRegistry.keys());
280
- const selectedInitialMessage = await ask(
281
- "Select an agent to attach to:",
282
- options
283
- );
284
- activeAgent = taskRegistry.get(selectedInitialMessage)!;
285
- console.log(
286
- `Attached to agent with task: "${selectedInitialMessage}"`
287
- );
288
- await startAgent(activeAgent, null, true);
289
- } else {
290
- console.log("No detached agents available.");
291
- }
292
- case "":
293
- break;
294
- default:
295
- console.log("Thinking...");
296
- console.log(input);
297
- const interaction = {
298
- input,
299
- output: "",
300
- } as ChatInteraction;
301
- if (Flags.enabled("agent")) {
302
- taskRegistry.set(input, activeAgent);
303
- await startAgent(activeAgent, {
304
- initialInput: input,
305
- plugins,
306
- chatHistory,
307
- interaction,
308
- });
309
- } else {
310
- const formattedPrompt = await formatChatInput(
311
- input,
312
- plugins,
313
- chatHistory
314
- );
315
- results = await askAI(formattedPrompt, provider, model);
316
- interaction.output = results;
317
- console.log(Marked.parse(results || "No response from the AI"));
318
- }
319
- console.log("\n\n");
320
- chatHistory.push(interaction);
321
- break;
322
- }
323
- } catch (e) {
324
- console.log(e);
325
- } finally {
326
- input = await getInput(promptText(), commands, chatHistory);
327
- }
328
- }
58
+ if (isMainModule) {
59
+ main().catch(console.error);
329
60
  }
330
61
 
331
- export async function startAgent(
332
- activeAgent: BaseAgent,
333
- newTask?: {
334
- initialInput: string;
335
- plugins: string[];
336
- chatHistory: ChatInteraction[];
337
- interaction: ChatInteraction;
338
- },
339
- attach = false
340
- ) {
341
- let done = false;
342
- let output = "Done";
343
-
344
- if (newTask) {
345
- const { initialInput, plugins, chatHistory, interaction } = newTask;
346
- await activeAgent.newTask();
347
- const formattedPrompt = await formatChatInput(
348
- initialInput,
349
- plugins,
350
- chatHistory
351
- );
352
- activeAgent.call(formattedPrompt);
353
-
354
- // Compress tokens of tool responses
355
- activeAgent.messageProcessor.setProcessors("pre_call", [
356
- new ToolResponseCache(activeAgent.tools).createProcessor(),
357
-
358
- new TokenCompressor(activeAgent.tools).createProcessor((msg) =>
359
- Boolean(msg.role === "tool" && msg.tool_call_id)
360
- ),
361
- new CustomVariables(activeAgent.tools).createProcessor(),
362
- ]);
363
-
364
- // Process XML and Harmony tool calls in assistant responses
365
- activeAgent.messageProcessor.setProcessors("post_call", [
366
- new XmlToolCallProcessor().createProcessor(),
367
- new HarmonyToolProcessor().createProcessor(),
368
- ]);
369
-
370
- if (
371
- !activeAgent.agentEvents.listenerCount(activeAgent.eventTypes.toolUsed)
372
- ) {
373
- activeAgent.agentEvents.on(
374
- activeAgent.eventTypes.toolUsed,
375
- (responseMsg) => {
376
- console.log(` 🔨 Tool used: ${JSON.stringify(responseMsg, null, 2)}`);
377
- }
378
- );
379
- }
380
-
381
- activeAgent.agentEvents.once(activeAgent.eventTypes.done, (doneMsg) => {
382
- console.log("Agent has finished.");
383
- done = true;
384
- taskRegistry.delete(initialInput);
385
- output = doneMsg || "No response from the AI";
386
- interaction.output = output;
387
- console.log(Marked.parse(output));
388
- });
389
- }
390
-
391
- // Define available commands
392
- const commands = ["pause", "unpause", "kill", "detach"];
393
- const history = [];
394
-
395
- let input = await getInput(
396
- `Enter command or message for ${activeAgent.name}: `,
397
- commands,
398
- history
399
- );
400
-
401
- history.push(input);
402
-
403
- const donePromise = new Promise<string>((resolve) => {
404
- activeAgent.agentEvents.on(activeAgent.eventTypes.done, () => {
405
- done = true;
406
- resolve("done");
407
- });
408
- });
409
-
410
- while (!done) {
411
- switch (input) {
412
- case "":
413
- break;
414
- case "done":
415
- output = "Exited agent interaction.";
416
- break;
417
- case "pause":
418
- await activeAgent.pause();
419
- console.log("Agent paused.");
420
- break;
421
- case "unpause":
422
- await activeAgent.unpause();
423
- console.log("Agent unpaused.");
424
- break;
425
- case "kill":
426
- await activeAgent.kill();
427
- console.log("Agent terminated.");
428
- break;
429
- case "detach":
430
- return "Detached from agent";
431
- break;
432
- default:
433
- activeAgent.addPendingUserMessage({ role: "user", content: input });
434
- }
435
-
436
- input = await Promise.race([
437
- getInput(
438
- `Enter command or message for ${activeAgent.name}: `,
439
- commands,
440
- history
441
- ),
442
- donePromise,
443
- ]);
444
- }
445
-
446
- return output;
447
- }
62
+ export { main as startChat };
package/src/cli.ts CHANGED
@@ -4,7 +4,7 @@ import * as fs from "fs";
4
4
  import * as path from "path";
5
5
  import * as os from "os";
6
6
  import { Command } from "commander";
7
- import { generate, embed, upload, chat } from "./index";
7
+ import { generate, embed, upload } from "./index";
8
8
  import { init } from "./config";
9
9
 
10
10
  import { download, purge } from ".";
@@ -14,8 +14,8 @@ import { services } from "./services";
14
14
  import { login } from "./login";
15
15
  import { worker } from "./worker";
16
16
  import { agents } from "./agents";
17
- import { startChat2 } from "./chat2";
18
- import { askAI } from "./chat";
17
+ import { startChat } from "./chat";
18
+ import { askAI } from "./chat-old";
19
19
  import { getConfiguredEmbeddingMap, queryEmbedding } from "./embeddings";
20
20
  import { getConfig } from "./config";
21
21
  import { marked } from "marked";
@@ -67,6 +67,7 @@ async function readStdin(): Promise<string> {
67
67
  async function main() {
68
68
  const program = new Command();
69
69
  const config = await getConfig();
70
+ const chatService = new CliChatService(config.plugins);
70
71
 
71
72
  program
72
73
  .name("knowhow")
@@ -86,7 +87,7 @@ async function main() {
86
87
  program
87
88
  .command("login")
88
89
  .description("Login to knowhow")
89
- .option("--jwt", "should use JWT login", "true")
90
+ .option("--jwt", "Use manual JWT input instead of browser login")
90
91
  .action(async (opts) => {
91
92
  await login(opts.jwt);
92
93
  });
@@ -129,16 +130,9 @@ async function main() {
129
130
 
130
131
  program
131
132
  .command("chat")
132
- .description("Start chat interface")
133
- .action(async () => {
134
- await chat();
135
- });
136
-
137
- program
138
- .command("chat2")
139
133
  .description("Start new chat interface")
140
134
  .action(async () => {
141
- await startChat2();
135
+ await startChat();
142
136
  });
143
137
 
144
138
  program
@@ -214,7 +208,9 @@ async function main() {
214
208
 
215
209
  input = readPromptFile(options.promptFile, input);
216
210
 
217
- await new AskModule().processAIQuery(input, {
211
+ const askModule = new AskModule();
212
+ await askModule.initialize(chatService);
213
+ await askModule.processAIQuery(input, {
218
214
  plugins: config.plugins,
219
215
  currentModel: options.model,
220
216
  currentProvider: options.provider,
@@ -230,7 +226,6 @@ async function main() {
230
226
  .description("Ask the agent to configure knowhow")
231
227
  .action(async (options) => {
232
228
  try {
233
- const chatService = new CliChatService(config.plugins);
234
229
  const setupModule = new SetupModule();
235
230
  await setupModule.initialize(chatService);
236
231
  await setupModule.handleSetupCommand([]);
@@ -158,9 +158,18 @@ export class AIClient {
158
158
  new Set(currentModels.concat(models))
159
159
  );
160
160
 
161
+ const embeddingModels = this.embeddingModels[provider] || [];
162
+ this.embeddingModels[provider] = Array.from<string>(
163
+ new Set(embeddingModels.concat(models.filter((m) => m.includes("embed"))))
164
+ );
165
+
161
166
  // We will assume if you register models, it's for completions
162
167
  this.completionModels[provider] = Array.from<string>(
163
- new Set(currentCompletionModels.concat(models))
168
+ new Set(
169
+ currentCompletionModels.concat(
170
+ models.filter((m) => !m.includes("embed"))
171
+ )
172
+ )
164
173
  );
165
174
  }
166
175
 
@@ -200,15 +209,37 @@ export class AIClient {
200
209
 
201
210
  findModel(modelPrefix: string) {
202
211
  for (const provider of Object.keys(this.clientModels)) {
203
- const models = this.clientModels[provider];
212
+ const models = this.clientModels[provider] as string[];
204
213
  const foundModel = models.find((m) => m.startsWith(modelPrefix));
205
214
  if (foundModel) {
206
215
  return { provider, model: foundModel };
207
216
  }
217
+
218
+ // Handle the case when model prefix is gpt-5 and the provider is knowhow, and the actual model is openai/gpt-5
219
+ const inferredFound = models.find((m) => {
220
+ const split = m.split("/");
221
+ if (split.length < 2) return false;
222
+ const inferredModel = split.slice(1).join("/");
223
+ return (
224
+ m === modelPrefix ||
225
+ inferredModel === modelPrefix ||
226
+ inferredModel.startsWith(modelPrefix)
227
+ );
228
+ });
229
+ if (inferredFound) {
230
+ return { provider, model: inferredFound };
231
+ }
208
232
  }
209
233
  return undefined;
210
234
  }
211
235
 
236
+ // detects these formats
237
+ // "openai", "gpt-5"
238
+ // "knowhow", "openai/gpt-5"
239
+ // "", "openai/gpt-5"
240
+ // "", openai/gpt-5
241
+ // "", "knowhow/openai/gpt-5"
242
+ //
212
243
  detectProviderModel(provider: string, model?: string) {
213
244
  if (this.providerHasModel(provider, model)) {
214
245
  return { provider, model };
@@ -237,6 +268,8 @@ export class AIClient {
237
268
  return foundByModel;
238
269
  }
239
270
 
271
+ console.log({ provider, model, all: this.listAllModels() });
272
+
240
273
  return { provider, model };
241
274
  }
242
275
 
package/src/embeddings.ts CHANGED
@@ -17,7 +17,7 @@ import {
17
17
  cosineSimilarity,
18
18
  takeFirstNWords,
19
19
  } from "./utils";
20
- import { summarizeTexts, openai, chunkText } from "./ai";
20
+ import { summarizeTexts, chunkText } from "./ai";
21
21
  import { Plugins } from "./plugins/plugins";
22
22
  import { md5Hash } from "./hashes";
23
23
  import { convertToText } from "./conversion";
package/src/index.ts CHANGED
@@ -39,7 +39,6 @@ import {
39
39
  } from "./embeddings";
40
40
 
41
41
  import { abort } from "process";
42
- import { chatLoop } from "./chat";
43
42
  import { convertToText } from "./conversion";
44
43
  import { Plugins } from "./plugins/plugins";
45
44
  import { knowhowMcpClient } from "./services/Mcp";
@@ -48,11 +47,11 @@ import { Models } from "./types";
48
47
 
49
48
  export * as clients from "./clients";
50
49
  export * as agents from "./agents";
51
- export * as ai from "./ai";
52
50
  export * as services from "./services";
53
51
  export * as embeddings from "./embeddings";
54
52
  export * as types from "./types";
55
53
  export * as processors from "./processors";
54
+ export * as ai from "./ai";
56
55
 
57
56
  export async function embed() {
58
57
  // load config
@@ -315,12 +314,6 @@ export async function handleSingleOutputGeneration(
315
314
  await saveAllFileHashes(filesToCheck, promptHash);
316
315
  }
317
316
 
318
- export async function chat() {
319
- const config = await getConfig();
320
- const embeddings = await getConfiguredEmbeddings();
321
- await chatLoop("knowhow", embeddings, config.plugins);
322
- }
323
-
324
317
  export async function download() {
325
318
  const config = await getConfig();
326
319
  const { AwsS3, GitHub, knowhowApiClient } = services();