centaurus-cli 2.9.2 → 2.9.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli-adapter.d.ts +6 -3
- package/dist/cli-adapter.d.ts.map +1 -1
- package/dist/cli-adapter.js +244 -74
- package/dist/cli-adapter.js.map +1 -1
- package/dist/config/models.d.ts.map +1 -1
- package/dist/config/models.js +2 -0
- package/dist/config/models.js.map +1 -1
- package/dist/config/slash-commands.d.ts +3 -0
- package/dist/config/slash-commands.d.ts.map +1 -1
- package/dist/config/slash-commands.js +35 -1
- package/dist/config/slash-commands.js.map +1 -1
- package/dist/config/types.d.ts +2 -0
- package/dist/config/types.d.ts.map +1 -1
- package/dist/config/types.js +1 -0
- package/dist/config/types.js.map +1 -1
- package/dist/services/ai-autocomplete-agent.d.ts +39 -0
- package/dist/services/ai-autocomplete-agent.d.ts.map +1 -0
- package/dist/services/ai-autocomplete-agent.js +189 -0
- package/dist/services/ai-autocomplete-agent.js.map +1 -0
- package/dist/services/ai-service-client.d.ts +25 -0
- package/dist/services/ai-service-client.d.ts.map +1 -1
- package/dist/services/ai-service-client.js +162 -1
- package/dist/services/ai-service-client.js.map +1 -1
- package/dist/services/auth-handler.js +1 -1
- package/dist/services/auth-handler.js.map +1 -1
- package/dist/services/local-chat-storage.d.ts +21 -0
- package/dist/services/local-chat-storage.d.ts.map +1 -1
- package/dist/services/local-chat-storage.js +138 -43
- package/dist/services/local-chat-storage.js.map +1 -1
- package/dist/services/ollama-service.d.ts +197 -0
- package/dist/services/ollama-service.d.ts.map +1 -0
- package/dist/services/ollama-service.js +324 -0
- package/dist/services/ollama-service.js.map +1 -0
- package/dist/ui/components/App.d.ts +2 -2
- package/dist/ui/components/App.d.ts.map +1 -1
- package/dist/ui/components/App.js +45 -2
- package/dist/ui/components/App.js.map +1 -1
- package/dist/ui/components/InputBox.d.ts +2 -0
- package/dist/ui/components/InputBox.d.ts.map +1 -1
- package/dist/ui/components/InputBox.js +321 -19
- package/dist/ui/components/InputBox.js.map +1 -1
- package/dist/ui/components/MultiLineInput.d.ts.map +1 -1
- package/dist/ui/components/MultiLineInput.js +68 -2
- package/dist/ui/components/MultiLineInput.js.map +1 -1
- package/dist/ui/components/ToolExecutionMessage.d.ts.map +1 -1
- package/dist/ui/components/ToolExecutionMessage.js +5 -1
- package/dist/ui/components/ToolExecutionMessage.js.map +1 -1
- package/dist/utils/command-history.d.ts +12 -2
- package/dist/utils/command-history.d.ts.map +1 -1
- package/dist/utils/command-history.js +57 -13
- package/dist/utils/command-history.js.map +1 -1
- package/dist/utils/input-classifier.js +1 -1
- package/dist/utils/input-classifier.js.map +1 -1
- package/package.json +1 -1
|
@@ -9,6 +9,9 @@ export declare const CHAT_SUBCOMMANDS: SlashCommand[];
|
|
|
9
9
|
export declare const ADD_COMMAND_SUBCOMMANDS: SlashCommand[];
|
|
10
10
|
export declare const BACKGROUND_TASK_SUBCOMMANDS: SlashCommand[];
|
|
11
11
|
export declare const SYNC_SUBCOMMANDS: SlashCommand[];
|
|
12
|
+
export declare const MODELS_SUBCOMMANDS: SlashCommand[];
|
|
13
|
+
export declare const SETTINGS_SUBCOMMANDS: SlashCommand[];
|
|
14
|
+
export declare const SETTINGS_AUTOSUGGEST_OPTIONS: SlashCommand[];
|
|
12
15
|
/**
|
|
13
16
|
* Filter commands based on query string
|
|
14
17
|
* @param query The search query (without the leading /)
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"slash-commands.d.ts","sourceRoot":"","sources":["../../src/config/slash-commands.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,YAAY;IACzB,IAAI,EAAE,MAAM,CAAC;IACb,WAAW,EAAE,MAAM,CAAC;IACpB,OAAO,CAAC,EAAE,MAAM,EAAE,CAAC;CACtB;AAED,eAAO,MAAM,cAAc,EAAE,YAAY,
|
|
1
|
+
{"version":3,"file":"slash-commands.d.ts","sourceRoot":"","sources":["../../src/config/slash-commands.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,YAAY;IACzB,IAAI,EAAE,MAAM,CAAC;IACb,WAAW,EAAE,MAAM,CAAC;IACpB,OAAO,CAAC,EAAE,MAAM,EAAE,CAAC;CACtB;AAED,eAAO,MAAM,cAAc,EAAE,YAAY,EAoBxC,CAAC;AAGF,eAAO,MAAM,eAAe,EAAE,YAAY,EAOzC,CAAC;AAGF,eAAO,MAAM,gBAAgB,EAAE,YAAY,EAM1C,CAAC;AAGF,eAAO,MAAM,uBAAuB,EAAE,YAAY,EAIjD,CAAC;AAGF,eAAO,MAAM,2BAA2B,EAAE,YAAY,EAGrD,CAAC;AAGF,eAAO,MAAM,gBAAgB,EAAE,YAAY,EAG1C,CAAC;AAGF,eAAO,MAAM,kBAAkB,EAAE,YAAY,EAG5C,CAAC;AAGF,eAAO,MAAM,oBAAoB,EAAE,YAAY,EAE9C,CAAC;AAGF,eAAO,MAAM,4BAA4B,EAAE,YAAY,EAGtD,CAAC;AAEF;;;;GAIG;AACH,wBAAgB,cAAc,CAAC,KAAK,EAAE,MAAM,GAAG,YAAY,EAAE,CAsH5D"}
|
|
@@ -6,7 +6,7 @@ export const SLASH_COMMANDS = [
|
|
|
6
6
|
{ name: 'sync', description: 'Sync data to/from cloud (upload/restore subcommands)' },
|
|
7
7
|
{ name: 'clean-ui', description: 'Refresh UI display to fix visual glitches', aliases: ['refresh-ui', 'redraw'] },
|
|
8
8
|
{ name: 'config', description: 'View current configuration' },
|
|
9
|
-
{ name: '
|
|
9
|
+
{ name: 'models', description: 'Select AI models (local Ollama or cloud)', aliases: ['model'] },
|
|
10
10
|
{ name: 'plan', description: 'Toggle plan mode for complex implementations' },
|
|
11
11
|
{ name: 'mcp', description: 'Manage configured MCP servers and tools' },
|
|
12
12
|
{ name: 'add-command', description: 'Manage custom terminal commands for auto-detect', aliases: ['add-command-auto-detect'] },
|
|
@@ -14,6 +14,7 @@ export const SLASH_COMMANDS = [
|
|
|
14
14
|
{ name: 'background-task', description: 'Manage background shell tasks', aliases: ['bkg', 'bg-task'] },
|
|
15
15
|
{ name: 'copy-chat-context', description: 'Copy chat history as readable text to clipboard' },
|
|
16
16
|
{ name: 'session-limits', description: 'View session quota usage and limits' },
|
|
17
|
+
{ name: 'settings', description: 'Configure CLI settings' },
|
|
17
18
|
{ name: 'sign-in', description: 'Sign in with Google (if not already signed in)' },
|
|
18
19
|
{ name: 'logout', description: 'Sign out, clear session, and exit CLI' },
|
|
19
20
|
{ name: 'exit', description: 'Exit the application' },
|
|
@@ -51,6 +52,20 @@ export const SYNC_SUBCOMMANDS = [
|
|
|
51
52
|
{ name: 'upload', description: 'Upload local chat history and config to cloud (overwrites cloud data)' },
|
|
52
53
|
{ name: 'restore', description: 'Download cloud data and restore locally (overwrites local data)' },
|
|
53
54
|
];
|
|
55
|
+
// Models subcommands (local vs cloud)
|
|
56
|
+
export const MODELS_SUBCOMMANDS = [
|
|
57
|
+
{ name: 'cloud', description: 'Select powerful models from leading providers' },
|
|
58
|
+
{ name: 'local', description: 'Select from locally installed Ollama models' },
|
|
59
|
+
];
|
|
60
|
+
// Settings subcommands
|
|
61
|
+
export const SETTINGS_SUBCOMMANDS = [
|
|
62
|
+
{ name: 'auto-suggest', description: 'AI-powered command suggestions (on/off)' },
|
|
63
|
+
];
|
|
64
|
+
// Settings auto-suggest options
|
|
65
|
+
export const SETTINGS_AUTOSUGGEST_OPTIONS = [
|
|
66
|
+
{ name: 'on', description: 'Enable AI auto-suggestions after 5 seconds of inactivity' },
|
|
67
|
+
{ name: 'off', description: 'Disable AI auto-suggestions' },
|
|
68
|
+
];
|
|
54
69
|
/**
|
|
55
70
|
* Filter commands based on query string
|
|
56
71
|
* @param query The search query (without the leading /)
|
|
@@ -102,6 +117,25 @@ export function filterCommands(query) {
|
|
|
102
117
|
return SYNC_SUBCOMMANDS.filter(cmd => cmd.name.toLowerCase().startsWith(subQuery) ||
|
|
103
118
|
cmd.description.toLowerCase().includes(subQuery));
|
|
104
119
|
}
|
|
120
|
+
// Check if this is a models subcommand query (e.g., "models local" or "model local")
|
|
121
|
+
if (normalized.startsWith('models ') || normalized.startsWith('model ')) {
|
|
122
|
+
const prefixLen = normalized.startsWith('models ') ? 7 : 6;
|
|
123
|
+
const subQuery = normalized.slice(prefixLen);
|
|
124
|
+
return MODELS_SUBCOMMANDS.filter(cmd => cmd.name.toLowerCase().startsWith(subQuery) ||
|
|
125
|
+
cmd.description.toLowerCase().includes(subQuery));
|
|
126
|
+
}
|
|
127
|
+
// Check if this is a settings auto-suggest option query (e.g., "settings auto-suggest on")
|
|
128
|
+
if (normalized.startsWith('settings auto-suggest ')) {
|
|
129
|
+
const subQuery = normalized.slice(22); // Remove "settings auto-suggest "
|
|
130
|
+
return SETTINGS_AUTOSUGGEST_OPTIONS.filter(cmd => cmd.name.toLowerCase().startsWith(subQuery) ||
|
|
131
|
+
cmd.description.toLowerCase().includes(subQuery));
|
|
132
|
+
}
|
|
133
|
+
// Check if this is a settings subcommand query (e.g., "settings auto-suggest")
|
|
134
|
+
if (normalized.startsWith('settings ')) {
|
|
135
|
+
const subQuery = normalized.slice(9); // Remove "settings "
|
|
136
|
+
return SETTINGS_SUBCOMMANDS.filter(cmd => cmd.name.toLowerCase().startsWith(subQuery) ||
|
|
137
|
+
cmd.description.toLowerCase().includes(subQuery));
|
|
138
|
+
}
|
|
105
139
|
// Filter commands that match
|
|
106
140
|
const matches = SLASH_COMMANDS.filter(cmd => {
|
|
107
141
|
// Match if command name starts with query
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"slash-commands.js","sourceRoot":"","sources":["../../src/config/slash-commands.ts"],"names":[],"mappings":"AAMA,MAAM,CAAC,MAAM,cAAc,GAAmB;IAC1C,EAAE,IAAI,EAAE,MAAM,EAAE,WAAW,EAAE,wBAAwB,EAAE;IACvD,EAAE,IAAI,EAAE,MAAM,EAAE,WAAW,EAAE,2DAA2D,EAAE;IAC1F,EAAE,IAAI,EAAE,MAAM,EAAE,WAAW,EAAE,8CAA8C,EAAE;IAC7E,EAAE,IAAI,EAAE,OAAO,EAAE,WAAW,EAAE,yCAAyC,EAAE;IACzE,EAAE,IAAI,EAAE,MAAM,EAAE,WAAW,EAAE,sDAAsD,EAAE;IACrF,EAAE,IAAI,EAAE,UAAU,EAAE,WAAW,EAAE,2CAA2C,EAAE,OAAO,EAAE,CAAC,YAAY,EAAE,QAAQ,CAAC,EAAE;IACjH,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,4BAA4B,EAAE;IAC7D,EAAE,IAAI,EAAE,
|
|
1
|
+
{"version":3,"file":"slash-commands.js","sourceRoot":"","sources":["../../src/config/slash-commands.ts"],"names":[],"mappings":"AAMA,MAAM,CAAC,MAAM,cAAc,GAAmB;IAC1C,EAAE,IAAI,EAAE,MAAM,EAAE,WAAW,EAAE,wBAAwB,EAAE;IACvD,EAAE,IAAI,EAAE,MAAM,EAAE,WAAW,EAAE,2DAA2D,EAAE;IAC1F,EAAE,IAAI,EAAE,MAAM,EAAE,WAAW,EAAE,8CAA8C,EAAE;IAC7E,EAAE,IAAI,EAAE,OAAO,EAAE,WAAW,EAAE,yCAAyC,EAAE;IACzE,EAAE,IAAI,EAAE,MAAM,EAAE,WAAW,EAAE,sDAAsD,EAAE;IACrF,EAAE,IAAI,EAAE,UAAU,EAAE,WAAW,EAAE,2CAA2C,EAAE,OAAO,EAAE,CAAC,YAAY,EAAE,QAAQ,CAAC,EAAE;IACjH,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,4BAA4B,EAAE;IAC7D,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,0CAA0C,EAAE,OAAO,EAAE,CAAC,OAAO,CAAC,EAAE;IAC/F,EAAE,IAAI,EAAE,MAAM,EAAE,WAAW,EAAE,8CAA8C,EAAE;IAC7E,EAAE,IAAI,EAAE,KAAK,EAAE,WAAW,EAAE,yCAAyC,EAAE;IACvE,EAAE,IAAI,EAAE,aAAa,EAAE,WAAW,EAAE,iDAAiD,EAAE,OAAO,EAAE,CAAC,yBAAyB,CAAC,EAAE;IAC7H,EAAE,IAAI,EAAE,MAAM,EAAE,WAAW,EAAE,yCAAyC,EAAE;IACxE,EAAE,IAAI,EAAE,iBAAiB,EAAE,WAAW,EAAE,+BAA+B,EAAE,OAAO,EAAE,CAAC,KAAK,EAAE,SAAS,CAAC,EAAE;IACtG,EAAE,IAAI,EAAE,mBAAmB,EAAE,WAAW,EAAE,iDAAiD,EAAE;IAC7F,EAAE,IAAI,EAAE,gBAAgB,EAAE,WAAW,EAAE,qCAAqC,EAAE;IAC9E,EAAE,IAAI,EAAE,UAAU,EAAE,WAAW,EAAE,wBAAwB,EAAE;IAC3D,EAAE,IAAI,EAAE,SAAS,EAAE,WAAW,EAAE,gDAAgD,EAAE;IAClF,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,uCAAuC,EAAE;IACxE,EAAE,IAAI,EAAE,MAAM,EAAE,WAAW,EAAE,sBAAsB,EAAE;CACxD,CAAC;AAEF,kBAAkB;AAClB,MAAM,CAAC,MAAM,eAAe,GAAmB;IAC3C,EAAE,IAAI,EAAE,MAAM,EAAE,WAAW,EAAE,uCAAuC,EAAE;IACtE,EAAE,IAAI,EAAE,SAAS,EAAE,WAAW,EAAE,qBAAqB,EAAE;IACvD,EAAE,IAAI,EAAE,KAAK,EAAE,WAAW,EAAE,sBAAsB,EAAE;IACpD,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,sBAAsB,EAAE;IACvD,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,0BAA0B,EAAE;IAC3D,EAAE,IAAI,EAAE,SAAS,EAAE,WAAW,EAAE,2BAA2B,EAAE;CAChE,CAAC;AAEF,mBAAmB;AACnB,MAAM,CAAC,MAAM,gBAAgB,GAAmB;IAC5C,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,gCAAgC,EAAE;IACjE,EAAE,IAAI,EAAE,MAAM,EAAE,WAAW,EAAE,sBAAsB,EAAE;IACrD,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,qBAAqB,EAAE;IACtD,EAAE,IAAI,EAAE,KAAK,EAAE,WAAW,EAAE,0BAA0B,EAAE;IACxD,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,qBAAqB,EAAE;CACzD,CAAC;AAEF,oEAAoE;AACpE,MAAM,CAAC,MAAM,uBAAuB,GAAmB;IACnD,EAAE,IAAI,EAAE,KAAK,EAAE,WAAW,EAAE,8DAA8D,EAAE;IAC5F,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,oEAAoE,EAAE;IACrG,EAAE,IAAI,EAAE,MAAM,EAAE,WAAW,EAAE,+BAA+B,EAAE;CACjE,CAAC;AAEF,8BAA8B;AAC9B,MAAM,CAAC,MAAM,2BAA2B,GAAmB;IACvD,EAAE,IAAI,EAAE,MAAM,EAAE,WAAW,EAAE,qDAAqD,EAAE;IACpF,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,kCAAkC,EAAE;CACtE,CAAC;AAEF,mBAAmB;AACnB,MAAM,CAAC,MAAM,gBAAgB,GAAmB;IAC5C,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,uEAAuE,EAAE;IACxG,EAAE,IAAI,EAAE,SAAS,EAAE,WAAW,EAAE,iEAAiE,EAAE;CACtG,CAAC;AAEF,sCAAsC;AACtC,MAAM,CAAC,MAAM,kBAAkB,GAAmB;IAC9C,EAAE,IAAI,EAAE,OAAO,EAAE,WAAW,EAAE,+CAA+C,EAAE;IAC/E,EAAE,IAAI,EAAE,OAAO,EAAE,WAAW,EAAE,6CAA6C,EAAE;CAChF,CAAC;AAEF,uBAAuB;AACvB,MAAM,CAAC,MAAM,oBAAoB,GAAmB;IAChD,EAAE,IAAI,EAAE,cAAc,EAAE,WAAW,EAAE,yCAAyC,EAAE;CACnF,CAAC;AAEF,gCAAgC;AAChC,MAAM,CAAC,MAAM,4BAA4B,GAAmB;IACxD,EAAE,IAAI,EAAE,IAAI,EAAE,WAAW,EAAE,0DAA0D,EAAE;IACvF,EAAE,IAAI,EAAE,KAAK,EAAE,WAAW,EAAE,6BAA6B,EAAE;CAC9D,CAAC;AAEF;;;;GAIG;AACH,MAAM,UAAU,cAAc,CAAC,KAAa;IACxC,IAAI,CAAC,KAAK,EAAE,CAAC;QACT,iCAAiC;QACjC,OAAO,cAAc,CAAC;IAC1B,CAAC;IAED,MAAM,UAAU,GAAG,KAAK,CAAC,WAAW,EAAE,CAAC;IAEvC,8DAA8D;IAC9D,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,CAAC,EAAE,CAAC;QAChC,MAAM,QAAQ,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,gBAAgB;QACtD,OAAO,eAAe,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CAChC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC;YAC3C,GAAG,CAAC,WAAW,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CACnD,CAAC;IACN,CAAC;IAED,iEAAiE;IACjE,IAAI,UAAU,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE,CAAC;QACjC,MAAM,QAAQ,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,iBAAiB;QACvD,OAAO,gBAAgB,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CACjC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC;YAC3C,GAAG,CAAC,WAAW,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CACnD,CAAC;IACN,CAAC;IAED,6EAA6E;IAC7E,IAAI,UAAU,CAAC,UAAU,CAAC,cAAc,CAAC,IAAI,UAAU,CAAC,UAAU,CAAC,0BAA0B,CAAC,EAAE,CAAC;QAC7F,MAAM,SAAS,GAAG,UAAU,CAAC,UAAU,CAAC,0BAA0B,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;QAC9E,MAAM,QAAQ,GAAG,UAAU,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC;QAC7C,OAAO,uBAAuB,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CACxC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC;YAC3C,GAAG,CAAC,WAAW,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CACnD,CAAC;IACN,CAAC;IAED,qFAAqF;IACrF,IAAI,UAAU,CAAC,UAAU,CAAC,kBAAkB,CAAC,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,CAAC,IAAI,UAAU,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE,CAAC;QAClH,IAAI,QAAQ,GAAG,EAAE,CAAC;QAClB,IAAI,UAAU,CAAC,UAAU,CAAC,kBAAkB,CAAC,EAAE,CAAC;YAC5C,QAAQ,GAAG,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;QACpC,CAAC;aAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,CAAC,EAAE,CAAC;YACvC,QAAQ,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACnC,CAAC;aAAM,IAAI,UAAU,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE,CAAC;YAC3C,QAAQ,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACnC,CAAC;QACD,OAAO,2BAA2B,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CAC5C,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC;YAC3C,GAAG,CAAC,WAAW,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CACnD,CAAC;IACN,CAAC;IAED,iEAAiE;IACjE,IAAI,UAAU,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE,CAAC;QACjC,MAAM,QAAQ,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,iBAAiB;QACvD,OAAO,gBAAgB,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CACjC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC;YAC3C,GAAG,CAAC,WAAW,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CACnD,CAAC;IACN,CAAC;IAED,qFAAqF;IACrF,IAAI,UAAU,CAAC,UAAU,CAAC,SAAS,CAAC,IAAI,UAAU,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;QACtE,MAAM,SAAS,GAAG,UAAU,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QAC3D,MAAM,QAAQ,GAAG,UAAU,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC;QAC7C,OAAO,kBAAkB,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CACnC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC;YAC3C,GAAG,CAAC,WAAW,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CACnD,CAAC;IACN,CAAC;IAED,2FAA2F;IAC3F,IAAI,UAAU,CAAC,UAAU,CAAC,wBAAwB,CAAC,EAAE,CAAC;QAClD,MAAM,QAAQ,GAAG,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,kCAAkC;QACzE,OAAO,4BAA4B,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CAC7C,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC;YAC3C,GAAG,CAAC,WAAW,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CACnD,CAAC;IACN,CAAC;IAED,+EAA+E;IAC/E,IAAI,UAAU,CAAC,UAAU,CAAC,WAAW,CAAC,EAAE,CAAC;QACrC,MAAM,QAAQ,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,qBAAqB;QAC3D,OAAO,oBAAoB,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CACrC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC;YAC3C,GAAG,CAAC,WAAW,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CACnD,CAAC;IACN,CAAC;IAED,6BAA6B;IAC7B,MAAM,OAAO,GAAG,cAAc,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE;QACxC,0CAA0C;QAC1C,IAAI,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE,CAAC;YAChD,OAAO,IAAI,CAAC;QAChB,CAAC;QAED,mCAAmC;QACnC,IAAI,GAAG,CAAC,WAAW,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,UAAU,CAAC,EAAE,CAAC;YACrD,OAAO,IAAI,CAAC;QAChB,CAAC;QAED,OAAO,KAAK,CAAC;IACjB,CAAC,CAAC,CAAC;IAEH,gDAAgD;IAChD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;QAClB,MAAM,OAAO,GAAG,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;QAC5D,MAAM,OAAO,GAAG,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;QAE5D,IAAI,OAAO,IAAI,CAAC,OAAO;YAAE,OAAO,CAAC,CAAC,CAAC;QACnC,IAAI,CAAC,OAAO,IAAI,OAAO;YAAE,OAAO,CAAC,CAAC;QAElC,uDAAuD;QACvD,OAAO,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;IACxC,CAAC,CAAC,CAAC;IAEH,sBAAsB;IACtB,OAAO,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;AAChC,CAAC"}
|
package/dist/config/types.d.ts
CHANGED
|
@@ -23,11 +23,13 @@ export interface SubshellConfig {
|
|
|
23
23
|
export interface Config {
|
|
24
24
|
model: string;
|
|
25
25
|
modelName?: string;
|
|
26
|
+
isLocalModel?: boolean;
|
|
26
27
|
autoApprove?: boolean;
|
|
27
28
|
subshell?: SubshellConfig;
|
|
28
29
|
enhancedQuality?: boolean;
|
|
29
30
|
externalThinking?: boolean;
|
|
30
31
|
autonomousMode?: boolean;
|
|
32
|
+
aiAutoSuggest?: boolean;
|
|
31
33
|
}
|
|
32
34
|
export declare const DEFAULT_CONFIG: Partial<Config>;
|
|
33
35
|
//# sourceMappingURL=types.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/config/types.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,cAAc;IAC7B,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB,GAAG,CAAC,EAAE;QACJ,OAAO,CAAC,EAAE,OAAO,CAAC;QAClB,iBAAiB,CAAC,EAAE,UAAU,GAAG,KAAK,CAAC;QACvC,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,OAAO,CAAC,EAAE,MAAM,CAAC;KAClB,CAAC;IACF,GAAG,CAAC,EAAE;QACJ,OAAO,CAAC,EAAE,OAAO,CAAC;QAClB,mBAAmB,CAAC,EAAE,MAAM,CAAC;QAC7B,OAAO,CAAC,EAAE,MAAM,CAAC;KAClB,CAAC;IACF,MAAM,CAAC,EAAE;QACP,OAAO,CAAC,EAAE,OAAO,CAAC;QAClB,OAAO,CAAC,EAAE,MAAM,CAAC;KAClB,CAAC;IACF,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,gBAAgB,CAAC,EAAE,MAAM,CAAC;CAC3B;AAED,MAAM,WAAW,MAAM;IACrB,KAAK,EAAE,MAAM,CAAC;IACd,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,QAAQ,CAAC,EAAE,cAAc,CAAC;IAC1B,eAAe,CAAC,EAAE,OAAO,CAAC;IAC1B,gBAAgB,CAAC,EAAE,OAAO,CAAC;IAC3B,cAAc,CAAC,EAAE,OAAO,CAAC;
|
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/config/types.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,cAAc;IAC7B,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB,GAAG,CAAC,EAAE;QACJ,OAAO,CAAC,EAAE,OAAO,CAAC;QAClB,iBAAiB,CAAC,EAAE,UAAU,GAAG,KAAK,CAAC;QACvC,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,OAAO,CAAC,EAAE,MAAM,CAAC;KAClB,CAAC;IACF,GAAG,CAAC,EAAE;QACJ,OAAO,CAAC,EAAE,OAAO,CAAC;QAClB,mBAAmB,CAAC,EAAE,MAAM,CAAC;QAC7B,OAAO,CAAC,EAAE,MAAM,CAAC;KAClB,CAAC;IACF,MAAM,CAAC,EAAE;QACP,OAAO,CAAC,EAAE,OAAO,CAAC;QAClB,OAAO,CAAC,EAAE,MAAM,CAAC;KAClB,CAAC;IACF,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,gBAAgB,CAAC,EAAE,MAAM,CAAC;CAC3B;AAED,MAAM,WAAW,MAAM;IACrB,KAAK,EAAE,MAAM,CAAC;IACd,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,QAAQ,CAAC,EAAE,cAAc,CAAC;IAC1B,eAAe,CAAC,EAAE,OAAO,CAAC;IAC1B,gBAAgB,CAAC,EAAE,OAAO,CAAC;IAC3B,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB,aAAa,CAAC,EAAE,OAAO,CAAC;CACzB;AAED,eAAO,MAAM,cAAc,EAAE,OAAO,CAAC,MAAM,CA2B1C,CAAC"}
|
package/dist/config/types.js
CHANGED
|
@@ -4,6 +4,7 @@ export const DEFAULT_CONFIG = {
|
|
|
4
4
|
enhancedQuality: true, // Enable enhanced quality features by default
|
|
5
5
|
externalThinking: false, // Disable external thinking by default (internal reasoning)
|
|
6
6
|
autonomousMode: false, // Disable autonomous mode by default (for backward compatibility)
|
|
7
|
+
aiAutoSuggest: false, // Disable AI auto-suggest by default
|
|
7
8
|
subshell: {
|
|
8
9
|
enabled: true,
|
|
9
10
|
ssh: {
|
package/dist/config/types.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"types.js","sourceRoot":"","sources":["../../src/config/types.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"types.js","sourceRoot":"","sources":["../../src/config/types.ts"],"names":[],"mappings":"AAmCA,MAAM,CAAC,MAAM,cAAc,GAAoB;IAC7C,KAAK,EAAE,kBAAkB;IACzB,WAAW,EAAE,KAAK;IAClB,eAAe,EAAE,IAAI,EAAE,8CAA8C;IACrE,gBAAgB,EAAE,KAAK,EAAE,4DAA4D;IACrF,cAAc,EAAE,KAAK,EAAE,kEAAkE;IACzF,aAAa,EAAE,KAAK,EAAE,qCAAqC;IAC3D,QAAQ,EAAE;QACR,OAAO,EAAE,IAAI;QACb,GAAG,EAAE;YACH,OAAO,EAAE,IAAI;YACb,iBAAiB,EAAE,KAAK;YACxB,OAAO,EAAE,KAAK;SACf;QACD,GAAG,EAAE;YACH,OAAO,EAAE,IAAI;YACb,OAAO,EAAE,KAAK;SACf;QACD,MAAM,EAAE;YACN,OAAO,EAAE,IAAI;YACb,OAAO,EAAE,KAAK;SACf;QACD,cAAc,EAAE,KAAK;QACrB,iBAAiB,EAAE,KAAK;QACxB,iBAAiB,EAAE,CAAC;QACpB,gBAAgB,EAAE,IAAI;KACvB;CACF,CAAC"}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* AI Autocomplete Agent Service
|
|
3
|
+
*
|
|
4
|
+
* A lightweight AI agent using gemini-2.0-flash-lite that predicts commands
|
|
5
|
+
* the user is trying to type. Triggered after 5 seconds of input inactivity.
|
|
6
|
+
*/
|
|
7
|
+
export interface AutocompleteContext {
|
|
8
|
+
os: string;
|
|
9
|
+
platform: string;
|
|
10
|
+
cwd: string;
|
|
11
|
+
directoryHistory: string[];
|
|
12
|
+
sessionCommands: string[];
|
|
13
|
+
files: string[];
|
|
14
|
+
currentInput: string;
|
|
15
|
+
}
|
|
16
|
+
declare class AIAutocompleteAgentClass {
|
|
17
|
+
private pendingRequest;
|
|
18
|
+
/**
|
|
19
|
+
* Predict the full command the user is trying to type
|
|
20
|
+
* Returns null if no confident prediction can be made
|
|
21
|
+
*/
|
|
22
|
+
predictCommand(context: AutocompleteContext, signal?: AbortSignal): Promise<string | null>;
|
|
23
|
+
/**
|
|
24
|
+
* Abort any pending prediction request
|
|
25
|
+
*/
|
|
26
|
+
abort(): void;
|
|
27
|
+
/**
|
|
28
|
+
* Call the AI model to get a prediction
|
|
29
|
+
*/
|
|
30
|
+
private callAI;
|
|
31
|
+
/**
|
|
32
|
+
* Merge two abort signals into one
|
|
33
|
+
*/
|
|
34
|
+
private mergeSignals;
|
|
35
|
+
}
|
|
36
|
+
export declare const AIAutocompleteAgent: AIAutocompleteAgentClass;
|
|
37
|
+
export declare const AI_AUTOCOMPLETE_DEBOUNCE_MS = 5000;
|
|
38
|
+
export {};
|
|
39
|
+
//# sourceMappingURL=ai-autocomplete-agent.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ai-autocomplete-agent.d.ts","sourceRoot":"","sources":["../../src/services/ai-autocomplete-agent.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAYH,MAAM,WAAW,mBAAmB;IAChC,EAAE,EAAE,MAAM,CAAC;IACX,QAAQ,EAAE,MAAM,CAAC;IACjB,GAAG,EAAE,MAAM,CAAC;IACZ,gBAAgB,EAAE,MAAM,EAAE,CAAC;IAC3B,eAAe,EAAE,MAAM,EAAE,CAAC;IAC1B,KAAK,EAAE,MAAM,EAAE,CAAC;IAChB,YAAY,EAAE,MAAM,CAAC;CACxB;AAwBD,cAAM,wBAAwB;IAC1B,OAAO,CAAC,cAAc,CAAgC;IAEtD;;;OAGG;IACG,cAAc,CAAC,OAAO,EAAE,mBAAmB,EAAE,MAAM,CAAC,EAAE,WAAW,GAAG,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC;IA6ChG;;OAEG;IACH,KAAK,IAAI,IAAI;IAOb;;OAEG;YACW,MAAM;IAwGpB;;OAEG;IACH,OAAO,CAAC,YAAY;CAcvB;AAGD,eAAO,MAAM,mBAAmB,0BAAiC,CAAC;AAGlE,eAAO,MAAM,2BAA2B,OAAc,CAAC"}
|
|
@@ -0,0 +1,189 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* AI Autocomplete Agent Service
|
|
3
|
+
*
|
|
4
|
+
* A lightweight AI agent using gemini-2.0-flash-lite that predicts commands
|
|
5
|
+
* the user is trying to type. Triggered after 5 seconds of input inactivity.
|
|
6
|
+
*/
|
|
7
|
+
import { aiServiceClient } from './ai-service-client.js';
|
|
8
|
+
import { quickLog } from '../utils/conversation-logger.js';
|
|
9
|
+
// ==================== Constants ====================
|
|
10
|
+
const MODEL = 'gemini-2.0-flash-lite'; // Fast, cheap model for prediction
|
|
11
|
+
const DEBOUNCE_MS = 5000; // 5 seconds of inactivity before triggering
|
|
12
|
+
// ==================== System Prompt ====================
|
|
13
|
+
const AUTOCOMPLETE_SYSTEM_PROMPT = `You are a command prediction assistant. Your job is to predict the FULL command the user is trying to type based on their current input and context.
|
|
14
|
+
|
|
15
|
+
RULES:
|
|
16
|
+
1. Respond with ONLY the predicted full command - no explanations, no quotes, no markdown
|
|
17
|
+
2. If you cannot confidently predict a command, respond with exactly "NONE"
|
|
18
|
+
3. The prediction should be a complete, valid command that would work in the given OS and directory
|
|
19
|
+
4. Consider the user's command history in this directory and session when making predictions
|
|
20
|
+
5. USE THE FILE LIST: If the user is typing a command that operates on a file (like 'python', 'cat', 'ls'), try to complete the filename from the provided list.
|
|
21
|
+
6. Be conservative - only predict when you have high confidence
|
|
22
|
+
|
|
23
|
+
EXAMPLES:
|
|
24
|
+
User typing: "git st" → git status
|
|
25
|
+
User typing: "npm i" → npm install
|
|
26
|
+
User typing: "cd .." → cd ..
|
|
27
|
+
User typing: "py te" (files: [test.py, main.py]) → python test.py
|
|
28
|
+
User typing: "cat rea" (files: [README.md]) → cat README.md
|
|
29
|
+
User typing: "xyz123" → NONE`;
|
|
30
|
+
// ==================== AIAutocompleteAgent Class ====================
|
|
31
|
+
class AIAutocompleteAgentClass {
|
|
32
|
+
pendingRequest = null;
|
|
33
|
+
/**
|
|
34
|
+
* Predict the full command the user is trying to type
|
|
35
|
+
* Returns null if no confident prediction can be made
|
|
36
|
+
*/
|
|
37
|
+
async predictCommand(context, signal) {
|
|
38
|
+
// Don't predict for empty or very short inputs
|
|
39
|
+
if (!context.currentInput || context.currentInput.trim().length < 2) {
|
|
40
|
+
return null;
|
|
41
|
+
}
|
|
42
|
+
// Don't predict for slash commands
|
|
43
|
+
if (context.currentInput.startsWith('/')) {
|
|
44
|
+
return null;
|
|
45
|
+
}
|
|
46
|
+
// Don't predict for file tags
|
|
47
|
+
if (context.currentInput.includes('@')) {
|
|
48
|
+
return null;
|
|
49
|
+
}
|
|
50
|
+
// Cancel any pending request
|
|
51
|
+
this.abort();
|
|
52
|
+
// Create new abort controller
|
|
53
|
+
const abortController = new AbortController();
|
|
54
|
+
this.pendingRequest = abortController;
|
|
55
|
+
// Merge signals if external signal provided
|
|
56
|
+
const effectiveSignal = signal
|
|
57
|
+
? this.mergeSignals(signal, abortController.signal)
|
|
58
|
+
: abortController.signal;
|
|
59
|
+
try {
|
|
60
|
+
const prediction = await this.callAI(context, effectiveSignal);
|
|
61
|
+
return prediction;
|
|
62
|
+
}
|
|
63
|
+
catch (error) {
|
|
64
|
+
if (error.name === 'AbortError' || error.message === 'AbortError') {
|
|
65
|
+
// Request was cancelled, this is expected
|
|
66
|
+
return null;
|
|
67
|
+
}
|
|
68
|
+
quickLog(`[${new Date().toISOString()}] [AIAutocompleteAgent] Prediction error: ${error.message}\n`);
|
|
69
|
+
return null;
|
|
70
|
+
}
|
|
71
|
+
finally {
|
|
72
|
+
if (this.pendingRequest === abortController) {
|
|
73
|
+
this.pendingRequest = null;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
/**
|
|
78
|
+
* Abort any pending prediction request
|
|
79
|
+
*/
|
|
80
|
+
abort() {
|
|
81
|
+
if (this.pendingRequest) {
|
|
82
|
+
this.pendingRequest.abort();
|
|
83
|
+
this.pendingRequest = null;
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
/**
|
|
87
|
+
* Call the AI model to get a prediction
|
|
88
|
+
*/
|
|
89
|
+
async callAI(context, signal) {
|
|
90
|
+
// Build context string
|
|
91
|
+
const contextParts = [
|
|
92
|
+
`OS: ${context.os}`,
|
|
93
|
+
`Platform: ${context.platform}`,
|
|
94
|
+
`Current Directory: ${context.cwd}`,
|
|
95
|
+
];
|
|
96
|
+
if (context.files && context.files.length > 0) {
|
|
97
|
+
const fileList = context.files.slice(0, 50); // Hard limit just in case
|
|
98
|
+
contextParts.push(`Files in current directory:\n${fileList.join('\n')}`);
|
|
99
|
+
}
|
|
100
|
+
if (context.directoryHistory.length > 0) {
|
|
101
|
+
const recentDirHistory = context.directoryHistory.slice(0, 10); // Take top 10 most frequent/recent
|
|
102
|
+
contextParts.push(`Recent commands in this directory:\n${recentDirHistory.join('\n')}`);
|
|
103
|
+
}
|
|
104
|
+
if (context.sessionCommands.length > 0) {
|
|
105
|
+
const recentSession = context.sessionCommands.slice(-10);
|
|
106
|
+
contextParts.push(`Commands run in this session:\n${recentSession.join('\n')}`);
|
|
107
|
+
}
|
|
108
|
+
const userPrompt = `${contextParts.join('\n\n')}
|
|
109
|
+
|
|
110
|
+
User is currently typing: "${context.currentInput}"
|
|
111
|
+
|
|
112
|
+
What is the full command they are trying to type?`;
|
|
113
|
+
// Build messages
|
|
114
|
+
const messages = [
|
|
115
|
+
{ role: 'user', content: AUTOCOMPLETE_SYSTEM_PROMPT + '\n\n' + userPrompt }
|
|
116
|
+
];
|
|
117
|
+
// Log detailed context and prompt for debugging
|
|
118
|
+
quickLog(`[${new Date().toISOString()}] [AIAutocompleteAgent] Context: ${JSON.stringify(context, null, 2)}\n`);
|
|
119
|
+
quickLog(`[${new Date().toISOString()}] [AIAutocompleteAgent] Full Prompt:\n${JSON.stringify(messages, null, 2)}\n`);
|
|
120
|
+
// Stream AI response
|
|
121
|
+
let responseText = '';
|
|
122
|
+
try {
|
|
123
|
+
for await (const chunk of aiServiceClient.streamChat(MODEL, messages, [], // No tools needed
|
|
124
|
+
{
|
|
125
|
+
cwd: context.cwd,
|
|
126
|
+
platform: context.platform,
|
|
127
|
+
shell: process.platform === 'win32' ? 'powershell' : 'bash',
|
|
128
|
+
os: (context.os === 'win32' ? 'windows' : context.os === 'darwin' ? 'macos' : 'linux'),
|
|
129
|
+
homeDir: ''
|
|
130
|
+
}, undefined, undefined, signal)) {
|
|
131
|
+
if (signal.aborted) {
|
|
132
|
+
throw new Error('AbortError');
|
|
133
|
+
}
|
|
134
|
+
if (chunk.type === 'text') {
|
|
135
|
+
responseText += chunk.content;
|
|
136
|
+
}
|
|
137
|
+
// Early exit if response is getting too long (should be a single command)
|
|
138
|
+
if (responseText.length > 200) {
|
|
139
|
+
break;
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
catch (error) {
|
|
144
|
+
if (error.message === 'AbortError' || error.name === 'AbortError') {
|
|
145
|
+
const abortError = new Error('AbortError');
|
|
146
|
+
abortError.name = 'AbortError';
|
|
147
|
+
throw abortError;
|
|
148
|
+
}
|
|
149
|
+
throw error;
|
|
150
|
+
}
|
|
151
|
+
// Parse response
|
|
152
|
+
const prediction = responseText.trim();
|
|
153
|
+
quickLog(`[${new Date().toISOString()}] [AIAutocompleteAgent] Raw AI Response: "${prediction}"\n`);
|
|
154
|
+
// Check for NONE or invalid predictions
|
|
155
|
+
if (!prediction || prediction.toUpperCase() === 'NONE' || prediction.length < 2) {
|
|
156
|
+
return null;
|
|
157
|
+
}
|
|
158
|
+
// Don't return the prediction if it's the same as the current input
|
|
159
|
+
if (prediction.toLowerCase() === context.currentInput.toLowerCase()) {
|
|
160
|
+
return null;
|
|
161
|
+
}
|
|
162
|
+
// Don't return if prediction doesn't start with the current input (should be a completion)
|
|
163
|
+
if (!prediction.toLowerCase().startsWith(context.currentInput.toLowerCase())) {
|
|
164
|
+
return null;
|
|
165
|
+
}
|
|
166
|
+
quickLog(`[${new Date().toISOString()}] [AIAutocompleteAgent] Valid Prediction: "${prediction}" for input "${context.currentInput}"\n`);
|
|
167
|
+
return prediction;
|
|
168
|
+
}
|
|
169
|
+
/**
|
|
170
|
+
* Merge two abort signals into one
|
|
171
|
+
*/
|
|
172
|
+
mergeSignals(signal1, signal2) {
|
|
173
|
+
const controller = new AbortController();
|
|
174
|
+
const abort = () => controller.abort();
|
|
175
|
+
if (signal1.aborted || signal2.aborted) {
|
|
176
|
+
controller.abort();
|
|
177
|
+
}
|
|
178
|
+
else {
|
|
179
|
+
signal1.addEventListener('abort', abort, { once: true });
|
|
180
|
+
signal2.addEventListener('abort', abort, { once: true });
|
|
181
|
+
}
|
|
182
|
+
return controller.signal;
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
// Export singleton
|
|
186
|
+
export const AIAutocompleteAgent = new AIAutocompleteAgentClass();
|
|
187
|
+
// Export debounce constant for use in InputBox
|
|
188
|
+
export const AI_AUTOCOMPLETE_DEBOUNCE_MS = DEBOUNCE_MS;
|
|
189
|
+
//# sourceMappingURL=ai-autocomplete-agent.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ai-autocomplete-agent.js","sourceRoot":"","sources":["../../src/services/ai-autocomplete-agent.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,EAAE,eAAe,EAAwB,MAAM,wBAAwB,CAAC;AAC/E,OAAO,EAAE,QAAQ,EAAE,MAAM,iCAAiC,CAAC;AAE3D,sDAAsD;AAEtD,MAAM,KAAK,GAAG,uBAAuB,CAAC,CAAE,mCAAmC;AAC3E,MAAM,WAAW,GAAG,IAAI,CAAC,CAAe,4CAA4C;AAcpF,0DAA0D;AAE1D,MAAM,0BAA0B,GAAG;;;;;;;;;;;;;;;;6BAgBN,CAAC;AAE9B,sEAAsE;AAEtE,MAAM,wBAAwB;IAClB,cAAc,GAA2B,IAAI,CAAC;IAEtD;;;OAGG;IACH,KAAK,CAAC,cAAc,CAAC,OAA4B,EAAE,MAAoB;QACnE,+CAA+C;QAC/C,IAAI,CAAC,OAAO,CAAC,YAAY,IAAI,OAAO,CAAC,YAAY,CAAC,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YAClE,OAAO,IAAI,CAAC;QAChB,CAAC;QAED,mCAAmC;QACnC,IAAI,OAAO,CAAC,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE,CAAC;YACvC,OAAO,IAAI,CAAC;QAChB,CAAC;QAED,8BAA8B;QAC9B,IAAI,OAAO,CAAC,YAAY,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC;YACrC,OAAO,IAAI,CAAC;QAChB,CAAC;QAED,6BAA6B;QAC7B,IAAI,CAAC,KAAK,EAAE,CAAC;QAEb,8BAA8B;QAC9B,MAAM,eAAe,GAAG,IAAI,eAAe,EAAE,CAAC;QAC9C,IAAI,CAAC,cAAc,GAAG,eAAe,CAAC;QAEtC,4CAA4C;QAC5C,MAAM,eAAe,GAAG,MAAM;YAC1B,CAAC,CAAC,IAAI,CAAC,YAAY,CAAC,MAAM,EAAE,eAAe,CAAC,MAAM,CAAC;YACnD,CAAC,CAAC,eAAe,CAAC,MAAM,CAAC;QAE7B,IAAI,CAAC;YACD,MAAM,UAAU,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,CAAC;YAC/D,OAAO,UAAU,CAAC;QACtB,CAAC;QAAC,OAAO,KAAU,EAAE,CAAC;YAClB,IAAI,KAAK,CAAC,IAAI,KAAK,YAAY,IAAI,KAAK,CAAC,OAAO,KAAK,YAAY,EAAE,CAAC;gBAChE,0CAA0C;gBAC1C,OAAO,IAAI,CAAC;YAChB,CAAC;YACD,QAAQ,CAAC,IAAI,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE,6CAA6C,KAAK,CAAC,OAAO,IAAI,CAAC,CAAC;YACrG,OAAO,IAAI,CAAC;QAChB,CAAC;gBAAS,CAAC;YACP,IAAI,IAAI,CAAC,cAAc,KAAK,eAAe,EAAE,CAAC;gBAC1C,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC;YAC/B,CAAC;QACL,CAAC;IACL,CAAC;IAED;;OAEG;IACH,KAAK;QACD,IAAI,IAAI,CAAC,cAAc,EAAE,CAAC;YACtB,IAAI,CAAC,cAAc,CAAC,KAAK,EAAE,CAAC;YAC5B,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC;QAC/B,CAAC;IACL,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,MAAM,CAAC,OAA4B,EAAE,MAAmB;QAClE,uBAAuB;QACvB,MAAM,YAAY,GAAa;YAC3B,OAAO,OAAO,CAAC,EAAE,EAAE;YACnB,aAAa,OAAO,CAAC,QAAQ,EAAE;YAC/B,sBAAsB,OAAO,CAAC,GAAG,EAAE;SACtC,CAAC;QAEF,IAAI,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YAC5C,MAAM,QAAQ,GAAG,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,0BAA0B;YACvE,YAAY,CAAC,IAAI,CAAC,gCAAgC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;QAC7E,CAAC;QAED,IAAI,OAAO,CAAC,gBAAgB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACtC,MAAM,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,mCAAmC;YACnG,YAAY,CAAC,IAAI,CAAC,uCAAuC,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;QAC5F,CAAC;QAED,IAAI,OAAO,CAAC,eAAe,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACrC,MAAM,aAAa,GAAG,OAAO,CAAC,eAAe,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC;YACzD,YAAY,CAAC,IAAI,CAAC,kCAAkC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;QACpF,CAAC;QAED,MAAM,UAAU,GAAG,GAAG,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC;;6BAE1B,OAAO,CAAC,YAAY;;kDAEC,CAAC;QAE3C,iBAAiB;QACjB,MAAM,QAAQ,GAAgB;YAC1B,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,0BAA0B,GAAG,MAAM,GAAG,UAAU,EAAE;SAC9E,CAAC;QAEF,gDAAgD;QAChD,QAAQ,CAAC,IAAI,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE,oCAAoC,IAAI,CAAC,SAAS,CAAC,OAAO,EAAE,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC;QAC/G,QAAQ,CAAC,IAAI,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE,yCAAyC,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC;QAErH,qBAAqB;QACrB,IAAI,YAAY,GAAG,EAAE,CAAC;QAEtB,IAAI,CAAC;YACD,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,eAAe,CAAC,UAAU,CAChD,KAAK,EACL,QAAQ,EACR,EAAE,EAAG,kBAAkB;YACvB;gBACI,GAAG,EAAE,OAAO,CAAC,GAAG;gBAChB,QAAQ,EAAE,OAAO,CAAC,QAAQ;gBAC1B,KAAK,EAAE,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,MAAM;gBAC3D,EAAE,EAAE,CAAC,OAAO,CAAC,EAAE,KAAK,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,KAAK,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAkC;gBACvH,OAAO,EAAE,EAAE;aACd,EACD,SAAS,EACT,SAAS,EACT,MAAM,CACT,EAAE,CAAC;gBACA,IAAI,MAAM,CAAC,OAAO,EAAE,CAAC;oBACjB,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;gBAClC,CAAC;gBAED,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;oBACxB,YAAY,IAAI,KAAK,CAAC,OAAO,CAAC;gBAClC,CAAC;gBAED,0EAA0E;gBAC1E,IAAI,YAAY,CAAC,MAAM,GAAG,GAAG,EAAE,CAAC;oBAC5B,MAAM;gBACV,CAAC;YACL,CAAC;QACL,CAAC;QAAC,OAAO,KAAU,EAAE,CAAC;YAClB,IAAI,KAAK,CAAC,OAAO,KAAK,YAAY,IAAI,KAAK,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;gBAChE,MAAM,UAAU,GAAG,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;gBAC3C,UAAU,CAAC,IAAI,GAAG,YAAY,CAAC;gBAC/B,MAAM,UAAU,CAAC;YACrB,CAAC;YACD,MAAM,KAAK,CAAC;QAChB,CAAC;QAED,iBAAiB;QACjB,MAAM,UAAU,GAAG,YAAY,CAAC,IAAI,EAAE,CAAC;QAEvC,QAAQ,CAAC,IAAI,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE,6CAA6C,UAAU,KAAK,CAAC,CAAC;QAEnG,wCAAwC;QACxC,IAAI,CAAC,UAAU,IAAI,UAAU,CAAC,WAAW,EAAE,KAAK,MAAM,IAAI,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YAC9E,OAAO,IAAI,CAAC;QAChB,CAAC;QAED,oEAAoE;QACpE,IAAI,UAAU,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC,YAAY,CAAC,WAAW,EAAE,EAAE,CAAC;YAClE,OAAO,IAAI,CAAC;QAChB,CAAC;QAED,2FAA2F;QAC3F,IAAI,CAAC,UAAU,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,YAAY,CAAC,WAAW,EAAE,CAAC,EAAE,CAAC;YAC3E,OAAO,IAAI,CAAC;QAChB,CAAC;QAED,QAAQ,CAAC,IAAI,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE,8CAA8C,UAAU,gBAAgB,OAAO,CAAC,YAAY,KAAK,CAAC,CAAC;QAExI,OAAO,UAAU,CAAC;IACtB,CAAC;IAED;;OAEG;IACK,YAAY,CAAC,OAAoB,EAAE,OAAoB;QAC3D,MAAM,UAAU,GAAG,IAAI,eAAe,EAAE,CAAC;QAEzC,MAAM,KAAK,GAAG,GAAG,EAAE,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;QAEvC,IAAI,OAAO,CAAC,OAAO,IAAI,OAAO,CAAC,OAAO,EAAE,CAAC;YACrC,UAAU,CAAC,KAAK,EAAE,CAAC;QACvB,CAAC;aAAM,CAAC;YACJ,OAAO,CAAC,gBAAgB,CAAC,OAAO,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC;YACzD,OAAO,CAAC,gBAAgB,CAAC,OAAO,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC;QAC7D,CAAC;QAED,OAAO,UAAU,CAAC,MAAM,CAAC;IAC7B,CAAC;CACJ;AAED,mBAAmB;AACnB,MAAM,CAAC,MAAM,mBAAmB,GAAG,IAAI,wBAAwB,EAAE,CAAC;AAElE,+CAA+C;AAC/C,MAAM,CAAC,MAAM,2BAA2B,GAAG,WAAW,CAAC"}
|
|
@@ -91,6 +91,31 @@ export declare class AIServiceClient {
|
|
|
91
91
|
* This is a workaround since sessionToken is private
|
|
92
92
|
*/
|
|
93
93
|
private getSessionToken;
|
|
94
|
+
/**
|
|
95
|
+
* Check if the current configuration is using a local Ollama model
|
|
96
|
+
*/
|
|
97
|
+
isUsingLocalModel(): boolean;
|
|
98
|
+
/**
|
|
99
|
+
* Get the current model name from config
|
|
100
|
+
*/
|
|
101
|
+
private getCurrentModel;
|
|
102
|
+
/**
|
|
103
|
+
* Convert internal message format to Ollama format (with tool support)
|
|
104
|
+
*/
|
|
105
|
+
private convertToOllamaMessages;
|
|
106
|
+
/**
|
|
107
|
+
* Convert ToolSchema array to Ollama tool format
|
|
108
|
+
*/
|
|
109
|
+
private convertToolsToOllamaFormat;
|
|
110
|
+
/**
|
|
111
|
+
* Stream chat request to local Ollama instance with tool calling support
|
|
112
|
+
*
|
|
113
|
+
* @param model - The local Ollama model to use (e.g., 'llama3.2:latest')
|
|
114
|
+
* @param messages - Conversation history
|
|
115
|
+
* @param tools - Available tools (will be converted to Ollama format)
|
|
116
|
+
* @yields Stream chunks (text, tool_call, or done events)
|
|
117
|
+
*/
|
|
118
|
+
streamLocalChat(model: string, messages: Message[], tools?: ToolSchema[]): AsyncGenerator<StreamChunk, void, unknown>;
|
|
94
119
|
/**
|
|
95
120
|
* Parse Server-Sent Events stream from response body
|
|
96
121
|
*
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ai-service-client.d.ts","sourceRoot":"","sources":["../../src/services/ai-service-client.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAGH,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AACpD,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;
|
|
1
|
+
{"version":3,"file":"ai-service-client.d.ts","sourceRoot":"","sources":["../../src/services/ai-service-client.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAGH,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AACpD,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AAS5D;;GAEG;AACH,MAAM,WAAW,OAAO;IACtB,IAAI,EAAE,QAAQ,GAAG,MAAM,GAAG,WAAW,GAAG,MAAM,CAAC;IAC/C,OAAO,EAAE,MAAM,CAAC;IAChB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,UAAU,CAAC,EAAE,QAAQ,EAAE,CAAC;IACxB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,iBAAiB,CAAC,EAAE,MAAM,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,WAAW,QAAQ;IACvB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,MAAM,CAAC;IACb,SAAS,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC/B,gBAAgB,CAAC,EAAE,MAAM,CAAC;CAC3B;AAED;;GAEG;AACH,MAAM,WAAW,SAAS;IACxB,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,WAAW,CAAC;IAClB,QAAQ,EAAE,QAAQ,CAAC;CACpB;AAED,MAAM,WAAW,SAAS;IACxB,IAAI,EAAE,MAAM,CAAC;CACd;AAED,MAAM,WAAW,UAAU;IACzB,IAAI,EAAE,OAAO,CAAC;IACd,OAAO,EAAE,MAAM,CAAC;IAChB,IAAI,EAAE,MAAM,CAAC;CACd;AAED,MAAM,WAAW,YAAY;IAC3B,IAAI,EAAE,SAAS,CAAC;IAChB,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,sBAAsB;IACrC,IAAI,EAAE,oBAAoB,CAAC;IAC3B,SAAS,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,MAAM,WAAW,GAAG,SAAS,GAAG,aAAa,GAAG,SAAS,GAAG,UAAU,GAAG,YAAY,GAAG,sBAAsB,CAAC;AAgBrH;;GAEG;AACH,qBAAa,eAAe;IAC1B,OAAO,CAAC,OAAO,CAAS;IACxB,OAAO,CAAC,UAAU,CAAa;IAC/B,OAAO,CAAC,UAAU,CAAgB;;IAQlC;;;OAGG;IACH,OAAO,CAAC,UAAU;IAWlB;;;;;;;;;OASG;IACI,UAAU,CACf,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,OAAO,EAAE,EACnB,KAAK,EAAE,UAAU,EAAE,EACnB,kBAAkB,CAAC,EAAE,kBAAkB,EACvC,IAAI,CAAC,EAAE,MAAM,EACb,cAAc,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,EACpC,WAAW,CAAC,EAAE,WAAW,GACxB,cAAc,CAAC,WAAW,EAAE,IAAI,EAAE,OAAO,CAAC;IAsL7C;;OAEG;IACH,OAAO,CAAC,gBAAgB;IAKxB;;OAEG;IACH,OAAO,CAAC,KAAK;IAIb;;;OAGG;IACH,OAAO,CAAC,eAAe;IAiBvB;;OAEG;IACH,iBAAiB,IAAI,OAAO;IAc5B;;OAEG;IACH,OAAO,CAAC,eAAe;IAcvB;;OAEG;IACH,OAAO,CAAC,uBAAuB;IAiC/B;;OAEG;IACH,OAAO,CAAC,0BAA0B;IAelC;;;;;;;OAOG;IACI,eAAe,CACpB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,OAAO,EAAE,EACnB,KAAK,GAAE,UAAU,EAAO,GACvB,cAAc,CAAC,WAAW,EAAE,IAAI,EAAE,OAAO,CAAC;IA6D7C;;;;;OAKG;YACY,cAAc;CA0E9B;AAGD,eAAO,MAAM,eAAe,iBAAwB,CAAC"}
|
|
@@ -10,6 +10,8 @@ import { join } from 'path';
|
|
|
10
10
|
import { homedir } from 'os';
|
|
11
11
|
import { IS_DEV_BUILD, DEV_BACKEND_URL, PRODUCTION_BACKEND_URL } from '../config/build-config.js';
|
|
12
12
|
import { logWarning } from '../utils/logger.js';
|
|
13
|
+
import { ollamaService, OllamaService } from './ollama-service.js';
|
|
14
|
+
import { quickLog } from '../utils/conversation-logger.js';
|
|
13
15
|
/**
|
|
14
16
|
* AI Service Client for streaming chat requests to backend
|
|
15
17
|
*/
|
|
@@ -46,7 +48,23 @@ export class AIServiceClient {
|
|
|
46
48
|
* @yields Stream chunks (text, tool calls, done, or error events)
|
|
47
49
|
*/
|
|
48
50
|
async *streamChat(model, messages, tools, environmentContext, mode, thinkingConfig, abortSignal) {
|
|
49
|
-
//
|
|
51
|
+
// Check if using a local Ollama model
|
|
52
|
+
if (this.isUsingLocalModel()) {
|
|
53
|
+
const localModel = this.getCurrentModel();
|
|
54
|
+
const supportsTools = OllamaService.modelSupportsTools(localModel);
|
|
55
|
+
quickLog(`[${new Date().toISOString()}] [AIServiceClient] Routing to local Ollama model: ${localModel}, supportsTools: ${supportsTools}\n`);
|
|
56
|
+
// Show warning only if tools are provided but model doesn't support them
|
|
57
|
+
if (tools.length > 0 && !supportsTools) {
|
|
58
|
+
yield {
|
|
59
|
+
type: 'text',
|
|
60
|
+
content: `⚠️ Note: Model "${localModel}" does not support tool calling. Running in text-only mode.\n\n`,
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
// Route to local Ollama with tools
|
|
64
|
+
yield* this.streamLocalChat(localModel, messages, tools);
|
|
65
|
+
return;
|
|
66
|
+
}
|
|
67
|
+
// Build request payload for cloud backend
|
|
50
68
|
const payload = {
|
|
51
69
|
model,
|
|
52
70
|
messages,
|
|
@@ -223,6 +241,149 @@ export class AIServiceClient {
|
|
|
223
241
|
}
|
|
224
242
|
return '';
|
|
225
243
|
}
|
|
244
|
+
/**
|
|
245
|
+
* Check if the current configuration is using a local Ollama model
|
|
246
|
+
*/
|
|
247
|
+
isUsingLocalModel() {
|
|
248
|
+
const configPath = join(homedir(), '.centaurus', 'config.json');
|
|
249
|
+
try {
|
|
250
|
+
if (existsSync(configPath)) {
|
|
251
|
+
const data = readFileSync(configPath, 'utf-8');
|
|
252
|
+
const config = JSON.parse(data);
|
|
253
|
+
return config.isLocalModel === true;
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
catch (error) {
|
|
257
|
+
// Return false if unable to read
|
|
258
|
+
}
|
|
259
|
+
return false;
|
|
260
|
+
}
|
|
261
|
+
/**
|
|
262
|
+
* Get the current model name from config
|
|
263
|
+
*/
|
|
264
|
+
getCurrentModel() {
|
|
265
|
+
const configPath = join(homedir(), '.centaurus', 'config.json');
|
|
266
|
+
try {
|
|
267
|
+
if (existsSync(configPath)) {
|
|
268
|
+
const data = readFileSync(configPath, 'utf-8');
|
|
269
|
+
const config = JSON.parse(data);
|
|
270
|
+
return config.model || 'gemini-2.5-flash';
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
catch (error) {
|
|
274
|
+
// Return default if unable to read
|
|
275
|
+
}
|
|
276
|
+
return 'gemini-2.5-flash';
|
|
277
|
+
}
|
|
278
|
+
/**
|
|
279
|
+
* Convert internal message format to Ollama format (with tool support)
|
|
280
|
+
*/
|
|
281
|
+
convertToOllamaMessages(messages) {
|
|
282
|
+
return messages.map(msg => {
|
|
283
|
+
if (msg.role === 'tool') {
|
|
284
|
+
// Tool results - convert to Ollama's tool message format
|
|
285
|
+
return {
|
|
286
|
+
role: 'tool',
|
|
287
|
+
content: msg.content,
|
|
288
|
+
tool_name: msg.tool_call_id || 'unknown', // Use tool_call_id as tool_name
|
|
289
|
+
};
|
|
290
|
+
}
|
|
291
|
+
if (msg.role === 'assistant' && msg.tool_calls?.length) {
|
|
292
|
+
// Assistant message with tool calls
|
|
293
|
+
return {
|
|
294
|
+
role: 'assistant',
|
|
295
|
+
content: msg.content || '',
|
|
296
|
+
tool_calls: msg.tool_calls.map(tc => ({
|
|
297
|
+
function: {
|
|
298
|
+
name: tc.name,
|
|
299
|
+
arguments: tc.arguments,
|
|
300
|
+
}
|
|
301
|
+
})),
|
|
302
|
+
};
|
|
303
|
+
}
|
|
304
|
+
// Regular message
|
|
305
|
+
return {
|
|
306
|
+
role: msg.role,
|
|
307
|
+
content: msg.content,
|
|
308
|
+
};
|
|
309
|
+
});
|
|
310
|
+
}
|
|
311
|
+
/**
|
|
312
|
+
* Convert ToolSchema array to Ollama tool format
|
|
313
|
+
*/
|
|
314
|
+
convertToolsToOllamaFormat(tools) {
|
|
315
|
+
return tools.map(tool => ({
|
|
316
|
+
type: 'function',
|
|
317
|
+
function: {
|
|
318
|
+
name: tool.name,
|
|
319
|
+
description: tool.description,
|
|
320
|
+
parameters: {
|
|
321
|
+
type: 'object',
|
|
322
|
+
properties: tool.parameters.properties,
|
|
323
|
+
required: tool.parameters.required,
|
|
324
|
+
},
|
|
325
|
+
},
|
|
326
|
+
}));
|
|
327
|
+
}
|
|
328
|
+
/**
|
|
329
|
+
* Stream chat request to local Ollama instance with tool calling support
|
|
330
|
+
*
|
|
331
|
+
* @param model - The local Ollama model to use (e.g., 'llama3.2:latest')
|
|
332
|
+
* @param messages - Conversation history
|
|
333
|
+
* @param tools - Available tools (will be converted to Ollama format)
|
|
334
|
+
* @yields Stream chunks (text, tool_call, or done events)
|
|
335
|
+
*/
|
|
336
|
+
async *streamLocalChat(model, messages, tools = []) {
|
|
337
|
+
try {
|
|
338
|
+
const supportsTools = OllamaService.modelSupportsTools(model);
|
|
339
|
+
const effectiveTools = supportsTools ? tools : [];
|
|
340
|
+
quickLog(`[${new Date().toISOString()}] [AIServiceClient] Starting local chat with model: ${model}, tools: ${effectiveTools.length}, supportsTools: ${supportsTools}\n`);
|
|
341
|
+
// Convert messages to Ollama format
|
|
342
|
+
const ollamaMessages = this.convertToOllamaMessages(messages);
|
|
343
|
+
// Convert tools to Ollama format if model supports them
|
|
344
|
+
const ollamaTools = effectiveTools.length > 0
|
|
345
|
+
? this.convertToolsToOllamaFormat(effectiveTools)
|
|
346
|
+
: undefined;
|
|
347
|
+
// Send request to Ollama with tools
|
|
348
|
+
const response = await ollamaService.sendChatMessage(model, ollamaMessages, ollamaTools);
|
|
349
|
+
// Check for tool calls in response
|
|
350
|
+
if (response.message?.tool_calls?.length) {
|
|
351
|
+
quickLog(`[${new Date().toISOString()}] [AIServiceClient] Received ${response.message.tool_calls.length} tool calls from Ollama\n`);
|
|
352
|
+
// Yield each tool call
|
|
353
|
+
for (const toolCall of response.message.tool_calls) {
|
|
354
|
+
yield {
|
|
355
|
+
type: 'tool_call',
|
|
356
|
+
toolCall: {
|
|
357
|
+
id: `ollama-${Date.now()}-${toolCall.function.name}`, // Generate unique ID
|
|
358
|
+
name: toolCall.function.name,
|
|
359
|
+
arguments: toolCall.function.arguments,
|
|
360
|
+
},
|
|
361
|
+
};
|
|
362
|
+
}
|
|
363
|
+
// Signal completion (tool execution will happen in cli-adapter)
|
|
364
|
+
yield { type: 'done' };
|
|
365
|
+
return;
|
|
366
|
+
}
|
|
367
|
+
// No tool calls - yield text response
|
|
368
|
+
if (response.message.content) {
|
|
369
|
+
yield {
|
|
370
|
+
type: 'text',
|
|
371
|
+
content: response.message.content,
|
|
372
|
+
};
|
|
373
|
+
}
|
|
374
|
+
// Signal completion
|
|
375
|
+
yield { type: 'done' };
|
|
376
|
+
quickLog(`[${new Date().toISOString()}] [AIServiceClient] Local chat completed\n`);
|
|
377
|
+
}
|
|
378
|
+
catch (error) {
|
|
379
|
+
quickLog(`[${new Date().toISOString()}] [AIServiceClient] Local chat error: ${error.message}\n`);
|
|
380
|
+
yield {
|
|
381
|
+
type: 'error',
|
|
382
|
+
message: error.message || 'Failed to communicate with Ollama',
|
|
383
|
+
code: 'OLLAMA_ERROR',
|
|
384
|
+
};
|
|
385
|
+
}
|
|
386
|
+
}
|
|
226
387
|
/**
|
|
227
388
|
* Parse Server-Sent Events stream from response body
|
|
228
389
|
*
|