@kispace-io/extension-ai-system 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +20 -0
- package/src/agents/agent-registry.ts +65 -0
- package/src/agents/index.ts +4 -0
- package/src/agents/message-processor.ts +50 -0
- package/src/agents/prompt-builder.ts +167 -0
- package/src/ai-system-extension.ts +104 -0
- package/src/aisystem.json +154 -0
- package/src/chat-provider-contributions.ts +95 -0
- package/src/core/constants.ts +23 -0
- package/src/core/index.ts +6 -0
- package/src/core/interfaces.ts +137 -0
- package/src/core/types.ts +126 -0
- package/src/general-assistant-prompt.txt +14 -0
- package/src/i18n.json +11 -0
- package/src/index.ts +13 -0
- package/src/prompt-enhancer-contributions.ts +29 -0
- package/src/providers/index.ts +5 -0
- package/src/providers/ollama-provider.ts +13 -0
- package/src/providers/openai-provider.ts +12 -0
- package/src/providers/provider-factory.ts +36 -0
- package/src/providers/provider.ts +156 -0
- package/src/providers/streaming/ollama-parser.ts +114 -0
- package/src/providers/streaming/sse-parser.ts +152 -0
- package/src/providers/streaming/stream-parser.ts +16 -0
- package/src/register.ts +16 -0
- package/src/service/ai-service.ts +744 -0
- package/src/service/token-usage-tracker.ts +139 -0
- package/src/tools/index.ts +4 -0
- package/src/tools/tool-call-accumulator.ts +81 -0
- package/src/tools/tool-executor.ts +174 -0
- package/src/tools/tool-registry.ts +70 -0
- package/src/translation.ts +3 -0
- package/src/utils/token-estimator.ts +87 -0
- package/src/utils/tool-detector.ts +144 -0
- package/src/view/agent-group-manager.ts +146 -0
- package/src/view/components/ai-agent-response-card.ts +198 -0
- package/src/view/components/ai-agent-response-group.ts +220 -0
- package/src/view/components/ai-chat-input.ts +131 -0
- package/src/view/components/ai-chat-message.ts +615 -0
- package/src/view/components/ai-empty-state.ts +52 -0
- package/src/view/components/ai-loading-indicator.ts +91 -0
- package/src/view/components/index.ts +7 -0
- package/src/view/components/k-ai-config-editor.ts +828 -0
- package/src/view/index.ts +6 -0
- package/src/view/k-aiview.ts +901 -0
- package/src/view/k-token-usage.ts +220 -0
- package/src/view/provider-manager.ts +196 -0
- package/src/view/session-manager.ts +255 -0
- package/src/view/stream-manager.ts +123 -0
- package/src/workflows/conditional-workflow.ts +98 -0
- package/src/workflows/index.ts +6 -0
- package/src/workflows/parallel-workflow.ts +45 -0
- package/src/workflows/sequential-workflow.ts +95 -0
- package/src/workflows/workflow-engine.ts +63 -0
- package/src/workflows/workflow-strategy.ts +21 -0
- package/tsconfig.json +12 -0
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
import { StreamParser } from "./stream-parser";
|
|
2
|
+
import type { StreamChunk, TokenUsage } from "@kispace-io/core";
|
|
3
|
+
|
|
4
|
+
export class OllamaParser extends StreamParser {
|
|
5
|
+
private usage: TokenUsage | null = null;
|
|
6
|
+
|
|
7
|
+
async *parse(reader: ReadableStreamDefaultReader<Uint8Array>): AsyncGenerator<StreamChunk> {
|
|
8
|
+
let buffer = '';
|
|
9
|
+
this.usage = null;
|
|
10
|
+
|
|
11
|
+
try {
|
|
12
|
+
while (true) {
|
|
13
|
+
const { done, value } = await reader.read();
|
|
14
|
+
|
|
15
|
+
if (done) {
|
|
16
|
+
break;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
buffer += this.decoder.decode(value, { stream: true });
|
|
20
|
+
const lines = buffer.split('\n');
|
|
21
|
+
buffer = lines.pop() || '';
|
|
22
|
+
|
|
23
|
+
for (const line of this.processLines(lines)) {
|
|
24
|
+
try {
|
|
25
|
+
const json = JSON.parse(line);
|
|
26
|
+
|
|
27
|
+
if (json.error) {
|
|
28
|
+
yield {
|
|
29
|
+
type: 'error',
|
|
30
|
+
content: json.error,
|
|
31
|
+
metadata: json
|
|
32
|
+
};
|
|
33
|
+
continue;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
if (json.done) {
|
|
37
|
+
this.extractUsage(json);
|
|
38
|
+
const doneChunk: StreamChunk = { type: 'done', content: '' };
|
|
39
|
+
if (this.usage) {
|
|
40
|
+
doneChunk.metadata = { usage: this.usage };
|
|
41
|
+
}
|
|
42
|
+
yield doneChunk;
|
|
43
|
+
continue;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
if (json.message?.content) {
|
|
47
|
+
yield {
|
|
48
|
+
type: 'token',
|
|
49
|
+
content: json.message.content,
|
|
50
|
+
message: {
|
|
51
|
+
role: json.message.role || 'assistant',
|
|
52
|
+
content: json.message.content
|
|
53
|
+
}
|
|
54
|
+
};
|
|
55
|
+
} else if (json.response) {
|
|
56
|
+
yield {
|
|
57
|
+
type: 'token',
|
|
58
|
+
content: json.response,
|
|
59
|
+
message: {
|
|
60
|
+
role: 'assistant',
|
|
61
|
+
content: json.response
|
|
62
|
+
}
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
} catch (e) {
|
|
66
|
+
continue;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
if (buffer.trim()) {
|
|
72
|
+
try {
|
|
73
|
+
const json = JSON.parse(buffer);
|
|
74
|
+
if (json.done) {
|
|
75
|
+
this.extractUsage(json);
|
|
76
|
+
}
|
|
77
|
+
if (json.message?.content) {
|
|
78
|
+
yield {
|
|
79
|
+
type: 'token',
|
|
80
|
+
content: json.message.content,
|
|
81
|
+
message: {
|
|
82
|
+
role: json.message.role || 'assistant',
|
|
83
|
+
content: json.message.content
|
|
84
|
+
}
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
} catch (e) {
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
const doneChunk: StreamChunk = { type: 'done', content: '' };
|
|
92
|
+
if (this.usage) {
|
|
93
|
+
doneChunk.metadata = { usage: this.usage };
|
|
94
|
+
}
|
|
95
|
+
yield doneChunk;
|
|
96
|
+
} finally {
|
|
97
|
+
reader.releaseLock();
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
private extractUsage(json: any): void {
|
|
102
|
+
if (json.prompt_eval_count !== undefined || json.eval_count !== undefined) {
|
|
103
|
+
const promptTokens = json.prompt_eval_count || 0;
|
|
104
|
+
const completionTokens = json.eval_count || 0;
|
|
105
|
+
this.usage = {
|
|
106
|
+
promptTokens,
|
|
107
|
+
completionTokens,
|
|
108
|
+
totalTokens: promptTokens + completionTokens,
|
|
109
|
+
estimated: false
|
|
110
|
+
};
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
|
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
import { StreamParser } from "./stream-parser";
|
|
2
|
+
import type { StreamChunk, ToolCall, TokenUsage } from "@kispace-io/core";
|
|
3
|
+
|
|
4
|
+
export class SSEParser extends StreamParser {
|
|
5
|
+
private usage: TokenUsage | null = null;
|
|
6
|
+
|
|
7
|
+
async *parse(reader: ReadableStreamDefaultReader<Uint8Array>): AsyncGenerator<StreamChunk> {
|
|
8
|
+
let buffer = '';
|
|
9
|
+
this.usage = null;
|
|
10
|
+
|
|
11
|
+
try {
|
|
12
|
+
while (true) {
|
|
13
|
+
const { done, value } = await reader.read();
|
|
14
|
+
|
|
15
|
+
if (done) {
|
|
16
|
+
break;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
buffer += this.decoder.decode(value, { stream: true });
|
|
20
|
+
const lines = buffer.split('\n');
|
|
21
|
+
buffer = lines.pop() || '';
|
|
22
|
+
|
|
23
|
+
for (const line of this.processLines(lines)) {
|
|
24
|
+
if (line.startsWith('data: ')) {
|
|
25
|
+
const data = line.slice(6).trim();
|
|
26
|
+
|
|
27
|
+
if (data === '[DONE]') {
|
|
28
|
+
const doneChunk: StreamChunk = { type: 'done', content: '' };
|
|
29
|
+
if (this.usage) {
|
|
30
|
+
doneChunk.metadata = { usage: this.usage };
|
|
31
|
+
}
|
|
32
|
+
yield doneChunk;
|
|
33
|
+
continue;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
try {
|
|
37
|
+
const json = JSON.parse(data);
|
|
38
|
+
|
|
39
|
+
if (json.error) {
|
|
40
|
+
yield {
|
|
41
|
+
type: 'error',
|
|
42
|
+
content: json.error.message || 'Unknown error',
|
|
43
|
+
metadata: json.error
|
|
44
|
+
};
|
|
45
|
+
continue;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
this.extractUsage(json);
|
|
49
|
+
|
|
50
|
+
const chunk = this.parseChunk(json);
|
|
51
|
+
if (chunk) {
|
|
52
|
+
yield chunk;
|
|
53
|
+
}
|
|
54
|
+
} catch (e) {
|
|
55
|
+
continue;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
if (buffer.trim()) {
|
|
62
|
+
if (buffer.startsWith('data: ')) {
|
|
63
|
+
const data = buffer.slice(6).trim();
|
|
64
|
+
if (data !== '[DONE]') {
|
|
65
|
+
try {
|
|
66
|
+
const json = JSON.parse(data);
|
|
67
|
+
this.extractUsage(json);
|
|
68
|
+
const chunk = this.parseChunk(json);
|
|
69
|
+
if (chunk) {
|
|
70
|
+
yield chunk;
|
|
71
|
+
}
|
|
72
|
+
} catch (e) {
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
const doneChunk: StreamChunk = { type: 'done', content: '' };
|
|
79
|
+
if (this.usage) {
|
|
80
|
+
doneChunk.metadata = { usage: this.usage };
|
|
81
|
+
}
|
|
82
|
+
yield doneChunk;
|
|
83
|
+
} finally {
|
|
84
|
+
reader.releaseLock();
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
private extractUsage(json: any): void {
|
|
89
|
+
if (json.usage) {
|
|
90
|
+
const usage = json.usage;
|
|
91
|
+
this.usage = {
|
|
92
|
+
promptTokens: usage.prompt_tokens || 0,
|
|
93
|
+
completionTokens: usage.completion_tokens || 0,
|
|
94
|
+
totalTokens: usage.total_tokens || 0,
|
|
95
|
+
estimated: false
|
|
96
|
+
};
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
private parseChunk(json: any): StreamChunk | null {
|
|
101
|
+
const delta = json.choices?.[0]?.delta;
|
|
102
|
+
const choice = json.choices?.[0];
|
|
103
|
+
|
|
104
|
+
if (delta?.content) {
|
|
105
|
+
return {
|
|
106
|
+
type: 'token',
|
|
107
|
+
content: delta.content,
|
|
108
|
+
message: {
|
|
109
|
+
role: delta.role || 'assistant',
|
|
110
|
+
content: choice?.message?.content || delta.content
|
|
111
|
+
}
|
|
112
|
+
};
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
if (choice?.message?.tool_calls) {
|
|
116
|
+
const toolCalls = this.parseToolCalls(choice.message.tool_calls, true);
|
|
117
|
+
if (toolCalls.length > 0) {
|
|
118
|
+
return {
|
|
119
|
+
type: 'token',
|
|
120
|
+
content: '',
|
|
121
|
+
toolCalls
|
|
122
|
+
};
|
|
123
|
+
}
|
|
124
|
+
} else if (delta?.tool_calls || choice?.delta?.tool_calls) {
|
|
125
|
+
const toolCalls = this.parseToolCalls(delta?.tool_calls || choice?.delta?.tool_calls || [], false);
|
|
126
|
+
if (toolCalls.length > 0) {
|
|
127
|
+
return {
|
|
128
|
+
type: 'token',
|
|
129
|
+
content: '',
|
|
130
|
+
toolCalls
|
|
131
|
+
};
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
return null;
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
private parseToolCalls(toolCalls: any[], isComplete: boolean = false): ToolCall[] {
|
|
139
|
+
return toolCalls
|
|
140
|
+
.filter(tc => tc.function !== undefined)
|
|
141
|
+
.map((tc, idx) => ({
|
|
142
|
+
id: tc.id || `call_${tc.index !== undefined ? tc.index : idx}_${Date.now()}`,
|
|
143
|
+
type: "function" as const,
|
|
144
|
+
function: {
|
|
145
|
+
name: tc.function?.name || "",
|
|
146
|
+
arguments: tc.function?.arguments || (isComplete ? "{}" : "")
|
|
147
|
+
},
|
|
148
|
+
_index: tc.index !== undefined ? tc.index : idx
|
|
149
|
+
} as ToolCall & { _index?: number }));
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import type { StreamChunk } from "@kispace-io/core";
|
|
2
|
+
|
|
3
|
+
export abstract class StreamParser {
|
|
4
|
+
protected decoder = new TextDecoder();
|
|
5
|
+
|
|
6
|
+
abstract parse(reader: ReadableStreamDefaultReader<Uint8Array>): AsyncGenerator<StreamChunk>;
|
|
7
|
+
|
|
8
|
+
protected *processLines(lines: string[]): Generator<string, void> {
|
|
9
|
+
for (const line of lines) {
|
|
10
|
+
if (line.trim() !== '') {
|
|
11
|
+
yield line;
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
|
package/src/register.ts
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { extensionRegistry, i18nLazy, contributionRegistry, SYSTEM_LANGUAGE_BUNDLES } from '@kispace-io/core';
|
|
2
|
+
import bundle from './i18n.json';
|
|
3
|
+
|
|
4
|
+
contributionRegistry.registerContribution(SYSTEM_LANGUAGE_BUNDLES, bundle as any);
|
|
5
|
+
|
|
6
|
+
const t = i18nLazy('extensions');
|
|
7
|
+
|
|
8
|
+
extensionRegistry.registerExtension({
|
|
9
|
+
id: "system.ai-system",
|
|
10
|
+
name: t('EXT_AI_SYSTEM_NAME'),
|
|
11
|
+
description: t('EXT_AI_SYSTEM_DESC'),
|
|
12
|
+
loader: () => import("./ai-system-extension"),
|
|
13
|
+
icon: "robot",
|
|
14
|
+
|
|
15
|
+
dependencies: ["system.in-browser-ml"],
|
|
16
|
+
});
|