@docscode/core 1.0.0 → 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +1166 -0
- package/dist/index.d.cts +517 -0
- package/dist/index.d.ts +517 -0
- package/dist/index.js +1115 -0
- package/package.json +6 -3
- package/scripts/docling_bridge.py +0 -56
- package/src/AICollaborator.ts +0 -209
- package/src/AutocompletePlugin.ts +0 -108
- package/src/ConflictResolver.ts +0 -113
- package/src/KairoPlugin.ts +0 -27
- package/src/LLMAdapter.ts +0 -27
- package/src/MockLLMAdapter.ts +0 -45
- package/src/PromptCache.ts +0 -76
- package/src/StreamBuffer.ts +0 -60
- package/src/SummarizationPlugin.ts +0 -100
- package/src/TransformersAdapter.ts +0 -48
- package/src/adapters/AnthropicAdapter.ts +0 -72
- package/src/adapters/GeminiAdapter.ts +0 -72
- package/src/adapters/OllamaAdapter.ts +0 -81
- package/src/adapters/OpenAIAdapter.ts +0 -87
- package/src/canonical-model.ts +0 -164
- package/src/cli/index.ts +0 -48
- package/src/docling-client.ts +0 -47
- package/src/editors/MonacoAdapter.ts +0 -22
- package/src/editors/ProseMirrorAdapter.ts +0 -17
- package/src/editors/QuillAdapter.ts +0 -27
- package/src/editors/TiptapAdapter.ts +0 -50
- package/src/format-adapter.ts +0 -25
- package/src/index.ts +0 -37
- package/src/kairo.ts +0 -151
- package/src/suggestion-manager.ts +0 -39
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,1166 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
|
|
30
|
+
// src/index.ts
|
|
31
|
+
var index_exports = {};
|
|
32
|
+
__export(index_exports, {
|
|
33
|
+
AICollaborator: () => AICollaborator,
|
|
34
|
+
AnthropicAdapter: () => AnthropicAdapter,
|
|
35
|
+
AutocompletePlugin: () => AutocompletePlugin,
|
|
36
|
+
CanonicalDoc: () => CanonicalDoc,
|
|
37
|
+
ConflictResolver: () => ConflictResolver,
|
|
38
|
+
DoclingClient: () => DoclingClient,
|
|
39
|
+
GeminiAdapter: () => GeminiAdapter,
|
|
40
|
+
Kairo: () => Kairo,
|
|
41
|
+
KairoSession: () => KairoSession,
|
|
42
|
+
MergePolicy: () => MergePolicy,
|
|
43
|
+
MockLLMAdapter: () => MockLLMAdapter,
|
|
44
|
+
OllamaAdapter: () => OllamaAdapter,
|
|
45
|
+
OpenAIAdapter: () => OpenAIAdapter,
|
|
46
|
+
PromptCache: () => PromptCache,
|
|
47
|
+
StreamBuffer: () => StreamBuffer,
|
|
48
|
+
SuggestionManager: () => SuggestionManager,
|
|
49
|
+
SummarizationPlugin: () => SummarizationPlugin,
|
|
50
|
+
kairo: () => kairo
|
|
51
|
+
});
|
|
52
|
+
module.exports = __toCommonJS(index_exports);
|
|
53
|
+
|
|
54
|
+
// ../../node_modules/tsup/assets/cjs_shims.js
|
|
55
|
+
var getImportMetaUrl = () => typeof document === "undefined" ? new URL(`file:${__filename}`).href : document.currentScript && document.currentScript.tagName.toUpperCase() === "SCRIPT" ? document.currentScript.src : new URL("main.js", document.baseURI).href;
|
|
56
|
+
var importMetaUrl = /* @__PURE__ */ getImportMetaUrl();
|
|
57
|
+
|
|
58
|
+
// src/kairo.ts
|
|
59
|
+
var Y4 = __toESM(require("yjs"), 1);
|
|
60
|
+
|
|
61
|
+
// src/PromptCache.ts
|
|
62
|
+
var PromptCache = class {
|
|
63
|
+
cache;
|
|
64
|
+
maxSize;
|
|
65
|
+
totalHits = 0;
|
|
66
|
+
totalMisses = 0;
|
|
67
|
+
constructor(maxSize = 100) {
|
|
68
|
+
this.maxSize = maxSize;
|
|
69
|
+
this.cache = /* @__PURE__ */ new Map();
|
|
70
|
+
}
|
|
71
|
+
/** Retrieve cached result for a prompt. Returns null on miss. */
|
|
72
|
+
get(prompt) {
|
|
73
|
+
const entry = this.cache.get(prompt);
|
|
74
|
+
if (!entry) {
|
|
75
|
+
this.totalMisses++;
|
|
76
|
+
return null;
|
|
77
|
+
}
|
|
78
|
+
this.cache.delete(prompt);
|
|
79
|
+
entry.hits++;
|
|
80
|
+
this.cache.set(prompt, entry);
|
|
81
|
+
this.totalHits++;
|
|
82
|
+
return entry.value;
|
|
83
|
+
}
|
|
84
|
+
/** Cache a result for a prompt. Evicts LRU entry if at capacity. */
|
|
85
|
+
set(prompt, value) {
|
|
86
|
+
if (this.cache.has(prompt)) {
|
|
87
|
+
this.cache.delete(prompt);
|
|
88
|
+
} else if (this.cache.size >= this.maxSize) {
|
|
89
|
+
const lruKey = this.cache.keys().next().value;
|
|
90
|
+
if (lruKey !== void 0) this.cache.delete(lruKey);
|
|
91
|
+
}
|
|
92
|
+
this.cache.set(prompt, { value, hits: 0, createdAt: Date.now() });
|
|
93
|
+
}
|
|
94
|
+
/** Check if a prompt is cached without counting as a hit */
|
|
95
|
+
has(prompt) {
|
|
96
|
+
return this.cache.has(prompt);
|
|
97
|
+
}
|
|
98
|
+
/** Remove a specific entry */
|
|
99
|
+
invalidate(prompt) {
|
|
100
|
+
this.cache.delete(prompt);
|
|
101
|
+
}
|
|
102
|
+
/** Clear all entries */
|
|
103
|
+
clear() {
|
|
104
|
+
this.cache.clear();
|
|
105
|
+
this.totalHits = 0;
|
|
106
|
+
this.totalMisses = 0;
|
|
107
|
+
}
|
|
108
|
+
/** Cache statistics */
|
|
109
|
+
stats() {
|
|
110
|
+
const total = this.totalHits + this.totalMisses;
|
|
111
|
+
return {
|
|
112
|
+
size: this.cache.size,
|
|
113
|
+
maxSize: this.maxSize,
|
|
114
|
+
hitRate: total > 0 ? this.totalHits / total : 0,
|
|
115
|
+
totalHits: this.totalHits,
|
|
116
|
+
totalMisses: this.totalMisses
|
|
117
|
+
};
|
|
118
|
+
}
|
|
119
|
+
};
|
|
120
|
+
|
|
121
|
+
// src/StreamBuffer.ts
|
|
122
|
+
var StreamBuffer = class {
|
|
123
|
+
text;
|
|
124
|
+
clientID;
|
|
125
|
+
buffer = "";
|
|
126
|
+
flushInterval;
|
|
127
|
+
timer = null;
|
|
128
|
+
currentIndex;
|
|
129
|
+
constructor(text, clientID, startIndex, flushInterval = 50) {
|
|
130
|
+
this.text = text;
|
|
131
|
+
this.clientID = clientID;
|
|
132
|
+
this.currentIndex = startIndex;
|
|
133
|
+
this.flushInterval = flushInterval;
|
|
134
|
+
}
|
|
135
|
+
/**
|
|
136
|
+
* Add a new token to the buffer
|
|
137
|
+
*/
|
|
138
|
+
push(token) {
|
|
139
|
+
this.buffer += token;
|
|
140
|
+
if (!this.timer) {
|
|
141
|
+
this.timer = setTimeout(() => this.flush(), this.flushInterval);
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
/**
|
|
145
|
+
* Immediately flush the buffer to the Yjs document
|
|
146
|
+
*/
|
|
147
|
+
flush() {
|
|
148
|
+
if (this.buffer.length === 0) {
|
|
149
|
+
this.timer = null;
|
|
150
|
+
return;
|
|
151
|
+
}
|
|
152
|
+
const contentToInsert = this.buffer;
|
|
153
|
+
this.buffer = "";
|
|
154
|
+
this.text.doc?.transact(() => {
|
|
155
|
+
this.text.insert(this.currentIndex, contentToInsert, {
|
|
156
|
+
"ai-generated": true,
|
|
157
|
+
"ai-client-id": this.clientID
|
|
158
|
+
});
|
|
159
|
+
this.currentIndex += contentToInsert.length;
|
|
160
|
+
}, this.clientID);
|
|
161
|
+
this.timer = null;
|
|
162
|
+
}
|
|
163
|
+
/**
|
|
164
|
+
* Stop any pending flushes
|
|
165
|
+
*/
|
|
166
|
+
stop() {
|
|
167
|
+
if (this.timer) {
|
|
168
|
+
clearTimeout(this.timer);
|
|
169
|
+
this.timer = null;
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
};
|
|
173
|
+
|
|
174
|
+
// src/suggestion-manager.ts
|
|
175
|
+
var SuggestionManager = class {
|
|
176
|
+
constructor(doc) {
|
|
177
|
+
this.doc = doc;
|
|
178
|
+
this.suggestions = doc.getMap("kairo-suggestions");
|
|
179
|
+
}
|
|
180
|
+
doc;
|
|
181
|
+
suggestions;
|
|
182
|
+
addSuggestion(suggestion) {
|
|
183
|
+
const id = Math.random().toString(36).substring(7);
|
|
184
|
+
const fullSuggestion = {
|
|
185
|
+
...suggestion,
|
|
186
|
+
id,
|
|
187
|
+
timestamp: Date.now()
|
|
188
|
+
};
|
|
189
|
+
this.suggestions.set(id, fullSuggestion);
|
|
190
|
+
return id;
|
|
191
|
+
}
|
|
192
|
+
getSuggestions() {
|
|
193
|
+
return Array.from(this.suggestions.values());
|
|
194
|
+
}
|
|
195
|
+
resolveSuggestion(id, accept) {
|
|
196
|
+
const suggestion = this.suggestions.get(id);
|
|
197
|
+
if (!suggestion) return;
|
|
198
|
+
if (accept) {
|
|
199
|
+
}
|
|
200
|
+
this.suggestions.delete(id);
|
|
201
|
+
}
|
|
202
|
+
};
|
|
203
|
+
|
|
204
|
+
// src/AICollaborator.ts
|
|
205
|
+
var AICollaborator = class {
|
|
206
|
+
doc;
|
|
207
|
+
awareness;
|
|
208
|
+
clientID;
|
|
209
|
+
suggestions;
|
|
210
|
+
cache;
|
|
211
|
+
_status = "idle";
|
|
212
|
+
_plugins = [];
|
|
213
|
+
_llm;
|
|
214
|
+
_streamFlushMs;
|
|
215
|
+
constructor(doc, awareness, options = {}) {
|
|
216
|
+
this.doc = doc;
|
|
217
|
+
this.awareness = awareness;
|
|
218
|
+
this.clientID = options.clientID ?? Math.floor(Math.random() * 1e6);
|
|
219
|
+
this.cache = new PromptCache(options.cacheSize ?? 50);
|
|
220
|
+
this.suggestions = new SuggestionManager(this.doc);
|
|
221
|
+
this._llm = options.llm;
|
|
222
|
+
this._streamFlushMs = options.streamFlushMs ?? 50;
|
|
223
|
+
this._updateAwareness();
|
|
224
|
+
console.log(`[Kairo] AICollaborator initialized \u2014 clientID: ${this.clientID}, model: ${this._llm?.model ?? "none"}`);
|
|
225
|
+
}
|
|
226
|
+
// ─── Status ──────────────────────────────────────────────────────────
|
|
227
|
+
setStatus(status) {
|
|
228
|
+
this._status = status;
|
|
229
|
+
this._updateAwareness();
|
|
230
|
+
}
|
|
231
|
+
setThinking() {
|
|
232
|
+
this.setStatus("thinking");
|
|
233
|
+
}
|
|
234
|
+
setWriting() {
|
|
235
|
+
this.setStatus("writing");
|
|
236
|
+
}
|
|
237
|
+
setReviewing() {
|
|
238
|
+
this.setStatus("reviewing");
|
|
239
|
+
}
|
|
240
|
+
setIdle() {
|
|
241
|
+
this.setStatus("idle");
|
|
242
|
+
}
|
|
243
|
+
_updateAwareness() {
|
|
244
|
+
this.awareness.setLocalStateField("kairo", {
|
|
245
|
+
status: this._status,
|
|
246
|
+
metadata: {
|
|
247
|
+
name: "Kairo",
|
|
248
|
+
version: "1.0.0",
|
|
249
|
+
capabilities: ["autocomplete", "summarization", "streaming", "tracked-changes"],
|
|
250
|
+
model: this._llm?.model
|
|
251
|
+
},
|
|
252
|
+
lastUpdate: Date.now()
|
|
253
|
+
});
|
|
254
|
+
}
|
|
255
|
+
// ─── Plugin System ────────────────────────────────────────────────────
|
|
256
|
+
registerPlugin(plugin) {
|
|
257
|
+
this._plugins.push(plugin);
|
|
258
|
+
plugin.setup();
|
|
259
|
+
}
|
|
260
|
+
// ─── CRDT Write Operations ────────────────────────────────────────────
|
|
261
|
+
/**
|
|
262
|
+
* Insert text with AI attribution markers.
|
|
263
|
+
* Consuming editors can style/filter AI-generated content using these markers.
|
|
264
|
+
*/
|
|
265
|
+
insertWithAttribution(text, index, content) {
|
|
266
|
+
this.doc.transact(() => {
|
|
267
|
+
text.insert(index, content, {
|
|
268
|
+
"ai-generated": true,
|
|
269
|
+
"ai-client-id": this.clientID,
|
|
270
|
+
"ai-timestamp": Date.now(),
|
|
271
|
+
"ai-model": this._llm?.model ?? "unknown"
|
|
272
|
+
});
|
|
273
|
+
}, this.clientID);
|
|
274
|
+
}
|
|
275
|
+
/**
|
|
276
|
+
* Stream LLM tokens directly into the document as CRDT operations.
|
|
277
|
+
*
|
|
278
|
+
* Modes:
|
|
279
|
+
* - 'insert': appends after current position
|
|
280
|
+
* - 'continue': appends to end of document
|
|
281
|
+
* - 'rewrite': clears existing text and rewrites
|
|
282
|
+
*
|
|
283
|
+
* Token cadence: tokens are batched with a 50ms flush interval for
|
|
284
|
+
* smooth real-time rendering without CRDT overload.
|
|
285
|
+
*/
|
|
286
|
+
async streamToDoc(yText, prompt, options = {}) {
|
|
287
|
+
if (!this._llm) {
|
|
288
|
+
throw new Error("[Kairo] No LLM configured. Pass llm to AICollaborator constructor.");
|
|
289
|
+
}
|
|
290
|
+
const startTime = Date.now();
|
|
291
|
+
const mode = options.mode ?? "continue";
|
|
292
|
+
let startIndex;
|
|
293
|
+
if (mode === "rewrite") {
|
|
294
|
+
this.doc.transact(() => yText.delete(0, yText.length));
|
|
295
|
+
startIndex = 0;
|
|
296
|
+
} else if (mode === "insert" && options.insertAt !== void 0) {
|
|
297
|
+
startIndex = options.insertAt;
|
|
298
|
+
} else {
|
|
299
|
+
startIndex = yText.length;
|
|
300
|
+
}
|
|
301
|
+
const buffer = new StreamBuffer(yText, this.clientID, startIndex, this._streamFlushMs);
|
|
302
|
+
this.setThinking();
|
|
303
|
+
let tokensWritten = 0;
|
|
304
|
+
try {
|
|
305
|
+
const systemPrompt = options.systemPrompt ?? "You are Kairo, an AI writing collaborator embedded in a shared document. Output clean prose only. No preamble, no meta-commentary.";
|
|
306
|
+
this.setWriting();
|
|
307
|
+
for await (const token of this._llm.stream(prompt, {
|
|
308
|
+
systemPrompt,
|
|
309
|
+
signal: options.signal
|
|
310
|
+
})) {
|
|
311
|
+
buffer.push(token);
|
|
312
|
+
tokensWritten += token.length;
|
|
313
|
+
options.onToken?.(token);
|
|
314
|
+
}
|
|
315
|
+
buffer.flush();
|
|
316
|
+
} catch (err) {
|
|
317
|
+
buffer.stop();
|
|
318
|
+
this.setIdle();
|
|
319
|
+
throw err;
|
|
320
|
+
}
|
|
321
|
+
buffer.stop();
|
|
322
|
+
this.setIdle();
|
|
323
|
+
return { tokensWritten, durationMs: Date.now() - startTime };
|
|
324
|
+
}
|
|
325
|
+
/**
|
|
326
|
+
* One-shot: summarize a block of text and return the result (no streaming).
|
|
327
|
+
*/
|
|
328
|
+
async summarize(text, options = {}) {
|
|
329
|
+
if (!this._llm) throw new Error("[Kairo] No LLM configured.");
|
|
330
|
+
const cached = this.cache.get(text);
|
|
331
|
+
if (cached) return cached;
|
|
332
|
+
this.setThinking();
|
|
333
|
+
try {
|
|
334
|
+
const result = await this._llm.complete(
|
|
335
|
+
`Summarize the following document content concisely:
|
|
336
|
+
|
|
337
|
+
${text}`,
|
|
338
|
+
{
|
|
339
|
+
systemPrompt: "You are a document summarization engine. Output only the summary.",
|
|
340
|
+
maxTokens: options.maxTokens ?? 300
|
|
341
|
+
}
|
|
342
|
+
);
|
|
343
|
+
this.cache.set(text, result);
|
|
344
|
+
return result;
|
|
345
|
+
} finally {
|
|
346
|
+
this.setIdle();
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
// ─── Cleanup ──────────────────────────────────────────────────────────
|
|
350
|
+
destroy() {
|
|
351
|
+
this._plugins.forEach((p) => p.destroy());
|
|
352
|
+
this.awareness.setLocalStateField("kairo", null);
|
|
353
|
+
console.log(`[Kairo] AICollaborator ${this.clientID} destroyed.`);
|
|
354
|
+
}
|
|
355
|
+
};
|
|
356
|
+
|
|
357
|
+
// src/AutocompletePlugin.ts
|
|
358
|
+
var Y = __toESM(require("yjs"), 1);
|
|
359
|
+
|
|
360
|
+
// src/KairoPlugin.ts
|
|
361
|
+
var KairoPlugin = class {
|
|
362
|
+
ai;
|
|
363
|
+
enabled = true;
|
|
364
|
+
constructor(ai) {
|
|
365
|
+
this.ai = ai;
|
|
366
|
+
}
|
|
367
|
+
/**
|
|
368
|
+
* Enable/Disable the plugin
|
|
369
|
+
*/
|
|
370
|
+
setEnabled(enabled) {
|
|
371
|
+
this.enabled = enabled;
|
|
372
|
+
}
|
|
373
|
+
};
|
|
374
|
+
|
|
375
|
+
// src/AutocompletePlugin.ts
|
|
376
|
+
var AutocompletePlugin = class extends KairoPlugin {
|
|
377
|
+
targetTextName;
|
|
378
|
+
adapter;
|
|
379
|
+
_deepObserver;
|
|
380
|
+
constructor(ai, adapter, targetTextName = "content") {
|
|
381
|
+
super(ai);
|
|
382
|
+
this.adapter = adapter;
|
|
383
|
+
this.targetTextName = targetTextName;
|
|
384
|
+
this._deepObserver = (events) => {
|
|
385
|
+
for (const event of events) {
|
|
386
|
+
if (event instanceof Y.YTextEvent) {
|
|
387
|
+
this._handleUpdate(event);
|
|
388
|
+
}
|
|
389
|
+
}
|
|
390
|
+
};
|
|
391
|
+
}
|
|
392
|
+
setup() {
|
|
393
|
+
try {
|
|
394
|
+
const arr = this.ai.doc.getArray(this.targetTextName);
|
|
395
|
+
arr.observeDeep(this._deepObserver);
|
|
396
|
+
} catch {
|
|
397
|
+
}
|
|
398
|
+
console.log(`[Kairo] AutocompletePlugin initialized \u2014 model: ${this.adapter.model}`);
|
|
399
|
+
}
|
|
400
|
+
destroy() {
|
|
401
|
+
try {
|
|
402
|
+
const arr = this.ai.doc.getArray(this.targetTextName);
|
|
403
|
+
arr.unobserveDeep(this._deepObserver);
|
|
404
|
+
} catch {
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
_handleUpdate(event) {
|
|
408
|
+
if (!this.enabled) return;
|
|
409
|
+
if (event.transaction.origin === this.ai.clientID) return;
|
|
410
|
+
const text = event.target;
|
|
411
|
+
const str = text.toString();
|
|
412
|
+
if (str.endsWith("...")) {
|
|
413
|
+
this._generateSuggestion(text);
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
async _generateSuggestion(text) {
|
|
417
|
+
this.ai.setThinking();
|
|
418
|
+
try {
|
|
419
|
+
const currentText = text.toString();
|
|
420
|
+
const prompt = currentText.slice(0, -3);
|
|
421
|
+
const cached = this.ai.cache.get(prompt);
|
|
422
|
+
if (cached) {
|
|
423
|
+
this.ai.doc.transact(() => {
|
|
424
|
+
text.delete(text.length - 3, 3);
|
|
425
|
+
text.insert(text.length, cached, { "ai-generated": true, "ai-client-id": this.ai.clientID });
|
|
426
|
+
}, this.ai.clientID);
|
|
427
|
+
return;
|
|
428
|
+
}
|
|
429
|
+
this.ai.doc.transact(() => {
|
|
430
|
+
text.delete(text.length - 3, 3);
|
|
431
|
+
}, this.ai.clientID);
|
|
432
|
+
this.ai.setWriting();
|
|
433
|
+
const streamBuffer = new StreamBuffer(text, this.ai.clientID, text.length);
|
|
434
|
+
let fullResult = "";
|
|
435
|
+
const systemPrompt = "You are an inline writing assistant. Continue the text naturally in the same style and voice. Output only the continuation \u2014 no preamble.";
|
|
436
|
+
for await (const token of this.adapter.stream(prompt, { systemPrompt })) {
|
|
437
|
+
fullResult += token;
|
|
438
|
+
streamBuffer.push(token);
|
|
439
|
+
}
|
|
440
|
+
streamBuffer.flush();
|
|
441
|
+
this.ai.cache.set(prompt, fullResult);
|
|
442
|
+
} catch (error) {
|
|
443
|
+
console.error("[Kairo] Autocomplete error:", error);
|
|
444
|
+
} finally {
|
|
445
|
+
this.ai.setIdle();
|
|
446
|
+
}
|
|
447
|
+
}
|
|
448
|
+
};
|
|
449
|
+
|
|
450
|
+
// src/SummarizationPlugin.ts
|
|
451
|
+
var Y2 = __toESM(require("yjs"), 1);
|
|
452
|
+
var SummarizationPlugin = class extends KairoPlugin {
|
|
453
|
+
targetTextName;
|
|
454
|
+
adapter;
|
|
455
|
+
_deepObserver;
|
|
456
|
+
constructor(ai, adapter, targetTextName = "content") {
|
|
457
|
+
super(ai);
|
|
458
|
+
this.adapter = adapter;
|
|
459
|
+
this.targetTextName = targetTextName;
|
|
460
|
+
this._deepObserver = (events) => {
|
|
461
|
+
for (const event of events) {
|
|
462
|
+
if (event instanceof Y2.YTextEvent) {
|
|
463
|
+
this._handleUpdate(event);
|
|
464
|
+
}
|
|
465
|
+
}
|
|
466
|
+
};
|
|
467
|
+
}
|
|
468
|
+
setup() {
|
|
469
|
+
try {
|
|
470
|
+
const arr = this.ai.doc.getArray(this.targetTextName);
|
|
471
|
+
arr.observeDeep(this._deepObserver);
|
|
472
|
+
} catch {
|
|
473
|
+
}
|
|
474
|
+
console.log(`[Kairo] SummarizationPlugin initialized \u2014 model: ${this.adapter.model}`);
|
|
475
|
+
}
|
|
476
|
+
destroy() {
|
|
477
|
+
try {
|
|
478
|
+
const arr = this.ai.doc.getArray(this.targetTextName);
|
|
479
|
+
arr.unobserveDeep(this._deepObserver);
|
|
480
|
+
} catch {
|
|
481
|
+
}
|
|
482
|
+
}
|
|
483
|
+
_handleUpdate(event) {
|
|
484
|
+
if (!this.enabled) return;
|
|
485
|
+
if (event.transaction.origin === this.ai.clientID) return;
|
|
486
|
+
const text = event.target;
|
|
487
|
+
const str = text.toString();
|
|
488
|
+
if (str.endsWith("/summarize")) {
|
|
489
|
+
this._performSummarization(text);
|
|
490
|
+
}
|
|
491
|
+
}
|
|
492
|
+
async _performSummarization(text) {
|
|
493
|
+
this.ai.setThinking();
|
|
494
|
+
try {
|
|
495
|
+
const fullText = text.toString();
|
|
496
|
+
const TRIGGER = "/summarize";
|
|
497
|
+
const contentToSummarize = fullText.slice(0, -TRIGGER.length).trim();
|
|
498
|
+
if (contentToSummarize.length < 20) {
|
|
499
|
+
throw new Error("Content too short to summarize");
|
|
500
|
+
}
|
|
501
|
+
const cached = this.ai.cache.get(contentToSummarize);
|
|
502
|
+
const summary = cached ?? await this.adapter.complete(
|
|
503
|
+
`Summarize the following document concisely in 2-3 sentences:
|
|
504
|
+
|
|
505
|
+
${contentToSummarize}`,
|
|
506
|
+
{
|
|
507
|
+
systemPrompt: "You are a document summarizer. Output only the summary \u2014 no preamble.",
|
|
508
|
+
maxTokens: 150
|
|
509
|
+
}
|
|
510
|
+
);
|
|
511
|
+
if (!cached) {
|
|
512
|
+
this.ai.cache.set(contentToSummarize, summary);
|
|
513
|
+
}
|
|
514
|
+
this.ai.setWriting();
|
|
515
|
+
this.ai.doc.transact(() => {
|
|
516
|
+
const idx = text.length - TRIGGER.length;
|
|
517
|
+
text.delete(idx, TRIGGER.length);
|
|
518
|
+
text.insert(idx, `
|
|
519
|
+
|
|
520
|
+
> **AI Summary:** ${summary.trim()}`, {
|
|
521
|
+
"ai-generated": true,
|
|
522
|
+
"ai-client-id": this.ai.clientID,
|
|
523
|
+
"ai-timestamp": Date.now()
|
|
524
|
+
});
|
|
525
|
+
}, this.ai.clientID);
|
|
526
|
+
} catch (error) {
|
|
527
|
+
console.error("[Kairo] Summarization error:", error);
|
|
528
|
+
} finally {
|
|
529
|
+
this.ai.setIdle();
|
|
530
|
+
}
|
|
531
|
+
}
|
|
532
|
+
};
|
|
533
|
+
|
|
534
|
+
// src/ConflictResolver.ts
|
|
535
|
+
var Y3 = __toESM(require("yjs"), 1);
|
|
536
|
+
var ConflictResolver = class {
|
|
537
|
+
constructor(doc, aiClientIds = []) {
|
|
538
|
+
this.doc = doc;
|
|
539
|
+
this.aiClientIds = new Set(aiClientIds);
|
|
540
|
+
this._observe();
|
|
541
|
+
}
|
|
542
|
+
doc;
|
|
543
|
+
conflictLog = [];
|
|
544
|
+
handlers = [];
|
|
545
|
+
aiClientIds;
|
|
546
|
+
/** Register a client ID as an AI participant */
|
|
547
|
+
registerAI(clientId) {
|
|
548
|
+
this.aiClientIds.add(clientId);
|
|
549
|
+
}
|
|
550
|
+
/** Listen for conflict events */
|
|
551
|
+
onConflict(handler) {
|
|
552
|
+
this.handlers.push(handler);
|
|
553
|
+
}
|
|
554
|
+
/** Get the conflict log */
|
|
555
|
+
getLog() {
|
|
556
|
+
return [...this.conflictLog];
|
|
557
|
+
}
|
|
558
|
+
/** Clear the conflict log */
|
|
559
|
+
clearLog() {
|
|
560
|
+
this.conflictLog = [];
|
|
561
|
+
}
|
|
562
|
+
_observe() {
|
|
563
|
+
this.doc.on("afterTransaction", (tr) => {
|
|
564
|
+
if (!tr.changed.size) return;
|
|
565
|
+
tr.changed.forEach((_changedKeys, type) => {
|
|
566
|
+
if (!(type instanceof Y3.Text)) return;
|
|
567
|
+
const origin = tr.origin;
|
|
568
|
+
const isAI = this.aiClientIds.has(origin);
|
|
569
|
+
if (!isAI) {
|
|
570
|
+
const event = {
|
|
571
|
+
type: "concurrent_edit",
|
|
572
|
+
position: 0,
|
|
573
|
+
humanClientId: origin,
|
|
574
|
+
aiClientId: -1,
|
|
575
|
+
timestamp: Date.now()
|
|
576
|
+
};
|
|
577
|
+
this._emit(event);
|
|
578
|
+
}
|
|
579
|
+
});
|
|
580
|
+
});
|
|
581
|
+
}
|
|
582
|
+
_emit(event) {
|
|
583
|
+
this.conflictLog.push(event);
|
|
584
|
+
if (this.conflictLog.length > 500) {
|
|
585
|
+
this.conflictLog = this.conflictLog.slice(-500);
|
|
586
|
+
}
|
|
587
|
+
for (const handler of this.handlers) {
|
|
588
|
+
try {
|
|
589
|
+
handler(event);
|
|
590
|
+
} catch {
|
|
591
|
+
}
|
|
592
|
+
}
|
|
593
|
+
}
|
|
594
|
+
};
|
|
595
|
+
var MergePolicy = class {
|
|
596
|
+
static HUMAN_WINS = "human_wins";
|
|
597
|
+
static LAST_WRITE_WINS = "last_write_wins";
|
|
598
|
+
static AI_WINS = "ai_wins";
|
|
599
|
+
static isAIGenerated(attrs) {
|
|
600
|
+
return attrs?.["ai-generated"] === true;
|
|
601
|
+
}
|
|
602
|
+
static getAuthor(attrs) {
|
|
603
|
+
return this.isAIGenerated(attrs) ? "ai" : "human";
|
|
604
|
+
}
|
|
605
|
+
};
|
|
606
|
+
|
|
607
|
+
// src/kairo.ts
|
|
608
|
+
var import_awareness = require("y-protocols/awareness");
|
|
609
|
+
var KairoSession = class {
|
|
610
|
+
constructor(doc, ai, adapter, conflictResolver) {
|
|
611
|
+
this.doc = doc;
|
|
612
|
+
this.ai = ai;
|
|
613
|
+
this._adapter = adapter;
|
|
614
|
+
this.conflictResolver = conflictResolver;
|
|
615
|
+
}
|
|
616
|
+
doc;
|
|
617
|
+
ai;
|
|
618
|
+
conflictResolver;
|
|
619
|
+
_adapter;
|
|
620
|
+
/** Export the document back to its original format */
|
|
621
|
+
async export() {
|
|
622
|
+
if (!this._adapter) throw new Error("[Kairo] No format adapter \u2014 cannot export.");
|
|
623
|
+
return this._adapter.write(this.doc);
|
|
624
|
+
}
|
|
625
|
+
/** Get the plain text content of the document */
|
|
626
|
+
getText(key = "content") {
|
|
627
|
+
return this.doc.getText(key).toString();
|
|
628
|
+
}
|
|
629
|
+
/** Get word count */
|
|
630
|
+
wordCount() {
|
|
631
|
+
return this.getText().split(/\s+/).filter(Boolean).length;
|
|
632
|
+
}
|
|
633
|
+
/** Destroy the session and free resources */
|
|
634
|
+
destroy() {
|
|
635
|
+
this.ai.destroy();
|
|
636
|
+
this.doc.destroy();
|
|
637
|
+
}
|
|
638
|
+
};
|
|
639
|
+
var Kairo = class {
|
|
640
|
+
formatAdapters = /* @__PURE__ */ new Map();
|
|
641
|
+
/** Register a format adapter (DOCX, PDF, MD, HTML, GDoc, etc.) */
|
|
642
|
+
registerFormatAdapter(adapter) {
|
|
643
|
+
this.formatAdapters.set(adapter.format, adapter);
|
|
644
|
+
return this;
|
|
645
|
+
}
|
|
646
|
+
/** Connect to a document and start an AI session */
|
|
647
|
+
async connect(options) {
|
|
648
|
+
const format = options.format ?? this._detectFormat(options.fileName ?? "");
|
|
649
|
+
const adapter = options.adapter ?? this.formatAdapters.get(format);
|
|
650
|
+
let doc;
|
|
651
|
+
if (adapter) {
|
|
652
|
+
doc = await adapter.read(options.content);
|
|
653
|
+
} else {
|
|
654
|
+
doc = new Y4.Doc();
|
|
655
|
+
const text = typeof options.content === "string" ? options.content : options.content.toString("utf-8");
|
|
656
|
+
doc.getText("content").insert(0, text);
|
|
657
|
+
}
|
|
658
|
+
const awareness = new import_awareness.Awareness(doc);
|
|
659
|
+
const ai = new AICollaborator(doc, awareness, {
|
|
660
|
+
llm: options.llm,
|
|
661
|
+
clientID: options.clientId,
|
|
662
|
+
streamFlushMs: options.streamFlushMs
|
|
663
|
+
});
|
|
664
|
+
const behaviors = options.behaviors ?? ["autocomplete", "summarize"];
|
|
665
|
+
if (options.llm) {
|
|
666
|
+
if (behaviors.includes("autocomplete")) ai.registerPlugin(new AutocompletePlugin(ai, options.llm));
|
|
667
|
+
if (behaviors.includes("summarize")) ai.registerPlugin(new SummarizationPlugin(ai, options.llm));
|
|
668
|
+
}
|
|
669
|
+
let conflictResolver;
|
|
670
|
+
if (options.conflictResolution !== false) {
|
|
671
|
+
conflictResolver = new ConflictResolver(doc, [ai.clientID]);
|
|
672
|
+
}
|
|
673
|
+
return new KairoSession(doc, ai, adapter, conflictResolver);
|
|
674
|
+
}
|
|
675
|
+
/** Open multiple documents concurrently */
|
|
676
|
+
async connectAll(configs) {
|
|
677
|
+
return Promise.all(configs.map((c) => this.connect(c)));
|
|
678
|
+
}
|
|
679
|
+
_detectFormat(file) {
|
|
680
|
+
if (typeof file !== "string") return "txt";
|
|
681
|
+
const ext = file.split(".").pop()?.toLowerCase() ?? "";
|
|
682
|
+
const map = {
|
|
683
|
+
docx: "docx",
|
|
684
|
+
pdf: "pdf",
|
|
685
|
+
md: "markdown",
|
|
686
|
+
markdown: "markdown",
|
|
687
|
+
html: "html",
|
|
688
|
+
htm: "html",
|
|
689
|
+
txt: "txt"
|
|
690
|
+
};
|
|
691
|
+
return map[ext] ?? "txt";
|
|
692
|
+
}
|
|
693
|
+
};
|
|
694
|
+
var kairo = new Kairo();
|
|
695
|
+
|
|
696
|
+
// src/adapters/OpenAIAdapter.ts
|
|
697
|
+
var OpenAIAdapter = class {
|
|
698
|
+
provider = "openai";
|
|
699
|
+
model;
|
|
700
|
+
apiKey;
|
|
701
|
+
baseUrl;
|
|
702
|
+
constructor(options = {}) {
|
|
703
|
+
this.apiKey = options.apiKey ?? process.env["OPENAI_API_KEY"] ?? "";
|
|
704
|
+
this.model = options.model ?? "gpt-4o-mini";
|
|
705
|
+
this.baseUrl = options.baseUrl ?? "https://api.openai.com/v1";
|
|
706
|
+
}
|
|
707
|
+
async *stream(prompt, options = {}) {
|
|
708
|
+
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
709
|
+
method: "POST",
|
|
710
|
+
headers: {
|
|
711
|
+
Authorization: `Bearer ${this.apiKey}`,
|
|
712
|
+
"Content-Type": "application/json"
|
|
713
|
+
},
|
|
714
|
+
body: JSON.stringify({
|
|
715
|
+
model: this.model,
|
|
716
|
+
messages: [
|
|
717
|
+
...options.systemPrompt ? [{ role: "system", content: options.systemPrompt }] : [],
|
|
718
|
+
{ role: "user", content: prompt }
|
|
719
|
+
],
|
|
720
|
+
max_tokens: options.maxTokens ?? 2e3,
|
|
721
|
+
temperature: options.temperature ?? 0.7,
|
|
722
|
+
stream: true
|
|
723
|
+
}),
|
|
724
|
+
signal: options.signal
|
|
725
|
+
});
|
|
726
|
+
if (!response.ok) {
|
|
727
|
+
const err = await response.text();
|
|
728
|
+
throw new Error(`[Kairo/OpenAI] API error ${response.status}: ${err}`);
|
|
729
|
+
}
|
|
730
|
+
const reader = response.body.getReader();
|
|
731
|
+
const decoder = new TextDecoder();
|
|
732
|
+
let buf = "";
|
|
733
|
+
try {
|
|
734
|
+
while (true) {
|
|
735
|
+
const { done, value } = await reader.read();
|
|
736
|
+
if (done) break;
|
|
737
|
+
buf += decoder.decode(value, { stream: true });
|
|
738
|
+
const lines = buf.split("\n");
|
|
739
|
+
buf = lines.pop() ?? "";
|
|
740
|
+
for (const line of lines) {
|
|
741
|
+
const t = line.trim();
|
|
742
|
+
if (!t || t === "data: [DONE]") continue;
|
|
743
|
+
if (!t.startsWith("data: ")) continue;
|
|
744
|
+
try {
|
|
745
|
+
const json = JSON.parse(t.slice(6));
|
|
746
|
+
const token = json.choices?.[0]?.delta?.content;
|
|
747
|
+
if (token) yield token;
|
|
748
|
+
} catch {
|
|
749
|
+
}
|
|
750
|
+
}
|
|
751
|
+
}
|
|
752
|
+
} finally {
|
|
753
|
+
reader.releaseLock();
|
|
754
|
+
}
|
|
755
|
+
}
|
|
756
|
+
async complete(prompt, options = {}) {
|
|
757
|
+
let out = "";
|
|
758
|
+
for await (const t of this.stream(prompt, options)) out += t;
|
|
759
|
+
return out;
|
|
760
|
+
}
|
|
761
|
+
async embed(text) {
|
|
762
|
+
const r = await fetch(`${this.baseUrl}/embeddings`, {
|
|
763
|
+
method: "POST",
|
|
764
|
+
headers: { Authorization: `Bearer ${this.apiKey}`, "Content-Type": "application/json" },
|
|
765
|
+
body: JSON.stringify({ model: "text-embedding-3-small", input: text })
|
|
766
|
+
});
|
|
767
|
+
if (!r.ok) throw new Error(`[Kairo/OpenAI] Embed error ${r.status}`);
|
|
768
|
+
const json = await r.json();
|
|
769
|
+
return json.data[0].embedding;
|
|
770
|
+
}
|
|
771
|
+
};
|
|
772
|
+
|
|
773
|
+
// src/adapters/AnthropicAdapter.ts
|
|
774
|
+
var AnthropicAdapter = class {
|
|
775
|
+
provider = "anthropic";
|
|
776
|
+
model;
|
|
777
|
+
apiKey;
|
|
778
|
+
constructor(options = {}) {
|
|
779
|
+
this.apiKey = options.apiKey ?? process.env["ANTHROPIC_API_KEY"] ?? "";
|
|
780
|
+
this.model = options.model ?? "claude-3-5-sonnet-20241022";
|
|
781
|
+
}
|
|
782
|
+
async *stream(prompt, options = {}) {
|
|
783
|
+
const response = await fetch("https://api.anthropic.com/v1/messages", {
|
|
784
|
+
method: "POST",
|
|
785
|
+
headers: {
|
|
786
|
+
"x-api-key": this.apiKey,
|
|
787
|
+
"anthropic-version": "2023-06-01",
|
|
788
|
+
"content-type": "application/json"
|
|
789
|
+
},
|
|
790
|
+
body: JSON.stringify({
|
|
791
|
+
model: this.model,
|
|
792
|
+
max_tokens: options.maxTokens ?? 2e3,
|
|
793
|
+
system: options.systemPrompt ?? "You are Kairo, an AI document collaborator.",
|
|
794
|
+
messages: [{ role: "user", content: prompt }],
|
|
795
|
+
stream: true
|
|
796
|
+
}),
|
|
797
|
+
signal: options.signal
|
|
798
|
+
});
|
|
799
|
+
if (!response.ok) {
|
|
800
|
+
const err = await response.text();
|
|
801
|
+
throw new Error(`[Kairo/Anthropic] API error ${response.status}: ${err}`);
|
|
802
|
+
}
|
|
803
|
+
const reader = response.body.getReader();
|
|
804
|
+
const decoder = new TextDecoder();
|
|
805
|
+
let buf = "";
|
|
806
|
+
try {
|
|
807
|
+
while (true) {
|
|
808
|
+
const { done, value } = await reader.read();
|
|
809
|
+
if (done) break;
|
|
810
|
+
buf += decoder.decode(value, { stream: true });
|
|
811
|
+
const lines = buf.split("\n");
|
|
812
|
+
buf = lines.pop() ?? "";
|
|
813
|
+
for (const line of lines) {
|
|
814
|
+
const t = line.trim();
|
|
815
|
+
if (!t.startsWith("data: ")) continue;
|
|
816
|
+
try {
|
|
817
|
+
const json = JSON.parse(t.slice(6));
|
|
818
|
+
if (json.type === "content_block_delta" && json.delta?.text) {
|
|
819
|
+
yield json.delta.text;
|
|
820
|
+
}
|
|
821
|
+
} catch {
|
|
822
|
+
}
|
|
823
|
+
}
|
|
824
|
+
}
|
|
825
|
+
} finally {
|
|
826
|
+
reader.releaseLock();
|
|
827
|
+
}
|
|
828
|
+
}
|
|
829
|
+
async complete(prompt, options = {}) {
|
|
830
|
+
let out = "";
|
|
831
|
+
for await (const t of this.stream(prompt, options)) out += t;
|
|
832
|
+
return out;
|
|
833
|
+
}
|
|
834
|
+
};
|
|
835
|
+
|
|
836
|
+
// src/adapters/OllamaAdapter.ts
|
|
837
|
+
var OllamaAdapter = class {
|
|
838
|
+
provider = "ollama";
|
|
839
|
+
model;
|
|
840
|
+
baseUrl;
|
|
841
|
+
constructor(options = {}) {
|
|
842
|
+
this.model = options.model ?? "llama3.2";
|
|
843
|
+
this.baseUrl = options.baseUrl ?? "http://localhost:11434";
|
|
844
|
+
}
|
|
845
|
+
async *stream(prompt, options = {}) {
|
|
846
|
+
const fullPrompt = options.systemPrompt ? `${options.systemPrompt}
|
|
847
|
+
|
|
848
|
+
User: ${prompt}
|
|
849
|
+
Assistant:` : prompt;
|
|
850
|
+
const response = await fetch(`${this.baseUrl}/api/generate`, {
|
|
851
|
+
method: "POST",
|
|
852
|
+
headers: { "Content-Type": "application/json" },
|
|
853
|
+
body: JSON.stringify({
|
|
854
|
+
model: this.model,
|
|
855
|
+
prompt: fullPrompt,
|
|
856
|
+
stream: true,
|
|
857
|
+
options: {
|
|
858
|
+
num_predict: options.maxTokens ?? 2e3,
|
|
859
|
+
temperature: options.temperature ?? 0.7
|
|
860
|
+
}
|
|
861
|
+
}),
|
|
862
|
+
signal: options.signal
|
|
863
|
+
});
|
|
864
|
+
if (!response.ok) {
|
|
865
|
+
throw new Error(`[Kairo/Ollama] Error ${response.status}. Is Ollama running at ${this.baseUrl}?`);
|
|
866
|
+
}
|
|
867
|
+
const reader = response.body.getReader();
|
|
868
|
+
const decoder = new TextDecoder();
|
|
869
|
+
try {
|
|
870
|
+
while (true) {
|
|
871
|
+
const { done, value } = await reader.read();
|
|
872
|
+
if (done) break;
|
|
873
|
+
const text = decoder.decode(value);
|
|
874
|
+
for (const line of text.split("\n")) {
|
|
875
|
+
if (!line.trim()) continue;
|
|
876
|
+
try {
|
|
877
|
+
const json = JSON.parse(line);
|
|
878
|
+
if (json.response) yield json.response;
|
|
879
|
+
if (json.done) return;
|
|
880
|
+
} catch {
|
|
881
|
+
}
|
|
882
|
+
}
|
|
883
|
+
}
|
|
884
|
+
} finally {
|
|
885
|
+
reader.releaseLock();
|
|
886
|
+
}
|
|
887
|
+
}
|
|
888
|
+
async complete(prompt, options = {}) {
|
|
889
|
+
let out = "";
|
|
890
|
+
for await (const t of this.stream(prompt, options)) out += t;
|
|
891
|
+
return out;
|
|
892
|
+
}
|
|
893
|
+
/** Check if Ollama is running and model is available */
|
|
894
|
+
async isAvailable() {
|
|
895
|
+
try {
|
|
896
|
+
const r = await fetch(`${this.baseUrl}/api/tags`, { signal: AbortSignal.timeout(2e3) });
|
|
897
|
+
if (!r.ok) return false;
|
|
898
|
+
const json = await r.json();
|
|
899
|
+
return json.models.some((m) => m.name.startsWith(this.model));
|
|
900
|
+
} catch {
|
|
901
|
+
return false;
|
|
902
|
+
}
|
|
903
|
+
}
|
|
904
|
+
};
|
|
905
|
+
|
|
906
|
+
// src/adapters/GeminiAdapter.ts
|
|
907
|
+
var GeminiAdapter = class {
|
|
908
|
+
provider = "gemini";
|
|
909
|
+
model;
|
|
910
|
+
apiKey;
|
|
911
|
+
constructor(options = {}) {
|
|
912
|
+
this.apiKey = options.apiKey ?? process.env["GOOGLE_AI_API_KEY"] ?? "";
|
|
913
|
+
this.model = options.model ?? "gemini-2.0-flash";
|
|
914
|
+
}
|
|
915
|
+
async *stream(prompt, options = {}) {
|
|
916
|
+
const url = `https://generativelanguage.googleapis.com/v1beta/models/${this.model}:streamGenerateContent?alt=sse&key=${this.apiKey}`;
|
|
917
|
+
const contents = options.systemPrompt ? [{ role: "user", parts: [{ text: `${options.systemPrompt}
|
|
918
|
+
|
|
919
|
+
${prompt}` }] }] : [{ role: "user", parts: [{ text: prompt }] }];
|
|
920
|
+
const response = await fetch(url, {
|
|
921
|
+
method: "POST",
|
|
922
|
+
headers: { "Content-Type": "application/json" },
|
|
923
|
+
body: JSON.stringify({
|
|
924
|
+
contents,
|
|
925
|
+
generationConfig: {
|
|
926
|
+
maxOutputTokens: options.maxTokens ?? 2e3,
|
|
927
|
+
temperature: options.temperature ?? 0.7
|
|
928
|
+
}
|
|
929
|
+
}),
|
|
930
|
+
signal: options.signal
|
|
931
|
+
});
|
|
932
|
+
if (!response.ok) {
|
|
933
|
+
throw new Error(`[Kairo/Gemini] Error ${response.status}: ${await response.text()}`);
|
|
934
|
+
}
|
|
935
|
+
const reader = response.body.getReader();
|
|
936
|
+
const decoder = new TextDecoder();
|
|
937
|
+
let buf = "";
|
|
938
|
+
try {
|
|
939
|
+
while (true) {
|
|
940
|
+
const { done, value } = await reader.read();
|
|
941
|
+
if (done) break;
|
|
942
|
+
buf += decoder.decode(value, { stream: true });
|
|
943
|
+
const lines = buf.split("\n");
|
|
944
|
+
buf = lines.pop() ?? "";
|
|
945
|
+
for (const line of lines) {
|
|
946
|
+
if (!line.startsWith("data: ")) continue;
|
|
947
|
+
try {
|
|
948
|
+
const json = JSON.parse(line.slice(6));
|
|
949
|
+
const text = json.candidates?.[0]?.content?.parts?.[0]?.text;
|
|
950
|
+
if (text) yield text;
|
|
951
|
+
} catch {
|
|
952
|
+
}
|
|
953
|
+
}
|
|
954
|
+
}
|
|
955
|
+
} finally {
|
|
956
|
+
reader.releaseLock();
|
|
957
|
+
}
|
|
958
|
+
}
|
|
959
|
+
async complete(prompt, options = {}) {
|
|
960
|
+
let out = "";
|
|
961
|
+
for await (const t of this.stream(prompt, options)) out += t;
|
|
962
|
+
return out;
|
|
963
|
+
}
|
|
964
|
+
};
|
|
965
|
+
|
|
966
|
+
// src/MockLLMAdapter.ts
|
|
967
|
+
var MockLLMAdapter = class {
|
|
968
|
+
provider = "mock";
|
|
969
|
+
model;
|
|
970
|
+
delay;
|
|
971
|
+
constructor(options = {}) {
|
|
972
|
+
this.model = options.model ?? "mock-gpt";
|
|
973
|
+
this.delay = options.delay ?? 100;
|
|
974
|
+
}
|
|
975
|
+
async *stream(prompt, options) {
|
|
976
|
+
const response = this._respond(prompt);
|
|
977
|
+
const words = response.split(" ");
|
|
978
|
+
for (let i = 0; i < words.length; i++) {
|
|
979
|
+
await new Promise((r) => setTimeout(r, this.delay / words.length));
|
|
980
|
+
yield words[i] + (i < words.length - 1 ? " " : "");
|
|
981
|
+
}
|
|
982
|
+
}
|
|
983
|
+
async complete(prompt, options) {
|
|
984
|
+
await new Promise((r) => setTimeout(r, this.delay));
|
|
985
|
+
return this._respond(prompt);
|
|
986
|
+
}
|
|
987
|
+
async embed(text) {
|
|
988
|
+
return Array.from({ length: 128 }, (_, i) => Math.sin(text.charCodeAt(i % text.length) * (i + 1)));
|
|
989
|
+
}
|
|
990
|
+
_respond(prompt) {
|
|
991
|
+
const p = prompt.toLowerCase();
|
|
992
|
+
if (p.includes("summar")) return "This document discusses key concepts with clarity and precision.";
|
|
993
|
+
if (p.includes("improve") || p.includes("rewrite")) return "The refined version presents ideas more effectively.";
|
|
994
|
+
if (p.includes("continue")) return " Furthermore, the analysis reveals several important implications.";
|
|
995
|
+
return `[Mock AI response to: "${prompt.slice(0, 60)}..."]`;
|
|
996
|
+
}
|
|
997
|
+
};
|
|
998
|
+
|
|
999
|
+
// src/canonical-model.ts
|
|
1000
|
+
var Y5 = __toESM(require("yjs"), 1);
|
|
1001
|
+
var CanonicalDoc = class {
|
|
1002
|
+
constructor(yDoc) {
|
|
1003
|
+
this.yDoc = yDoc;
|
|
1004
|
+
this.yDoc.getMap("metadata");
|
|
1005
|
+
this.yDoc.getArray("content");
|
|
1006
|
+
}
|
|
1007
|
+
yDoc;
|
|
1008
|
+
get metadata() {
|
|
1009
|
+
return this.yDoc.getMap("metadata");
|
|
1010
|
+
}
|
|
1011
|
+
get content() {
|
|
1012
|
+
return this.yDoc.getArray("content");
|
|
1013
|
+
}
|
|
1014
|
+
// ─── Block Factory ─────────────────────────────────────────────────
|
|
1015
|
+
/**
|
|
1016
|
+
* Add a block to the document.
|
|
1017
|
+
* Returns the Y.Text of the block so callers can further instrument it.
|
|
1018
|
+
*/
|
|
1019
|
+
addBlock(type, text = "", meta) {
|
|
1020
|
+
const block = new Y5.Map();
|
|
1021
|
+
const yText = new Y5.Text(text);
|
|
1022
|
+
const id = `blk_${Date.now().toString(36)}_${Math.random().toString(36).slice(2, 7)}`;
|
|
1023
|
+
block.set("type", type);
|
|
1024
|
+
block.set("id", id);
|
|
1025
|
+
block.set("text", yText);
|
|
1026
|
+
if (meta) {
|
|
1027
|
+
const yMeta = new Y5.Map();
|
|
1028
|
+
for (const [k, v] of Object.entries(meta)) {
|
|
1029
|
+
yMeta.set(k, v);
|
|
1030
|
+
}
|
|
1031
|
+
block.set("meta", yMeta);
|
|
1032
|
+
}
|
|
1033
|
+
this.content.push([block]);
|
|
1034
|
+
return yText;
|
|
1035
|
+
}
|
|
1036
|
+
/** Convenience: add a paragraph block */
|
|
1037
|
+
addParagraph(text = "") {
|
|
1038
|
+
return this.addBlock("p", text);
|
|
1039
|
+
}
|
|
1040
|
+
/** Convenience: add a heading block */
|
|
1041
|
+
addHeading(level, text) {
|
|
1042
|
+
return this.addBlock(`h${level}`, text);
|
|
1043
|
+
}
|
|
1044
|
+
/** Set or update metadata on an existing block's Y.Text */
|
|
1045
|
+
setBlockMeta(yText, meta) {
|
|
1046
|
+
for (const block of this.content.toArray()) {
|
|
1047
|
+
if (block.get("text") === yText) {
|
|
1048
|
+
let yMeta = block.get("meta");
|
|
1049
|
+
if (!yMeta) {
|
|
1050
|
+
yMeta = new Y5.Map();
|
|
1051
|
+
block.set("meta", yMeta);
|
|
1052
|
+
}
|
|
1053
|
+
for (const [k, v] of Object.entries(meta)) {
|
|
1054
|
+
yMeta.set(k, v);
|
|
1055
|
+
}
|
|
1056
|
+
return;
|
|
1057
|
+
}
|
|
1058
|
+
}
|
|
1059
|
+
}
|
|
1060
|
+
/** Get block by index */
|
|
1061
|
+
getBlock(index) {
|
|
1062
|
+
return this.content.get(index);
|
|
1063
|
+
}
|
|
1064
|
+
/** Find block by id */
|
|
1065
|
+
findById(id) {
|
|
1066
|
+
return this.content.toArray().find((b) => b.get("id") === id);
|
|
1067
|
+
}
|
|
1068
|
+
/** Get all text content as plain string */
|
|
1069
|
+
toPlainText() {
|
|
1070
|
+
return this.content.toArray().map((block) => {
|
|
1071
|
+
const yText = block.get("text");
|
|
1072
|
+
return yText?.toString() ?? "";
|
|
1073
|
+
}).filter(Boolean).join("\n\n");
|
|
1074
|
+
}
|
|
1075
|
+
/** Get document stats */
|
|
1076
|
+
stats() {
|
|
1077
|
+
const text = this.toPlainText();
|
|
1078
|
+
const words = text.split(/\s+/).filter(Boolean);
|
|
1079
|
+
return {
|
|
1080
|
+
blockCount: this.content.length,
|
|
1081
|
+
charCount: text.length,
|
|
1082
|
+
wordCount: words.length
|
|
1083
|
+
};
|
|
1084
|
+
}
|
|
1085
|
+
/** Get AI-contributed character count */
|
|
1086
|
+
aiContributions() {
|
|
1087
|
+
let charCount = 0;
|
|
1088
|
+
let blockCount = 0;
|
|
1089
|
+
for (const block of this.content.toArray()) {
|
|
1090
|
+
const yText = block.get("text");
|
|
1091
|
+
if (!yText) continue;
|
|
1092
|
+
let blockHasAI = false;
|
|
1093
|
+
for (const delta of yText.toDelta()) {
|
|
1094
|
+
if (delta.attributes?.["ai-generated"] && delta.insert) {
|
|
1095
|
+
charCount += delta.insert.length;
|
|
1096
|
+
blockHasAI = true;
|
|
1097
|
+
}
|
|
1098
|
+
}
|
|
1099
|
+
if (blockHasAI) blockCount++;
|
|
1100
|
+
}
|
|
1101
|
+
return { charCount, blockCount };
|
|
1102
|
+
}
|
|
1103
|
+
};
|
|
1104
|
+
|
|
1105
|
+
// src/docling-client.ts
|
|
1106
|
+
var import_child_process = require("child_process");
|
|
1107
|
+
var import_path = __toESM(require("path"), 1);
|
|
1108
|
+
var import_url = require("url");
|
|
1109
|
+
var getDirname = () => {
|
|
1110
|
+
if (typeof __dirname !== "undefined") return __dirname;
|
|
1111
|
+
return import_path.default.dirname((0, import_url.fileURLToPath)(importMetaUrl));
|
|
1112
|
+
};
|
|
1113
|
+
var _dirname = getDirname();
|
|
1114
|
+
var BRIDGE_PATH = import_path.default.join(_dirname, "../scripts/docling_bridge.py");
|
|
1115
|
+
var DoclingClient = class {
|
|
1116
|
+
async convert(filePathOrBase64) {
|
|
1117
|
+
return new Promise((resolve, reject) => {
|
|
1118
|
+
const pythonProcess = (0, import_child_process.spawn)("python", [BRIDGE_PATH, filePathOrBase64]);
|
|
1119
|
+
let output = "";
|
|
1120
|
+
let error = "";
|
|
1121
|
+
pythonProcess.stdout.on("data", (data) => {
|
|
1122
|
+
output += data.toString();
|
|
1123
|
+
});
|
|
1124
|
+
pythonProcess.stderr.on("data", (data) => {
|
|
1125
|
+
error += data.toString();
|
|
1126
|
+
});
|
|
1127
|
+
pythonProcess.on("close", (code) => {
|
|
1128
|
+
if (code !== 0) {
|
|
1129
|
+
reject(new Error(`Python process exited with code ${code}: ${error}`));
|
|
1130
|
+
return;
|
|
1131
|
+
}
|
|
1132
|
+
try {
|
|
1133
|
+
const result = JSON.parse(output);
|
|
1134
|
+
if (result.error) {
|
|
1135
|
+
reject(new Error(result.error));
|
|
1136
|
+
} else {
|
|
1137
|
+
resolve(result);
|
|
1138
|
+
}
|
|
1139
|
+
} catch (e) {
|
|
1140
|
+
reject(new Error(`Failed to parse Python output: ${output}`));
|
|
1141
|
+
}
|
|
1142
|
+
});
|
|
1143
|
+
});
|
|
1144
|
+
}
|
|
1145
|
+
};
|
|
1146
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
1147
|
+
0 && (module.exports = {
|
|
1148
|
+
AICollaborator,
|
|
1149
|
+
AnthropicAdapter,
|
|
1150
|
+
AutocompletePlugin,
|
|
1151
|
+
CanonicalDoc,
|
|
1152
|
+
ConflictResolver,
|
|
1153
|
+
DoclingClient,
|
|
1154
|
+
GeminiAdapter,
|
|
1155
|
+
Kairo,
|
|
1156
|
+
KairoSession,
|
|
1157
|
+
MergePolicy,
|
|
1158
|
+
MockLLMAdapter,
|
|
1159
|
+
OllamaAdapter,
|
|
1160
|
+
OpenAIAdapter,
|
|
1161
|
+
PromptCache,
|
|
1162
|
+
StreamBuffer,
|
|
1163
|
+
SuggestionManager,
|
|
1164
|
+
SummarizationPlugin,
|
|
1165
|
+
kairo
|
|
1166
|
+
});
|