@docscode/cli 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +229 -0
- package/package.json +19 -0
- package/src/index.ts +268 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,229 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
// src/index.ts
|
|
4
|
+
import { Command } from "commander";
|
|
5
|
+
import chalk from "chalk";
|
|
6
|
+
import fs from "fs";
|
|
7
|
+
import path from "path";
|
|
8
|
+
import { fileURLToPath } from "url";
|
|
9
|
+
var __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
10
|
+
var program = new Command();
|
|
11
|
+
program.name("kairo").description("Kairo \u2014 Universal Document AI. Add an AI peer to any document in one line.").version("1.0.0");
|
|
12
|
+
program.command("init [name]").description("Initialize a new Kairo project").option("--llm <provider>", "Default LLM provider (openai|anthropic|ollama|gemini)", "openai").action((name = "my-kairo-project", opts) => {
|
|
13
|
+
const dir = name === "." ? process.cwd() : path.join(process.cwd(), name);
|
|
14
|
+
if (name !== "." && !fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
|
|
15
|
+
console.log(chalk.cyan("\n\u{1F300} Initializing Kairo project...\n"));
|
|
16
|
+
const pkg = {
|
|
17
|
+
name,
|
|
18
|
+
version: "1.0.0",
|
|
19
|
+
type: "module",
|
|
20
|
+
scripts: {
|
|
21
|
+
start: "node src/index.js",
|
|
22
|
+
dev: "tsx src/index.ts"
|
|
23
|
+
},
|
|
24
|
+
dependencies: {
|
|
25
|
+
"@kairo/core": "latest",
|
|
26
|
+
"@kairo/adapter-markdown": "latest",
|
|
27
|
+
"@kairo/adapter-docx": "latest",
|
|
28
|
+
"yjs": "^13.6.30"
|
|
29
|
+
},
|
|
30
|
+
devDependencies: { "typescript": "^5.0.0", "tsx": "^4.0.0" }
|
|
31
|
+
};
|
|
32
|
+
const llmImport = {
|
|
33
|
+
openai: "import { OpenAIAdapter } from '@kairo/core';",
|
|
34
|
+
anthropic: "import { AnthropicAdapter } from '@kairo/core';",
|
|
35
|
+
ollama: "import { OllamaAdapter } from '@kairo/core';",
|
|
36
|
+
gemini: "import { GeminiAdapter } from '@kairo/core';"
|
|
37
|
+
};
|
|
38
|
+
const llmInit = {
|
|
39
|
+
openai: "new OpenAIAdapter() // Reads OPENAI_API_KEY from env",
|
|
40
|
+
anthropic: "new AnthropicAdapter() // Reads ANTHROPIC_API_KEY from env",
|
|
41
|
+
ollama: "new OllamaAdapter() // Local model, no API key needed",
|
|
42
|
+
gemini: "new GeminiAdapter() // Reads GOOGLE_AI_API_KEY from env"
|
|
43
|
+
};
|
|
44
|
+
const provider = opts.llm in llmImport ? opts.llm : "openai";
|
|
45
|
+
const entrypoint = `${llmImport[provider]}
|
|
46
|
+
import { kairo } from '@kairo/core';
|
|
47
|
+
import { MarkdownAdapter } from '@kairo/adapter-markdown';
|
|
48
|
+
import fs from 'fs';
|
|
49
|
+
|
|
50
|
+
// Register format adapters
|
|
51
|
+
kairo.registerFormatAdapter(new MarkdownAdapter());
|
|
52
|
+
|
|
53
|
+
// Connect to your document
|
|
54
|
+
const fileContent = fs.readFileSync('document.md', 'utf-8');
|
|
55
|
+
const session = await kairo.connect({
|
|
56
|
+
content: fileContent,
|
|
57
|
+
fileName: 'document.md',
|
|
58
|
+
llm: ${llmInit[provider]},
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
// Get a reference to the document text
|
|
62
|
+
const yText = session.doc.getText('content');
|
|
63
|
+
|
|
64
|
+
// Ask the AI to collaborate
|
|
65
|
+
await session.ai.streamToDoc(yText, 'Improve the clarity of this document');
|
|
66
|
+
|
|
67
|
+
// Export the result
|
|
68
|
+
const output = await session.export();
|
|
69
|
+
fs.writeFileSync('document-revised.md', output);
|
|
70
|
+
|
|
71
|
+
console.log('Done! AI has collaborated on your document.');
|
|
72
|
+
session.destroy();
|
|
73
|
+
`;
|
|
74
|
+
if (!fs.existsSync(path.join(dir, "src"))) fs.mkdirSync(path.join(dir, "src"));
|
|
75
|
+
fs.writeFileSync(path.join(dir, "package.json"), JSON.stringify(pkg, null, 2));
|
|
76
|
+
fs.writeFileSync(path.join(dir, "src", "index.ts"), entrypoint);
|
|
77
|
+
fs.writeFileSync(path.join(dir, "document.md"), "# My Document\n\nStart writing here...\n");
|
|
78
|
+
fs.writeFileSync(path.join(dir, ".env.example"), `# Add your API key here
|
|
79
|
+
OPENAI_API_KEY=sk-...
|
|
80
|
+
ANTHROPIC_API_KEY=sk-ant-...
|
|
81
|
+
GOOGLE_AI_API_KEY=...
|
|
82
|
+
`);
|
|
83
|
+
console.log(chalk.green("\u2705 Project initialized!\n"));
|
|
84
|
+
console.log(chalk.white(`Next steps:
|
|
85
|
+
`));
|
|
86
|
+
if (name !== ".") console.log(chalk.white(` cd ${name}`));
|
|
87
|
+
console.log(chalk.white(` npm install`));
|
|
88
|
+
console.log(chalk.white(` # Add your API key to .env`));
|
|
89
|
+
console.log(chalk.white(` npx tsx src/index.ts
|
|
90
|
+
`));
|
|
91
|
+
});
|
|
92
|
+
program.command("create-adapter <name>").description("Scaffold a new Kairo format adapter").option("--dir <path>", "Output directory", "packages/adapters").action((name, opts) => {
|
|
93
|
+
const adapterName = name.toLowerCase();
|
|
94
|
+
const className = adapterName.charAt(0).toUpperCase() + adapterName.slice(1) + "Adapter";
|
|
95
|
+
const dir = path.join(process.cwd(), opts.dir, `adapter-${adapterName}`);
|
|
96
|
+
const srcDir = path.join(dir, "src");
|
|
97
|
+
if (!fs.existsSync(srcDir)) fs.mkdirSync(srcDir, { recursive: true });
|
|
98
|
+
const pkg = {
|
|
99
|
+
name: `@kairo/adapter-${adapterName}`,
|
|
100
|
+
version: "1.0.0",
|
|
101
|
+
type: "module",
|
|
102
|
+
main: "dist/index.cjs",
|
|
103
|
+
module: "dist/index.js",
|
|
104
|
+
types: "dist/index.d.ts",
|
|
105
|
+
exports: {
|
|
106
|
+
".": { import: "./dist/index.js", require: "./dist/index.cjs" }
|
|
107
|
+
},
|
|
108
|
+
peerDependencies: { "@kairo/core": "*", "yjs": "*" }
|
|
109
|
+
};
|
|
110
|
+
const tsconfig = {
|
|
111
|
+
extends: "../../../../tsconfig.json",
|
|
112
|
+
compilerOptions: { outDir: "./dist", rootDir: "./src" },
|
|
113
|
+
include: ["src"]
|
|
114
|
+
};
|
|
115
|
+
const source = `import * as Y from 'yjs';
|
|
116
|
+
import type { FormatAdapter } from '@kairo/core';
|
|
117
|
+
import { CanonicalDoc } from '@kairo/core';
|
|
118
|
+
|
|
119
|
+
/**
|
|
120
|
+
* ${className} \u2014 Kairo format adapter for .${adapterName} files.
|
|
121
|
+
*
|
|
122
|
+
* Implement read() to parse ${adapterName} \u2192 Y.Doc.
|
|
123
|
+
* Implement write() to serialize Y.Doc \u2192 ${adapterName} Buffer.
|
|
124
|
+
*/
|
|
125
|
+
export class ${className} implements FormatAdapter {
|
|
126
|
+
readonly format = '${adapterName}';
|
|
127
|
+
|
|
128
|
+
async read(source: Buffer | string): Promise<Y.Doc> {
|
|
129
|
+
const yDoc = new Y.Doc();
|
|
130
|
+
const canonical = new CanonicalDoc(yDoc);
|
|
131
|
+
|
|
132
|
+
// TODO: Parse ${adapterName} format and call canonical.addBlock(...)
|
|
133
|
+
// Example:
|
|
134
|
+
// canonical.addParagraph('Hello from ${adapterName}!');
|
|
135
|
+
|
|
136
|
+
const content = Buffer.isBuffer(source) ? source.toString('utf-8') : source;
|
|
137
|
+
canonical.addParagraph(content);
|
|
138
|
+
|
|
139
|
+
return yDoc;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
async write(doc: Y.Doc): Promise<Buffer> {
|
|
143
|
+
const canonical = new CanonicalDoc(doc);
|
|
144
|
+
|
|
145
|
+
// TODO: Serialize blocks back to ${adapterName} format
|
|
146
|
+
const text = canonical.toPlainText();
|
|
147
|
+
return Buffer.from(text, 'utf-8');
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
`;
|
|
151
|
+
const readme = `# @kairo/adapter-${adapterName}
|
|
152
|
+
|
|
153
|
+
Kairo format adapter for \`.${adapterName}\` files.
|
|
154
|
+
|
|
155
|
+
## Usage
|
|
156
|
+
|
|
157
|
+
\`\`\`typescript
|
|
158
|
+
import { kairo } from '@kairo/core';
|
|
159
|
+
import { ${className} } from '@kairo/adapter-${adapterName}';
|
|
160
|
+
|
|
161
|
+
kairo.registerFormatAdapter(new ${className}());
|
|
162
|
+
|
|
163
|
+
const session = await kairo.connect({ file: 'document.${adapterName}', llm });
|
|
164
|
+
\`\`\`
|
|
165
|
+
|
|
166
|
+
## Development
|
|
167
|
+
|
|
168
|
+
Implement \`read()\` and \`write()\` in \`src/index.ts\`.
|
|
169
|
+
`;
|
|
170
|
+
fs.writeFileSync(path.join(dir, "package.json"), JSON.stringify(pkg, null, 2));
|
|
171
|
+
fs.writeFileSync(path.join(dir, "tsconfig.json"), JSON.stringify(tsconfig, null, 2));
|
|
172
|
+
fs.writeFileSync(path.join(srcDir, "index.ts"), source);
|
|
173
|
+
fs.writeFileSync(path.join(dir, "README.md"), readme);
|
|
174
|
+
console.log(chalk.green(`
|
|
175
|
+
\u2705 Adapter scaffolded: @kairo/adapter-${adapterName}
|
|
176
|
+
`));
|
|
177
|
+
console.log(chalk.white(` ${path.join(dir, "src", "index.ts")}`));
|
|
178
|
+
console.log(chalk.white(`
|
|
179
|
+
Implement read() and write() then publish to npm.
|
|
180
|
+
`));
|
|
181
|
+
});
|
|
182
|
+
program.command("connect <file>").description("Quick-connect to a document and show AI analysis").option("--model <model>", "Ollama model to use (local)", "llama3.2").option("--cloud", "Use OpenAI instead of Ollama").action(async (file, opts) => {
|
|
183
|
+
if (!fs.existsSync(file)) {
|
|
184
|
+
console.error(chalk.red(`File not found: ${file}`));
|
|
185
|
+
process.exit(1);
|
|
186
|
+
}
|
|
187
|
+
console.log(chalk.cyan(`
|
|
188
|
+
\u{1F300} Kairo connecting to ${path.basename(file)}...
|
|
189
|
+
`));
|
|
190
|
+
try {
|
|
191
|
+
const { kairo } = await import("@kairo/core");
|
|
192
|
+
const { MarkdownAdapter } = await import("@kairo/adapter-markdown");
|
|
193
|
+
let llm;
|
|
194
|
+
if (opts.cloud) {
|
|
195
|
+
const { OpenAIAdapter } = await import("@kairo/core");
|
|
196
|
+
llm = new OpenAIAdapter();
|
|
197
|
+
console.log(chalk.yellow(" Using OpenAI GPT-4o-mini"));
|
|
198
|
+
} else {
|
|
199
|
+
const { OllamaAdapter } = await import("@kairo/core");
|
|
200
|
+
llm = new OllamaAdapter({ model: opts.model });
|
|
201
|
+
console.log(chalk.yellow(` Using Ollama (${opts.model})`));
|
|
202
|
+
}
|
|
203
|
+
kairo.registerFormatAdapter(new MarkdownAdapter());
|
|
204
|
+
const rawContent = fs.readFileSync(file, "utf-8");
|
|
205
|
+
const session = await kairo.connect({ content: rawContent, fileName: file, llm });
|
|
206
|
+
const { CanonicalDoc } = await import("@kairo/core");
|
|
207
|
+
const canonical = new CanonicalDoc(session.doc);
|
|
208
|
+
const stats = canonical.stats();
|
|
209
|
+
const wordCount = Math.ceil(stats.charCount / 5);
|
|
210
|
+
console.log(chalk.green(`
|
|
211
|
+
\u2705 Connected!`));
|
|
212
|
+
console.log(chalk.white(` Words: ~${wordCount}`));
|
|
213
|
+
console.log(chalk.white(` Format: ${path.extname(file).slice(1)}`));
|
|
214
|
+
console.log(chalk.white(`
|
|
215
|
+
AI peer status: ${session.ai.getStatus ? session.ai["_status"] ?? "idle" : "idle"}`));
|
|
216
|
+
console.log(chalk.cyan(`
|
|
217
|
+
Use kairo mcp to expose this as an MCP server for Claude/Cursor.
|
|
218
|
+
`));
|
|
219
|
+
session.destroy();
|
|
220
|
+
} catch (e) {
|
|
221
|
+
console.error(chalk.red(`Error: ${e.stack}`));
|
|
222
|
+
}
|
|
223
|
+
});
|
|
224
|
+
program.command("mcp").description("Start the Kairo MCP Server for Claude Desktop / Cursor / Windsurf").action(async () => {
|
|
225
|
+
console.error(chalk.magenta("\u{1F680} Starting Kairo MCP Server..."));
|
|
226
|
+
const { default: run } = await import("@kairo/mcp-server");
|
|
227
|
+
if (run) await run();
|
|
228
|
+
});
|
|
229
|
+
program.parse();
|
package/package.json
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@docscode/cli",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "Kairo CLI Tool",
|
|
5
|
+
"main": "./dist/index.cjs",
|
|
6
|
+
"module": "./dist/index.js",
|
|
7
|
+
"type": "module",
|
|
8
|
+
"bin": {
|
|
9
|
+
"kairo": "./dist/index.js"
|
|
10
|
+
},
|
|
11
|
+
"scripts": {
|
|
12
|
+
"build": "tsup src/index.ts --format esm --clean --external @docscode/core --external @docscode/adapter-markdown --external @docscode/adapter-docx --external @docscode/mcp-server"
|
|
13
|
+
},
|
|
14
|
+
"dependencies": {
|
|
15
|
+
"commander": "^14.0.3",
|
|
16
|
+
"chalk": "^5.6.2",
|
|
17
|
+
"fs-extra": "^11.2.0"
|
|
18
|
+
}
|
|
19
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,268 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { Command } from 'commander';
|
|
3
|
+
import chalk from 'chalk';
|
|
4
|
+
import fs from 'fs';
|
|
5
|
+
import path from 'path';
|
|
6
|
+
import { fileURLToPath } from 'url';
|
|
7
|
+
|
|
8
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
9
|
+
const program = new Command();
|
|
10
|
+
|
|
11
|
+
// ─── Kairo CLI ────────────────────────────────────────────────────────────────
|
|
12
|
+
// The fastest way to go from document to AI-collaborative session.
|
|
13
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
14
|
+
|
|
15
|
+
program
|
|
16
|
+
.name('kairo')
|
|
17
|
+
.description('Kairo — Universal Document AI. Add an AI peer to any document in one line.')
|
|
18
|
+
.version('1.0.0');
|
|
19
|
+
|
|
20
|
+
// ── kairo init ────────────────────────────────────────────────────────────────
|
|
21
|
+
program
|
|
22
|
+
.command('init [name]')
|
|
23
|
+
.description('Initialize a new Kairo project')
|
|
24
|
+
.option('--llm <provider>', 'Default LLM provider (openai|anthropic|ollama|gemini)', 'openai')
|
|
25
|
+
.action((name: string = 'my-kairo-project', opts: any) => {
|
|
26
|
+
const dir = name === '.' ? process.cwd() : path.join(process.cwd(), name);
|
|
27
|
+
if (name !== '.' && !fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
|
|
28
|
+
|
|
29
|
+
console.log(chalk.cyan('\n🌀 Initializing Kairo project...\n'));
|
|
30
|
+
|
|
31
|
+
const pkg = {
|
|
32
|
+
name,
|
|
33
|
+
version: '1.0.0',
|
|
34
|
+
type: 'module',
|
|
35
|
+
scripts: {
|
|
36
|
+
start: 'node src/index.js',
|
|
37
|
+
dev: 'tsx src/index.ts',
|
|
38
|
+
},
|
|
39
|
+
dependencies: {
|
|
40
|
+
'@docscode/core': 'latest',
|
|
41
|
+
'@docscode/adapter-markdown': 'latest',
|
|
42
|
+
'@docscode/adapter-docx': 'latest',
|
|
43
|
+
'yjs': '^13.6.30',
|
|
44
|
+
},
|
|
45
|
+
devDependencies: { 'typescript': '^5.0.0', 'tsx': '^4.0.0' },
|
|
46
|
+
};
|
|
47
|
+
|
|
48
|
+
const llmImport: Record<string, string> = {
|
|
49
|
+
openai: "import { OpenAIAdapter } from '@docscode/core';",
|
|
50
|
+
anthropic: "import { AnthropicAdapter } from '@docscode/core';",
|
|
51
|
+
ollama: "import { OllamaAdapter } from '@docscode/core';",
|
|
52
|
+
gemini: "import { GeminiAdapter } from '@docscode/core';",
|
|
53
|
+
};
|
|
54
|
+
|
|
55
|
+
const llmInit: Record<string, string> = {
|
|
56
|
+
openai: "new OpenAIAdapter() // Reads OPENAI_API_KEY from env",
|
|
57
|
+
anthropic: "new AnthropicAdapter() // Reads ANTHROPIC_API_KEY from env",
|
|
58
|
+
ollama: "new OllamaAdapter() // Local model, no API key needed",
|
|
59
|
+
gemini: "new GeminiAdapter() // Reads GOOGLE_AI_API_KEY from env",
|
|
60
|
+
};
|
|
61
|
+
|
|
62
|
+
const provider = opts.llm in llmImport ? opts.llm : 'openai';
|
|
63
|
+
|
|
64
|
+
const entrypoint = `${llmImport[provider]}
|
|
65
|
+
import { kairo } from '@docscode/core';
|
|
66
|
+
import { MarkdownAdapter } from '@docscode/adapter-markdown';
|
|
67
|
+
import fs from 'fs';
|
|
68
|
+
|
|
69
|
+
// Register format adapters
|
|
70
|
+
kairo.registerFormatAdapter(new MarkdownAdapter());
|
|
71
|
+
|
|
72
|
+
// Connect to your document
|
|
73
|
+
const fileContent = fs.readFileSync('document.md', 'utf-8');
|
|
74
|
+
const session = await kairo.connect({
|
|
75
|
+
content: fileContent,
|
|
76
|
+
fileName: 'document.md',
|
|
77
|
+
llm: ${llmInit[provider]},
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
// Get a reference to the document text
|
|
81
|
+
const yText = session.doc.getText('content');
|
|
82
|
+
|
|
83
|
+
// Ask the AI to collaborate
|
|
84
|
+
await session.ai.streamToDoc(yText, 'Improve the clarity of this document');
|
|
85
|
+
|
|
86
|
+
// Export the result
|
|
87
|
+
const output = await session.export();
|
|
88
|
+
fs.writeFileSync('document-revised.md', output);
|
|
89
|
+
|
|
90
|
+
console.log('Done! AI has collaborated on your document.');
|
|
91
|
+
session.destroy();
|
|
92
|
+
`;
|
|
93
|
+
|
|
94
|
+
if (!fs.existsSync(path.join(dir, 'src'))) fs.mkdirSync(path.join(dir, 'src'));
|
|
95
|
+
fs.writeFileSync(path.join(dir, 'package.json'), JSON.stringify(pkg, null, 2));
|
|
96
|
+
fs.writeFileSync(path.join(dir, 'src', 'index.ts'), entrypoint);
|
|
97
|
+
fs.writeFileSync(path.join(dir, 'document.md'), '# My Document\n\nStart writing here...\n');
|
|
98
|
+
fs.writeFileSync(path.join(dir, '.env.example'), `# Add your API key here\nOPENAI_API_KEY=sk-...\nANTHROPIC_API_KEY=sk-ant-...\nGOOGLE_AI_API_KEY=...\n`);
|
|
99
|
+
|
|
100
|
+
console.log(chalk.green('✅ Project initialized!\n'));
|
|
101
|
+
console.log(chalk.white(`Next steps:\n`));
|
|
102
|
+
if (name !== '.') console.log(chalk.white(` cd ${name}`));
|
|
103
|
+
console.log(chalk.white(` npm install`));
|
|
104
|
+
console.log(chalk.white(` # Add your API key to .env`));
|
|
105
|
+
console.log(chalk.white(` npx tsx src/index.ts\n`));
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
// ── kairo create-adapter ──────────────────────────────────────────────────────
|
|
109
|
+
program
|
|
110
|
+
.command('create-adapter <name>')
|
|
111
|
+
.description('Scaffold a new Kairo format adapter')
|
|
112
|
+
.option('--dir <path>', 'Output directory', 'packages/adapters')
|
|
113
|
+
.action((name: string, opts: any) => {
|
|
114
|
+
const adapterName = name.toLowerCase();
|
|
115
|
+
const className = adapterName.charAt(0).toUpperCase() + adapterName.slice(1) + 'Adapter';
|
|
116
|
+
const dir = path.join(process.cwd(), opts.dir, `adapter-${adapterName}`);
|
|
117
|
+
const srcDir = path.join(dir, 'src');
|
|
118
|
+
|
|
119
|
+
if (!fs.existsSync(srcDir)) fs.mkdirSync(srcDir, { recursive: true });
|
|
120
|
+
|
|
121
|
+
const pkg = {
|
|
122
|
+
name: `@docscode/adapter-${adapterName}`,
|
|
123
|
+
version: '1.0.0',
|
|
124
|
+
type: 'module',
|
|
125
|
+
main: 'dist/index.cjs',
|
|
126
|
+
module: 'dist/index.js',
|
|
127
|
+
types: 'dist/index.d.ts',
|
|
128
|
+
exports: {
|
|
129
|
+
'.': { import: './dist/index.js', require: './dist/index.cjs' },
|
|
130
|
+
},
|
|
131
|
+
peerDependencies: { '@docscode/core': '*', 'yjs': '*' },
|
|
132
|
+
};
|
|
133
|
+
|
|
134
|
+
const tsconfig = {
|
|
135
|
+
extends: '../../../../tsconfig.json',
|
|
136
|
+
compilerOptions: { outDir: './dist', rootDir: './src' },
|
|
137
|
+
include: ['src'],
|
|
138
|
+
};
|
|
139
|
+
|
|
140
|
+
const source = `import * as Y from 'yjs';
|
|
141
|
+
import type { FormatAdapter } from '@docscode/core';
|
|
142
|
+
import { CanonicalDoc } from '@docscode/core';
|
|
143
|
+
|
|
144
|
+
/**
|
|
145
|
+
* ${className} — Kairo format adapter for .${adapterName} files.
|
|
146
|
+
*
|
|
147
|
+
* Implement read() to parse ${adapterName} → Y.Doc.
|
|
148
|
+
* Implement write() to serialize Y.Doc → ${adapterName} Buffer.
|
|
149
|
+
*/
|
|
150
|
+
export class ${className} implements FormatAdapter {
|
|
151
|
+
readonly format = '${adapterName}';
|
|
152
|
+
|
|
153
|
+
async read(source: Buffer | string): Promise<Y.Doc> {
|
|
154
|
+
const yDoc = new Y.Doc();
|
|
155
|
+
const canonical = new CanonicalDoc(yDoc);
|
|
156
|
+
|
|
157
|
+
// TODO: Parse ${adapterName} format and call canonical.addBlock(...)
|
|
158
|
+
// Example:
|
|
159
|
+
// canonical.addParagraph('Hello from ${adapterName}!');
|
|
160
|
+
|
|
161
|
+
const content = Buffer.isBuffer(source) ? source.toString('utf-8') : source;
|
|
162
|
+
canonical.addParagraph(content);
|
|
163
|
+
|
|
164
|
+
return yDoc;
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
async write(doc: Y.Doc): Promise<Buffer> {
|
|
168
|
+
const canonical = new CanonicalDoc(doc);
|
|
169
|
+
|
|
170
|
+
// TODO: Serialize blocks back to ${adapterName} format
|
|
171
|
+
const text = canonical.toPlainText();
|
|
172
|
+
return Buffer.from(text, 'utf-8');
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
`;
|
|
176
|
+
|
|
177
|
+
const readme = `# @docscode/adapter-${adapterName}
|
|
178
|
+
|
|
179
|
+
Kairo format adapter for \`.${adapterName}\` files.
|
|
180
|
+
|
|
181
|
+
## Usage
|
|
182
|
+
|
|
183
|
+
\`\`\`typescript
|
|
184
|
+
import { kairo } from '@docscode/core';
|
|
185
|
+
import { ${className} } from '@docscode/adapter-${adapterName}';
|
|
186
|
+
|
|
187
|
+
kairo.registerFormatAdapter(new ${className}());
|
|
188
|
+
|
|
189
|
+
const session = await kairo.connect({ file: 'document.${adapterName}', llm });
|
|
190
|
+
\`\`\`
|
|
191
|
+
|
|
192
|
+
## Development
|
|
193
|
+
|
|
194
|
+
Implement \`read()\` and \`write()\` in \`src/index.ts\`.
|
|
195
|
+
`;
|
|
196
|
+
|
|
197
|
+
fs.writeFileSync(path.join(dir, 'package.json'), JSON.stringify(pkg, null, 2));
|
|
198
|
+
fs.writeFileSync(path.join(dir, 'tsconfig.json'), JSON.stringify(tsconfig, null, 2));
|
|
199
|
+
fs.writeFileSync(path.join(srcDir, 'index.ts'), source);
|
|
200
|
+
fs.writeFileSync(path.join(dir, 'README.md'), readme);
|
|
201
|
+
|
|
202
|
+
console.log(chalk.green(`\n✅ Adapter scaffolded: @docscode/adapter-${adapterName}\n`));
|
|
203
|
+
console.log(chalk.white(` ${path.join(dir, 'src', 'index.ts')}`));
|
|
204
|
+
console.log(chalk.white(`\nImplement read() and write() then publish to npm.\n`));
|
|
205
|
+
});
|
|
206
|
+
|
|
207
|
+
// ── kairo connect (quick CLI test) ────────────────────────────────────────────
|
|
208
|
+
program
|
|
209
|
+
.command('connect <file>')
|
|
210
|
+
.description('Quick-connect to a document and show AI analysis')
|
|
211
|
+
.option('--model <model>', 'Ollama model to use (local)', 'llama3.2')
|
|
212
|
+
.option('--cloud', 'Use OpenAI instead of Ollama')
|
|
213
|
+
.action(async (file: string, opts: any) => {
|
|
214
|
+
if (!fs.existsSync(file)) {
|
|
215
|
+
console.error(chalk.red(`File not found: ${file}`));
|
|
216
|
+
process.exit(1);
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
console.log(chalk.cyan(`\n🌀 Kairo connecting to ${path.basename(file)}...\n`));
|
|
220
|
+
|
|
221
|
+
try {
|
|
222
|
+
const { kairo } = await import('@docscode/core');
|
|
223
|
+
const { MarkdownAdapter } = await import('@docscode/adapter-markdown');
|
|
224
|
+
|
|
225
|
+
let llm;
|
|
226
|
+
if (opts.cloud) {
|
|
227
|
+
const { OpenAIAdapter } = await import('@docscode/core');
|
|
228
|
+
llm = new OpenAIAdapter();
|
|
229
|
+
console.log(chalk.yellow(' Using OpenAI GPT-4o-mini'));
|
|
230
|
+
} else {
|
|
231
|
+
const { OllamaAdapter } = await import('@docscode/core');
|
|
232
|
+
llm = new OllamaAdapter({ model: opts.model });
|
|
233
|
+
console.log(chalk.yellow(` Using Ollama (${opts.model})`));
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
kairo.registerFormatAdapter(new MarkdownAdapter());
|
|
237
|
+
|
|
238
|
+
const rawContent = fs.readFileSync(file, 'utf-8');
|
|
239
|
+
const session = await kairo.connect({ content: rawContent, fileName: file, llm });
|
|
240
|
+
|
|
241
|
+
const { CanonicalDoc } = await import('@docscode/core');
|
|
242
|
+
const canonical = new CanonicalDoc(session.doc);
|
|
243
|
+
const stats = canonical.stats();
|
|
244
|
+
const wordCount = Math.ceil(stats.charCount / 5); // approximate words
|
|
245
|
+
|
|
246
|
+
console.log(chalk.green(`\n✅ Connected!`));
|
|
247
|
+
console.log(chalk.white(` Words: ~${wordCount}`));
|
|
248
|
+
console.log(chalk.white(` Format: ${path.extname(file).slice(1)}`));
|
|
249
|
+
console.log(chalk.white(`\n AI peer status: ${session.ai.getStatus ? session.ai['_status'] ?? 'idle' : 'idle'}`));
|
|
250
|
+
console.log(chalk.cyan(`\n Use kairo mcp to expose this as an MCP server for Claude/Cursor.\n`));
|
|
251
|
+
|
|
252
|
+
session.destroy();
|
|
253
|
+
} catch (e: any) {
|
|
254
|
+
console.error(chalk.red(`Error: ${e.stack}`));
|
|
255
|
+
}
|
|
256
|
+
});
|
|
257
|
+
|
|
258
|
+
// ── kairo mcp ─────────────────────────────────────────────────────────────────
|
|
259
|
+
program
|
|
260
|
+
.command('mcp')
|
|
261
|
+
.description('Start the Kairo MCP Server for Claude Desktop / Cursor / Windsurf')
|
|
262
|
+
.action(async () => {
|
|
263
|
+
console.error(chalk.magenta('🚀 Starting Kairo MCP Server...'));
|
|
264
|
+
const { default: run } = await import('@docscode/mcp-server');
|
|
265
|
+
if (run) await run();
|
|
266
|
+
});
|
|
267
|
+
|
|
268
|
+
program.parse();
|