ctxpkg 0.0.1 → 0.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/cli.js +1 -1
- package/bin/daemon.js +1 -1
- package/dist/agent/agent.d.ts +65 -0
- package/dist/agent/agent.d.ts.map +1 -0
- package/dist/agent/agent.js +291 -0
- package/dist/agent/agent.js.map +1 -0
- package/dist/agent/agent.prompts.d.ts +13 -0
- package/dist/agent/agent.prompts.d.ts.map +1 -0
- package/{src/agent/agent.prompts.ts → dist/agent/agent.prompts.js} +11 -12
- package/dist/agent/agent.prompts.js.map +1 -0
- package/dist/agent/agent.test-runner.d.ts +73 -0
- package/dist/agent/agent.test-runner.d.ts.map +1 -0
- package/dist/agent/agent.test-runner.js +316 -0
- package/dist/agent/agent.test-runner.js.map +1 -0
- package/dist/agent/agent.test-runner.schemas.d.ts +382 -0
- package/dist/agent/agent.test-runner.schemas.d.ts.map +1 -0
- package/dist/agent/agent.test-runner.schemas.js +110 -0
- package/dist/agent/agent.test-runner.schemas.js.map +1 -0
- package/dist/agent/agent.types.d.ts +122 -0
- package/dist/agent/agent.types.d.ts.map +1 -0
- package/dist/agent/agent.types.js +19 -0
- package/dist/agent/agent.types.js.map +1 -0
- package/dist/backend/backend.d.ts +16 -0
- package/dist/backend/backend.d.ts.map +1 -0
- package/dist/backend/backend.js +79 -0
- package/dist/backend/backend.js.map +1 -0
- package/dist/backend/backend.protocol.d.ts +74 -0
- package/dist/backend/backend.protocol.d.ts.map +1 -0
- package/dist/backend/backend.protocol.js +46 -0
- package/dist/backend/backend.protocol.js.map +1 -0
- package/dist/backend/backend.schemas.d.ts +141 -0
- package/dist/backend/backend.schemas.d.ts.map +1 -0
- package/dist/backend/backend.schemas.js +59 -0
- package/dist/backend/backend.schemas.js.map +1 -0
- package/dist/backend/backend.services.d.ts +290 -0
- package/dist/backend/backend.services.d.ts.map +1 -0
- package/dist/backend/backend.services.js +103 -0
- package/dist/backend/backend.services.js.map +1 -0
- package/dist/backend/backend.types.d.ts +25 -0
- package/dist/backend/backend.types.d.ts.map +1 -0
- package/dist/backend/backend.types.js +6 -0
- package/dist/backend/backend.types.js.map +1 -0
- package/dist/cli/cli.agent.d.ts +4 -0
- package/dist/cli/cli.agent.d.ts.map +1 -0
- package/dist/cli/cli.agent.js +158 -0
- package/dist/cli/cli.agent.js.map +1 -0
- package/dist/cli/cli.chat.d.ts +4 -0
- package/dist/cli/cli.chat.d.ts.map +1 -0
- package/dist/cli/cli.chat.js +311 -0
- package/dist/cli/cli.chat.js.map +1 -0
- package/dist/cli/cli.client.d.ts +11 -0
- package/dist/cli/cli.client.d.ts.map +1 -0
- package/dist/cli/cli.client.js +40 -0
- package/dist/cli/cli.client.js.map +1 -0
- package/dist/cli/cli.collections.d.ts +4 -0
- package/dist/cli/cli.collections.d.ts.map +1 -0
- package/dist/cli/cli.collections.js +411 -0
- package/dist/cli/cli.collections.js.map +1 -0
- package/dist/cli/cli.config.d.ts +4 -0
- package/dist/cli/cli.config.d.ts.map +1 -0
- package/dist/cli/cli.config.js +192 -0
- package/dist/cli/cli.config.js.map +1 -0
- package/dist/cli/cli.d.ts +4 -0
- package/dist/cli/cli.d.ts.map +1 -0
- package/dist/cli/cli.daemon.d.ts +4 -0
- package/dist/cli/cli.daemon.d.ts.map +1 -0
- package/dist/cli/cli.daemon.js +116 -0
- package/dist/cli/cli.daemon.js.map +1 -0
- package/dist/cli/cli.documents.d.ts +4 -0
- package/dist/cli/cli.documents.d.ts.map +1 -0
- package/dist/cli/cli.documents.js +332 -0
- package/dist/cli/cli.documents.js.map +1 -0
- package/dist/cli/cli.js +23 -0
- package/dist/cli/cli.js.map +1 -0
- package/dist/cli/cli.mcp.d.ts +4 -0
- package/dist/cli/cli.mcp.d.ts.map +1 -0
- package/dist/cli/cli.mcp.js +146 -0
- package/dist/cli/cli.mcp.js.map +1 -0
- package/dist/cli/cli.utils.d.ts +51 -0
- package/dist/cli/cli.utils.d.ts.map +1 -0
- package/dist/cli/cli.utils.js +95 -0
- package/dist/cli/cli.utils.js.map +1 -0
- package/dist/client/client.adapters.d.ts +38 -0
- package/dist/client/client.adapters.d.ts.map +1 -0
- package/dist/client/client.adapters.js +233 -0
- package/dist/client/client.adapters.js.map +1 -0
- package/dist/client/client.d.ts +16 -0
- package/dist/client/client.d.ts.map +1 -0
- package/dist/client/client.js +74 -0
- package/dist/client/client.js.map +1 -0
- package/dist/client/client.types.d.ts +10 -0
- package/dist/client/client.types.d.ts.map +1 -0
- package/dist/client/client.types.js +2 -0
- package/dist/client/client.types.js.map +1 -0
- package/dist/collections/collections.d.ts +219 -0
- package/dist/collections/collections.d.ts.map +1 -0
- package/dist/collections/collections.js +933 -0
- package/dist/collections/collections.js.map +1 -0
- package/dist/collections/collections.schemas.d.ts +298 -0
- package/dist/collections/collections.schemas.d.ts.map +1 -0
- package/dist/collections/collections.schemas.js +117 -0
- package/dist/collections/collections.schemas.js.map +1 -0
- package/dist/config/config.d.ts +29 -0
- package/dist/config/config.d.ts.map +1 -0
- package/dist/config/config.js +112 -0
- package/dist/config/config.js.map +1 -0
- package/dist/daemon/daemon.config.d.ts +6 -0
- package/dist/daemon/daemon.config.d.ts.map +1 -0
- package/dist/daemon/daemon.config.js +19 -0
- package/dist/daemon/daemon.config.js.map +1 -0
- package/dist/daemon/daemon.d.ts +10 -0
- package/dist/daemon/daemon.d.ts.map +1 -0
- package/dist/daemon/daemon.js +173 -0
- package/dist/daemon/daemon.js.map +1 -0
- package/dist/daemon/daemon.manager.d.ts +20 -0
- package/dist/daemon/daemon.manager.d.ts.map +1 -0
- package/dist/daemon/daemon.manager.js +176 -0
- package/dist/daemon/daemon.manager.js.map +1 -0
- package/dist/daemon/daemon.schemas.d.ts +38 -0
- package/dist/daemon/daemon.schemas.d.ts.map +1 -0
- package/dist/daemon/daemon.schemas.js +15 -0
- package/dist/daemon/daemon.schemas.js.map +1 -0
- package/dist/database/database.d.ts +10 -0
- package/dist/database/database.d.ts.map +1 -0
- package/dist/database/database.js +52 -0
- package/dist/database/database.js.map +1 -0
- package/dist/database/migrations/migrations.001-init.d.ts +9 -0
- package/dist/database/migrations/migrations.001-init.d.ts.map +1 -0
- package/dist/database/migrations/migrations.001-init.js +46 -0
- package/dist/database/migrations/migrations.001-init.js.map +1 -0
- package/dist/database/migrations/migrations.002-fts5.d.ts +11 -0
- package/dist/database/migrations/migrations.002-fts5.d.ts.map +1 -0
- package/dist/database/migrations/migrations.002-fts5.js +29 -0
- package/dist/database/migrations/migrations.002-fts5.js.map +1 -0
- package/dist/database/migrations/migrations.d.ts +11 -0
- package/dist/database/migrations/migrations.d.ts.map +1 -0
- package/dist/database/migrations/migrations.js +14 -0
- package/dist/database/migrations/migrations.js.map +1 -0
- package/dist/database/migrations/migrations.types.d.ts +8 -0
- package/dist/database/migrations/migrations.types.d.ts.map +1 -0
- package/dist/database/migrations/migrations.types.js +2 -0
- package/dist/database/migrations/migrations.types.js.map +1 -0
- package/dist/documents/documents.d.ts +58 -0
- package/dist/documents/documents.d.ts.map +1 -0
- package/dist/documents/documents.js +597 -0
- package/dist/documents/documents.js.map +1 -0
- package/dist/documents/documents.schemas.d.ts +418 -0
- package/dist/documents/documents.schemas.d.ts.map +1 -0
- package/dist/documents/documents.schemas.js +111 -0
- package/dist/documents/documents.schemas.js.map +1 -0
- package/dist/embedder/embedder.d.ts +22 -0
- package/dist/embedder/embedder.d.ts.map +1 -0
- package/dist/embedder/embedder.js +46 -0
- package/dist/embedder/embedder.js.map +1 -0
- package/dist/exports.d.ts +2 -0
- package/dist/exports.d.ts.map +1 -0
- package/dist/exports.js +2 -0
- package/dist/exports.js.map +1 -0
- package/dist/mcp/mcp.d.ts +44 -0
- package/dist/mcp/mcp.d.ts.map +1 -0
- package/dist/mcp/mcp.js +62 -0
- package/dist/mcp/mcp.js.map +1 -0
- package/dist/tools/agent/agent.d.ts +14 -0
- package/dist/tools/agent/agent.d.ts.map +1 -0
- package/dist/tools/agent/agent.js +31 -0
- package/dist/tools/agent/agent.js.map +1 -0
- package/dist/tools/documents/documents.d.ts +28 -0
- package/dist/tools/documents/documents.d.ts.map +1 -0
- package/dist/tools/documents/documents.js +336 -0
- package/dist/tools/documents/documents.js.map +1 -0
- package/dist/tools/tools.langchain.d.ts +11 -0
- package/dist/tools/tools.langchain.d.ts.map +1 -0
- package/dist/tools/tools.langchain.js +30 -0
- package/dist/tools/tools.langchain.js.map +1 -0
- package/dist/tools/tools.mcp.d.ts +12 -0
- package/dist/tools/tools.mcp.d.ts.map +1 -0
- package/dist/tools/tools.mcp.js +38 -0
- package/dist/tools/tools.mcp.js.map +1 -0
- package/{src/tools/tools.types.ts → dist/tools/tools.types.d.ts} +10 -16
- package/dist/tools/tools.types.d.ts.map +1 -0
- package/dist/tools/tools.types.js +7 -0
- package/dist/tools/tools.types.js.map +1 -0
- package/dist/tsconfig.tsbuildinfo +1 -0
- package/dist/utils/utils.services.d.ts +14 -0
- package/dist/utils/utils.services.d.ts.map +1 -0
- package/dist/utils/utils.services.js +33 -0
- package/dist/utils/utils.services.js.map +1 -0
- package/package.json +5 -2
- package/src/agent/AGENTS.md +0 -249
- package/src/agent/agent.test-runner.schemas.ts +0 -158
- package/src/agent/agent.test-runner.ts +0 -436
- package/src/agent/agent.ts +0 -371
- package/src/agent/agent.types.ts +0 -94
- package/src/backend/AGENTS.md +0 -112
- package/src/backend/backend.protocol.ts +0 -95
- package/src/backend/backend.schemas.ts +0 -123
- package/src/backend/backend.services.ts +0 -151
- package/src/backend/backend.ts +0 -111
- package/src/backend/backend.types.ts +0 -34
- package/src/cli/AGENTS.md +0 -213
- package/src/cli/cli.agent.ts +0 -197
- package/src/cli/cli.chat.ts +0 -369
- package/src/cli/cli.client.ts +0 -55
- package/src/cli/cli.collections.ts +0 -491
- package/src/cli/cli.config.ts +0 -252
- package/src/cli/cli.daemon.ts +0 -160
- package/src/cli/cli.documents.ts +0 -413
- package/src/cli/cli.mcp.ts +0 -177
- package/src/cli/cli.ts +0 -28
- package/src/cli/cli.utils.ts +0 -122
- package/src/client/AGENTS.md +0 -135
- package/src/client/client.adapters.ts +0 -279
- package/src/client/client.ts +0 -86
- package/src/client/client.types.ts +0 -17
- package/src/collections/AGENTS.md +0 -185
- package/src/collections/collections.schemas.ts +0 -195
- package/src/collections/collections.ts +0 -1160
- package/src/config/config.ts +0 -118
- package/src/daemon/AGENTS.md +0 -168
- package/src/daemon/daemon.config.ts +0 -23
- package/src/daemon/daemon.manager.ts +0 -215
- package/src/daemon/daemon.schemas.ts +0 -22
- package/src/daemon/daemon.ts +0 -205
- package/src/database/AGENTS.md +0 -211
- package/src/database/database.ts +0 -64
- package/src/database/migrations/migrations.001-init.ts +0 -56
- package/src/database/migrations/migrations.002-fts5.ts +0 -32
- package/src/database/migrations/migrations.ts +0 -20
- package/src/database/migrations/migrations.types.ts +0 -9
- package/src/documents/AGENTS.md +0 -301
- package/src/documents/documents.schemas.ts +0 -190
- package/src/documents/documents.ts +0 -734
- package/src/embedder/embedder.ts +0 -53
- package/src/exports.ts +0 -0
- package/src/mcp/AGENTS.md +0 -264
- package/src/mcp/mcp.ts +0 -105
- package/src/tools/AGENTS.md +0 -228
- package/src/tools/agent/agent.ts +0 -45
- package/src/tools/documents/documents.ts +0 -401
- package/src/tools/tools.langchain.ts +0 -37
- package/src/tools/tools.mcp.ts +0 -46
- package/src/utils/utils.services.ts +0 -46
|
@@ -0,0 +1,933 @@
|
|
|
1
|
+
import { createHash } from 'node:crypto';
|
|
2
|
+
import { existsSync, readFileSync, writeFileSync, realpathSync, createWriteStream, mkdirSync } from 'node:fs';
|
|
3
|
+
import { readFile, glob, mkdtemp, rm } from 'node:fs/promises';
|
|
4
|
+
import { tmpdir } from 'node:os';
|
|
5
|
+
import { resolve, join, dirname } from 'node:path';
|
|
6
|
+
import { Readable } from 'node:stream';
|
|
7
|
+
import { pipeline } from 'node:stream/promises';
|
|
8
|
+
import { simpleGit } from 'simple-git';
|
|
9
|
+
import * as tar from 'tar';
|
|
10
|
+
import { projectConfigSchema, collectionRecordSchema, manifestSchema, isGlobSources, isFileSources, isGitUrl, parseGitUrl, } from './collections.schemas.js';
|
|
11
|
+
import { DatabaseService, tableNames } from '#root/database/database.js';
|
|
12
|
+
import { DocumentsService } from '#root/documents/documents.js';
|
|
13
|
+
import { config } from '#root/config/config.js';
|
|
14
|
+
class CollectionsService {
|
|
15
|
+
#services;
|
|
16
|
+
constructor(services) {
|
|
17
|
+
this.#services = services;
|
|
18
|
+
}
|
|
19
|
+
// === Project Config ===
|
|
20
|
+
/**
|
|
21
|
+
* Get the project config file path for a given directory.
|
|
22
|
+
*/
|
|
23
|
+
getProjectConfigPath = (cwd = process.cwd()) => {
|
|
24
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
25
|
+
const configFile = config.get('project.configFile');
|
|
26
|
+
return resolve(cwd, configFile);
|
|
27
|
+
};
|
|
28
|
+
/**
|
|
29
|
+
* Check if a project config file exists.
|
|
30
|
+
*/
|
|
31
|
+
projectConfigExists = (cwd = process.cwd()) => {
|
|
32
|
+
return existsSync(this.getProjectConfigPath(cwd));
|
|
33
|
+
};
|
|
34
|
+
/**
|
|
35
|
+
* Read and parse the project config file.
|
|
36
|
+
*/
|
|
37
|
+
readProjectConfig = (cwd = process.cwd()) => {
|
|
38
|
+
const configPath = this.getProjectConfigPath(cwd);
|
|
39
|
+
if (!existsSync(configPath)) {
|
|
40
|
+
return { collections: {} };
|
|
41
|
+
}
|
|
42
|
+
const content = readFileSync(configPath, 'utf-8');
|
|
43
|
+
const parsed = JSON.parse(content);
|
|
44
|
+
return projectConfigSchema.parse(parsed);
|
|
45
|
+
};
|
|
46
|
+
/**
|
|
47
|
+
* Write the project config file.
|
|
48
|
+
*/
|
|
49
|
+
writeProjectConfig = (projectConfig, cwd = process.cwd()) => {
|
|
50
|
+
const configPath = this.getProjectConfigPath(cwd);
|
|
51
|
+
const content = JSON.stringify(projectConfig, null, 2);
|
|
52
|
+
writeFileSync(configPath, content, 'utf-8');
|
|
53
|
+
};
|
|
54
|
+
/**
|
|
55
|
+
* Initialize a new project config file.
|
|
56
|
+
*/
|
|
57
|
+
initProjectConfig = (cwd = process.cwd(), force = false) => {
|
|
58
|
+
const configPath = this.getProjectConfigPath(cwd);
|
|
59
|
+
if (existsSync(configPath) && !force) {
|
|
60
|
+
throw new Error(`Project config already exists at ${configPath}`);
|
|
61
|
+
}
|
|
62
|
+
const initialConfig = { collections: {} };
|
|
63
|
+
this.writeProjectConfig(initialConfig, cwd);
|
|
64
|
+
};
|
|
65
|
+
// === Global Config ===
|
|
66
|
+
/**
|
|
67
|
+
* Get the global config file path.
|
|
68
|
+
*/
|
|
69
|
+
getGlobalConfigPath = () => {
|
|
70
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
71
|
+
return config.get('global.configFile');
|
|
72
|
+
};
|
|
73
|
+
/**
|
|
74
|
+
* Check if the global config file exists.
|
|
75
|
+
*/
|
|
76
|
+
globalConfigExists = () => {
|
|
77
|
+
return existsSync(this.getGlobalConfigPath());
|
|
78
|
+
};
|
|
79
|
+
/**
|
|
80
|
+
* Read and parse the global config file.
|
|
81
|
+
*/
|
|
82
|
+
readGlobalConfig = () => {
|
|
83
|
+
const configPath = this.getGlobalConfigPath();
|
|
84
|
+
if (!existsSync(configPath)) {
|
|
85
|
+
return { collections: {} };
|
|
86
|
+
}
|
|
87
|
+
const content = readFileSync(configPath, 'utf-8');
|
|
88
|
+
const parsed = JSON.parse(content);
|
|
89
|
+
return projectConfigSchema.parse(parsed);
|
|
90
|
+
};
|
|
91
|
+
/**
|
|
92
|
+
* Write the global config file. Auto-creates directory if needed.
|
|
93
|
+
*/
|
|
94
|
+
writeGlobalConfig = (globalConfig) => {
|
|
95
|
+
const configPath = this.getGlobalConfigPath();
|
|
96
|
+
const configDir = dirname(configPath);
|
|
97
|
+
// Ensure directory exists
|
|
98
|
+
if (!existsSync(configDir)) {
|
|
99
|
+
mkdirSync(configDir, { recursive: true });
|
|
100
|
+
}
|
|
101
|
+
const content = JSON.stringify(globalConfig, null, 2);
|
|
102
|
+
writeFileSync(configPath, content, 'utf-8');
|
|
103
|
+
};
|
|
104
|
+
// === Unified Config Operations ===
|
|
105
|
+
/**
|
|
106
|
+
* Add a collection to project or global config.
|
|
107
|
+
*/
|
|
108
|
+
addToConfig = (name, spec, options = {}) => {
|
|
109
|
+
const { global: isGlobal = false, cwd = process.cwd() } = options;
|
|
110
|
+
if (isGlobal) {
|
|
111
|
+
const globalConfig = this.readGlobalConfig();
|
|
112
|
+
if (name in globalConfig.collections) {
|
|
113
|
+
throw new Error(`Collection "${name}" already exists in global config`);
|
|
114
|
+
}
|
|
115
|
+
globalConfig.collections[name] = spec;
|
|
116
|
+
this.writeGlobalConfig(globalConfig);
|
|
117
|
+
}
|
|
118
|
+
else {
|
|
119
|
+
this.addToProjectConfig(name, spec, cwd);
|
|
120
|
+
}
|
|
121
|
+
};
|
|
122
|
+
/**
|
|
123
|
+
* Remove a collection from project or global config.
|
|
124
|
+
*/
|
|
125
|
+
removeFromConfig = (name, options = {}) => {
|
|
126
|
+
const { global: isGlobal = false, cwd = process.cwd() } = options;
|
|
127
|
+
if (isGlobal) {
|
|
128
|
+
const globalConfig = this.readGlobalConfig();
|
|
129
|
+
if (!(name in globalConfig.collections)) {
|
|
130
|
+
throw new Error(`Collection "${name}" not found in global config`);
|
|
131
|
+
}
|
|
132
|
+
const { [name]: _removed, ...rest } = globalConfig.collections;
|
|
133
|
+
void _removed;
|
|
134
|
+
globalConfig.collections = rest;
|
|
135
|
+
this.writeGlobalConfig(globalConfig);
|
|
136
|
+
}
|
|
137
|
+
else {
|
|
138
|
+
this.removeFromProjectConfig(name, cwd);
|
|
139
|
+
}
|
|
140
|
+
};
|
|
141
|
+
/**
|
|
142
|
+
* Get a collection spec by name from project or global config.
|
|
143
|
+
* If global is not specified, searches local first then global.
|
|
144
|
+
*/
|
|
145
|
+
getFromConfig = (name, options = {}) => {
|
|
146
|
+
const { global: isGlobal, cwd = process.cwd() } = options;
|
|
147
|
+
if (isGlobal === true) {
|
|
148
|
+
const globalConfig = this.readGlobalConfig();
|
|
149
|
+
return globalConfig.collections[name] || null;
|
|
150
|
+
}
|
|
151
|
+
if (isGlobal === false) {
|
|
152
|
+
return this.getFromProjectConfig(name, cwd);
|
|
153
|
+
}
|
|
154
|
+
// If global is undefined, search local first then global
|
|
155
|
+
const localSpec = this.getFromProjectConfig(name, cwd);
|
|
156
|
+
if (localSpec) {
|
|
157
|
+
return localSpec;
|
|
158
|
+
}
|
|
159
|
+
const globalConfig = this.readGlobalConfig();
|
|
160
|
+
return globalConfig.collections[name] || null;
|
|
161
|
+
};
|
|
162
|
+
/**
|
|
163
|
+
* Get all collections from both local and global configs.
|
|
164
|
+
* Returns a map with collection name as key and spec + source info as value.
|
|
165
|
+
* Local collections take precedence over global ones with the same name.
|
|
166
|
+
*/
|
|
167
|
+
getAllCollections = (cwd = process.cwd()) => {
|
|
168
|
+
const result = new Map();
|
|
169
|
+
// Add global collections first
|
|
170
|
+
const globalConfig = this.readGlobalConfig();
|
|
171
|
+
for (const [name, spec] of Object.entries(globalConfig.collections)) {
|
|
172
|
+
result.set(name, { spec, source: 'global' });
|
|
173
|
+
}
|
|
174
|
+
// Add local collections (will override global ones with same name)
|
|
175
|
+
if (this.projectConfigExists(cwd)) {
|
|
176
|
+
const projectConfig = this.readProjectConfig(cwd);
|
|
177
|
+
for (const [name, spec] of Object.entries(projectConfig.collections)) {
|
|
178
|
+
result.set(name, { spec, source: 'local' });
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
return result;
|
|
182
|
+
};
|
|
183
|
+
// === Collection ID Computation ===
|
|
184
|
+
/**
|
|
185
|
+
* Normalize a path to its canonical absolute form.
|
|
186
|
+
*/
|
|
187
|
+
normalizePath = (path, basePath = process.cwd()) => {
|
|
188
|
+
const absolutePath = resolve(basePath, path);
|
|
189
|
+
// Resolve symlinks to canonical path
|
|
190
|
+
try {
|
|
191
|
+
return realpathSync(absolutePath);
|
|
192
|
+
}
|
|
193
|
+
catch {
|
|
194
|
+
// Path doesn't exist yet, return resolved path
|
|
195
|
+
return absolutePath;
|
|
196
|
+
}
|
|
197
|
+
};
|
|
198
|
+
/**
|
|
199
|
+
* Compute the collection ID for a given spec.
|
|
200
|
+
* Format: pkg:{normalized_url}
|
|
201
|
+
*/
|
|
202
|
+
computeCollectionId = (spec) => {
|
|
203
|
+
// Normalize URL (remove trailing slashes)
|
|
204
|
+
const normalizedUrl = spec.url.replace(/\/+$/, '');
|
|
205
|
+
return `pkg:${normalizedUrl}`;
|
|
206
|
+
};
|
|
207
|
+
// === Database Operations ===
|
|
208
|
+
/**
|
|
209
|
+
* Get a collection record by ID.
|
|
210
|
+
*/
|
|
211
|
+
getCollection = async (id) => {
|
|
212
|
+
const databaseService = this.#services.get(DatabaseService);
|
|
213
|
+
const database = await databaseService.getInstance();
|
|
214
|
+
const [record] = await database(tableNames.collections).where({ id }).limit(1);
|
|
215
|
+
if (!record) {
|
|
216
|
+
return null;
|
|
217
|
+
}
|
|
218
|
+
return collectionRecordSchema.parse(record);
|
|
219
|
+
};
|
|
220
|
+
/**
|
|
221
|
+
* List all collection records.
|
|
222
|
+
*/
|
|
223
|
+
listCollections = async () => {
|
|
224
|
+
const databaseService = this.#services.get(DatabaseService);
|
|
225
|
+
const database = await databaseService.getInstance();
|
|
226
|
+
const records = await database(tableNames.collections).orderBy('created_at', 'asc');
|
|
227
|
+
return records.map((record) => collectionRecordSchema.parse(record));
|
|
228
|
+
};
|
|
229
|
+
/**
|
|
230
|
+
* Create or update a collection record.
|
|
231
|
+
*/
|
|
232
|
+
upsertCollection = async (id, data) => {
|
|
233
|
+
const databaseService = this.#services.get(DatabaseService);
|
|
234
|
+
const database = await databaseService.getInstance();
|
|
235
|
+
const now = new Date().toISOString();
|
|
236
|
+
const existing = await this.getCollection(id);
|
|
237
|
+
if (existing) {
|
|
238
|
+
await database(tableNames.collections)
|
|
239
|
+
.where({ id })
|
|
240
|
+
.update({
|
|
241
|
+
...data,
|
|
242
|
+
updated_at: now,
|
|
243
|
+
});
|
|
244
|
+
}
|
|
245
|
+
else {
|
|
246
|
+
await database(tableNames.collections).insert({
|
|
247
|
+
id,
|
|
248
|
+
...data,
|
|
249
|
+
created_at: now,
|
|
250
|
+
updated_at: now,
|
|
251
|
+
});
|
|
252
|
+
}
|
|
253
|
+
};
|
|
254
|
+
/**
|
|
255
|
+
* Delete a collection record.
|
|
256
|
+
*/
|
|
257
|
+
deleteCollection = async (id) => {
|
|
258
|
+
const databaseService = this.#services.get(DatabaseService);
|
|
259
|
+
const database = await databaseService.getInstance();
|
|
260
|
+
await database(tableNames.collections).where({ id }).delete();
|
|
261
|
+
};
|
|
262
|
+
/**
|
|
263
|
+
* Update the last sync timestamp for a collection.
|
|
264
|
+
*/
|
|
265
|
+
updateLastSync = async (id) => {
|
|
266
|
+
const databaseService = this.#services.get(DatabaseService);
|
|
267
|
+
const database = await databaseService.getInstance();
|
|
268
|
+
const now = new Date().toISOString();
|
|
269
|
+
await database(tableNames.collections).where({ id }).update({
|
|
270
|
+
last_sync_at: now,
|
|
271
|
+
updated_at: now,
|
|
272
|
+
});
|
|
273
|
+
};
|
|
274
|
+
/**
|
|
275
|
+
* Update the manifest hash for a collection.
|
|
276
|
+
*/
|
|
277
|
+
updateManifestHash = async (id, hash) => {
|
|
278
|
+
const databaseService = this.#services.get(DatabaseService);
|
|
279
|
+
const database = await databaseService.getInstance();
|
|
280
|
+
const now = new Date().toISOString();
|
|
281
|
+
await database(tableNames.collections).where({ id }).update({
|
|
282
|
+
manifest_hash: hash,
|
|
283
|
+
updated_at: now,
|
|
284
|
+
});
|
|
285
|
+
};
|
|
286
|
+
// === Project Config Helpers ===
|
|
287
|
+
/**
|
|
288
|
+
* Add a collection to the project config.
|
|
289
|
+
*/
|
|
290
|
+
addToProjectConfig = (name, spec, cwd = process.cwd()) => {
|
|
291
|
+
const projectConfig = this.readProjectConfig(cwd);
|
|
292
|
+
if (name in projectConfig.collections) {
|
|
293
|
+
throw new Error(`Collection "${name}" already exists in project config`);
|
|
294
|
+
}
|
|
295
|
+
projectConfig.collections[name] = spec;
|
|
296
|
+
this.writeProjectConfig(projectConfig, cwd);
|
|
297
|
+
};
|
|
298
|
+
/**
|
|
299
|
+
* Remove a collection from the project config.
|
|
300
|
+
*/
|
|
301
|
+
removeFromProjectConfig = (name, cwd = process.cwd()) => {
|
|
302
|
+
const projectConfig = this.readProjectConfig(cwd);
|
|
303
|
+
if (!(name in projectConfig.collections)) {
|
|
304
|
+
throw new Error(`Collection "${name}" not found in project config`);
|
|
305
|
+
}
|
|
306
|
+
const { [name]: _removed, ...rest } = projectConfig.collections;
|
|
307
|
+
void _removed; // Intentionally unused
|
|
308
|
+
projectConfig.collections = rest;
|
|
309
|
+
this.writeProjectConfig(projectConfig, cwd);
|
|
310
|
+
};
|
|
311
|
+
/**
|
|
312
|
+
* Get a collection spec by name from the project config.
|
|
313
|
+
*/
|
|
314
|
+
getFromProjectConfig = (name, cwd = process.cwd()) => {
|
|
315
|
+
const projectConfig = this.readProjectConfig(cwd);
|
|
316
|
+
return projectConfig.collections[name] || null;
|
|
317
|
+
};
|
|
318
|
+
// === Sync Status ===
|
|
319
|
+
/**
|
|
320
|
+
* Get sync status for a collection by computing its ID and checking the database.
|
|
321
|
+
*/
|
|
322
|
+
getSyncStatus = async (spec) => {
|
|
323
|
+
const id = this.computeCollectionId(spec);
|
|
324
|
+
const record = await this.getCollection(id);
|
|
325
|
+
if (!record || !record.last_sync_at) {
|
|
326
|
+
return 'not_synced';
|
|
327
|
+
}
|
|
328
|
+
// For now, just check if it has ever been synced
|
|
329
|
+
// Future: compare manifest hashes for staleness
|
|
330
|
+
return 'synced';
|
|
331
|
+
};
|
|
332
|
+
// === Sync Operations ===
|
|
333
|
+
/**
|
|
334
|
+
* Sync a collection based on its spec.
|
|
335
|
+
*/
|
|
336
|
+
syncCollection = async (name, spec, cwd = process.cwd(), options = {}) => {
|
|
337
|
+
// Check if it's a git URL
|
|
338
|
+
if (isGitUrl(spec.url)) {
|
|
339
|
+
return this.syncGitCollection(name, spec, cwd, options);
|
|
340
|
+
}
|
|
341
|
+
return this.syncPkgCollection(name, spec, cwd, options);
|
|
342
|
+
};
|
|
343
|
+
// === Manifest Handling ===
|
|
344
|
+
/**
|
|
345
|
+
* Parse a manifest URL and determine its protocol.
|
|
346
|
+
*/
|
|
347
|
+
parseManifestUrl = (url, cwd = process.cwd()) => {
|
|
348
|
+
// Check for git URLs first
|
|
349
|
+
if (isGitUrl(url)) {
|
|
350
|
+
const parsed = parseGitUrl(url);
|
|
351
|
+
return {
|
|
352
|
+
protocol: 'git',
|
|
353
|
+
cloneUrl: parsed.cloneUrl,
|
|
354
|
+
ref: parsed.ref,
|
|
355
|
+
manifestPath: parsed.manifestPath,
|
|
356
|
+
isBundle: false,
|
|
357
|
+
};
|
|
358
|
+
}
|
|
359
|
+
const isBundle = url.endsWith('.tar.gz') || url.endsWith('.tgz');
|
|
360
|
+
if (url.startsWith('file://')) {
|
|
361
|
+
const filePath = url.slice(7); // Remove 'file://'
|
|
362
|
+
const resolvedPath = this.normalizePath(filePath, cwd);
|
|
363
|
+
return { protocol: 'file', path: resolvedPath, isBundle };
|
|
364
|
+
}
|
|
365
|
+
if (url.startsWith('https://') || url.startsWith('http://')) {
|
|
366
|
+
return { protocol: 'https', path: url, isBundle };
|
|
367
|
+
}
|
|
368
|
+
// Assume it's a relative file path
|
|
369
|
+
const resolvedPath = this.normalizePath(url, cwd);
|
|
370
|
+
return { protocol: 'file', path: resolvedPath, isBundle };
|
|
371
|
+
};
|
|
372
|
+
/**
|
|
373
|
+
* Load a manifest from a file:// URL.
|
|
374
|
+
*/
|
|
375
|
+
loadLocalManifest = async (manifestPath) => {
|
|
376
|
+
const content = await readFile(manifestPath, 'utf8');
|
|
377
|
+
const parsed = JSON.parse(content);
|
|
378
|
+
return manifestSchema.parse(parsed);
|
|
379
|
+
};
|
|
380
|
+
/**
|
|
381
|
+
* Resolve manifest sources to a list of file entries.
|
|
382
|
+
* For glob sources: expand globs relative to manifest directory.
|
|
383
|
+
* For files sources: resolve paths relative to manifest or baseUrl.
|
|
384
|
+
*/
|
|
385
|
+
resolveManifestSources = async (manifest, manifestDir, protocol) => {
|
|
386
|
+
const sources = manifest.sources;
|
|
387
|
+
const baseUrl = manifest.baseUrl;
|
|
388
|
+
if (isGlobSources(sources)) {
|
|
389
|
+
if (protocol !== 'file') {
|
|
390
|
+
throw new Error('Glob sources are only supported for file:// manifests');
|
|
391
|
+
}
|
|
392
|
+
const entries = [];
|
|
393
|
+
for (const pattern of sources.glob) {
|
|
394
|
+
for await (const file of glob(pattern, { cwd: manifestDir })) {
|
|
395
|
+
const fullPath = resolve(manifestDir, file);
|
|
396
|
+
entries.push({
|
|
397
|
+
id: file,
|
|
398
|
+
url: `file://${fullPath}`,
|
|
399
|
+
});
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
return entries;
|
|
403
|
+
}
|
|
404
|
+
if (isFileSources(sources)) {
|
|
405
|
+
return sources.files.map((entry) => this.resolveFileEntry(entry, manifestDir, baseUrl, protocol));
|
|
406
|
+
}
|
|
407
|
+
throw new Error('Unknown sources type in manifest');
|
|
408
|
+
};
|
|
409
|
+
/**
|
|
410
|
+
* Resolve a single file entry to its final URL.
|
|
411
|
+
*/
|
|
412
|
+
resolveFileEntry = (entry, manifestDir, baseUrl, protocol) => {
|
|
413
|
+
// String shorthand = relative path
|
|
414
|
+
if (typeof entry === 'string') {
|
|
415
|
+
return this.resolveFileEntry({ path: entry }, manifestDir, baseUrl, protocol);
|
|
416
|
+
}
|
|
417
|
+
// Fully qualified URL
|
|
418
|
+
if (entry.url) {
|
|
419
|
+
return {
|
|
420
|
+
id: entry.url,
|
|
421
|
+
url: entry.url,
|
|
422
|
+
hash: entry.hash,
|
|
423
|
+
};
|
|
424
|
+
}
|
|
425
|
+
// Relative path
|
|
426
|
+
if (entry.path) {
|
|
427
|
+
let resolvedUrl;
|
|
428
|
+
if (baseUrl) {
|
|
429
|
+
// Resolve relative to baseUrl
|
|
430
|
+
const base = baseUrl.endsWith('/') ? baseUrl : `${baseUrl}/`;
|
|
431
|
+
resolvedUrl = `${base}${entry.path}`;
|
|
432
|
+
}
|
|
433
|
+
else if (protocol === 'file') {
|
|
434
|
+
// Resolve relative to manifest directory
|
|
435
|
+
const fullPath = resolve(manifestDir, entry.path);
|
|
436
|
+
resolvedUrl = `file://${fullPath}`;
|
|
437
|
+
}
|
|
438
|
+
else {
|
|
439
|
+
// For https, resolve relative to manifest URL directory
|
|
440
|
+
const base = manifestDir.endsWith('/') ? manifestDir : `${manifestDir}/`;
|
|
441
|
+
resolvedUrl = `${base}${entry.path}`;
|
|
442
|
+
}
|
|
443
|
+
return {
|
|
444
|
+
id: entry.path,
|
|
445
|
+
url: resolvedUrl,
|
|
446
|
+
hash: entry.hash,
|
|
447
|
+
};
|
|
448
|
+
}
|
|
449
|
+
throw new Error('File entry must have either path or url');
|
|
450
|
+
};
|
|
451
|
+
/**
|
|
452
|
+
* Fetch content from a URL (file:// or https://).
|
|
453
|
+
*/
|
|
454
|
+
fetchContent = async (url) => {
|
|
455
|
+
if (url.startsWith('file://')) {
|
|
456
|
+
const filePath = url.slice(7);
|
|
457
|
+
return readFile(filePath, 'utf8');
|
|
458
|
+
}
|
|
459
|
+
if (url.startsWith('https://') || url.startsWith('http://')) {
|
|
460
|
+
const response = await fetch(url);
|
|
461
|
+
if (!response.ok) {
|
|
462
|
+
throw new Error(`Failed to fetch ${url}: ${response.status} ${response.statusText}`);
|
|
463
|
+
}
|
|
464
|
+
return response.text();
|
|
465
|
+
}
|
|
466
|
+
throw new Error(`Unsupported URL protocol: ${url}`);
|
|
467
|
+
};
|
|
468
|
+
/**
|
|
469
|
+
* Load a manifest from a remote URL.
|
|
470
|
+
*/
|
|
471
|
+
loadRemoteManifest = async (manifestUrl) => {
|
|
472
|
+
const content = await this.fetchContent(manifestUrl);
|
|
473
|
+
const parsed = JSON.parse(content);
|
|
474
|
+
const manifest = manifestSchema.parse(parsed);
|
|
475
|
+
return { manifest, content };
|
|
476
|
+
};
|
|
477
|
+
/**
|
|
478
|
+
* Get the directory part of a URL.
|
|
479
|
+
*/
|
|
480
|
+
getUrlDirectory = (url) => {
|
|
481
|
+
const lastSlash = url.lastIndexOf('/');
|
|
482
|
+
return lastSlash >= 0 ? url.substring(0, lastSlash) : url;
|
|
483
|
+
};
|
|
484
|
+
// === Bundle Handling ===
|
|
485
|
+
/**
|
|
486
|
+
* Download a bundle to a temporary file and extract it.
|
|
487
|
+
* Returns the path to the extracted directory.
|
|
488
|
+
*/
|
|
489
|
+
downloadAndExtractBundle = async (url, onProgress) => {
|
|
490
|
+
const tempDir = await mkdtemp(join(tmpdir(), 'ai-assist-bundle-'));
|
|
491
|
+
try {
|
|
492
|
+
if (url.startsWith('file://')) {
|
|
493
|
+
// Local bundle - extract directly
|
|
494
|
+
const bundlePath = url.slice(7);
|
|
495
|
+
onProgress?.('Extracting local bundle...');
|
|
496
|
+
await tar.extract({
|
|
497
|
+
file: bundlePath,
|
|
498
|
+
cwd: tempDir,
|
|
499
|
+
});
|
|
500
|
+
}
|
|
501
|
+
else {
|
|
502
|
+
// Remote bundle - download then extract
|
|
503
|
+
onProgress?.('Downloading bundle...');
|
|
504
|
+
const response = await fetch(url);
|
|
505
|
+
if (!response.ok) {
|
|
506
|
+
throw new Error(`Failed to download bundle: ${response.status} ${response.statusText}`);
|
|
507
|
+
}
|
|
508
|
+
const bundlePath = join(tempDir, 'bundle.tar.gz');
|
|
509
|
+
// Stream response to file
|
|
510
|
+
if (!response.body) {
|
|
511
|
+
throw new Error('Response body is empty');
|
|
512
|
+
}
|
|
513
|
+
const fileStream = createWriteStream(bundlePath);
|
|
514
|
+
await pipeline(Readable.fromWeb(response.body), fileStream);
|
|
515
|
+
onProgress?.('Extracting bundle...');
|
|
516
|
+
await tar.extract({
|
|
517
|
+
file: bundlePath,
|
|
518
|
+
cwd: tempDir,
|
|
519
|
+
});
|
|
520
|
+
}
|
|
521
|
+
// Find the extracted content - could be in root or a subdirectory
|
|
522
|
+
// Check if manifest.json exists at root or find it
|
|
523
|
+
const manifestAtRoot = join(tempDir, 'manifest.json');
|
|
524
|
+
if (existsSync(manifestAtRoot)) {
|
|
525
|
+
return tempDir;
|
|
526
|
+
}
|
|
527
|
+
// Look for manifest in immediate subdirectories (common for tarballs)
|
|
528
|
+
const { readdir } = await import('node:fs/promises');
|
|
529
|
+
const entries = await readdir(tempDir, { withFileTypes: true });
|
|
530
|
+
for (const entry of entries) {
|
|
531
|
+
if (entry.isDirectory()) {
|
|
532
|
+
const subManifest = join(tempDir, entry.name, 'manifest.json');
|
|
533
|
+
if (existsSync(subManifest)) {
|
|
534
|
+
return join(tempDir, entry.name);
|
|
535
|
+
}
|
|
536
|
+
}
|
|
537
|
+
}
|
|
538
|
+
throw new Error('Could not find manifest.json in bundle');
|
|
539
|
+
}
|
|
540
|
+
catch (error) {
|
|
541
|
+
// Clean up on error
|
|
542
|
+
await rm(tempDir, { recursive: true, force: true }).catch(() => undefined);
|
|
543
|
+
throw error;
|
|
544
|
+
}
|
|
545
|
+
};
|
|
546
|
+
/**
|
|
547
|
+
* Sync a bundle collection.
|
|
548
|
+
*/
|
|
549
|
+
syncBundleCollection = async (name, spec, cwd = process.cwd(), options = {}) => {
|
|
550
|
+
const { force = false, onProgress } = options;
|
|
551
|
+
const collectionId = this.computeCollectionId(spec);
|
|
552
|
+
const parsed = this.parseManifestUrl(spec.url, cwd);
|
|
553
|
+
// This method only handles file/https bundles, not git URLs
|
|
554
|
+
if (parsed.protocol === 'git') {
|
|
555
|
+
throw new Error('syncBundleCollection does not support git URLs');
|
|
556
|
+
}
|
|
557
|
+
const { protocol, path: bundlePath } = parsed;
|
|
558
|
+
// Reconstruct URL with protocol for downloadAndExtractBundle
|
|
559
|
+
const bundleUrl = protocol === 'file' ? `file://${bundlePath}` : bundlePath;
|
|
560
|
+
let tempDir = null;
|
|
561
|
+
try {
|
|
562
|
+
// Download and extract bundle
|
|
563
|
+
tempDir = await this.downloadAndExtractBundle(bundleUrl, onProgress);
|
|
564
|
+
const manifestPath = join(tempDir, 'manifest.json');
|
|
565
|
+
onProgress?.('Reading manifest...');
|
|
566
|
+
// Load manifest
|
|
567
|
+
const manifest = await this.loadLocalManifest(manifestPath);
|
|
568
|
+
const manifestContent = await readFile(manifestPath, 'utf8');
|
|
569
|
+
const manifestHash = createHash('sha256').update(manifestContent).digest('hex');
|
|
570
|
+
// Check if we can skip sync
|
|
571
|
+
const existingCollection = await this.getCollection(collectionId);
|
|
572
|
+
if (!force && existingCollection?.manifest_hash === manifestHash) {
|
|
573
|
+
onProgress?.('Bundle unchanged, skipping sync');
|
|
574
|
+
return { added: 0, updated: 0, removed: 0, total: 0 };
|
|
575
|
+
}
|
|
576
|
+
onProgress?.('Resolving sources...');
|
|
577
|
+
// Resolve sources (always use 'file' protocol for extracted bundle)
|
|
578
|
+
const entries = await this.resolveManifestSources(manifest, tempDir, 'file');
|
|
579
|
+
// Get existing documents from database
|
|
580
|
+
const documentsService = this.#services.get(DocumentsService);
|
|
581
|
+
const existingDocs = await documentsService.getDocumentIds(collectionId);
|
|
582
|
+
const existingMap = new Map(existingDocs.map((doc) => [doc.id, doc.hash]));
|
|
583
|
+
// Compute changes
|
|
584
|
+
const toAdd = [];
|
|
585
|
+
const toUpdate = [];
|
|
586
|
+
const toRemove = [];
|
|
587
|
+
for (const entry of entries) {
|
|
588
|
+
const existingHash = existingMap.get(entry.id);
|
|
589
|
+
if (!existingHash) {
|
|
590
|
+
toAdd.push(entry);
|
|
591
|
+
}
|
|
592
|
+
else if (force) {
|
|
593
|
+
toUpdate.push(entry);
|
|
594
|
+
}
|
|
595
|
+
else if (entry.hash) {
|
|
596
|
+
if (existingHash !== entry.hash) {
|
|
597
|
+
toUpdate.push(entry);
|
|
598
|
+
}
|
|
599
|
+
}
|
|
600
|
+
else {
|
|
601
|
+
toUpdate.push(entry);
|
|
602
|
+
}
|
|
603
|
+
}
|
|
604
|
+
const currentIds = new Set(entries.map((e) => e.id));
|
|
605
|
+
for (const [id] of existingMap) {
|
|
606
|
+
if (!currentIds.has(id)) {
|
|
607
|
+
toRemove.push(id);
|
|
608
|
+
}
|
|
609
|
+
}
|
|
610
|
+
// Apply changes
|
|
611
|
+
if (toRemove.length > 0) {
|
|
612
|
+
onProgress?.(`Removing ${toRemove.length} deleted documents...`);
|
|
613
|
+
await documentsService.deleteDocuments(collectionId, toRemove);
|
|
614
|
+
}
|
|
615
|
+
const toProcess = [...toAdd, ...toUpdate];
|
|
616
|
+
let actualUpdated = 0;
|
|
617
|
+
for (let i = 0; i < toProcess.length; i++) {
|
|
618
|
+
const entry = toProcess[i];
|
|
619
|
+
const isNew = toAdd.includes(entry);
|
|
620
|
+
onProgress?.(`${isNew ? 'Adding' : 'Checking'} ${entry.id} (${i + 1}/${toProcess.length})...`);
|
|
621
|
+
const content = await this.fetchContent(entry.url);
|
|
622
|
+
const contentHash = createHash('sha256').update(content).digest('hex');
|
|
623
|
+
if (!isNew && !force && existingMap.get(entry.id) === contentHash) {
|
|
624
|
+
continue;
|
|
625
|
+
}
|
|
626
|
+
if (!isNew)
|
|
627
|
+
actualUpdated++;
|
|
628
|
+
await documentsService.updateDocument({
|
|
629
|
+
collection: collectionId,
|
|
630
|
+
id: entry.id,
|
|
631
|
+
content,
|
|
632
|
+
});
|
|
633
|
+
}
|
|
634
|
+
// Update collection record
|
|
635
|
+
await this.upsertCollection(collectionId, {
|
|
636
|
+
url: spec.url,
|
|
637
|
+
name: manifest.name,
|
|
638
|
+
version: manifest.version,
|
|
639
|
+
description: manifest.description ?? null,
|
|
640
|
+
manifest_hash: manifestHash,
|
|
641
|
+
last_sync_at: new Date().toISOString(),
|
|
642
|
+
});
|
|
643
|
+
return {
|
|
644
|
+
added: toAdd.length,
|
|
645
|
+
updated: actualUpdated,
|
|
646
|
+
removed: toRemove.length,
|
|
647
|
+
total: entries.length,
|
|
648
|
+
};
|
|
649
|
+
}
|
|
650
|
+
finally {
|
|
651
|
+
// Clean up temp directory
|
|
652
|
+
if (tempDir) {
|
|
653
|
+
await rm(tempDir, { recursive: true, force: true }).catch(() => undefined);
|
|
654
|
+
}
|
|
655
|
+
}
|
|
656
|
+
};
|
|
657
|
+
/**
|
|
658
|
+
* Sync a git collection.
|
|
659
|
+
* Clones the repository to a temp directory relative to cwd (to preserve includeIf git config),
|
|
660
|
+
* reads the manifest from the specified path, and syncs documents.
|
|
661
|
+
*/
|
|
662
|
+
syncGitCollection = async (name, spec, cwd = process.cwd(), options = {}) => {
|
|
663
|
+
const { force = false, onProgress } = options;
|
|
664
|
+
const collectionId = this.computeCollectionId(spec);
|
|
665
|
+
const parsed = parseGitUrl(spec.url);
|
|
666
|
+
let tempDir = null;
|
|
667
|
+
try {
|
|
668
|
+
// Create temp directory relative to cwd (preserves includeIf git config)
|
|
669
|
+
const tmpBase = join(cwd, '.ctxpkg', 'tmp');
|
|
670
|
+
mkdirSync(tmpBase, { recursive: true });
|
|
671
|
+
// Create unique temp dir
|
|
672
|
+
const uniqueId = Math.random().toString(36).substring(2, 10);
|
|
673
|
+
tempDir = join(tmpBase, `git-${uniqueId}`);
|
|
674
|
+
mkdirSync(tempDir, { recursive: true });
|
|
675
|
+
// Clone the repository
|
|
676
|
+
const refDisplay = parsed.ref ? ` @ ${parsed.ref}` : '';
|
|
677
|
+
onProgress?.(`Cloning ${parsed.cloneUrl}${refDisplay}...`);
|
|
678
|
+
const git = simpleGit();
|
|
679
|
+
// Build clone options - disable hooks for security
|
|
680
|
+
const cloneOptions = ['--config', 'core.hooksPath=/dev/null'];
|
|
681
|
+
// Use shallow clone when possible (not for commit SHAs)
|
|
682
|
+
const isCommitSha = parsed.ref && /^[a-f0-9]{7,40}$/i.test(parsed.ref);
|
|
683
|
+
if (!isCommitSha) {
|
|
684
|
+
cloneOptions.push('--depth', '1');
|
|
685
|
+
if (parsed.ref) {
|
|
686
|
+
cloneOptions.push('--branch', parsed.ref);
|
|
687
|
+
}
|
|
688
|
+
}
|
|
689
|
+
await git.clone(parsed.cloneUrl, tempDir, cloneOptions);
|
|
690
|
+
// For commit SHAs, checkout the specific commit
|
|
691
|
+
if (isCommitSha && parsed.ref) {
|
|
692
|
+
onProgress?.(`Checking out ${parsed.ref}...`);
|
|
693
|
+
await simpleGit(tempDir).checkout(parsed.ref);
|
|
694
|
+
}
|
|
695
|
+
// Locate manifest
|
|
696
|
+
const manifestPath = join(tempDir, parsed.manifestPath);
|
|
697
|
+
if (!existsSync(manifestPath)) {
|
|
698
|
+
throw new Error(`Manifest not found at ${parsed.manifestPath} in repository`);
|
|
699
|
+
}
|
|
700
|
+
onProgress?.('Reading manifest...');
|
|
701
|
+
// Load manifest
|
|
702
|
+
const manifest = await this.loadLocalManifest(manifestPath);
|
|
703
|
+
const manifestContent = await readFile(manifestPath, 'utf8');
|
|
704
|
+
const manifestHash = createHash('sha256').update(manifestContent).digest('hex');
|
|
705
|
+
// Check if we can skip sync
|
|
706
|
+
const existingCollection = await this.getCollection(collectionId);
|
|
707
|
+
if (!force && existingCollection?.manifest_hash === manifestHash) {
|
|
708
|
+
onProgress?.('Repository unchanged, skipping sync');
|
|
709
|
+
return { added: 0, updated: 0, removed: 0, total: 0 };
|
|
710
|
+
}
|
|
711
|
+
onProgress?.('Resolving sources...');
|
|
712
|
+
// Get manifest directory for resolving relative paths
|
|
713
|
+
const manifestDir = dirname(manifestPath);
|
|
714
|
+
// Resolve sources (always use 'file' protocol for cloned repo)
|
|
715
|
+
const entries = await this.resolveManifestSources(manifest, manifestDir, 'file');
|
|
716
|
+
// Get existing documents from database
|
|
717
|
+
const documentsService = this.#services.get(DocumentsService);
|
|
718
|
+
const existingDocs = await documentsService.getDocumentIds(collectionId);
|
|
719
|
+
const existingMap = new Map(existingDocs.map((doc) => [doc.id, doc.hash]));
|
|
720
|
+
// Compute changes
|
|
721
|
+
const toAdd = [];
|
|
722
|
+
const toUpdate = [];
|
|
723
|
+
const toRemove = [];
|
|
724
|
+
for (const entry of entries) {
|
|
725
|
+
const existingHash = existingMap.get(entry.id);
|
|
726
|
+
if (!existingHash) {
|
|
727
|
+
toAdd.push(entry);
|
|
728
|
+
}
|
|
729
|
+
else if (force) {
|
|
730
|
+
toUpdate.push(entry);
|
|
731
|
+
}
|
|
732
|
+
else if (entry.hash) {
|
|
733
|
+
if (existingHash !== entry.hash) {
|
|
734
|
+
toUpdate.push(entry);
|
|
735
|
+
}
|
|
736
|
+
}
|
|
737
|
+
else {
|
|
738
|
+
toUpdate.push(entry);
|
|
739
|
+
}
|
|
740
|
+
}
|
|
741
|
+
const currentIds = new Set(entries.map((e) => e.id));
|
|
742
|
+
for (const [id] of existingMap) {
|
|
743
|
+
if (!currentIds.has(id)) {
|
|
744
|
+
toRemove.push(id);
|
|
745
|
+
}
|
|
746
|
+
}
|
|
747
|
+
// Apply changes
|
|
748
|
+
if (toRemove.length > 0) {
|
|
749
|
+
onProgress?.(`Removing ${toRemove.length} deleted documents...`);
|
|
750
|
+
await documentsService.deleteDocuments(collectionId, toRemove);
|
|
751
|
+
}
|
|
752
|
+
const toProcess = [...toAdd, ...toUpdate];
|
|
753
|
+
let actualUpdated = 0;
|
|
754
|
+
for (let i = 0; i < toProcess.length; i++) {
|
|
755
|
+
const entry = toProcess[i];
|
|
756
|
+
const isNew = toAdd.includes(entry);
|
|
757
|
+
onProgress?.(`${isNew ? 'Adding' : 'Checking'} ${entry.id} (${i + 1}/${toProcess.length})...`);
|
|
758
|
+
const content = await this.fetchContent(entry.url);
|
|
759
|
+
const contentHash = createHash('sha256').update(content).digest('hex');
|
|
760
|
+
if (!isNew && !force && existingMap.get(entry.id) === contentHash) {
|
|
761
|
+
continue;
|
|
762
|
+
}
|
|
763
|
+
if (!isNew)
|
|
764
|
+
actualUpdated++;
|
|
765
|
+
await documentsService.updateDocument({
|
|
766
|
+
collection: collectionId,
|
|
767
|
+
id: entry.id,
|
|
768
|
+
content,
|
|
769
|
+
});
|
|
770
|
+
}
|
|
771
|
+
// Update collection record
|
|
772
|
+
await this.upsertCollection(collectionId, {
|
|
773
|
+
url: spec.url,
|
|
774
|
+
name: manifest.name,
|
|
775
|
+
version: manifest.version,
|
|
776
|
+
description: manifest.description ?? null,
|
|
777
|
+
manifest_hash: manifestHash,
|
|
778
|
+
last_sync_at: new Date().toISOString(),
|
|
779
|
+
});
|
|
780
|
+
return {
|
|
781
|
+
added: toAdd.length,
|
|
782
|
+
updated: actualUpdated,
|
|
783
|
+
removed: toRemove.length,
|
|
784
|
+
total: entries.length,
|
|
785
|
+
};
|
|
786
|
+
}
|
|
787
|
+
finally {
|
|
788
|
+
// Clean up temp directory
|
|
789
|
+
if (tempDir) {
|
|
790
|
+
await rm(tempDir, { recursive: true, force: true }).catch(() => undefined);
|
|
791
|
+
}
|
|
792
|
+
}
|
|
793
|
+
};
|
|
794
|
+
/**
|
|
795
|
+
* Sync a pkg collection.
|
|
796
|
+
*/
|
|
797
|
+
syncPkgCollection = async (name, spec, cwd = process.cwd(), options = {}) => {
|
|
798
|
+
const { force = false, onProgress } = options;
|
|
799
|
+
const collectionId = this.computeCollectionId(spec);
|
|
800
|
+
const parsed = this.parseManifestUrl(spec.url, cwd);
|
|
801
|
+
// This method only handles file/https, not git URLs
|
|
802
|
+
if (parsed.protocol === 'git') {
|
|
803
|
+
throw new Error('syncPkgCollection does not support git URLs');
|
|
804
|
+
}
|
|
805
|
+
const { protocol, path: manifestPath, isBundle } = parsed;
|
|
806
|
+
if (isBundle) {
|
|
807
|
+
return this.syncBundleCollection(name, spec, cwd, options);
|
|
808
|
+
}
|
|
809
|
+
onProgress?.(`Loading manifest from ${manifestPath}...`);
|
|
810
|
+
// Load and parse manifest based on protocol
|
|
811
|
+
let manifest;
|
|
812
|
+
let manifestContent;
|
|
813
|
+
let manifestDir;
|
|
814
|
+
if (protocol === 'file') {
|
|
815
|
+
manifest = await this.loadLocalManifest(manifestPath);
|
|
816
|
+
manifestContent = await readFile(manifestPath, 'utf8');
|
|
817
|
+
manifestDir = manifestPath.substring(0, manifestPath.lastIndexOf('/'));
|
|
818
|
+
}
|
|
819
|
+
else {
|
|
820
|
+
const result = await this.loadRemoteManifest(manifestPath);
|
|
821
|
+
manifest = result.manifest;
|
|
822
|
+
manifestContent = result.content;
|
|
823
|
+
manifestDir = this.getUrlDirectory(manifestPath);
|
|
824
|
+
}
|
|
825
|
+
// Check manifest hash to skip if unchanged
|
|
826
|
+
const manifestHash = createHash('sha256').update(manifestContent).digest('hex');
|
|
827
|
+
const existingCollection = await this.getCollection(collectionId);
|
|
828
|
+
if (!force && existingCollection?.manifest_hash === manifestHash) {
|
|
829
|
+
onProgress?.('Manifest unchanged, skipping sync');
|
|
830
|
+
return { added: 0, updated: 0, removed: 0, total: 0 };
|
|
831
|
+
}
|
|
832
|
+
onProgress?.('Resolving sources...');
|
|
833
|
+
// Resolve sources to file entries
|
|
834
|
+
const entries = await this.resolveManifestSources(manifest, manifestDir, protocol);
|
|
835
|
+
// Get existing documents from database
|
|
836
|
+
const documentsService = this.#services.get(DocumentsService);
|
|
837
|
+
const existingDocs = await documentsService.getDocumentIds(collectionId);
|
|
838
|
+
const existingMap = new Map(existingDocs.map((doc) => [doc.id, doc.hash]));
|
|
839
|
+
// Compute changes
|
|
840
|
+
const toAdd = [];
|
|
841
|
+
const toUpdate = [];
|
|
842
|
+
const toRemove = [];
|
|
843
|
+
for (const entry of entries) {
|
|
844
|
+
const existingHash = existingMap.get(entry.id);
|
|
845
|
+
if (!existingHash) {
|
|
846
|
+
toAdd.push(entry);
|
|
847
|
+
}
|
|
848
|
+
else if (force) {
|
|
849
|
+
toUpdate.push(entry);
|
|
850
|
+
}
|
|
851
|
+
else if (entry.hash) {
|
|
852
|
+
// Manifest provides hash, compare with stored hash
|
|
853
|
+
if (existingHash !== entry.hash) {
|
|
854
|
+
toUpdate.push(entry);
|
|
855
|
+
}
|
|
856
|
+
}
|
|
857
|
+
else {
|
|
858
|
+
// No manifest hash, need to fetch and compare
|
|
859
|
+
toUpdate.push(entry);
|
|
860
|
+
}
|
|
861
|
+
}
|
|
862
|
+
const currentIds = new Set(entries.map((e) => e.id));
|
|
863
|
+
for (const [id] of existingMap) {
|
|
864
|
+
if (!currentIds.has(id)) {
|
|
865
|
+
toRemove.push(id);
|
|
866
|
+
}
|
|
867
|
+
}
|
|
868
|
+
// Apply changes
|
|
869
|
+
if (toRemove.length > 0) {
|
|
870
|
+
onProgress?.(`Removing ${toRemove.length} deleted documents...`);
|
|
871
|
+
await documentsService.deleteDocuments(collectionId, toRemove);
|
|
872
|
+
}
|
|
873
|
+
const toProcess = [...toAdd, ...toUpdate];
|
|
874
|
+
let actualUpdated = 0;
|
|
875
|
+
for (let i = 0; i < toProcess.length; i++) {
|
|
876
|
+
const entry = toProcess[i];
|
|
877
|
+
const isNew = toAdd.includes(entry);
|
|
878
|
+
onProgress?.(`${isNew ? 'Adding' : 'Checking'} ${entry.id} (${i + 1}/${toProcess.length})...`);
|
|
879
|
+
try {
|
|
880
|
+
const content = await this.fetchContent(entry.url);
|
|
881
|
+
const contentHash = createHash('sha256').update(content).digest('hex');
|
|
882
|
+
// Skip if content hash matches (for entries without manifest hash)
|
|
883
|
+
if (!isNew && !force && existingMap.get(entry.id) === contentHash) {
|
|
884
|
+
continue;
|
|
885
|
+
}
|
|
886
|
+
if (!isNew)
|
|
887
|
+
actualUpdated++;
|
|
888
|
+
await documentsService.updateDocument({
|
|
889
|
+
collection: collectionId,
|
|
890
|
+
id: entry.id,
|
|
891
|
+
content,
|
|
892
|
+
});
|
|
893
|
+
}
|
|
894
|
+
catch (error) {
|
|
895
|
+
// Log warning but don't fail the entire sync for individual file failures
|
|
896
|
+
const errorMsg = error instanceof Error ? error.message : 'Unknown error';
|
|
897
|
+
onProgress?.(`Warning: Failed to fetch ${entry.id}: ${errorMsg}`);
|
|
898
|
+
}
|
|
899
|
+
}
|
|
900
|
+
// Update collection record
|
|
901
|
+
await this.upsertCollection(collectionId, {
|
|
902
|
+
url: spec.url,
|
|
903
|
+
name: manifest.name,
|
|
904
|
+
version: manifest.version,
|
|
905
|
+
description: manifest.description ?? null,
|
|
906
|
+
manifest_hash: manifestHash,
|
|
907
|
+
last_sync_at: new Date().toISOString(),
|
|
908
|
+
});
|
|
909
|
+
return {
|
|
910
|
+
added: toAdd.length,
|
|
911
|
+
updated: actualUpdated,
|
|
912
|
+
removed: toRemove.length,
|
|
913
|
+
total: entries.length,
|
|
914
|
+
};
|
|
915
|
+
};
|
|
916
|
+
/**
|
|
917
|
+
* Sync all collections from project config.
|
|
918
|
+
*/
|
|
919
|
+
syncAllCollections = async (cwd = process.cwd(), options = {}) => {
|
|
920
|
+
const projectConfig = this.readProjectConfig(cwd);
|
|
921
|
+
const results = new Map();
|
|
922
|
+
for (const [name, spec] of Object.entries(projectConfig.collections)) {
|
|
923
|
+
const result = await this.syncCollection(name, spec, cwd, {
|
|
924
|
+
force: options.force,
|
|
925
|
+
onProgress: (message) => options.onProgress?.(name, message),
|
|
926
|
+
});
|
|
927
|
+
results.set(name, result);
|
|
928
|
+
}
|
|
929
|
+
return results;
|
|
930
|
+
};
|
|
931
|
+
}
|
|
932
|
+
export { CollectionsService };
|
|
933
|
+
//# sourceMappingURL=collections.js.map
|