opentology 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +609 -0
- package/dist/commands/context.d.ts +29 -0
- package/dist/commands/context.js +369 -0
- package/dist/commands/delete.d.ts +2 -0
- package/dist/commands/delete.js +46 -0
- package/dist/commands/diff.d.ts +2 -0
- package/dist/commands/diff.js +43 -0
- package/dist/commands/drop.d.ts +2 -0
- package/dist/commands/drop.js +41 -0
- package/dist/commands/graph.d.ts +2 -0
- package/dist/commands/graph.js +130 -0
- package/dist/commands/infer.d.ts +2 -0
- package/dist/commands/infer.js +47 -0
- package/dist/commands/init.d.ts +2 -0
- package/dist/commands/init.js +53 -0
- package/dist/commands/mcp.d.ts +2 -0
- package/dist/commands/mcp.js +9 -0
- package/dist/commands/prefix.d.ts +2 -0
- package/dist/commands/prefix.js +73 -0
- package/dist/commands/pull.d.ts +2 -0
- package/dist/commands/pull.js +43 -0
- package/dist/commands/push.d.ts +2 -0
- package/dist/commands/push.js +79 -0
- package/dist/commands/query.d.ts +2 -0
- package/dist/commands/query.js +119 -0
- package/dist/commands/shapes.d.ts +2 -0
- package/dist/commands/shapes.js +67 -0
- package/dist/commands/status.d.ts +2 -0
- package/dist/commands/status.js +47 -0
- package/dist/commands/validate.d.ts +2 -0
- package/dist/commands/validate.js +46 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +38 -0
- package/dist/lib/codebase-scanner.d.ts +41 -0
- package/dist/lib/codebase-scanner.js +360 -0
- package/dist/lib/config.d.ts +16 -0
- package/dist/lib/config.js +70 -0
- package/dist/lib/embedded-adapter.d.ts +45 -0
- package/dist/lib/embedded-adapter.js +202 -0
- package/dist/lib/http-adapter.d.ts +41 -0
- package/dist/lib/http-adapter.js +169 -0
- package/dist/lib/oxigraph.d.ts +62 -0
- package/dist/lib/oxigraph.js +323 -0
- package/dist/lib/reasoner.d.ts +19 -0
- package/dist/lib/reasoner.js +310 -0
- package/dist/lib/shacl.d.ts +22 -0
- package/dist/lib/shacl.js +105 -0
- package/dist/lib/sparql-utils.d.ts +28 -0
- package/dist/lib/sparql-utils.js +217 -0
- package/dist/lib/store-adapter.d.ts +50 -0
- package/dist/lib/store-adapter.js +1 -0
- package/dist/lib/store-factory.d.ts +9 -0
- package/dist/lib/store-factory.js +71 -0
- package/dist/lib/validator.d.ts +10 -0
- package/dist/lib/validator.js +40 -0
- package/dist/mcp/server.d.ts +3 -0
- package/dist/mcp/server.js +1020 -0
- package/dist/templates/claude-md-context.d.ts +4 -0
- package/dist/templates/claude-md-context.js +104 -0
- package/dist/templates/otx-ontology.d.ts +2 -0
- package/dist/templates/otx-ontology.js +31 -0
- package/dist/templates/session-start-hook.d.ts +1 -0
- package/dist/templates/session-start-hook.js +94 -0
- package/dist/templates/slash-commands.d.ts +5 -0
- package/dist/templates/slash-commands.js +108 -0
- package/package.json +58 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { Command } from 'commander';
|
|
3
|
+
import { registerInit } from './commands/init.js';
|
|
4
|
+
import { registerValidate } from './commands/validate.js';
|
|
5
|
+
import { registerPush } from './commands/push.js';
|
|
6
|
+
import { registerQuery } from './commands/query.js';
|
|
7
|
+
import { registerStatus } from './commands/status.js';
|
|
8
|
+
import { registerPull } from './commands/pull.js';
|
|
9
|
+
import { registerDrop } from './commands/drop.js';
|
|
10
|
+
import { registerDelete } from './commands/delete.js';
|
|
11
|
+
import { registerMcp } from './commands/mcp.js';
|
|
12
|
+
import { registerShapes } from './commands/shapes.js';
|
|
13
|
+
import { registerDiff } from './commands/diff.js';
|
|
14
|
+
import { registerGraph } from './commands/graph.js';
|
|
15
|
+
import { registerInfer } from './commands/infer.js';
|
|
16
|
+
import { registerPrefix } from './commands/prefix.js';
|
|
17
|
+
import { registerContext } from './commands/context.js';
|
|
18
|
+
const program = new Command();
|
|
19
|
+
program
|
|
20
|
+
.name('opentology')
|
|
21
|
+
.version('0.1.0')
|
|
22
|
+
.description('CLI-managed RDF/SPARQL infrastructure — Supabase for RDF');
|
|
23
|
+
registerInit(program);
|
|
24
|
+
registerValidate(program);
|
|
25
|
+
registerPush(program);
|
|
26
|
+
registerQuery(program);
|
|
27
|
+
registerStatus(program);
|
|
28
|
+
registerPull(program);
|
|
29
|
+
registerDrop(program);
|
|
30
|
+
registerDelete(program);
|
|
31
|
+
registerMcp(program);
|
|
32
|
+
registerShapes(program);
|
|
33
|
+
registerDiff(program);
|
|
34
|
+
registerGraph(program);
|
|
35
|
+
registerInfer(program);
|
|
36
|
+
registerPrefix(program);
|
|
37
|
+
registerContext(program);
|
|
38
|
+
program.parse(process.argv);
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
export interface DirectoryNode {
|
|
2
|
+
name: string;
|
|
3
|
+
type: 'file' | 'directory';
|
|
4
|
+
children?: DirectoryNode[];
|
|
5
|
+
}
|
|
6
|
+
export interface DependencyEdge {
|
|
7
|
+
from: string;
|
|
8
|
+
to: string;
|
|
9
|
+
}
|
|
10
|
+
export interface DependencyGraph {
|
|
11
|
+
modules: string[];
|
|
12
|
+
edges: DependencyEdge[];
|
|
13
|
+
}
|
|
14
|
+
export interface CodebaseSnapshot {
|
|
15
|
+
packageJson: {
|
|
16
|
+
name?: string;
|
|
17
|
+
version?: string;
|
|
18
|
+
description?: string;
|
|
19
|
+
scripts?: Record<string, string>;
|
|
20
|
+
dependencies?: Record<string, string>;
|
|
21
|
+
devDependencies?: Record<string, string>;
|
|
22
|
+
engines?: Record<string, string>;
|
|
23
|
+
} | null;
|
|
24
|
+
tsconfig: {
|
|
25
|
+
target?: string;
|
|
26
|
+
module?: string;
|
|
27
|
+
strict?: boolean;
|
|
28
|
+
paths?: Record<string, string[]>;
|
|
29
|
+
} | null;
|
|
30
|
+
directoryTree: DirectoryNode[];
|
|
31
|
+
entryPoints: Array<{
|
|
32
|
+
path: string;
|
|
33
|
+
content: string;
|
|
34
|
+
}>;
|
|
35
|
+
detectedImports: string[];
|
|
36
|
+
dependencyGraph: DependencyGraph | null;
|
|
37
|
+
readme: string | null;
|
|
38
|
+
truncated?: boolean;
|
|
39
|
+
}
|
|
40
|
+
export declare function extractDependencyGraph(rootDir: string, gitFiles: Set<string> | null): Promise<DependencyGraph>;
|
|
41
|
+
export declare function scanCodebase(rootDir: string, maxBytes?: number): Promise<CodebaseSnapshot>;
|
|
@@ -0,0 +1,360 @@
|
|
|
1
|
+
import { readFile, readdir } from 'node:fs/promises';
|
|
2
|
+
import { join, relative } from 'node:path';
|
|
3
|
+
import { execFile } from 'node:child_process';
|
|
4
|
+
import { promisify } from 'node:util';
|
|
5
|
+
const execFileAsync = promisify(execFile);
|
|
6
|
+
const DEFAULT_MAX_BYTES = 15360;
|
|
7
|
+
const HARDCODED_EXCLUDES = new Set(['.git', 'node_modules', 'dist', 'build', '.opentology', '.claude', '.omc']);
|
|
8
|
+
const MAX_ENTRY_POINT_LINES = 150;
|
|
9
|
+
const MAX_README_LINES = 100;
|
|
10
|
+
async function readJsonFile(filePath) {
|
|
11
|
+
try {
|
|
12
|
+
const content = await readFile(filePath, 'utf-8');
|
|
13
|
+
return JSON.parse(content);
|
|
14
|
+
}
|
|
15
|
+
catch {
|
|
16
|
+
return null;
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
async function getGitTrackedFiles(rootDir) {
|
|
20
|
+
try {
|
|
21
|
+
const { stdout } = await execFileAsync('git', ['ls-files', '--cached', '--others', '--exclude-standard'], { cwd: rootDir });
|
|
22
|
+
const files = new Set();
|
|
23
|
+
for (const line of stdout.split('\n')) {
|
|
24
|
+
const trimmed = line.trim();
|
|
25
|
+
if (trimmed)
|
|
26
|
+
files.add(trimmed);
|
|
27
|
+
}
|
|
28
|
+
return files;
|
|
29
|
+
}
|
|
30
|
+
catch {
|
|
31
|
+
return null;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
function isExcluded(name, gitFiles, relativePath) {
|
|
35
|
+
if (HARDCODED_EXCLUDES.has(name))
|
|
36
|
+
return true;
|
|
37
|
+
if (gitFiles !== null) {
|
|
38
|
+
// Check if any git-tracked file starts with this path
|
|
39
|
+
for (const f of gitFiles) {
|
|
40
|
+
if (f === relativePath || f.startsWith(relativePath + '/'))
|
|
41
|
+
return false;
|
|
42
|
+
}
|
|
43
|
+
return true;
|
|
44
|
+
}
|
|
45
|
+
return false;
|
|
46
|
+
}
|
|
47
|
+
async function buildDirectoryTree(rootDir, currentDir, gitFiles, maxDepth, currentDepth = 0) {
|
|
48
|
+
if (currentDepth >= maxDepth)
|
|
49
|
+
return [];
|
|
50
|
+
let entries;
|
|
51
|
+
try {
|
|
52
|
+
entries = await readdir(currentDir, { withFileTypes: true });
|
|
53
|
+
}
|
|
54
|
+
catch {
|
|
55
|
+
return [];
|
|
56
|
+
}
|
|
57
|
+
const nodes = [];
|
|
58
|
+
const sorted = entries.sort((a, b) => a.name.localeCompare(b.name));
|
|
59
|
+
for (const entry of sorted) {
|
|
60
|
+
const relPath = relative(rootDir, join(currentDir, entry.name));
|
|
61
|
+
if (isExcluded(entry.name, gitFiles, relPath))
|
|
62
|
+
continue;
|
|
63
|
+
if (entry.isDirectory()) {
|
|
64
|
+
const children = await buildDirectoryTree(rootDir, join(currentDir, entry.name), gitFiles, maxDepth, currentDepth + 1);
|
|
65
|
+
nodes.push({ name: entry.name, type: 'directory', children });
|
|
66
|
+
}
|
|
67
|
+
else if (entry.isFile()) {
|
|
68
|
+
nodes.push({ name: entry.name, type: 'file' });
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
return nodes;
|
|
72
|
+
}
|
|
73
|
+
async function readTruncatedFile(filePath, maxLines) {
|
|
74
|
+
try {
|
|
75
|
+
const content = await readFile(filePath, 'utf-8');
|
|
76
|
+
const lines = content.split('\n');
|
|
77
|
+
if (lines.length > maxLines) {
|
|
78
|
+
return lines.slice(0, maxLines).join('\n') + '\n... (truncated)';
|
|
79
|
+
}
|
|
80
|
+
return content;
|
|
81
|
+
}
|
|
82
|
+
catch {
|
|
83
|
+
return null;
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
function detectEntryPoints(pkg) {
|
|
87
|
+
const entries = [];
|
|
88
|
+
if (typeof pkg.main === 'string')
|
|
89
|
+
entries.push(pkg.main);
|
|
90
|
+
if (typeof pkg.bin === 'string') {
|
|
91
|
+
entries.push(pkg.bin);
|
|
92
|
+
}
|
|
93
|
+
else if (pkg.bin && typeof pkg.bin === 'object') {
|
|
94
|
+
entries.push(...Object.values(pkg.bin));
|
|
95
|
+
}
|
|
96
|
+
if (pkg.exports && typeof pkg.exports === 'object') {
|
|
97
|
+
const walk = (obj) => {
|
|
98
|
+
if (typeof obj === 'string') {
|
|
99
|
+
entries.push(obj);
|
|
100
|
+
return;
|
|
101
|
+
}
|
|
102
|
+
if (obj && typeof obj === 'object') {
|
|
103
|
+
for (const v of Object.values(obj))
|
|
104
|
+
walk(v);
|
|
105
|
+
}
|
|
106
|
+
};
|
|
107
|
+
walk(pkg.exports);
|
|
108
|
+
}
|
|
109
|
+
// Deduplicate and resolve dist→src mapping for TS projects
|
|
110
|
+
const seen = new Set();
|
|
111
|
+
const result = [];
|
|
112
|
+
for (const e of entries) {
|
|
113
|
+
const normalized = e.replace(/^\.\//, '');
|
|
114
|
+
// Try source version first (dist/index.js → src/index.ts)
|
|
115
|
+
const srcVersion = normalized.replace(/^dist\//, 'src/').replace(/\.js$/, '.ts');
|
|
116
|
+
if (!seen.has(srcVersion)) {
|
|
117
|
+
seen.add(srcVersion);
|
|
118
|
+
result.push(srcVersion);
|
|
119
|
+
}
|
|
120
|
+
if (!seen.has(normalized)) {
|
|
121
|
+
seen.add(normalized);
|
|
122
|
+
result.push(normalized);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
return result;
|
|
126
|
+
}
|
|
127
|
+
async function detectImports(rootDir, filePaths) {
|
|
128
|
+
const imports = new Set();
|
|
129
|
+
// Also scan up to 5 src/*.ts files
|
|
130
|
+
const scanPaths = [...filePaths];
|
|
131
|
+
try {
|
|
132
|
+
const srcEntries = await readdir(join(rootDir, 'src'), { withFileTypes: true });
|
|
133
|
+
let count = 0;
|
|
134
|
+
for (const e of srcEntries) {
|
|
135
|
+
if (count >= 5)
|
|
136
|
+
break;
|
|
137
|
+
if (e.isFile() && e.name.endsWith('.ts')) {
|
|
138
|
+
const p = `src/${e.name}`;
|
|
139
|
+
if (!scanPaths.includes(p)) {
|
|
140
|
+
scanPaths.push(p);
|
|
141
|
+
count++;
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
catch { /* no src dir */ }
|
|
147
|
+
for (const fp of scanPaths) {
|
|
148
|
+
try {
|
|
149
|
+
const content = await readFile(join(rootDir, fp), 'utf-8');
|
|
150
|
+
const importRegex = /(?:import|from)\s+['"]([^./'][^'"]*)['"]/g;
|
|
151
|
+
let match;
|
|
152
|
+
while ((match = importRegex.exec(content)) !== null) {
|
|
153
|
+
const pkg = match[1];
|
|
154
|
+
// Extract package name (handle scoped packages)
|
|
155
|
+
if (pkg.startsWith('@')) {
|
|
156
|
+
const parts = pkg.split('/');
|
|
157
|
+
if (parts.length >= 2)
|
|
158
|
+
imports.add(`${parts[0]}/${parts[1]}`);
|
|
159
|
+
}
|
|
160
|
+
else {
|
|
161
|
+
imports.add(pkg.split('/')[0]);
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
catch { /* skip unreadable files */ }
|
|
166
|
+
}
|
|
167
|
+
return [...imports].sort();
|
|
168
|
+
}
|
|
169
|
+
async function collectSourceFiles(rootDir, dir, gitFiles) {
|
|
170
|
+
const results = [];
|
|
171
|
+
let entries;
|
|
172
|
+
try {
|
|
173
|
+
entries = await readdir(dir, { withFileTypes: true });
|
|
174
|
+
}
|
|
175
|
+
catch {
|
|
176
|
+
return results;
|
|
177
|
+
}
|
|
178
|
+
for (const entry of entries) {
|
|
179
|
+
const relPath = relative(rootDir, join(dir, entry.name));
|
|
180
|
+
if (isExcluded(entry.name, gitFiles, relPath))
|
|
181
|
+
continue;
|
|
182
|
+
if (entry.isDirectory()) {
|
|
183
|
+
results.push(...await collectSourceFiles(rootDir, join(dir, entry.name), gitFiles));
|
|
184
|
+
}
|
|
185
|
+
else if (entry.isFile() && /\.[tj]sx?$/.test(entry.name)) {
|
|
186
|
+
results.push(relPath);
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
return results;
|
|
190
|
+
}
|
|
191
|
+
const LOCAL_IMPORT_REGEX = /(?:import|export)\s+(?:(?:type\s+)?(?:\{[^}]*\}|[^'";\n]+)\s+from\s+)?['"](\.\.?\/[^'"]+)['"]/g;
|
|
192
|
+
function normalizeImportPath(fromFile, importPath) {
|
|
193
|
+
const fromDir = fromFile.includes('/') ? fromFile.substring(0, fromFile.lastIndexOf('/')) : '.';
|
|
194
|
+
const parts = (fromDir === '.' ? importPath : `${fromDir}/${importPath}`).split('/');
|
|
195
|
+
const resolved = [];
|
|
196
|
+
for (const part of parts) {
|
|
197
|
+
if (part === '.')
|
|
198
|
+
continue;
|
|
199
|
+
if (part === '..') {
|
|
200
|
+
resolved.pop();
|
|
201
|
+
continue;
|
|
202
|
+
}
|
|
203
|
+
resolved.push(part);
|
|
204
|
+
}
|
|
205
|
+
let result = resolved.join('/');
|
|
206
|
+
// Strip file extension for module name
|
|
207
|
+
result = result.replace(/\.[tj]sx?$/, '').replace(/\.js$/, '');
|
|
208
|
+
return result || null;
|
|
209
|
+
}
|
|
210
|
+
export async function extractDependencyGraph(rootDir, gitFiles) {
|
|
211
|
+
const sourceFiles = await collectSourceFiles(rootDir, rootDir, gitFiles);
|
|
212
|
+
const moduleSet = new Set();
|
|
213
|
+
const edges = [];
|
|
214
|
+
for (const filePath of sourceFiles) {
|
|
215
|
+
const moduleName = filePath.replace(/\.[tj]sx?$/, '');
|
|
216
|
+
moduleSet.add(moduleName);
|
|
217
|
+
let content;
|
|
218
|
+
try {
|
|
219
|
+
content = await readFile(join(rootDir, filePath), 'utf-8');
|
|
220
|
+
}
|
|
221
|
+
catch {
|
|
222
|
+
continue;
|
|
223
|
+
}
|
|
224
|
+
LOCAL_IMPORT_REGEX.lastIndex = 0;
|
|
225
|
+
let match;
|
|
226
|
+
while ((match = LOCAL_IMPORT_REGEX.exec(content)) !== null) {
|
|
227
|
+
const importPath = match[1];
|
|
228
|
+
const resolved = normalizeImportPath(filePath, importPath);
|
|
229
|
+
if (resolved) {
|
|
230
|
+
// Find the actual file (could be .ts, .tsx, /index.ts, etc.)
|
|
231
|
+
const target = sourceFiles.find(f => {
|
|
232
|
+
const noExt = f.replace(/\.[tj]sx?$/, '');
|
|
233
|
+
return noExt === resolved || noExt === `${resolved}/index`;
|
|
234
|
+
});
|
|
235
|
+
if (target) {
|
|
236
|
+
const targetModule = target.replace(/\.[tj]sx?$/, '');
|
|
237
|
+
if (targetModule !== moduleName) {
|
|
238
|
+
edges.push({ from: moduleName, to: targetModule });
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
// Deduplicate edges
|
|
245
|
+
const seen = new Set();
|
|
246
|
+
const uniqueEdges = edges.filter(e => {
|
|
247
|
+
const key = `${e.from}->${e.to}`;
|
|
248
|
+
if (seen.has(key))
|
|
249
|
+
return false;
|
|
250
|
+
seen.add(key);
|
|
251
|
+
return true;
|
|
252
|
+
});
|
|
253
|
+
return {
|
|
254
|
+
modules: [...moduleSet].sort(),
|
|
255
|
+
edges: uniqueEdges,
|
|
256
|
+
};
|
|
257
|
+
}
|
|
258
|
+
function byteLength(obj) {
|
|
259
|
+
return Buffer.byteLength(JSON.stringify(obj), 'utf-8');
|
|
260
|
+
}
|
|
261
|
+
function applyTruncation(snapshot, maxBytes) {
|
|
262
|
+
// Stage 1: Shorten entry point contents
|
|
263
|
+
if (byteLength(snapshot) > maxBytes) {
|
|
264
|
+
snapshot.entryPoints = snapshot.entryPoints.map(ep => ({
|
|
265
|
+
path: ep.path,
|
|
266
|
+
content: ep.content.split('\n').slice(0, 50).join('\n') + '\n... (truncated)',
|
|
267
|
+
}));
|
|
268
|
+
snapshot.truncated = true;
|
|
269
|
+
}
|
|
270
|
+
// Stage 2: Trim README
|
|
271
|
+
if (byteLength(snapshot) > maxBytes && snapshot.readme) {
|
|
272
|
+
const lines = snapshot.readme.split('\n');
|
|
273
|
+
snapshot.readme = lines.slice(0, 30).join('\n') + '\n... (truncated)';
|
|
274
|
+
snapshot.truncated = true;
|
|
275
|
+
}
|
|
276
|
+
// Stage 3: Reduce tree depth to 2
|
|
277
|
+
if (byteLength(snapshot) > maxBytes) {
|
|
278
|
+
const pruneDepth = (nodes, depth) => {
|
|
279
|
+
return nodes.map(n => {
|
|
280
|
+
if (n.type === 'directory' && n.children) {
|
|
281
|
+
if (depth >= 1)
|
|
282
|
+
return { name: n.name, type: n.type };
|
|
283
|
+
return { ...n, children: pruneDepth(n.children, depth + 1) };
|
|
284
|
+
}
|
|
285
|
+
return n;
|
|
286
|
+
});
|
|
287
|
+
};
|
|
288
|
+
snapshot.directoryTree = pruneDepth(snapshot.directoryTree, 0);
|
|
289
|
+
snapshot.truncated = true;
|
|
290
|
+
}
|
|
291
|
+
// Stage 4: Drop detectedImports
|
|
292
|
+
if (byteLength(snapshot) > maxBytes) {
|
|
293
|
+
snapshot.detectedImports = [];
|
|
294
|
+
snapshot.truncated = true;
|
|
295
|
+
}
|
|
296
|
+
// Stage 5: Drop dependencyGraph
|
|
297
|
+
if (byteLength(snapshot) > maxBytes) {
|
|
298
|
+
snapshot.dependencyGraph = null;
|
|
299
|
+
snapshot.truncated = true;
|
|
300
|
+
}
|
|
301
|
+
return snapshot;
|
|
302
|
+
}
|
|
303
|
+
export async function scanCodebase(rootDir, maxBytes = DEFAULT_MAX_BYTES) {
|
|
304
|
+
// Read package.json
|
|
305
|
+
const rawPkg = await readJsonFile(join(rootDir, 'package.json'));
|
|
306
|
+
const packageJson = rawPkg ? {
|
|
307
|
+
name: rawPkg.name,
|
|
308
|
+
version: rawPkg.version,
|
|
309
|
+
description: rawPkg.description,
|
|
310
|
+
scripts: rawPkg.scripts,
|
|
311
|
+
dependencies: rawPkg.dependencies,
|
|
312
|
+
devDependencies: rawPkg.devDependencies,
|
|
313
|
+
engines: rawPkg.engines,
|
|
314
|
+
} : null;
|
|
315
|
+
// Read tsconfig.json or jsconfig.json
|
|
316
|
+
let rawTsConfig = await readJsonFile(join(rootDir, 'tsconfig.json'));
|
|
317
|
+
if (!rawTsConfig) {
|
|
318
|
+
rawTsConfig = await readJsonFile(join(rootDir, 'jsconfig.json'));
|
|
319
|
+
}
|
|
320
|
+
const compilerOptions = rawTsConfig?.compilerOptions;
|
|
321
|
+
const tsconfig = rawTsConfig ? {
|
|
322
|
+
target: compilerOptions?.target,
|
|
323
|
+
module: compilerOptions?.module,
|
|
324
|
+
strict: compilerOptions?.strict,
|
|
325
|
+
paths: compilerOptions?.paths,
|
|
326
|
+
} : null;
|
|
327
|
+
// Get git-tracked files for gitignore awareness
|
|
328
|
+
const gitFiles = await getGitTrackedFiles(rootDir);
|
|
329
|
+
// Build directory tree (depth 3)
|
|
330
|
+
const directoryTree = await buildDirectoryTree(rootDir, rootDir, gitFiles, 3);
|
|
331
|
+
// Detect and read entry points
|
|
332
|
+
const entryPointPaths = rawPkg ? detectEntryPoints(rawPkg) : [];
|
|
333
|
+
const entryPoints = [];
|
|
334
|
+
for (const ep of entryPointPaths) {
|
|
335
|
+
const content = await readTruncatedFile(join(rootDir, ep), MAX_ENTRY_POINT_LINES);
|
|
336
|
+
if (content !== null) {
|
|
337
|
+
entryPoints.push({ path: ep, content });
|
|
338
|
+
}
|
|
339
|
+
}
|
|
340
|
+
// Detect imports
|
|
341
|
+
const detectedImports = await detectImports(rootDir, entryPointPaths);
|
|
342
|
+
// Extract dependency graph
|
|
343
|
+
const dependencyGraph = await extractDependencyGraph(rootDir, gitFiles);
|
|
344
|
+
// Read README
|
|
345
|
+
const readme = await readTruncatedFile(join(rootDir, 'README.md'), MAX_README_LINES);
|
|
346
|
+
let snapshot = {
|
|
347
|
+
packageJson,
|
|
348
|
+
tsconfig,
|
|
349
|
+
directoryTree,
|
|
350
|
+
entryPoints,
|
|
351
|
+
detectedImports,
|
|
352
|
+
dependencyGraph,
|
|
353
|
+
readme,
|
|
354
|
+
};
|
|
355
|
+
// Apply byte cap with progressive truncation
|
|
356
|
+
if (byteLength(snapshot) > maxBytes) {
|
|
357
|
+
snapshot = applyTruncation(snapshot, maxBytes);
|
|
358
|
+
}
|
|
359
|
+
return snapshot;
|
|
360
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
export interface OpenTologyConfig {
|
|
2
|
+
projectId: string;
|
|
3
|
+
mode: 'http' | 'embedded';
|
|
4
|
+
endpoint?: string;
|
|
5
|
+
graphUri: string;
|
|
6
|
+
graphs?: Record<string, string>;
|
|
7
|
+
files?: Record<string, string[]>;
|
|
8
|
+
prefixes?: Record<string, string>;
|
|
9
|
+
}
|
|
10
|
+
export declare function resolveGraphUri(config: OpenTologyConfig, graphName?: string): string;
|
|
11
|
+
export declare function loadConfig(): OpenTologyConfig;
|
|
12
|
+
export declare function saveConfig(config: OpenTologyConfig): void;
|
|
13
|
+
export declare function configExists(): boolean;
|
|
14
|
+
export declare function resolveEndpoint(config: OpenTologyConfig): string;
|
|
15
|
+
export declare function getTrackedFiles(config: OpenTologyConfig, graphUri: string): string[];
|
|
16
|
+
export declare function addTrackedFile(config: OpenTologyConfig, graphUri: string, filePath: string): void;
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import { readFileSync, writeFileSync, existsSync } from 'node:fs';
|
|
2
|
+
import path, { join } from 'node:path';
|
|
3
|
+
export function resolveGraphUri(config, graphName) {
|
|
4
|
+
if (!graphName) {
|
|
5
|
+
return config.graphUri;
|
|
6
|
+
}
|
|
7
|
+
const uri = config.graphs?.[graphName];
|
|
8
|
+
if (!uri) {
|
|
9
|
+
throw new Error(`Unknown graph: ${graphName}. Use 'opentology graph create ${graphName}' first.`);
|
|
10
|
+
}
|
|
11
|
+
return uri;
|
|
12
|
+
}
|
|
13
|
+
const CONFIG_FILENAME = '.opentology.json';
|
|
14
|
+
function configPath() {
|
|
15
|
+
return join(process.cwd(), CONFIG_FILENAME);
|
|
16
|
+
}
|
|
17
|
+
export function loadConfig() {
|
|
18
|
+
const configFilePath = configPath();
|
|
19
|
+
if (!existsSync(configFilePath)) {
|
|
20
|
+
throw new Error(`Config file not found at ${configFilePath}. Run 'opentology init' first.`);
|
|
21
|
+
}
|
|
22
|
+
try {
|
|
23
|
+
const raw = readFileSync(configFilePath, 'utf-8');
|
|
24
|
+
const config = JSON.parse(raw);
|
|
25
|
+
if (!config.mode) {
|
|
26
|
+
config.mode = 'http';
|
|
27
|
+
}
|
|
28
|
+
if (config.mode === 'http' && !config.endpoint) {
|
|
29
|
+
config.endpoint = 'http://localhost:7878';
|
|
30
|
+
}
|
|
31
|
+
return config;
|
|
32
|
+
}
|
|
33
|
+
catch (err) {
|
|
34
|
+
throw new Error(`Failed to read config file at ${configFilePath}: ${err.message}`);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
export function saveConfig(config) {
|
|
38
|
+
const configFilePath = configPath();
|
|
39
|
+
try {
|
|
40
|
+
writeFileSync(configFilePath, JSON.stringify(config, null, 2) + '\n', 'utf-8');
|
|
41
|
+
}
|
|
42
|
+
catch (err) {
|
|
43
|
+
throw new Error(`Failed to write config file at ${configFilePath}: ${err.message}`);
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
export function configExists() {
|
|
47
|
+
return existsSync(configPath());
|
|
48
|
+
}
|
|
49
|
+
export function resolveEndpoint(config) {
|
|
50
|
+
if (config.endpoint)
|
|
51
|
+
return config.endpoint;
|
|
52
|
+
if (config.mode === 'embedded') {
|
|
53
|
+
throw new Error("No endpoint configured: project is in 'embedded' mode.");
|
|
54
|
+
}
|
|
55
|
+
return 'http://localhost:7878';
|
|
56
|
+
}
|
|
57
|
+
export function getTrackedFiles(config, graphUri) {
|
|
58
|
+
return config.files?.[graphUri] ?? [];
|
|
59
|
+
}
|
|
60
|
+
export function addTrackedFile(config, graphUri, filePath) {
|
|
61
|
+
if (!config.files)
|
|
62
|
+
config.files = {};
|
|
63
|
+
if (!config.files[graphUri])
|
|
64
|
+
config.files[graphUri] = [];
|
|
65
|
+
// Store relative path
|
|
66
|
+
const relative = path.relative(process.cwd(), path.resolve(filePath));
|
|
67
|
+
if (!config.files[graphUri].includes(relative)) {
|
|
68
|
+
config.files[graphUri].push(relative);
|
|
69
|
+
}
|
|
70
|
+
}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import type { StoreAdapter, SparqlResults } from './store-adapter.js';
|
|
2
|
+
export declare class EmbeddedAdapter implements StoreAdapter {
|
|
3
|
+
private store;
|
|
4
|
+
constructor();
|
|
5
|
+
/**
|
|
6
|
+
* Load Turtle data directly into a named graph.
|
|
7
|
+
*/
|
|
8
|
+
loadTurtleIntoGraph(turtle: string, graphUri: string): void;
|
|
9
|
+
askQuery(query: string): Promise<boolean>;
|
|
10
|
+
sparqlQuery(query: string): Promise<SparqlResults>;
|
|
11
|
+
sparqlUpdate(update: string): Promise<void>;
|
|
12
|
+
constructQuery(query: string): Promise<string>;
|
|
13
|
+
insertTurtle(graphUri: string, turtle: string): Promise<void>;
|
|
14
|
+
getGraphTripleCount(graphUri: string): Promise<number>;
|
|
15
|
+
exportGraph(graphUri: string): Promise<string>;
|
|
16
|
+
dropGraph(graphUri: string): Promise<void>;
|
|
17
|
+
deleteTriples(graphUri: string, options: {
|
|
18
|
+
turtle?: string;
|
|
19
|
+
where?: string;
|
|
20
|
+
}): Promise<void>;
|
|
21
|
+
diffGraph(graphUri: string, localTurtle: string): Promise<{
|
|
22
|
+
added: string[];
|
|
23
|
+
removed: string[];
|
|
24
|
+
unchanged: number;
|
|
25
|
+
}>;
|
|
26
|
+
getSchemaOverview(graphUri: string): Promise<{
|
|
27
|
+
prefixes: Record<string, string>;
|
|
28
|
+
classes: string[];
|
|
29
|
+
properties: string[];
|
|
30
|
+
tripleCount: number;
|
|
31
|
+
}>;
|
|
32
|
+
getClassDetails(graphUri: string, classUri: string): Promise<{
|
|
33
|
+
classUri: string;
|
|
34
|
+
instanceCount: number;
|
|
35
|
+
properties: Array<{
|
|
36
|
+
property: string;
|
|
37
|
+
count: number;
|
|
38
|
+
}>;
|
|
39
|
+
sampleTriples: Array<{
|
|
40
|
+
s: string;
|
|
41
|
+
p: string;
|
|
42
|
+
o: string;
|
|
43
|
+
}>;
|
|
44
|
+
}>;
|
|
45
|
+
}
|