@unrdf/kgc-substrate 26.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +288 -0
- package/package.json +54 -0
- package/src/Allocator.mjs +321 -0
- package/src/KnowledgeStore.mjs +325 -0
- package/src/ReceiptChain.mjs +292 -0
- package/src/Router.mjs +382 -0
- package/src/TamperDetector.mjs +299 -0
- package/src/Workspace.mjs +556 -0
- package/src/index.mjs +23 -0
- package/src/types.mjs +136 -0
|
@@ -0,0 +1,556 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Workspace Isolation & IO Contracts
|
|
3
|
+
*
|
|
4
|
+
* Provides secure, isolated workspaces for multi-agent execution with:
|
|
5
|
+
* - Per-agent work directories under /tmp/kgc-workspace/{agentId}/{namespace}/
|
|
6
|
+
* - Declared input/output file sets with enforcement
|
|
7
|
+
* - Security guards preventing unauthorized access
|
|
8
|
+
* - Symlink escape protection
|
|
9
|
+
* - Cross-agent interference prevention
|
|
10
|
+
*
|
|
11
|
+
* @module @unrdf/kgc-substrate/workspace
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
import { z } from 'zod';
|
|
15
|
+
import { blake3 } from 'hash-wasm';
|
|
16
|
+
import * as fs from 'fs/promises';
|
|
17
|
+
import * as path from 'path';
|
|
18
|
+
import { existsSync } from 'fs';
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* IO operation type
|
|
22
|
+
*/
|
|
23
|
+
export const IOOperationType = z.enum(['read', 'write', 'delete', 'list']);
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* IO operation schema
|
|
27
|
+
*/
|
|
28
|
+
export const IOOperationSchema = z.object({
|
|
29
|
+
type: IOOperationType,
|
|
30
|
+
path: z.string(),
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* @typedef {z.infer<typeof IOOperationSchema>} IOOperation
|
|
35
|
+
*/
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Workspace constraints schema
|
|
39
|
+
*/
|
|
40
|
+
export const WorkspaceConstraintsSchema = z.object({
|
|
41
|
+
/** Set of allowed input file paths (relative to workspace root) */
|
|
42
|
+
inputs: z.set(z.string()).default(() => new Set()),
|
|
43
|
+
/** Set of allowed output file paths (relative to workspace root) */
|
|
44
|
+
outputs: z.set(z.string()).default(() => new Set()),
|
|
45
|
+
/** Set of paths accessible for both read and write */
|
|
46
|
+
readWrite: z.set(z.string()).default(() => new Set()),
|
|
47
|
+
/** Optional namespace for workspace organization */
|
|
48
|
+
namespace: z.string().default('default'),
|
|
49
|
+
/** Prevent access to sensitive system paths */
|
|
50
|
+
allowSystemPaths: z.boolean().default(false),
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* @typedef {z.infer<typeof WorkspaceConstraintsSchema>} WorkspaceConstraints
|
|
55
|
+
*/
|
|
56
|
+
|
|
57
|
+
/**
|
|
58
|
+
* Workspace base path - isolated under /tmp
|
|
59
|
+
*/
|
|
60
|
+
const WORKSPACE_BASE = '/tmp/kgc-workspace';
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* Blocked system paths that should never be accessible
|
|
64
|
+
*/
|
|
65
|
+
const BLOCKED_PATHS = [
|
|
66
|
+
'/etc',
|
|
67
|
+
'/home',
|
|
68
|
+
'/root',
|
|
69
|
+
'/sys',
|
|
70
|
+
'/proc',
|
|
71
|
+
'/dev',
|
|
72
|
+
'/boot',
|
|
73
|
+
'/var/lib',
|
|
74
|
+
'/usr/bin',
|
|
75
|
+
'/usr/sbin',
|
|
76
|
+
'/bin',
|
|
77
|
+
'/sbin',
|
|
78
|
+
];
|
|
79
|
+
|
|
80
|
+
/**
|
|
81
|
+
* Error class for workspace violations
|
|
82
|
+
*/
|
|
83
|
+
export class WorkspaceViolationError extends Error {
|
|
84
|
+
/**
|
|
85
|
+
* @param {string} message - Error message
|
|
86
|
+
* @param {Object} details - Violation details
|
|
87
|
+
*/
|
|
88
|
+
constructor(message, details = {}) {
|
|
89
|
+
super(message);
|
|
90
|
+
this.name = 'WorkspaceViolationError';
|
|
91
|
+
this.details = details;
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
/**
|
|
96
|
+
* Resolve path and verify it doesn't escape workspace via symlinks
|
|
97
|
+
*
|
|
98
|
+
* @param {string} targetPath - Path to resolve
|
|
99
|
+
* @param {string} workspaceRoot - Workspace root directory
|
|
100
|
+
* @returns {Promise<string>} Resolved absolute path
|
|
101
|
+
* @throws {WorkspaceViolationError} If path escapes workspace
|
|
102
|
+
*/
|
|
103
|
+
async function resolveAndValidatePath(targetPath, workspaceRoot) {
|
|
104
|
+
try {
|
|
105
|
+
// Convert to absolute path relative to workspace
|
|
106
|
+
const absolutePath = path.isAbsolute(targetPath)
|
|
107
|
+
? targetPath
|
|
108
|
+
: path.join(workspaceRoot, targetPath);
|
|
109
|
+
|
|
110
|
+
// Resolve symlinks
|
|
111
|
+
let resolvedPath;
|
|
112
|
+
try {
|
|
113
|
+
resolvedPath = await fs.realpath(absolutePath);
|
|
114
|
+
} catch (err) {
|
|
115
|
+
// If file doesn't exist yet, resolve parent directory
|
|
116
|
+
const parentDir = path.dirname(absolutePath);
|
|
117
|
+
if (existsSync(parentDir)) {
|
|
118
|
+
const resolvedParent = await fs.realpath(parentDir);
|
|
119
|
+
resolvedPath = path.join(resolvedParent, path.basename(absolutePath));
|
|
120
|
+
} else {
|
|
121
|
+
// Parent doesn't exist, validate the path structure
|
|
122
|
+
resolvedPath = path.resolve(absolutePath);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
// Ensure resolved path is within workspace
|
|
127
|
+
if (!resolvedPath.startsWith(workspaceRoot + path.sep) && resolvedPath !== workspaceRoot) {
|
|
128
|
+
throw new WorkspaceViolationError('Path escapes workspace boundary', {
|
|
129
|
+
requestedPath: targetPath,
|
|
130
|
+
resolvedPath,
|
|
131
|
+
workspaceRoot,
|
|
132
|
+
});
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
return resolvedPath;
|
|
136
|
+
} catch (error) {
|
|
137
|
+
if (error instanceof WorkspaceViolationError) {
|
|
138
|
+
throw error;
|
|
139
|
+
}
|
|
140
|
+
throw new WorkspaceViolationError('Path validation failed', {
|
|
141
|
+
requestedPath: targetPath,
|
|
142
|
+
error: error.message,
|
|
143
|
+
});
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
/**
|
|
148
|
+
* Check if path matches any blocked system paths
|
|
149
|
+
*
|
|
150
|
+
* @param {string} targetPath - Path to check
|
|
151
|
+
* @returns {boolean} True if path is blocked
|
|
152
|
+
*/
|
|
153
|
+
function isBlockedPath(targetPath) {
|
|
154
|
+
const normalized = path.resolve(targetPath);
|
|
155
|
+
return BLOCKED_PATHS.some((blocked) => normalized.startsWith(blocked));
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
/**
|
|
159
|
+
* Normalize path for comparison (remove trailing slashes, resolve ..)
|
|
160
|
+
*
|
|
161
|
+
* @param {string} filePath - Path to normalize
|
|
162
|
+
* @returns {string} Normalized path
|
|
163
|
+
*/
|
|
164
|
+
function normalizePath(filePath) {
|
|
165
|
+
return path.normalize(filePath).replace(/\/$/, '');
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
/**
|
|
169
|
+
* Check if a file path matches any pattern in the allowed set
|
|
170
|
+
*
|
|
171
|
+
* @param {string} filePath - File path to check
|
|
172
|
+
* @param {Set<string>} allowedSet - Set of allowed patterns
|
|
173
|
+
* @returns {boolean} True if path is allowed
|
|
174
|
+
*/
|
|
175
|
+
function isPathAllowed(filePath, allowedSet) {
|
|
176
|
+
const normalized = normalizePath(filePath);
|
|
177
|
+
|
|
178
|
+
for (const pattern of allowedSet) {
|
|
179
|
+
const normalizedPattern = normalizePath(pattern);
|
|
180
|
+
|
|
181
|
+
// Exact match
|
|
182
|
+
if (normalized === normalizedPattern) {
|
|
183
|
+
return true;
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
// Directory prefix match (allows files within directory)
|
|
187
|
+
if (normalized.startsWith(normalizedPattern + path.sep)) {
|
|
188
|
+
return true;
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
// Wildcard pattern support (simple * and ** patterns)
|
|
192
|
+
if (pattern.includes('*')) {
|
|
193
|
+
const regexPattern = pattern
|
|
194
|
+
.replace(/\*\*/g, '§DOUBLESTAR§')
|
|
195
|
+
.replace(/\*/g, '[^/]*')
|
|
196
|
+
.replace(/§DOUBLESTAR§/g, '.*');
|
|
197
|
+
const regex = new RegExp(`^${regexPattern}$`);
|
|
198
|
+
if (regex.test(normalized)) {
|
|
199
|
+
return true;
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
return false;
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
/**
|
|
208
|
+
* Create an isolated workspace for an agent
|
|
209
|
+
*
|
|
210
|
+
* @param {string} agentId - Unique agent identifier
|
|
211
|
+
* @param {Partial<WorkspaceConstraints>} [constraints={}] - IO constraints
|
|
212
|
+
* @returns {Promise<Workspace>} Workspace instance
|
|
213
|
+
*
|
|
214
|
+
* @example
|
|
215
|
+
* const workspace = await createWorkspace('agent-1', {
|
|
216
|
+
* inputs: new Set(['data/input.json']),
|
|
217
|
+
* outputs: new Set(['results/output.json']),
|
|
218
|
+
* namespace: 'experiment-1',
|
|
219
|
+
* });
|
|
220
|
+
*
|
|
221
|
+
* // Check if operation is allowed
|
|
222
|
+
* await workspace.enforceIOContract({ type: 'write', path: 'results/output.json' });
|
|
223
|
+
*
|
|
224
|
+
* // Get workspace paths
|
|
225
|
+
* const inputPath = workspace.resolvePath('data/input.json');
|
|
226
|
+
*/
|
|
227
|
+
export async function createWorkspace(agentId, constraints = {}) {
|
|
228
|
+
const config = WorkspaceConstraintsSchema.parse(constraints);
|
|
229
|
+
|
|
230
|
+
// Create workspace directory
|
|
231
|
+
const workspaceRoot = path.join(WORKSPACE_BASE, agentId, config.namespace);
|
|
232
|
+
|
|
233
|
+
try {
|
|
234
|
+
await fs.mkdir(workspaceRoot, { recursive: true });
|
|
235
|
+
} catch (error) {
|
|
236
|
+
throw new WorkspaceViolationError('Failed to create workspace directory', {
|
|
237
|
+
workspaceRoot,
|
|
238
|
+
error: error.message,
|
|
239
|
+
});
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
/**
|
|
243
|
+
* Enforcement log for audit trail
|
|
244
|
+
* @type {Array<{timestamp: string, operation: IOOperation, allowed: boolean, reason?: string}>}
|
|
245
|
+
*/
|
|
246
|
+
const enforcementLog = [];
|
|
247
|
+
|
|
248
|
+
/**
|
|
249
|
+
* Track files accessed for violation detection
|
|
250
|
+
* @type {Set<string>}
|
|
251
|
+
*/
|
|
252
|
+
const accessedFiles = new Set();
|
|
253
|
+
|
|
254
|
+
return {
|
|
255
|
+
/**
|
|
256
|
+
* Get agent ID
|
|
257
|
+
* @returns {string}
|
|
258
|
+
*/
|
|
259
|
+
getAgentId() {
|
|
260
|
+
return agentId;
|
|
261
|
+
},
|
|
262
|
+
|
|
263
|
+
/**
|
|
264
|
+
* Get workspace root path
|
|
265
|
+
* @returns {string}
|
|
266
|
+
*/
|
|
267
|
+
getRoot() {
|
|
268
|
+
return workspaceRoot;
|
|
269
|
+
},
|
|
270
|
+
|
|
271
|
+
/**
|
|
272
|
+
* Get workspace configuration
|
|
273
|
+
* @returns {WorkspaceConstraints}
|
|
274
|
+
*/
|
|
275
|
+
getConstraints() {
|
|
276
|
+
return { ...config };
|
|
277
|
+
},
|
|
278
|
+
|
|
279
|
+
/**
|
|
280
|
+
* Resolve a relative path to absolute workspace path
|
|
281
|
+
*
|
|
282
|
+
* @param {string} relativePath - Relative path within workspace
|
|
283
|
+
* @returns {string} Absolute path
|
|
284
|
+
*/
|
|
285
|
+
resolvePath(relativePath) {
|
|
286
|
+
return path.join(workspaceRoot, relativePath);
|
|
287
|
+
},
|
|
288
|
+
|
|
289
|
+
/**
|
|
290
|
+
* Enforce IO contract for an operation
|
|
291
|
+
*
|
|
292
|
+
* @param {IOOperation} operation - IO operation to validate
|
|
293
|
+
* @returns {Promise<void>}
|
|
294
|
+
* @throws {WorkspaceViolationError} If operation violates contract
|
|
295
|
+
*/
|
|
296
|
+
async enforceIOContract(operation) {
|
|
297
|
+
const validated = IOOperationSchema.parse(operation);
|
|
298
|
+
const { type, path: targetPath } = validated;
|
|
299
|
+
|
|
300
|
+
// Block absolute system paths early (before resolution)
|
|
301
|
+
if (!config.allowSystemPaths && path.isAbsolute(targetPath) && isBlockedPath(targetPath)) {
|
|
302
|
+
const violation = {
|
|
303
|
+
timestamp: new Date().toISOString(),
|
|
304
|
+
operation: validated,
|
|
305
|
+
allowed: false,
|
|
306
|
+
reason: 'Blocked system path',
|
|
307
|
+
};
|
|
308
|
+
enforcementLog.push(violation);
|
|
309
|
+
throw new WorkspaceViolationError('Access to system path denied', {
|
|
310
|
+
path: targetPath,
|
|
311
|
+
operation: type,
|
|
312
|
+
});
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
// Resolve path and check for symlink escapes
|
|
316
|
+
const resolvedPath = await resolveAndValidatePath(targetPath, workspaceRoot);
|
|
317
|
+
|
|
318
|
+
// Block system paths after resolution (catches symlinks and relative paths)
|
|
319
|
+
if (!config.allowSystemPaths && isBlockedPath(resolvedPath)) {
|
|
320
|
+
const violation = {
|
|
321
|
+
timestamp: new Date().toISOString(),
|
|
322
|
+
operation: validated,
|
|
323
|
+
allowed: false,
|
|
324
|
+
reason: 'Blocked system path (via symlink or resolution)',
|
|
325
|
+
};
|
|
326
|
+
enforcementLog.push(violation);
|
|
327
|
+
throw new WorkspaceViolationError('Access to system path denied', {
|
|
328
|
+
path: targetPath,
|
|
329
|
+
resolvedPath,
|
|
330
|
+
operation: type,
|
|
331
|
+
});
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
// Get relative path for contract checking
|
|
335
|
+
const relativePath = path.relative(workspaceRoot, resolvedPath);
|
|
336
|
+
|
|
337
|
+
// Track file access
|
|
338
|
+
accessedFiles.add(relativePath);
|
|
339
|
+
|
|
340
|
+
// Check operation against declared constraints
|
|
341
|
+
let allowed = false;
|
|
342
|
+
let reason = '';
|
|
343
|
+
|
|
344
|
+
switch (type) {
|
|
345
|
+
case 'read':
|
|
346
|
+
case 'list':
|
|
347
|
+
// Allow if in inputs or readWrite sets
|
|
348
|
+
allowed =
|
|
349
|
+
isPathAllowed(relativePath, config.inputs) ||
|
|
350
|
+
isPathAllowed(relativePath, config.readWrite);
|
|
351
|
+
reason = allowed ? '' : 'Path not in declared inputs';
|
|
352
|
+
break;
|
|
353
|
+
|
|
354
|
+
case 'write':
|
|
355
|
+
case 'delete':
|
|
356
|
+
// Allow if in outputs or readWrite sets
|
|
357
|
+
allowed =
|
|
358
|
+
isPathAllowed(relativePath, config.outputs) ||
|
|
359
|
+
isPathAllowed(relativePath, config.readWrite);
|
|
360
|
+
reason = allowed ? '' : 'Path not in declared outputs';
|
|
361
|
+
break;
|
|
362
|
+
|
|
363
|
+
default:
|
|
364
|
+
reason = 'Unknown operation type';
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
const logEntry = {
|
|
368
|
+
timestamp: new Date().toISOString(),
|
|
369
|
+
operation: validated,
|
|
370
|
+
allowed,
|
|
371
|
+
reason,
|
|
372
|
+
};
|
|
373
|
+
enforcementLog.push(logEntry);
|
|
374
|
+
|
|
375
|
+
if (!allowed) {
|
|
376
|
+
throw new WorkspaceViolationError(`IO contract violation: ${reason}`, {
|
|
377
|
+
operation: type,
|
|
378
|
+
path: relativePath,
|
|
379
|
+
resolvedPath,
|
|
380
|
+
constraints: {
|
|
381
|
+
inputs: Array.from(config.inputs),
|
|
382
|
+
outputs: Array.from(config.outputs),
|
|
383
|
+
readWrite: Array.from(config.readWrite),
|
|
384
|
+
},
|
|
385
|
+
});
|
|
386
|
+
}
|
|
387
|
+
},
|
|
388
|
+
|
|
389
|
+
/**
|
|
390
|
+
* Read a file within workspace
|
|
391
|
+
*
|
|
392
|
+
* @param {string} filePath - File path relative to workspace
|
|
393
|
+
* @returns {Promise<string>} File contents
|
|
394
|
+
*/
|
|
395
|
+
async readFile(filePath) {
|
|
396
|
+
await this.enforceIOContract({ type: 'read', path: filePath });
|
|
397
|
+
const absolutePath = this.resolvePath(filePath);
|
|
398
|
+
return await fs.readFile(absolutePath, 'utf-8');
|
|
399
|
+
},
|
|
400
|
+
|
|
401
|
+
/**
|
|
402
|
+
* Write a file within workspace
|
|
403
|
+
*
|
|
404
|
+
* @param {string} filePath - File path relative to workspace
|
|
405
|
+
* @param {string} content - File content
|
|
406
|
+
* @returns {Promise<void>}
|
|
407
|
+
*/
|
|
408
|
+
async writeFile(filePath, content) {
|
|
409
|
+
await this.enforceIOContract({ type: 'write', path: filePath });
|
|
410
|
+
const absolutePath = this.resolvePath(filePath);
|
|
411
|
+
|
|
412
|
+
// Ensure parent directory exists
|
|
413
|
+
const parentDir = path.dirname(absolutePath);
|
|
414
|
+
await fs.mkdir(parentDir, { recursive: true });
|
|
415
|
+
|
|
416
|
+
await fs.writeFile(absolutePath, content, 'utf-8');
|
|
417
|
+
},
|
|
418
|
+
|
|
419
|
+
/**
|
|
420
|
+
* Delete a file within workspace
|
|
421
|
+
*
|
|
422
|
+
* @param {string} filePath - File path relative to workspace
|
|
423
|
+
* @returns {Promise<void>}
|
|
424
|
+
*/
|
|
425
|
+
async deleteFile(filePath) {
|
|
426
|
+
await this.enforceIOContract({ type: 'delete', path: filePath });
|
|
427
|
+
const absolutePath = this.resolvePath(filePath);
|
|
428
|
+
await fs.unlink(absolutePath);
|
|
429
|
+
},
|
|
430
|
+
|
|
431
|
+
/**
|
|
432
|
+
* List files in a directory within workspace
|
|
433
|
+
*
|
|
434
|
+
* @param {string} dirPath - Directory path relative to workspace
|
|
435
|
+
* @returns {Promise<string[]>} List of file names
|
|
436
|
+
*/
|
|
437
|
+
async listFiles(dirPath) {
|
|
438
|
+
await this.enforceIOContract({ type: 'list', path: dirPath });
|
|
439
|
+
const absolutePath = this.resolvePath(dirPath);
|
|
440
|
+
return await fs.readdir(absolutePath);
|
|
441
|
+
},
|
|
442
|
+
|
|
443
|
+
/**
|
|
444
|
+
* Get enforcement log for audit
|
|
445
|
+
*
|
|
446
|
+
* @returns {Array} Enforcement log entries
|
|
447
|
+
*/
|
|
448
|
+
getEnforcementLog() {
|
|
449
|
+
return [...enforcementLog];
|
|
450
|
+
},
|
|
451
|
+
|
|
452
|
+
/**
|
|
453
|
+
* Get set of accessed files
|
|
454
|
+
*
|
|
455
|
+
* @returns {Set<string>} Accessed file paths
|
|
456
|
+
*/
|
|
457
|
+
getAccessedFiles() {
|
|
458
|
+
return new Set(accessedFiles);
|
|
459
|
+
},
|
|
460
|
+
|
|
461
|
+
/**
|
|
462
|
+
* Clean up workspace (delete all files)
|
|
463
|
+
*
|
|
464
|
+
* @returns {Promise<void>}
|
|
465
|
+
*/
|
|
466
|
+
async cleanup() {
|
|
467
|
+
try {
|
|
468
|
+
await fs.rm(workspaceRoot, { recursive: true, force: true });
|
|
469
|
+
} catch (error) {
|
|
470
|
+
throw new WorkspaceViolationError('Failed to cleanup workspace', {
|
|
471
|
+
workspaceRoot,
|
|
472
|
+
error: error.message,
|
|
473
|
+
});
|
|
474
|
+
}
|
|
475
|
+
},
|
|
476
|
+
|
|
477
|
+
/**
|
|
478
|
+
* Generate workspace manifest with hash
|
|
479
|
+
*
|
|
480
|
+
* @returns {Promise<Object>} Workspace manifest
|
|
481
|
+
*/
|
|
482
|
+
async getManifest() {
|
|
483
|
+
const manifest = {
|
|
484
|
+
agentId,
|
|
485
|
+
namespace: config.namespace,
|
|
486
|
+
workspaceRoot,
|
|
487
|
+
constraints: {
|
|
488
|
+
inputs: Array.from(config.inputs),
|
|
489
|
+
outputs: Array.from(config.outputs),
|
|
490
|
+
readWrite: Array.from(config.readWrite),
|
|
491
|
+
},
|
|
492
|
+
accessedFiles: Array.from(accessedFiles),
|
|
493
|
+
enforcementLog,
|
|
494
|
+
timestamp: new Date().toISOString(),
|
|
495
|
+
};
|
|
496
|
+
|
|
497
|
+
const manifestHash = await blake3(JSON.stringify(manifest));
|
|
498
|
+
|
|
499
|
+
return {
|
|
500
|
+
...manifest,
|
|
501
|
+
manifestHash,
|
|
502
|
+
};
|
|
503
|
+
},
|
|
504
|
+
};
|
|
505
|
+
}
|
|
506
|
+
|
|
507
|
+
/**
|
|
508
|
+
* @typedef {Awaited<ReturnType<typeof createWorkspace>>} Workspace
|
|
509
|
+
*/
|
|
510
|
+
|
|
511
|
+
/**
|
|
512
|
+
* Verify two workspaces don't interfere with each other
|
|
513
|
+
*
|
|
514
|
+
* @param {Workspace} workspace1 - First workspace
|
|
515
|
+
* @param {Workspace} workspace2 - Second workspace
|
|
516
|
+
* @returns {boolean} True if workspaces are isolated
|
|
517
|
+
*/
|
|
518
|
+
export function verifyWorkspaceIsolation(workspace1, workspace2) {
|
|
519
|
+
const root1 = workspace1.getRoot();
|
|
520
|
+
const root2 = workspace2.getRoot();
|
|
521
|
+
|
|
522
|
+
// Ensure paths don't overlap
|
|
523
|
+
return !root1.startsWith(root2) && !root2.startsWith(root1) && root1 !== root2;
|
|
524
|
+
}
|
|
525
|
+
|
|
526
|
+
/**
|
|
527
|
+
* Create a workspace with read-only access to specific paths
|
|
528
|
+
*
|
|
529
|
+
* @param {string} agentId - Unique agent identifier
|
|
530
|
+
* @param {string[]} readOnlyPaths - Paths accessible for reading
|
|
531
|
+
* @param {string} [namespace='default'] - Workspace namespace
|
|
532
|
+
* @returns {Promise<Workspace>}
|
|
533
|
+
*/
|
|
534
|
+
export async function createReadOnlyWorkspace(agentId, readOnlyPaths, namespace = 'default') {
|
|
535
|
+
return createWorkspace(agentId, {
|
|
536
|
+
inputs: new Set(readOnlyPaths),
|
|
537
|
+
outputs: new Set(),
|
|
538
|
+
namespace,
|
|
539
|
+
});
|
|
540
|
+
}
|
|
541
|
+
|
|
542
|
+
/**
|
|
543
|
+
* Create a workspace with write-only access to specific paths
|
|
544
|
+
*
|
|
545
|
+
* @param {string} agentId - Unique agent identifier
|
|
546
|
+
* @param {string[]} writeOnlyPaths - Paths accessible for writing
|
|
547
|
+
* @param {string} [namespace='default'] - Workspace namespace
|
|
548
|
+
* @returns {Promise<Workspace>}
|
|
549
|
+
*/
|
|
550
|
+
export async function createWriteOnlyWorkspace(agentId, writeOnlyPaths, namespace = 'default') {
|
|
551
|
+
return createWorkspace(agentId, {
|
|
552
|
+
inputs: new Set(),
|
|
553
|
+
outputs: new Set(writeOnlyPaths),
|
|
554
|
+
namespace,
|
|
555
|
+
});
|
|
556
|
+
}
|
package/src/index.mjs
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* KGC Substrate - Core KnowledgeStore, Receipt Chain, and Tamper Detection
|
|
3
|
+
*
|
|
4
|
+
* Exports:
|
|
5
|
+
* - KnowledgeStore: Deterministic, hash-stable, immutable append-only log
|
|
6
|
+
* - ReceiptChain: Cryptographic receipt chain with merkle tree chaining
|
|
7
|
+
* - TamperDetector: Tamper detection and verification for receipt chains
|
|
8
|
+
* - Type validators and schemas
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
export { KnowledgeStore } from './KnowledgeStore.mjs';
|
|
12
|
+
export { ReceiptChain } from './ReceiptChain.mjs';
|
|
13
|
+
export { TamperDetector } from './TamperDetector.mjs';
|
|
14
|
+
export {
|
|
15
|
+
validateStorageSnapshot,
|
|
16
|
+
validateQueryPattern,
|
|
17
|
+
validateTripleEntry,
|
|
18
|
+
validateStateCommitment,
|
|
19
|
+
StorageSnapshotSchema,
|
|
20
|
+
QueryPatternSchema,
|
|
21
|
+
TripleEntrySchema,
|
|
22
|
+
StateCommitmentSchema,
|
|
23
|
+
} from './types.mjs';
|
package/src/types.mjs
ADDED
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* KGC Substrate Type Definitions
|
|
3
|
+
*
|
|
4
|
+
* Defines types for StorageSnapshot, QueryPattern, and related substrate primitives.
|
|
5
|
+
* Uses Zod for runtime validation with JSDoc for static type hints.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { z } from 'zod';
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Storage Snapshot Schema
|
|
12
|
+
*
|
|
13
|
+
* Represents an immutable snapshot of the KnowledgeStore state at a specific epoch.
|
|
14
|
+
* All hashes are BLAKE3 for deterministic verification.
|
|
15
|
+
*
|
|
16
|
+
* @typedef {Object} StorageSnapshot
|
|
17
|
+
* @property {number} epoch - Sequential epoch number (starts at 0)
|
|
18
|
+
* @property {bigint} timestamp_ns - Nanosecond-precision timestamp
|
|
19
|
+
* @property {string} quads_hash - BLAKE3 hash of canonically sorted quads
|
|
20
|
+
* @property {string} commit_hash - Git blob hash for snapshot storage
|
|
21
|
+
* @property {string} snapshot_id - UUID for snapshot identification
|
|
22
|
+
* @property {number} [quad_count] - Optional quad count for metadata
|
|
23
|
+
*/
|
|
24
|
+
export const StorageSnapshotSchema = z.object({
|
|
25
|
+
epoch: z.number().int().nonnegative(),
|
|
26
|
+
timestamp_ns: z.bigint().nonnegative(),
|
|
27
|
+
quads_hash: z.string().min(1),
|
|
28
|
+
commit_hash: z.string().min(1),
|
|
29
|
+
snapshot_id: z.string().uuid(),
|
|
30
|
+
quad_count: z.number().int().nonnegative().optional(),
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Query Pattern Schema
|
|
35
|
+
*
|
|
36
|
+
* Triple pattern for querying the KnowledgeStore.
|
|
37
|
+
* Null values act as wildcards (match any).
|
|
38
|
+
*
|
|
39
|
+
* @typedef {Object} QueryPattern
|
|
40
|
+
* @property {Object|null} subject - RDF subject term or null for wildcard
|
|
41
|
+
* @property {Object|null} predicate - RDF predicate term or null for wildcard
|
|
42
|
+
* @property {Object|null} object - RDF object term or null for wildcard
|
|
43
|
+
* @property {Object|null} [graph] - RDF graph term or null for wildcard
|
|
44
|
+
*/
|
|
45
|
+
export const QueryPatternSchema = z.object({
|
|
46
|
+
subject: z.any().nullable(),
|
|
47
|
+
predicate: z.any().nullable(),
|
|
48
|
+
object: z.any().nullable(),
|
|
49
|
+
graph: z.any().nullable().optional(),
|
|
50
|
+
});
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Triple Entry Schema
|
|
54
|
+
*
|
|
55
|
+
* Represents a single triple entry in the append-only log.
|
|
56
|
+
*
|
|
57
|
+
* @typedef {Object} TripleEntry
|
|
58
|
+
* @property {bigint} index - Sequential index in append-only log
|
|
59
|
+
* @property {bigint} timestamp_ns - Nanosecond-precision timestamp
|
|
60
|
+
* @property {'add'|'delete'} operation - Operation type
|
|
61
|
+
* @property {Object} subject - RDF subject term
|
|
62
|
+
* @property {Object} predicate - RDF predicate term
|
|
63
|
+
* @property {Object} object - RDF object term
|
|
64
|
+
* @property {Object} [graph] - RDF graph term (optional)
|
|
65
|
+
*/
|
|
66
|
+
export const TripleEntrySchema = z.object({
|
|
67
|
+
index: z.bigint().nonnegative(),
|
|
68
|
+
timestamp_ns: z.bigint().nonnegative(),
|
|
69
|
+
operation: z.enum(['add', 'delete']),
|
|
70
|
+
subject: z.any(),
|
|
71
|
+
predicate: z.any(),
|
|
72
|
+
object: z.any(),
|
|
73
|
+
graph: z.any().optional(),
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* State Commitment Schema
|
|
78
|
+
*
|
|
79
|
+
* Represents a cryptographic commitment to the current store state.
|
|
80
|
+
*
|
|
81
|
+
* @typedef {Object} StateCommitment
|
|
82
|
+
* @property {string} state_hash - BLAKE3 hash of canonical store state
|
|
83
|
+
* @property {bigint} log_index - Current append-only log index
|
|
84
|
+
* @property {bigint} timestamp_ns - Nanosecond-precision timestamp
|
|
85
|
+
* @property {number} quad_count - Total number of quads in store
|
|
86
|
+
*/
|
|
87
|
+
export const StateCommitmentSchema = z.object({
|
|
88
|
+
state_hash: z.string().min(1),
|
|
89
|
+
log_index: z.bigint().nonnegative(),
|
|
90
|
+
timestamp_ns: z.bigint().nonnegative(),
|
|
91
|
+
quad_count: z.number().int().nonnegative(),
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
/**
|
|
95
|
+
* Validate StorageSnapshot
|
|
96
|
+
*
|
|
97
|
+
* @param {any} data - Data to validate
|
|
98
|
+
* @returns {StorageSnapshot} Validated snapshot
|
|
99
|
+
* @throws {Error} If validation fails
|
|
100
|
+
*/
|
|
101
|
+
export function validateStorageSnapshot(data) {
|
|
102
|
+
return StorageSnapshotSchema.parse(data);
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
/**
|
|
106
|
+
* Validate QueryPattern
|
|
107
|
+
*
|
|
108
|
+
* @param {any} data - Data to validate
|
|
109
|
+
* @returns {QueryPattern} Validated query pattern
|
|
110
|
+
* @throws {Error} If validation fails
|
|
111
|
+
*/
|
|
112
|
+
export function validateQueryPattern(data) {
|
|
113
|
+
return QueryPatternSchema.parse(data);
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
/**
|
|
117
|
+
* Validate TripleEntry
|
|
118
|
+
*
|
|
119
|
+
* @param {any} data - Data to validate
|
|
120
|
+
* @returns {TripleEntry} Validated triple entry
|
|
121
|
+
* @throws {Error} If validation fails
|
|
122
|
+
*/
|
|
123
|
+
export function validateTripleEntry(data) {
|
|
124
|
+
return TripleEntrySchema.parse(data);
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
/**
|
|
128
|
+
* Validate StateCommitment
|
|
129
|
+
*
|
|
130
|
+
* @param {any} data - Data to validate
|
|
131
|
+
* @returns {StateCommitment} Validated state commitment
|
|
132
|
+
* @throws {Error} If validation fails
|
|
133
|
+
*/
|
|
134
|
+
export function validateStateCommitment(data) {
|
|
135
|
+
return StateCommitmentSchema.parse(data);
|
|
136
|
+
}
|