clearctx 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +71 -0
- package/LICENSE +21 -0
- package/README.md +1006 -0
- package/STRATEGY.md +485 -0
- package/bin/cli.js +1756 -0
- package/bin/continuity-hook.js +118 -0
- package/bin/mcp.js +27 -0
- package/bin/setup.js +929 -0
- package/package.json +56 -0
- package/src/artifact-store.js +710 -0
- package/src/atomic-io.js +99 -0
- package/src/briefing-generator.js +451 -0
- package/src/continuity-hooks.js +253 -0
- package/src/contract-store.js +525 -0
- package/src/decision-journal.js +229 -0
- package/src/delegate.js +348 -0
- package/src/dependency-resolver.js +453 -0
- package/src/diff-engine.js +473 -0
- package/src/file-lock.js +161 -0
- package/src/index.js +61 -0
- package/src/lineage-graph.js +402 -0
- package/src/manager.js +510 -0
- package/src/mcp-server.js +3501 -0
- package/src/pattern-registry.js +221 -0
- package/src/pipeline-engine.js +618 -0
- package/src/prompts.js +1217 -0
- package/src/safety-net.js +170 -0
- package/src/session-snapshot.js +508 -0
- package/src/snapshot-engine.js +490 -0
- package/src/stale-detector.js +169 -0
- package/src/store.js +131 -0
- package/src/stream-session.js +463 -0
- package/src/team-hub.js +615 -0
|
@@ -0,0 +1,710 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* artifact-store.js
|
|
3
|
+
* Layer 2: Artifact Store with Versioning, Immutability, and Schema Validation
|
|
4
|
+
*
|
|
5
|
+
* This module provides a versioned artifact storage system that ensures:
|
|
6
|
+
* - Immutability: Once published, artifact versions cannot be changed
|
|
7
|
+
* - Schema validation: Data is validated against JSON schemas before storage
|
|
8
|
+
* - Version tracking: Each artifact can have multiple versions
|
|
9
|
+
* - Metadata: Rich metadata including tags, lineage, checksums
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
const fs = require('fs');
|
|
13
|
+
const path = require('path');
|
|
14
|
+
const os = require('os');
|
|
15
|
+
const crypto = require('crypto');
|
|
16
|
+
|
|
17
|
+
// Import Layer 1 utilities (being built in parallel)
|
|
18
|
+
const { atomicWriteJson, writeImmutable, readJsonSafe } = require('./atomic-io');
|
|
19
|
+
const { acquireLock, releaseLock } = require('./file-lock');
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* ArtifactStore manages versioned, immutable artifacts with schema validation
|
|
23
|
+
*
|
|
24
|
+
* Directory structure:
|
|
25
|
+
* team/{teamName}/artifacts/
|
|
26
|
+
* index.json - artifact registry (locked for writes)
|
|
27
|
+
* schemas/ - JSON schema files for well-known types
|
|
28
|
+
* data/{artifactId}/ - one dir per artifact
|
|
29
|
+
* v1.json, v2.json - immutable version files
|
|
30
|
+
*/
|
|
31
|
+
class ArtifactStore {
|
|
32
|
+
/**
|
|
33
|
+
* Create a new ArtifactStore instance
|
|
34
|
+
* @param {string} teamName - Name of the team (default: 'default')
|
|
35
|
+
*/
|
|
36
|
+
constructor(teamName = 'default') {
|
|
37
|
+
// Set up base directory structure
|
|
38
|
+
const baseDir = path.join(os.homedir(), '.clearctx');
|
|
39
|
+
const teamDir = path.join(baseDir, 'team', teamName);
|
|
40
|
+
|
|
41
|
+
// Define all directory paths
|
|
42
|
+
this.artifactsDir = path.join(teamDir, 'artifacts');
|
|
43
|
+
this.indexPath = path.join(this.artifactsDir, 'index.json');
|
|
44
|
+
this.schemasDir = path.join(this.artifactsDir, 'schemas');
|
|
45
|
+
this.dataDir = path.join(this.artifactsDir, 'data');
|
|
46
|
+
this.locksDir = path.join(teamDir, 'locks');
|
|
47
|
+
|
|
48
|
+
// Create all necessary directories
|
|
49
|
+
this._ensureDirectories();
|
|
50
|
+
|
|
51
|
+
// Create default schemas if they don't exist
|
|
52
|
+
this._createDefaultSchemas();
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Ensure all required directories exist
|
|
57
|
+
* @private
|
|
58
|
+
*/
|
|
59
|
+
_ensureDirectories() {
|
|
60
|
+
// Create directories recursively if they don't exist
|
|
61
|
+
[this.artifactsDir, this.schemasDir, this.dataDir, this.locksDir].forEach(dir => {
|
|
62
|
+
if (!fs.existsSync(dir)) {
|
|
63
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
64
|
+
}
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Load a JSON schema for a specific artifact type
|
|
70
|
+
* @param {string} type - The artifact type
|
|
71
|
+
* @returns {Object|null} The schema object, or null if not found
|
|
72
|
+
* @private
|
|
73
|
+
*/
|
|
74
|
+
_loadSchema(type) {
|
|
75
|
+
const schemaPath = path.join(this.schemasDir, `${type}.json`);
|
|
76
|
+
|
|
77
|
+
// Try to read the schema file
|
|
78
|
+
try {
|
|
79
|
+
if (!fs.existsSync(schemaPath)) {
|
|
80
|
+
return null; // Custom types have no schema
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
const schemaContent = fs.readFileSync(schemaPath, 'utf8');
|
|
84
|
+
return JSON.parse(schemaContent);
|
|
85
|
+
} catch (err) {
|
|
86
|
+
// If schema file is corrupted or invalid, return null
|
|
87
|
+
return null;
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* Validate data against a JSON schema
|
|
93
|
+
* Implements basic JSON Schema validation (required fields, types, enums)
|
|
94
|
+
* @param {string} type - The artifact type
|
|
95
|
+
* @param {Object} data - The data to validate
|
|
96
|
+
* @returns {Object} { valid: true } or { valid: false, errors: [...] }
|
|
97
|
+
* @private
|
|
98
|
+
*/
|
|
99
|
+
_validateData(type, data) {
|
|
100
|
+
// Load the schema for this type
|
|
101
|
+
const schema = this._loadSchema(type);
|
|
102
|
+
|
|
103
|
+
// If no schema exists, validation passes (custom types)
|
|
104
|
+
if (!schema) {
|
|
105
|
+
return { valid: true };
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
const errors = [];
|
|
109
|
+
|
|
110
|
+
// Check required fields
|
|
111
|
+
if (schema.required && Array.isArray(schema.required)) {
|
|
112
|
+
for (const field of schema.required) {
|
|
113
|
+
if (!(field in data)) {
|
|
114
|
+
// Build helpful error message with type-specific suggestions
|
|
115
|
+
let errorMsg = `field '${field}' is required`;
|
|
116
|
+
|
|
117
|
+
// Add helpful suggestions for well-known types
|
|
118
|
+
const typeHints = {
|
|
119
|
+
'api-contract': `type 'api-contract' requires an 'endpoints' field. If this artifact isn't an API contract, use type 'custom' instead.`,
|
|
120
|
+
'schema-change': `type 'schema-change' requires a 'models' field. If this artifact isn't a database schema change, use type 'custom' instead.`,
|
|
121
|
+
'test-results': `type 'test-results' requires 'total', 'passed', and 'failed' fields. If this artifact isn't test results, use type 'custom' instead.`,
|
|
122
|
+
'component-spec': `type 'component-spec' requires a 'componentName' field. If this artifact isn't a component specification, use type 'custom' instead.`,
|
|
123
|
+
'file-manifest': `type 'file-manifest' requires a 'files' field. If this artifact isn't a file manifest, use type 'custom' instead.`,
|
|
124
|
+
'config-change': `type 'config-change' requires a 'changes' field. If this artifact isn't a configuration change, use type 'custom' instead.`
|
|
125
|
+
};
|
|
126
|
+
|
|
127
|
+
if (typeHints[type]) {
|
|
128
|
+
errorMsg = `Schema validation failed: ${typeHints[type]}`;
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
errors.push(errorMsg);
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
// Check types for each field in schema.properties
|
|
137
|
+
if (schema.properties) {
|
|
138
|
+
for (const [field, fieldSchema] of Object.entries(schema.properties)) {
|
|
139
|
+
// Only validate if field exists in data
|
|
140
|
+
if (field in data) {
|
|
141
|
+
const value = data[field];
|
|
142
|
+
const expectedType = fieldSchema.type;
|
|
143
|
+
|
|
144
|
+
// Type checking
|
|
145
|
+
if (expectedType) {
|
|
146
|
+
let actualType = typeof value;
|
|
147
|
+
|
|
148
|
+
// Special case: arrays
|
|
149
|
+
if (expectedType === 'array') {
|
|
150
|
+
if (!Array.isArray(value)) {
|
|
151
|
+
errors.push(`field '${field}' must be an array`);
|
|
152
|
+
continue;
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
// Check array items if schema defines items
|
|
156
|
+
if (fieldSchema.items) {
|
|
157
|
+
const itemsSchema = fieldSchema.items;
|
|
158
|
+
|
|
159
|
+
// Check each array item
|
|
160
|
+
for (let i = 0; i < value.length; i++) {
|
|
161
|
+
const item = value[i];
|
|
162
|
+
|
|
163
|
+
// Check item type
|
|
164
|
+
if (itemsSchema.type) {
|
|
165
|
+
const itemActualType = typeof item;
|
|
166
|
+
if (itemsSchema.type === 'object' && itemActualType !== 'object') {
|
|
167
|
+
errors.push(`field '${field}[${i}]' must be an object`);
|
|
168
|
+
} else if (itemsSchema.type !== 'object' && itemActualType !== itemsSchema.type) {
|
|
169
|
+
errors.push(`field '${field}[${i}]' must be a ${itemsSchema.type}`);
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
// Check required fields in array items
|
|
174
|
+
if (itemsSchema.required && typeof item === 'object') {
|
|
175
|
+
for (const requiredField of itemsSchema.required) {
|
|
176
|
+
if (!(requiredField in item)) {
|
|
177
|
+
errors.push(`field '${field}[${i}].${requiredField}' is required`);
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
// Check enum values in array items
|
|
183
|
+
if (itemsSchema.properties) {
|
|
184
|
+
for (const [itemField, itemFieldSchema] of Object.entries(itemsSchema.properties)) {
|
|
185
|
+
if (itemField in item && itemFieldSchema.enum) {
|
|
186
|
+
if (!itemFieldSchema.enum.includes(item[itemField])) {
|
|
187
|
+
errors.push(`field '${field}[${i}].${itemField}' must be one of: ${itemFieldSchema.enum.join(', ')}`);
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
} else if (expectedType === 'object') {
|
|
195
|
+
// Check if value is an object (but not null or array)
|
|
196
|
+
if (actualType !== 'object' || value === null || Array.isArray(value)) {
|
|
197
|
+
errors.push(`field '${field}' must be an object`);
|
|
198
|
+
}
|
|
199
|
+
} else if (expectedType === 'number') {
|
|
200
|
+
if (actualType !== 'number') {
|
|
201
|
+
errors.push(`field '${field}' must be a number`);
|
|
202
|
+
}
|
|
203
|
+
} else if (expectedType === 'string') {
|
|
204
|
+
if (actualType !== 'string') {
|
|
205
|
+
errors.push(`field '${field}' must be a string`);
|
|
206
|
+
}
|
|
207
|
+
} else if (expectedType === 'boolean') {
|
|
208
|
+
if (actualType !== 'boolean') {
|
|
209
|
+
errors.push(`field '${field}' must be a boolean`);
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
// Enum validation
|
|
215
|
+
if (fieldSchema.enum && !fieldSchema.enum.includes(value)) {
|
|
216
|
+
errors.push(`field '${field}' must be one of: ${fieldSchema.enum.join(', ')}`);
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
// Return validation result
|
|
223
|
+
if (errors.length > 0) {
|
|
224
|
+
return { valid: false, errors };
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
return { valid: true };
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
/**
|
|
231
|
+
* Create default schemas for well-known artifact types
|
|
232
|
+
* @private
|
|
233
|
+
*/
|
|
234
|
+
_createDefaultSchemas() {
|
|
235
|
+
// Define default schemas for well-known types
|
|
236
|
+
const defaultSchemas = {
|
|
237
|
+
'api-contract': {
|
|
238
|
+
type: 'object',
|
|
239
|
+
required: ['endpoints'],
|
|
240
|
+
properties: {
|
|
241
|
+
endpoints: {
|
|
242
|
+
type: 'array',
|
|
243
|
+
items: {
|
|
244
|
+
type: 'object',
|
|
245
|
+
required: ['method', 'path'],
|
|
246
|
+
properties: {
|
|
247
|
+
method: {
|
|
248
|
+
type: 'string',
|
|
249
|
+
enum: ['GET', 'POST', 'PUT', 'PATCH', 'DELETE']
|
|
250
|
+
},
|
|
251
|
+
path: {
|
|
252
|
+
type: 'string'
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
},
|
|
259
|
+
'schema-change': {
|
|
260
|
+
type: 'object',
|
|
261
|
+
required: ['models'],
|
|
262
|
+
properties: {
|
|
263
|
+
models: {
|
|
264
|
+
type: 'array',
|
|
265
|
+
items: {
|
|
266
|
+
type: 'object',
|
|
267
|
+
required: ['name'],
|
|
268
|
+
properties: {
|
|
269
|
+
name: {
|
|
270
|
+
type: 'string'
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
},
|
|
277
|
+
'test-results': {
|
|
278
|
+
type: 'object',
|
|
279
|
+
required: ['total', 'passed', 'failed'],
|
|
280
|
+
properties: {
|
|
281
|
+
total: {
|
|
282
|
+
type: 'number'
|
|
283
|
+
},
|
|
284
|
+
passed: {
|
|
285
|
+
type: 'number'
|
|
286
|
+
},
|
|
287
|
+
failed: {
|
|
288
|
+
type: 'number'
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
},
|
|
292
|
+
'component-spec': {
|
|
293
|
+
type: 'object',
|
|
294
|
+
required: ['componentName'],
|
|
295
|
+
properties: {
|
|
296
|
+
componentName: {
|
|
297
|
+
type: 'string'
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
},
|
|
301
|
+
'file-manifest': {
|
|
302
|
+
type: 'object',
|
|
303
|
+
required: ['files'],
|
|
304
|
+
properties: {
|
|
305
|
+
files: {
|
|
306
|
+
type: 'array'
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
},
|
|
310
|
+
'config-change': {
|
|
311
|
+
type: 'object',
|
|
312
|
+
required: ['changes'],
|
|
313
|
+
properties: {
|
|
314
|
+
changes: {
|
|
315
|
+
type: 'array'
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
};
|
|
320
|
+
|
|
321
|
+
// Write each schema file if it doesn't exist
|
|
322
|
+
for (const [type, schema] of Object.entries(defaultSchemas)) {
|
|
323
|
+
const schemaPath = path.join(this.schemasDir, `${type}.json`);
|
|
324
|
+
|
|
325
|
+
if (!fs.existsSync(schemaPath)) {
|
|
326
|
+
try {
|
|
327
|
+
fs.writeFileSync(schemaPath, JSON.stringify(schema, null, 2), 'utf8');
|
|
328
|
+
} catch (err) {
|
|
329
|
+
// Ignore errors - schemas are optional
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
/**
|
|
336
|
+
* Calculate SHA-256 checksum for data
|
|
337
|
+
* @param {Object} data - The data to checksum
|
|
338
|
+
* @returns {string} Checksum prefixed with 'sha256:'
|
|
339
|
+
* @private
|
|
340
|
+
*/
|
|
341
|
+
_calculateChecksum(data) {
|
|
342
|
+
const hash = crypto.createHash('sha256')
|
|
343
|
+
.update(JSON.stringify(data))
|
|
344
|
+
.digest('hex');
|
|
345
|
+
return `sha256:${hash}`;
|
|
346
|
+
}
|
|
347
|
+
|
|
348
|
+
/**
|
|
349
|
+
* Publish a new version of an artifact
|
|
350
|
+
* @param {string} artifactId - Unique identifier for the artifact
|
|
351
|
+
* @param {Object} options - Publication options
|
|
352
|
+
* @param {string} options.type - Artifact type (e.g., 'api-contract')
|
|
353
|
+
* @param {string} options.name - Human-readable name
|
|
354
|
+
* @param {string} [options.summary=''] - Brief description of this version
|
|
355
|
+
* @param {Object} options.data - The actual artifact data
|
|
356
|
+
* @param {string[]} [options.tags=[]] - Tags for categorization
|
|
357
|
+
* @param {string} options.publisher - Who/what published this version
|
|
358
|
+
* @param {string[]} [options.derivedFrom=[]] - Artifact IDs this was derived from
|
|
359
|
+
* @returns {Object} { artifactId, version, type }
|
|
360
|
+
*/
|
|
361
|
+
publish(artifactId, { type, name, summary = '', data, tags = [], publisher, derivedFrom = [] }) {
|
|
362
|
+
// Step 1: Validate data against schema
|
|
363
|
+
const validation = this._validateData(type, data);
|
|
364
|
+
if (!validation.valid) {
|
|
365
|
+
throw new Error(`Schema validation failed: ${validation.errors.join(', ')}`);
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
// Step 2: Acquire lock on the artifacts index
|
|
369
|
+
acquireLock(this.locksDir, 'artifacts-index');
|
|
370
|
+
|
|
371
|
+
try {
|
|
372
|
+
// Step 3: Read the current index
|
|
373
|
+
const index = readJsonSafe(this.indexPath, {});
|
|
374
|
+
|
|
375
|
+
// Step 4: Determine the version number
|
|
376
|
+
let version = 1;
|
|
377
|
+
let createdAt = new Date().toISOString();
|
|
378
|
+
|
|
379
|
+
if (index[artifactId]) {
|
|
380
|
+
// Artifact exists - increment version
|
|
381
|
+
version = index[artifactId].latestVersion + 1;
|
|
382
|
+
createdAt = index[artifactId].createdAt; // Preserve original creation time
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
// Step 5: Create version file data
|
|
386
|
+
const versionData = {
|
|
387
|
+
artifactId,
|
|
388
|
+
version,
|
|
389
|
+
type,
|
|
390
|
+
publisher,
|
|
391
|
+
publishedAt: new Date().toISOString(),
|
|
392
|
+
summary,
|
|
393
|
+
name,
|
|
394
|
+
data,
|
|
395
|
+
tags,
|
|
396
|
+
lineage: {
|
|
397
|
+
producedBy: null,
|
|
398
|
+
derivedFrom
|
|
399
|
+
},
|
|
400
|
+
checksum: this._calculateChecksum(data)
|
|
401
|
+
};
|
|
402
|
+
|
|
403
|
+
// Step 6: Write the version file (immutable)
|
|
404
|
+
const artifactDir = path.join(this.dataDir, artifactId);
|
|
405
|
+
|
|
406
|
+
// Ensure artifact directory exists
|
|
407
|
+
if (!fs.existsSync(artifactDir)) {
|
|
408
|
+
fs.mkdirSync(artifactDir, { recursive: true });
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
const versionPath = path.join(artifactDir, `v${version}.json`);
|
|
412
|
+
|
|
413
|
+
try {
|
|
414
|
+
// Attempt to write the immutable version file
|
|
415
|
+
writeImmutable(versionPath, versionData);
|
|
416
|
+
} catch (err) {
|
|
417
|
+
// Handle race condition - file already exists
|
|
418
|
+
if (err.code === 'EEXIST') {
|
|
419
|
+
// Re-read index to get actual latest version
|
|
420
|
+
const freshIndex = readJsonSafe(this.indexPath, {});
|
|
421
|
+
const freshLatest = freshIndex[artifactId]?.latestVersion || version;
|
|
422
|
+
version = freshLatest + 1;
|
|
423
|
+
const updatedData = { ...versionData, version, publishedAt: new Date().toISOString() };
|
|
424
|
+
const retryPath = path.join(artifactDir, `v${version}.json`);
|
|
425
|
+
writeImmutable(retryPath, updatedData);
|
|
426
|
+
versionData = updatedData; // Update for index write below
|
|
427
|
+
} else {
|
|
428
|
+
throw err;
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
|
|
432
|
+
// Step 7: Update the index entry
|
|
433
|
+
index[artifactId] = {
|
|
434
|
+
artifactId,
|
|
435
|
+
type,
|
|
436
|
+
name,
|
|
437
|
+
publisher,
|
|
438
|
+
createdAt,
|
|
439
|
+
updatedAt: new Date().toISOString(),
|
|
440
|
+
latestVersion: version,
|
|
441
|
+
tags
|
|
442
|
+
};
|
|
443
|
+
|
|
444
|
+
// Step 8: Save the index atomically
|
|
445
|
+
atomicWriteJson(this.indexPath, index);
|
|
446
|
+
|
|
447
|
+
// Step 9: Release the lock
|
|
448
|
+
releaseLock(this.locksDir, 'artifacts-index');
|
|
449
|
+
|
|
450
|
+
// Step 10: Return publication result
|
|
451
|
+
return { artifactId, version, type };
|
|
452
|
+
|
|
453
|
+
} catch (err) {
|
|
454
|
+
// Ensure lock is released even on error
|
|
455
|
+
releaseLock(this.locksDir, 'artifacts-index');
|
|
456
|
+
throw err;
|
|
457
|
+
}
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
/**
|
|
461
|
+
* Get a specific version of an artifact
|
|
462
|
+
* @param {string} artifactId - The artifact identifier
|
|
463
|
+
* @param {number|null} [version=null] - Version number (null = latest)
|
|
464
|
+
* @returns {Object|null} The version data, or null if not found
|
|
465
|
+
*/
|
|
466
|
+
get(artifactId, version = null) {
|
|
467
|
+
// If version is null, get the latest version from index
|
|
468
|
+
if (version === null) {
|
|
469
|
+
const index = readJsonSafe(this.indexPath, {});
|
|
470
|
+
|
|
471
|
+
if (!index[artifactId]) {
|
|
472
|
+
return null; // Artifact doesn't exist
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
version = index[artifactId].latestVersion;
|
|
476
|
+
}
|
|
477
|
+
|
|
478
|
+
// Read the version file
|
|
479
|
+
const versionPath = path.join(this.dataDir, artifactId, `v${version}.json`);
|
|
480
|
+
|
|
481
|
+
if (!fs.existsSync(versionPath)) {
|
|
482
|
+
return null; // Version doesn't exist
|
|
483
|
+
}
|
|
484
|
+
|
|
485
|
+
try {
|
|
486
|
+
const content = fs.readFileSync(versionPath, 'utf8');
|
|
487
|
+
return JSON.parse(content);
|
|
488
|
+
} catch (err) {
|
|
489
|
+
return null; // Corrupted or invalid file
|
|
490
|
+
}
|
|
491
|
+
}
|
|
492
|
+
|
|
493
|
+
/**
|
|
494
|
+
* Track that a session read an artifact
|
|
495
|
+
* @param {string} artifactId - The artifact that was read
|
|
496
|
+
* @param {string} reader - The session name that read it
|
|
497
|
+
* @param {number} version - The version that was read
|
|
498
|
+
*/
|
|
499
|
+
trackRead(artifactId, reader, version) {
|
|
500
|
+
if (!reader) return; // Skip if no reader identified
|
|
501
|
+
|
|
502
|
+
const readsPath = path.join(this.dataDir, artifactId, 'reads.json');
|
|
503
|
+
|
|
504
|
+
// Read existing reads log
|
|
505
|
+
let reads = readJsonSafe(readsPath, []);
|
|
506
|
+
|
|
507
|
+
// Add this read event
|
|
508
|
+
reads.push({
|
|
509
|
+
reader,
|
|
510
|
+
version,
|
|
511
|
+
readAt: new Date().toISOString(),
|
|
512
|
+
});
|
|
513
|
+
|
|
514
|
+
// Write back (not immutable — reads are append-only log)
|
|
515
|
+
const dir = path.join(this.dataDir, artifactId);
|
|
516
|
+
if (!fs.existsSync(dir)) {
|
|
517
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
518
|
+
}
|
|
519
|
+
fs.writeFileSync(readsPath, JSON.stringify(reads, null, 2));
|
|
520
|
+
}
|
|
521
|
+
|
|
522
|
+
/**
|
|
523
|
+
* Get the read log for an artifact
|
|
524
|
+
* @param {string} artifactId - The artifact to check
|
|
525
|
+
* @returns {Array} Array of read events: [{ reader, version, readAt }]
|
|
526
|
+
*/
|
|
527
|
+
getReads(artifactId) {
|
|
528
|
+
const readsPath = path.join(this.dataDir, artifactId, 'reads.json');
|
|
529
|
+
return readJsonSafe(readsPath, []);
|
|
530
|
+
}
|
|
531
|
+
|
|
532
|
+
/**
|
|
533
|
+
* List artifacts with optional filtering
|
|
534
|
+
* @param {Object} [filters={}] - Filter criteria
|
|
535
|
+
* @param {string} [filters.type] - Filter by artifact type
|
|
536
|
+
* @param {string} [filters.publisher] - Filter by publisher
|
|
537
|
+
* @param {string} [filters.tag] - Filter by tag
|
|
538
|
+
* @returns {Array} Array of index entries
|
|
539
|
+
*/
|
|
540
|
+
list({ type, publisher, tag } = {}) {
|
|
541
|
+
// Read the index
|
|
542
|
+
const index = readJsonSafe(this.indexPath, {});
|
|
543
|
+
|
|
544
|
+
// Convert index object to array
|
|
545
|
+
let results = Object.values(index);
|
|
546
|
+
|
|
547
|
+
// Apply filters
|
|
548
|
+
if (type) {
|
|
549
|
+
results = results.filter(item => item.type === type);
|
|
550
|
+
}
|
|
551
|
+
|
|
552
|
+
if (publisher) {
|
|
553
|
+
results = results.filter(item => item.publisher === publisher);
|
|
554
|
+
}
|
|
555
|
+
|
|
556
|
+
if (tag) {
|
|
557
|
+
results = results.filter(item => item.tags && item.tags.includes(tag));
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
// Enrich with read counts
|
|
561
|
+
results = results.map(item => {
|
|
562
|
+
const reads = this.getReads(item.artifactId);
|
|
563
|
+
const uniqueReaders = [...new Set(reads.map(r => r.reader))];
|
|
564
|
+
return {
|
|
565
|
+
...item,
|
|
566
|
+
readCount: reads.length,
|
|
567
|
+
uniqueReaders,
|
|
568
|
+
};
|
|
569
|
+
});
|
|
570
|
+
|
|
571
|
+
return results;
|
|
572
|
+
}
|
|
573
|
+
|
|
574
|
+
/**
|
|
575
|
+
* Get version history for an artifact
|
|
576
|
+
* @param {string} artifactId - The artifact identifier
|
|
577
|
+
* @returns {Array} Array of version summaries
|
|
578
|
+
*/
|
|
579
|
+
history(artifactId) {
|
|
580
|
+
// Read the index to get latest version
|
|
581
|
+
const index = readJsonSafe(this.indexPath, {});
|
|
582
|
+
|
|
583
|
+
if (!index[artifactId]) {
|
|
584
|
+
return []; // Artifact doesn't exist
|
|
585
|
+
}
|
|
586
|
+
|
|
587
|
+
const latestVersion = index[artifactId].latestVersion;
|
|
588
|
+
const artifactDir = path.join(this.dataDir, artifactId);
|
|
589
|
+
const history = [];
|
|
590
|
+
|
|
591
|
+
// Read all version files from v1 to vN
|
|
592
|
+
for (let v = 1; v <= latestVersion; v++) {
|
|
593
|
+
const versionPath = path.join(artifactDir, `v${v}.json`);
|
|
594
|
+
|
|
595
|
+
if (fs.existsSync(versionPath)) {
|
|
596
|
+
try {
|
|
597
|
+
const content = fs.readFileSync(versionPath, 'utf8');
|
|
598
|
+
const versionData = JSON.parse(content);
|
|
599
|
+
|
|
600
|
+
// Add summary to history
|
|
601
|
+
history.push({
|
|
602
|
+
version: versionData.version,
|
|
603
|
+
publishedAt: versionData.publishedAt,
|
|
604
|
+
publisher: versionData.publisher,
|
|
605
|
+
summary: versionData.summary,
|
|
606
|
+
checksum: versionData.checksum
|
|
607
|
+
});
|
|
608
|
+
} catch (err) {
|
|
609
|
+
// Skip corrupted version files
|
|
610
|
+
}
|
|
611
|
+
}
|
|
612
|
+
}
|
|
613
|
+
|
|
614
|
+
return history;
|
|
615
|
+
}
|
|
616
|
+
|
|
617
|
+
/**
|
|
618
|
+
* Repair the index by scanning all version files
|
|
619
|
+
* Useful for recovering from index corruption
|
|
620
|
+
* @returns {Object} { repaired: count }
|
|
621
|
+
*/
|
|
622
|
+
repair() {
|
|
623
|
+
// Acquire lock on the artifacts index
|
|
624
|
+
acquireLock(this.locksDir, 'artifacts-index');
|
|
625
|
+
|
|
626
|
+
try {
|
|
627
|
+
const newIndex = {};
|
|
628
|
+
let repairedCount = 0;
|
|
629
|
+
|
|
630
|
+
// Scan the data directory for all artifact directories
|
|
631
|
+
if (!fs.existsSync(this.dataDir)) {
|
|
632
|
+
releaseLock(this.locksDir, 'artifacts-index');
|
|
633
|
+
return { repaired: 0 };
|
|
634
|
+
}
|
|
635
|
+
|
|
636
|
+
const artifactDirs = fs.readdirSync(this.dataDir);
|
|
637
|
+
|
|
638
|
+
// Process each artifact directory
|
|
639
|
+
for (const artifactId of artifactDirs) {
|
|
640
|
+
const artifactDir = path.join(this.dataDir, artifactId);
|
|
641
|
+
|
|
642
|
+
// Skip if not a directory
|
|
643
|
+
if (!fs.statSync(artifactDir).isDirectory()) {
|
|
644
|
+
continue;
|
|
645
|
+
}
|
|
646
|
+
|
|
647
|
+
// Find all version files
|
|
648
|
+
const files = fs.readdirSync(artifactDir);
|
|
649
|
+
const versionFiles = files.filter(f => f.match(/^v\d+\.json$/));
|
|
650
|
+
|
|
651
|
+
if (versionFiles.length === 0) {
|
|
652
|
+
continue; // No version files
|
|
653
|
+
}
|
|
654
|
+
|
|
655
|
+
// Sort version files numerically
|
|
656
|
+
versionFiles.sort((a, b) => {
|
|
657
|
+
const versionA = parseInt(a.match(/\d+/)[0]);
|
|
658
|
+
const versionB = parseInt(b.match(/\d+/)[0]);
|
|
659
|
+
return versionA - versionB;
|
|
660
|
+
});
|
|
661
|
+
|
|
662
|
+
// Read the latest version file
|
|
663
|
+
const latestVersionFile = versionFiles[versionFiles.length - 1];
|
|
664
|
+
const latestVersionPath = path.join(artifactDir, latestVersionFile);
|
|
665
|
+
|
|
666
|
+
try {
|
|
667
|
+
const content = fs.readFileSync(latestVersionPath, 'utf8');
|
|
668
|
+
const versionData = JSON.parse(content);
|
|
669
|
+
|
|
670
|
+
// Read the first version file for createdAt
|
|
671
|
+
const firstVersionPath = path.join(artifactDir, versionFiles[0]);
|
|
672
|
+
const firstContent = fs.readFileSync(firstVersionPath, 'utf8');
|
|
673
|
+
const firstVersionData = JSON.parse(firstContent);
|
|
674
|
+
|
|
675
|
+
// Rebuild index entry
|
|
676
|
+
newIndex[artifactId] = {
|
|
677
|
+
artifactId,
|
|
678
|
+
type: versionData.type,
|
|
679
|
+
name: versionData.name,
|
|
680
|
+
publisher: versionData.publisher,
|
|
681
|
+
createdAt: firstVersionData.publishedAt,
|
|
682
|
+
updatedAt: versionData.publishedAt,
|
|
683
|
+
latestVersion: versionData.version,
|
|
684
|
+
tags: versionData.tags || []
|
|
685
|
+
};
|
|
686
|
+
|
|
687
|
+
repairedCount++;
|
|
688
|
+
} catch (err) {
|
|
689
|
+
// Skip corrupted artifacts
|
|
690
|
+
}
|
|
691
|
+
}
|
|
692
|
+
|
|
693
|
+
// Write the rebuilt index atomically
|
|
694
|
+
atomicWriteJson(this.indexPath, newIndex);
|
|
695
|
+
|
|
696
|
+
// Release lock
|
|
697
|
+
releaseLock(this.locksDir, 'artifacts-index');
|
|
698
|
+
|
|
699
|
+
return { repaired: repairedCount };
|
|
700
|
+
|
|
701
|
+
} catch (err) {
|
|
702
|
+
// Ensure lock is released even on error
|
|
703
|
+
releaseLock(this.locksDir, 'artifacts-index');
|
|
704
|
+
throw err;
|
|
705
|
+
}
|
|
706
|
+
}
|
|
707
|
+
}
|
|
708
|
+
|
|
709
|
+
// Export the ArtifactStore class
|
|
710
|
+
module.exports = ArtifactStore;
|