trellis 2.0.8 → 2.0.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/README.md +279 -116
  2. package/dist/cli/index.js +655 -4
  3. package/dist/core/index.js +471 -2
  4. package/dist/embeddings/index.js +5 -1
  5. package/dist/{index-s603ev6w.js → index-5b01h414.js} +1 -1
  6. package/dist/index-5m0g9r0y.js +1100 -0
  7. package/dist/{index-zf6htvnm.js → index-7gvjxt27.js} +166 -2
  8. package/dist/index-hybgxe40.js +1174 -0
  9. package/dist/index.js +7 -2
  10. package/dist/transformers.node-bx3q9d7k.js +33130 -0
  11. package/package.json +9 -4
  12. package/src/cli/index.ts +939 -0
  13. package/src/core/agents/harness.ts +380 -0
  14. package/src/core/agents/index.ts +18 -0
  15. package/src/core/agents/types.ts +90 -0
  16. package/src/core/index.ts +85 -2
  17. package/src/core/kernel/trellis-kernel.ts +593 -0
  18. package/src/core/ontology/builtins.ts +248 -0
  19. package/src/core/ontology/index.ts +34 -0
  20. package/src/core/ontology/registry.ts +209 -0
  21. package/src/core/ontology/types.ts +124 -0
  22. package/src/core/ontology/validator.ts +382 -0
  23. package/src/core/persist/backend.ts +10 -0
  24. package/src/core/persist/sqlite-backend.ts +298 -0
  25. package/src/core/plugins/index.ts +17 -0
  26. package/src/core/plugins/registry.ts +322 -0
  27. package/src/core/plugins/types.ts +126 -0
  28. package/src/core/query/datalog.ts +188 -0
  29. package/src/core/query/engine.ts +370 -0
  30. package/src/core/query/index.ts +34 -0
  31. package/src/core/query/parser.ts +481 -0
  32. package/src/core/query/types.ts +200 -0
  33. package/src/embeddings/auto-embed.ts +248 -0
  34. package/src/embeddings/index.ts +7 -0
  35. package/src/embeddings/model.ts +21 -4
  36. package/src/embeddings/types.ts +8 -1
  37. package/src/index.ts +9 -0
  38. package/src/sync/http-transport.ts +144 -0
  39. package/src/sync/index.ts +11 -0
  40. package/src/sync/multi-repo.ts +200 -0
  41. package/src/sync/ws-transport.ts +145 -0
  42. package/dist/index-5bhe57y9.js +0 -326
@@ -0,0 +1,382 @@
1
+ /**
2
+ * Ontology Validation — Schema enforcement for graph mutations.
3
+ *
4
+ * Provides both standalone validation (check existing entities against schemas)
5
+ * and a kernel middleware that rejects mutations violating ontology constraints.
6
+ *
7
+ * @module trellis/core/ontology
8
+ */
9
+
10
+ import type { EAVStore, Fact, Link, Atom } from '../store/eav-store.js';
11
+ import type { KernelOp } from '../persist/backend.js';
12
+ import type { KernelMiddleware, MiddlewareContext, OpMiddlewareNext } from '../kernel/middleware.js';
13
+ import type { OntologyRegistry } from './registry.js';
14
+ import type {
15
+ AttributeDef,
16
+ AttrType,
17
+ EntityDef,
18
+ ValidationError,
19
+ ValidationResult,
20
+ } from './types.js';
21
+
22
+ // ---------------------------------------------------------------------------
23
+ // Standalone Validation
24
+ // ---------------------------------------------------------------------------
25
+
26
+ /**
27
+ * Validate a single entity against the ontology registry.
28
+ */
29
+ export function validateEntity(
30
+ entityId: string,
31
+ store: EAVStore,
32
+ registry: OntologyRegistry,
33
+ ): ValidationResult {
34
+ const errors: ValidationError[] = [];
35
+ const warnings: ValidationError[] = [];
36
+
37
+ const facts = store.getFactsByEntity(entityId);
38
+ if (facts.length === 0) {
39
+ return { valid: true, errors: [], warnings: [] };
40
+ }
41
+
42
+ const typeFact = facts.find((f) => f.a === 'type');
43
+ if (!typeFact) {
44
+ warnings.push({
45
+ entityId,
46
+ entityType: '(unknown)',
47
+ field: 'type',
48
+ message: 'Entity has no "type" attribute.',
49
+ severity: 'warning',
50
+ });
51
+ return { valid: true, errors, warnings };
52
+ }
53
+
54
+ const entityType = String(typeFact.v);
55
+ const def = registry.getEntityDef(entityType);
56
+
57
+ if (!def) {
58
+ // Type not in any registered ontology — that's OK, just warn
59
+ warnings.push({
60
+ entityId,
61
+ entityType,
62
+ field: 'type',
63
+ message: `Entity type "${entityType}" is not defined in any registered ontology.`,
64
+ severity: 'warning',
65
+ });
66
+ return { valid: true, errors, warnings };
67
+ }
68
+
69
+ // Check abstract
70
+ if (def.abstract) {
71
+ errors.push({
72
+ entityId,
73
+ entityType,
74
+ field: 'type',
75
+ message: `Cannot instantiate abstract entity type "${entityType}".`,
76
+ severity: 'error',
77
+ });
78
+ }
79
+
80
+ // Check required attributes
81
+ for (const attr of def.attributes) {
82
+ if (attr.required && attr.name !== 'type') {
83
+ const hasFact = facts.some((f) => f.a === attr.name);
84
+ if (!hasFact) {
85
+ errors.push({
86
+ entityId,
87
+ entityType,
88
+ field: attr.name,
89
+ message: `Required attribute "${attr.name}" is missing.`,
90
+ severity: 'error',
91
+ });
92
+ }
93
+ }
94
+ }
95
+
96
+ // Validate each fact against its attribute def
97
+ for (const fact of facts) {
98
+ if (fact.a === 'type' || fact.a === 'createdAt' || fact.a === 'updatedAt') continue;
99
+
100
+ const attrDef = def.attributes.find((a) => a.name === fact.a);
101
+ if (!attrDef) {
102
+ // Unknown attribute — warn but don't error (open-world assumption)
103
+ warnings.push({
104
+ entityId,
105
+ entityType,
106
+ field: fact.a,
107
+ message: `Attribute "${fact.a}" is not defined in the "${entityType}" ontology.`,
108
+ severity: 'warning',
109
+ });
110
+ continue;
111
+ }
112
+
113
+ // Type check
114
+ const typeErr = validateAttrType(fact.v, attrDef);
115
+ if (typeErr) {
116
+ errors.push({
117
+ entityId,
118
+ entityType,
119
+ field: fact.a,
120
+ message: typeErr,
121
+ severity: 'error',
122
+ });
123
+ }
124
+
125
+ // Enum check
126
+ if (attrDef.enum && !attrDef.enum.includes(fact.v)) {
127
+ errors.push({
128
+ entityId,
129
+ entityType,
130
+ field: fact.a,
131
+ message: `Value "${fact.v}" is not in allowed values: [${attrDef.enum.join(', ')}].`,
132
+ severity: 'error',
133
+ });
134
+ }
135
+
136
+ // Pattern check
137
+ if (attrDef.pattern && typeof fact.v === 'string') {
138
+ if (!new RegExp(attrDef.pattern).test(fact.v)) {
139
+ errors.push({
140
+ entityId,
141
+ entityType,
142
+ field: fact.a,
143
+ message: `Value "${fact.v}" does not match pattern /${attrDef.pattern}/.`,
144
+ severity: 'error',
145
+ });
146
+ }
147
+ }
148
+
149
+ // Range check
150
+ if (attrDef.min !== undefined) {
151
+ if (typeof fact.v === 'number' && fact.v < attrDef.min) {
152
+ errors.push({
153
+ entityId,
154
+ entityType,
155
+ field: fact.a,
156
+ message: `Value ${fact.v} is below minimum ${attrDef.min}.`,
157
+ severity: 'error',
158
+ });
159
+ }
160
+ if (typeof fact.v === 'string' && fact.v.length < attrDef.min) {
161
+ errors.push({
162
+ entityId,
163
+ entityType,
164
+ field: fact.a,
165
+ message: `String length ${fact.v.length} is below minimum ${attrDef.min}.`,
166
+ severity: 'error',
167
+ });
168
+ }
169
+ }
170
+ if (attrDef.max !== undefined) {
171
+ if (typeof fact.v === 'number' && fact.v > attrDef.max) {
172
+ errors.push({
173
+ entityId,
174
+ entityType,
175
+ field: fact.a,
176
+ message: `Value ${fact.v} exceeds maximum ${attrDef.max}.`,
177
+ severity: 'error',
178
+ });
179
+ }
180
+ if (typeof fact.v === 'string' && fact.v.length > attrDef.max) {
181
+ errors.push({
182
+ entityId,
183
+ entityType,
184
+ field: fact.a,
185
+ message: `String length ${fact.v.length} exceeds maximum ${attrDef.max}.`,
186
+ severity: 'error',
187
+ });
188
+ }
189
+ }
190
+ }
191
+
192
+ // Validate links
193
+ const links = store.getLinksByEntity(entityId);
194
+ for (const link of links) {
195
+ if (link.e1 !== entityId) continue; // Only validate outgoing links
196
+ const relDef = registry.getRelationDef(link.a);
197
+ if (!relDef) continue;
198
+
199
+ // Check source type is allowed
200
+ if (!relDef.sourceTypes.includes(entityType)) {
201
+ errors.push({
202
+ entityId,
203
+ entityType,
204
+ field: link.a,
205
+ message: `Entity type "${entityType}" is not allowed as source for relation "${link.a}".`,
206
+ severity: 'error',
207
+ });
208
+ }
209
+
210
+ // Check target type
211
+ const targetFacts = store.getFactsByEntity(link.e2);
212
+ const targetType = targetFacts.find((f) => f.a === 'type');
213
+ if (targetType && !relDef.targetTypes.includes(String(targetType.v))) {
214
+ errors.push({
215
+ entityId,
216
+ entityType,
217
+ field: link.a,
218
+ message: `Target type "${targetType.v}" is not allowed for relation "${link.a}" (expected: ${relDef.targetTypes.join(', ')}).`,
219
+ severity: 'error',
220
+ });
221
+ }
222
+ }
223
+
224
+ return {
225
+ valid: errors.length === 0,
226
+ errors,
227
+ warnings,
228
+ };
229
+ }
230
+
231
+ /**
232
+ * Validate all entities in the store against the ontology registry.
233
+ */
234
+ export function validateStore(
235
+ store: EAVStore,
236
+ registry: OntologyRegistry,
237
+ ): ValidationResult {
238
+ const allErrors: ValidationError[] = [];
239
+ const allWarnings: ValidationError[] = [];
240
+
241
+ const typeFacts = store.getFactsByAttribute('type');
242
+ const entityIds = new Set(typeFacts.map((f) => f.e));
243
+
244
+ for (const entityId of entityIds) {
245
+ const result = validateEntity(entityId, store, registry);
246
+ allErrors.push(...result.errors);
247
+ allWarnings.push(...result.warnings);
248
+ }
249
+
250
+ return {
251
+ valid: allErrors.length === 0,
252
+ errors: allErrors,
253
+ warnings: allWarnings,
254
+ };
255
+ }
256
+
257
+ // ---------------------------------------------------------------------------
258
+ // Type checking helper
259
+ // ---------------------------------------------------------------------------
260
+
261
+ function validateAttrType(value: Atom, def: AttributeDef): string | null {
262
+ if (def.type === 'any') return null;
263
+
264
+ switch (def.type) {
265
+ case 'string':
266
+ if (typeof value !== 'string') return `Expected string, got ${typeof value}.`;
267
+ break;
268
+ case 'number':
269
+ if (typeof value !== 'number') return `Expected number, got ${typeof value}.`;
270
+ break;
271
+ case 'boolean':
272
+ if (typeof value !== 'boolean') return `Expected boolean, got ${typeof value}.`;
273
+ break;
274
+ case 'date':
275
+ if (typeof value === 'string') {
276
+ if (isNaN(Date.parse(value))) return `Expected ISO date string, got "${value}".`;
277
+ } else if (!(value instanceof Date)) {
278
+ return `Expected date, got ${typeof value}.`;
279
+ }
280
+ break;
281
+ case 'ref':
282
+ if (typeof value !== 'string') return `Expected entity reference (string), got ${typeof value}.`;
283
+ break;
284
+ }
285
+
286
+ return null;
287
+ }
288
+
289
+ // ---------------------------------------------------------------------------
290
+ // Validation Middleware
291
+ // ---------------------------------------------------------------------------
292
+
293
+ /**
294
+ * Creates a kernel middleware that validates mutations against the ontology.
295
+ *
296
+ * - On `addFacts`: validates that new facts conform to attribute definitions
297
+ * - On `addLinks`: validates that links conform to relation definitions
298
+ * - Blocks operations that would create invalid data (throws on error)
299
+ *
300
+ * @param registry The ontology registry to validate against
301
+ * @param strict If true, unknown entity types cause errors (default: false = warnings only)
302
+ */
303
+ export function createValidationMiddleware(
304
+ registry: OntologyRegistry,
305
+ options?: { strict?: boolean },
306
+ ): KernelMiddleware {
307
+ const strict = options?.strict ?? false;
308
+
309
+ return {
310
+ name: 'ontology-validator',
311
+
312
+ handleOp: (op: KernelOp, ctx: MiddlewareContext, next: OpMiddlewareNext) => {
313
+ // Validate new facts
314
+ if (op.facts && op.facts.length > 0) {
315
+ for (const fact of op.facts) {
316
+ if (fact.a === 'type') continue; // type facts are always allowed
317
+ if (fact.a === 'createdAt' || fact.a === 'updatedAt') continue;
318
+
319
+ // Find the entity type from the same op's facts or skip
320
+ const typeFact = op.facts.find((f) => f.e === fact.e && f.a === 'type');
321
+ if (!typeFact) continue; // Can't validate without knowing the type
322
+
323
+ const entityType = String(typeFact.v);
324
+ const def = registry.getEntityDef(entityType);
325
+ if (!def) {
326
+ if (strict) {
327
+ throw new Error(
328
+ `[ontology-validator] Unknown entity type "${entityType}" for entity "${fact.e}".`,
329
+ );
330
+ }
331
+ continue;
332
+ }
333
+
334
+ const attrDef = def.attributes.find((a) => a.name === fact.a);
335
+ if (!attrDef) {
336
+ if (strict) {
337
+ throw new Error(
338
+ `[ontology-validator] Unknown attribute "${fact.a}" for type "${entityType}".`,
339
+ );
340
+ }
341
+ continue;
342
+ }
343
+
344
+ // Type check
345
+ const typeErr = validateAttrType(fact.v, attrDef);
346
+ if (typeErr) {
347
+ throw new Error(
348
+ `[ontology-validator] Entity "${fact.e}" attribute "${fact.a}": ${typeErr}`,
349
+ );
350
+ }
351
+
352
+ // Enum check
353
+ if (attrDef.enum && !attrDef.enum.includes(fact.v)) {
354
+ throw new Error(
355
+ `[ontology-validator] Entity "${fact.e}" attribute "${fact.a}": value "${fact.v}" not in [${attrDef.enum.join(', ')}].`,
356
+ );
357
+ }
358
+ }
359
+ }
360
+
361
+ // Validate new links
362
+ if (op.links && op.links.length > 0) {
363
+ for (const link of op.links) {
364
+ const relDef = registry.getRelationDef(link.a);
365
+ if (!relDef) continue; // Unknown relation — skip
366
+
367
+ // Source type check (from op facts)
368
+ const sourceTypeFact = op.facts?.find(
369
+ (f) => f.e === link.e1 && f.a === 'type',
370
+ );
371
+ if (sourceTypeFact && !relDef.sourceTypes.includes(String(sourceTypeFact.v))) {
372
+ throw new Error(
373
+ `[ontology-validator] Relation "${link.a}": source type "${sourceTypeFact.v}" not allowed (expected: ${relDef.sourceTypes.join(', ')}).`,
374
+ );
375
+ }
376
+ }
377
+ }
378
+
379
+ return next(op, ctx);
380
+ },
381
+ };
382
+ }
@@ -45,6 +45,16 @@ export interface KernelOp {
45
45
  */
46
46
  facts?: Fact[];
47
47
  links?: Link[];
48
+
49
+ /**
50
+ * Facts to delete (for update/delete operations).
51
+ */
52
+ deleteFacts?: Fact[];
53
+
54
+ /**
55
+ * Links to delete (for update/delete operations).
56
+ */
57
+ deleteLinks?: Link[];
48
58
  }
49
59
 
50
60
  export interface KernelBackend {
@@ -0,0 +1,298 @@
1
+ /**
2
+ * SQLite-backed Kernel Backend
3
+ *
4
+ * Replaces the P0 JsonOpLog with a proper WAL-mode SQLite database.
5
+ * Stores ops, snapshots, and blobs in a single database file.
6
+ *
7
+ * @module trellis/core
8
+ */
9
+
10
+ import { Database } from 'bun:sqlite';
11
+ import type { KernelOp, KernelBackend } from './backend.js';
12
+
13
+ // ---------------------------------------------------------------------------
14
+ // Schema
15
+ // ---------------------------------------------------------------------------
16
+
17
+ const SCHEMA_SQL = `
18
+ CREATE TABLE IF NOT EXISTS ops (
19
+ hash TEXT PRIMARY KEY,
20
+ kind TEXT NOT NULL,
21
+ timestamp TEXT NOT NULL,
22
+ agent_id TEXT NOT NULL,
23
+ previous_hash TEXT,
24
+ payload TEXT NOT NULL
25
+ );
26
+
27
+ CREATE TABLE IF NOT EXISTS snapshots (
28
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
29
+ last_op_hash TEXT NOT NULL,
30
+ data TEXT NOT NULL,
31
+ created_at TEXT NOT NULL DEFAULT (datetime('now'))
32
+ );
33
+
34
+ CREATE TABLE IF NOT EXISTS blobs (
35
+ hash TEXT PRIMARY KEY,
36
+ content BLOB NOT NULL
37
+ );
38
+
39
+ CREATE INDEX IF NOT EXISTS idx_ops_kind ON ops(kind);
40
+ CREATE INDEX IF NOT EXISTS idx_ops_timestamp ON ops(timestamp);
41
+ CREATE INDEX IF NOT EXISTS idx_ops_agent ON ops(agent_id);
42
+ CREATE INDEX IF NOT EXISTS idx_ops_previous ON ops(previous_hash);
43
+ CREATE INDEX IF NOT EXISTS idx_snapshots_op ON snapshots(last_op_hash);
44
+ `;
45
+
46
+ // ---------------------------------------------------------------------------
47
+ // Implementation
48
+ // ---------------------------------------------------------------------------
49
+
50
+ export class SqliteKernelBackend implements KernelBackend {
51
+ private db: Database;
52
+ private _stmts: {
53
+ insert: ReturnType<Database['prepare']>;
54
+ readAll: ReturnType<Database['prepare']>;
55
+ readUntil: ReturnType<Database['prepare']>;
56
+ readAfter: ReturnType<Database['prepare']>;
57
+ getByHash: ReturnType<Database['prepare']>;
58
+ getLast: ReturnType<Database['prepare']>;
59
+ count: ReturnType<Database['prepare']>;
60
+ saveSnapshot: ReturnType<Database['prepare']>;
61
+ loadSnapshot: ReturnType<Database['prepare']>;
62
+ putBlob: ReturnType<Database['prepare']>;
63
+ getBlob: ReturnType<Database['prepare']>;
64
+ hasBlob: ReturnType<Database['prepare']>;
65
+ } | null = null;
66
+
67
+ constructor(private dbPath: string) {
68
+ this.db = new Database(dbPath);
69
+ }
70
+
71
+ init(): void {
72
+ this.db.exec('PRAGMA journal_mode=WAL;');
73
+ this.db.exec('PRAGMA foreign_keys=ON;');
74
+ this.db.exec('PRAGMA synchronous=NORMAL;');
75
+ this.db.exec(SCHEMA_SQL);
76
+ this._prepareStatements();
77
+ }
78
+
79
+ private _prepareStatements(): void {
80
+ this._stmts = {
81
+ insert: this.db.prepare(`
82
+ INSERT OR IGNORE INTO ops (hash, kind, timestamp, agent_id, previous_hash, payload)
83
+ VALUES ($hash, $kind, $timestamp, $agentId, $previousHash, $payload)
84
+ `),
85
+ readAll: this.db.prepare(`
86
+ SELECT hash, kind, timestamp, agent_id, previous_hash, payload
87
+ FROM ops ORDER BY rowid ASC
88
+ `),
89
+ readUntil: this.db.prepare(`
90
+ SELECT hash, kind, timestamp, agent_id, previous_hash, payload
91
+ FROM ops WHERE rowid <= (SELECT rowid FROM ops WHERE hash = $hash)
92
+ ORDER BY rowid ASC
93
+ `),
94
+ readAfter: this.db.prepare(`
95
+ SELECT hash, kind, timestamp, agent_id, previous_hash, payload
96
+ FROM ops WHERE rowid > (SELECT rowid FROM ops WHERE hash = $hash)
97
+ ORDER BY rowid ASC
98
+ `),
99
+ getByHash: this.db.prepare(`
100
+ SELECT hash, kind, timestamp, agent_id, previous_hash, payload
101
+ FROM ops WHERE hash = $hash
102
+ `),
103
+ getLast: this.db.prepare(`
104
+ SELECT hash, kind, timestamp, agent_id, previous_hash, payload
105
+ FROM ops ORDER BY rowid DESC LIMIT 1
106
+ `),
107
+ count: this.db.prepare('SELECT COUNT(*) as cnt FROM ops'),
108
+ saveSnapshot: this.db.prepare(`
109
+ INSERT INTO snapshots (last_op_hash, data)
110
+ VALUES ($lastOpHash, $data)
111
+ `),
112
+ loadSnapshot: this.db.prepare(`
113
+ SELECT last_op_hash, data FROM snapshots
114
+ ORDER BY id DESC LIMIT 1
115
+ `),
116
+ putBlob: this.db.prepare(`
117
+ INSERT OR IGNORE INTO blobs (hash, content) VALUES ($hash, $content)
118
+ `),
119
+ getBlob: this.db.prepare(`
120
+ SELECT content FROM blobs WHERE hash = $hash
121
+ `),
122
+ hasBlob: this.db.prepare(`
123
+ SELECT 1 FROM blobs WHERE hash = $hash
124
+ `),
125
+ };
126
+ }
127
+
128
+ // -------------------------------------------------------------------------
129
+ // Op operations
130
+ // -------------------------------------------------------------------------
131
+
132
+ append(op: KernelOp): void {
133
+ const payload = JSON.stringify({
134
+ facts: op.facts,
135
+ links: op.links,
136
+ ...(op.deleteFacts?.length ? { deleteFacts: op.deleteFacts } : {}),
137
+ ...(op.deleteLinks?.length ? { deleteLinks: op.deleteLinks } : {}),
138
+ ...((op as any).vcs ? { vcs: (op as any).vcs } : {}),
139
+ ...((op as any).signature ? { signature: (op as any).signature } : {}),
140
+ });
141
+
142
+ this._stmts!.insert.run({
143
+ $hash: op.hash,
144
+ $kind: op.kind,
145
+ $timestamp: op.timestamp,
146
+ $agentId: op.agentId,
147
+ $previousHash: op.previousHash ?? null,
148
+ $payload: payload,
149
+ });
150
+ }
151
+
152
+ appendBatch(ops: KernelOp[]): void {
153
+ if (ops.length === 0) return;
154
+ this.db.transaction(() => {
155
+ for (const op of ops) {
156
+ this.append(op);
157
+ }
158
+ })();
159
+ }
160
+
161
+ readAll(): KernelOp[] {
162
+ const rows = this._stmts!.readAll.all() as any[];
163
+ return rows.map(rowToOp);
164
+ }
165
+
166
+ readUntil(hash: string): KernelOp[] {
167
+ const rows = this._stmts!.readUntil.all({ $hash: hash }) as any[];
168
+ return rows.map(rowToOp);
169
+ }
170
+
171
+ readAfter(hash: string): KernelOp[] {
172
+ const rows = this._stmts!.readAfter.all({ $hash: hash }) as any[];
173
+ return rows.map(rowToOp);
174
+ }
175
+
176
+ readUntilTimestamp(isoTimestamp: string): KernelOp[] {
177
+ const rows = this.db
178
+ .prepare(
179
+ `SELECT hash, kind, timestamp, agent_id, previous_hash, payload
180
+ FROM ops WHERE timestamp <= $ts ORDER BY rowid ASC`,
181
+ )
182
+ .all({ $ts: isoTimestamp }) as any[];
183
+ return rows.map(rowToOp);
184
+ }
185
+
186
+ getLastOp(): KernelOp | undefined {
187
+ const row = this._stmts!.getLast.get() as any;
188
+ return row ? rowToOp(row) : undefined;
189
+ }
190
+
191
+ getOpByHash(hash: string): KernelOp | undefined {
192
+ const row = this._stmts!.getByHash.get({ $hash: hash }) as any;
193
+ return row ? rowToOp(row) : undefined;
194
+ }
195
+
196
+ count(): number {
197
+ const row = this._stmts!.count.get() as any;
198
+ return row?.cnt ?? 0;
199
+ }
200
+
201
+ /**
202
+ * Find the common ancestor op of two op hashes by walking
203
+ * previousHash chains until they converge.
204
+ */
205
+ findCommonAncestor(hashA: string, hashB: string): KernelOp | undefined {
206
+ // Collect ancestors of A
207
+ const ancestorsA = new Set<string>();
208
+ let cursor: string | undefined = hashA;
209
+ while (cursor) {
210
+ ancestorsA.add(cursor);
211
+ const op = this.getOpByHash(cursor);
212
+ cursor = op?.previousHash;
213
+ }
214
+
215
+ // Walk B's chain until we find a common ancestor
216
+ cursor = hashB;
217
+ while (cursor) {
218
+ if (ancestorsA.has(cursor)) {
219
+ return this.getOpByHash(cursor);
220
+ }
221
+ const op = this.getOpByHash(cursor);
222
+ cursor = op?.previousHash;
223
+ }
224
+
225
+ return undefined;
226
+ }
227
+
228
+ // -------------------------------------------------------------------------
229
+ // Snapshot operations
230
+ // -------------------------------------------------------------------------
231
+
232
+ saveSnapshot(lastOpHash: string, data: any): void {
233
+ this._stmts!.saveSnapshot.run({
234
+ $lastOpHash: lastOpHash,
235
+ $data: JSON.stringify(data),
236
+ });
237
+ }
238
+
239
+ loadLatestSnapshot(): { lastOpHash: string; data: any } | undefined {
240
+ const row = this._stmts!.loadSnapshot.get() as any;
241
+ if (!row) return undefined;
242
+ return {
243
+ lastOpHash: row.last_op_hash,
244
+ data: JSON.parse(row.data),
245
+ };
246
+ }
247
+
248
+ // -------------------------------------------------------------------------
249
+ // Blob operations
250
+ // -------------------------------------------------------------------------
251
+
252
+ putBlob(hash: string, content: Uint8Array): void {
253
+ this._stmts!.putBlob.run({
254
+ $hash: hash,
255
+ $content: Buffer.from(content),
256
+ });
257
+ }
258
+
259
+ getBlob(hash: string): Uint8Array | undefined {
260
+ const row = this._stmts!.getBlob.get({ $hash: hash }) as any;
261
+ if (!row) return undefined;
262
+ return new Uint8Array(row.content);
263
+ }
264
+
265
+ hasBlob(hash: string): boolean {
266
+ return !!this._stmts!.hasBlob.get({ $hash: hash });
267
+ }
268
+
269
+ // -------------------------------------------------------------------------
270
+ // Lifecycle
271
+ // -------------------------------------------------------------------------
272
+
273
+ close(): void {
274
+ this.db.close();
275
+ }
276
+ }
277
+
278
+ // ---------------------------------------------------------------------------
279
+ // Helpers
280
+ // ---------------------------------------------------------------------------
281
+
282
+ function rowToOp(row: any): KernelOp {
283
+ const payload = JSON.parse(row.payload);
284
+ const op: any = {
285
+ hash: row.hash,
286
+ kind: row.kind,
287
+ timestamp: row.timestamp,
288
+ agentId: row.agent_id,
289
+ };
290
+ if (row.previous_hash) op.previousHash = row.previous_hash;
291
+ if (payload.facts) op.facts = payload.facts;
292
+ if (payload.links) op.links = payload.links;
293
+ if (payload.deleteFacts) op.deleteFacts = payload.deleteFacts;
294
+ if (payload.deleteLinks) op.deleteLinks = payload.deleteLinks;
295
+ if (payload.vcs) op.vcs = payload.vcs;
296
+ if (payload.signature) op.signature = payload.signature;
297
+ return op;
298
+ }
@@ -0,0 +1,17 @@
1
+ /**
2
+ * Plugin System — Public API Surface
3
+ *
4
+ * @module trellis/core/plugins
5
+ */
6
+
7
+ export { PluginRegistry, EventBus } from './registry.js';
8
+
9
+ export type {
10
+ PluginDef,
11
+ PluginContext,
12
+ PluginManifest,
13
+ EventCallback,
14
+ EventHandler,
15
+ WellKnownEvent,
16
+ WorkspaceConfig,
17
+ } from './types.js';