trellis 2.0.7 → 2.0.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/index.js +1031 -30
- package/dist/core/index.js +474 -2
- package/dist/decisions/index.js +5 -2
- package/dist/embeddings/index.js +5 -1
- package/dist/{index-3s0eak0p.js → index-3ejh8k6v.js} +26 -3
- package/dist/{index-1j1anhmr.js → index-5b01h414.js} +489 -335
- package/dist/index-5m0g9r0y.js +1100 -0
- package/dist/{index-8pce39mh.js → index-65z0xfjw.js} +17 -3
- package/dist/{index-zf6htvnm.js → index-7gvjxt27.js} +166 -2
- package/dist/index-hybgxe40.js +1174 -0
- package/dist/{index-gnw8d7d6.js → index-k5kf7sd0.js} +32 -3
- package/dist/{index-fd4e26s4.js → index-v9b4hqa7.js} +23 -15
- package/dist/index.js +20 -7
- package/dist/transformers.node-bx3q9d7k.js +33130 -0
- package/dist/ui/client.html +695 -0
- package/dist/vcs/index.js +3 -3
- package/package.json +5 -4
- package/src/cli/index.ts +1017 -1
- package/src/core/agents/harness.ts +336 -0
- package/src/core/agents/index.ts +18 -0
- package/src/core/agents/types.ts +90 -0
- package/src/core/index.ts +85 -2
- package/src/core/kernel/trellis-kernel.ts +593 -0
- package/src/core/ontology/builtins.ts +248 -0
- package/src/core/ontology/index.ts +34 -0
- package/src/core/ontology/registry.ts +209 -0
- package/src/core/ontology/types.ts +124 -0
- package/src/core/ontology/validator.ts +382 -0
- package/src/core/persist/backend.ts +10 -0
- package/src/core/persist/sqlite-backend.ts +298 -0
- package/src/core/plugins/index.ts +17 -0
- package/src/core/plugins/registry.ts +322 -0
- package/src/core/plugins/types.ts +126 -0
- package/src/core/query/datalog.ts +188 -0
- package/src/core/query/engine.ts +370 -0
- package/src/core/query/index.ts +34 -0
- package/src/core/query/parser.ts +481 -0
- package/src/core/query/types.ts +200 -0
- package/src/embeddings/auto-embed.ts +248 -0
- package/src/embeddings/index.ts +7 -0
- package/src/embeddings/model.ts +21 -4
- package/src/embeddings/types.ts +8 -1
- package/src/engine.ts +45 -3
- package/src/index.ts +9 -0
- package/src/sync/http-transport.ts +144 -0
- package/src/sync/index.ts +11 -0
- package/src/sync/multi-repo.ts +200 -0
- package/src/sync/ws-transport.ts +145 -0
- package/src/ui/client.html +695 -0
- package/src/ui/server.ts +419 -0
- package/src/watcher/fs-watcher.ts +41 -3
- package/dist/index-gkvhzm9f.js +0 -321
|
@@ -0,0 +1,248 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Auto-Embedding Middleware
|
|
3
|
+
*
|
|
4
|
+
* Kernel middleware that automatically embeds entity facts and links
|
|
5
|
+
* on graph mutations. Runs after successful ops to index new/changed
|
|
6
|
+
* content into the vector store.
|
|
7
|
+
*
|
|
8
|
+
* @module trellis/embeddings
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
import type { KernelOp } from '../core/persist/backend.js';
|
|
12
|
+
import type { KernelMiddleware, MiddlewareContext, OpMiddlewareNext } from '../core/kernel/middleware.js';
|
|
13
|
+
import type { Fact, Link } from '../core/store/eav-store.js';
|
|
14
|
+
import type { ChunkMeta, EmbeddingRecord } from './types.js';
|
|
15
|
+
import type { Embedder } from './search.js';
|
|
16
|
+
import { VectorStore } from './store.js';
|
|
17
|
+
import { embed } from './model.js';
|
|
18
|
+
|
|
19
|
+
// ---------------------------------------------------------------------------
|
|
20
|
+
// Entity text builder — converts facts/links into embeddable text
|
|
21
|
+
// ---------------------------------------------------------------------------
|
|
22
|
+
|
|
23
|
+
function factsToText(facts: Fact[]): string {
|
|
24
|
+
return facts
|
|
25
|
+
.filter((f) => f.a !== 'createdAt' && f.a !== 'updatedAt')
|
|
26
|
+
.map((f) => `${f.a}: ${f.v}`)
|
|
27
|
+
.join('\n');
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
function linksToText(links: Link[]): string {
|
|
31
|
+
return links.map((l) => `${l.e1} —[${l.a}]→ ${l.e2}`).join('\n');
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
function entitySummaryText(entityId: string, facts: Fact[], links: Link[]): string {
|
|
35
|
+
const type = facts.find((f) => f.a === 'type')?.v ?? 'Entity';
|
|
36
|
+
const name = facts.find((f) => f.a === 'name' || f.a === 'title')?.v ?? entityId;
|
|
37
|
+
const parts = [`${type}: ${name} (${entityId})`];
|
|
38
|
+
|
|
39
|
+
const attrs = facts.filter((f) => !['type', 'name', 'title', 'createdAt', 'updatedAt'].includes(f.a));
|
|
40
|
+
if (attrs.length > 0) {
|
|
41
|
+
parts.push(attrs.map((f) => ` ${f.a} = ${f.v}`).join('\n'));
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
if (links.length > 0) {
|
|
45
|
+
parts.push('Relations:');
|
|
46
|
+
parts.push(links.map((l) => ` ${l.a} → ${l.e2}`).join('\n'));
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
return parts.join('\n');
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// ---------------------------------------------------------------------------
|
|
53
|
+
// Middleware factory
|
|
54
|
+
// ---------------------------------------------------------------------------
|
|
55
|
+
|
|
56
|
+
export interface AutoEmbedOptions {
|
|
57
|
+
/** Path to the vector store SQLite database. */
|
|
58
|
+
dbPath: string;
|
|
59
|
+
/** Custom embedder function (default: transformers.js embed). */
|
|
60
|
+
embedFn?: Embedder;
|
|
61
|
+
/** Whether to embed facts individually (default: false — only entity summaries). */
|
|
62
|
+
embedIndividualFacts?: boolean;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* Creates a kernel middleware that auto-embeds entities on mutation.
|
|
67
|
+
*
|
|
68
|
+
* On addFacts/addLinks: embeds entity summaries into the vector store.
|
|
69
|
+
* On deleteFacts/deleteLinks: removes stale embeddings.
|
|
70
|
+
*/
|
|
71
|
+
export function createAutoEmbedMiddleware(options: AutoEmbedOptions): KernelMiddleware & { close: () => void } {
|
|
72
|
+
const store = new VectorStore(options.dbPath);
|
|
73
|
+
const embedFn = options.embedFn ?? embed;
|
|
74
|
+
const embedIndividual = options.embedIndividualFacts ?? false;
|
|
75
|
+
|
|
76
|
+
return {
|
|
77
|
+
name: 'auto-embed',
|
|
78
|
+
|
|
79
|
+
handleOp: async (op: KernelOp, ctx: MiddlewareContext, next: OpMiddlewareNext) => {
|
|
80
|
+
// Let the op proceed first
|
|
81
|
+
await next(op, ctx);
|
|
82
|
+
|
|
83
|
+
// Then asynchronously embed (don't block the mutation)
|
|
84
|
+
try {
|
|
85
|
+
await _processOp(op, store, embedFn, embedIndividual);
|
|
86
|
+
} catch {
|
|
87
|
+
// Embedding failures are non-fatal
|
|
88
|
+
}
|
|
89
|
+
},
|
|
90
|
+
|
|
91
|
+
close: () => {
|
|
92
|
+
store.close();
|
|
93
|
+
},
|
|
94
|
+
};
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
async function _processOp(
|
|
98
|
+
op: KernelOp,
|
|
99
|
+
store: VectorStore,
|
|
100
|
+
embedFn: Embedder,
|
|
101
|
+
embedIndividual: boolean,
|
|
102
|
+
): Promise<void> {
|
|
103
|
+
const now = new Date().toISOString();
|
|
104
|
+
|
|
105
|
+
// Collect affected entity IDs
|
|
106
|
+
const entityIds = new Set<string>();
|
|
107
|
+
if (op.facts) for (const f of op.facts) entityIds.add(f.e);
|
|
108
|
+
if (op.links) for (const l of op.links) { entityIds.add(l.e1); entityIds.add(l.e2); }
|
|
109
|
+
if (op.deleteFacts) for (const f of op.deleteFacts) entityIds.add(f.e);
|
|
110
|
+
if (op.deleteLinks) for (const l of op.deleteLinks) { entityIds.add(l.e1); entityIds.add(l.e2); }
|
|
111
|
+
|
|
112
|
+
// Handle deletions — remove old embeddings for deleted entities
|
|
113
|
+
if (op.deleteFacts || op.deleteLinks) {
|
|
114
|
+
for (const eid of entityIds) {
|
|
115
|
+
store.deleteByEntity(eid);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
// Handle additions — embed entity summaries
|
|
120
|
+
if (op.facts && op.facts.length > 0) {
|
|
121
|
+
// Group facts by entity
|
|
122
|
+
const factsByEntity = new Map<string, Fact[]>();
|
|
123
|
+
for (const f of op.facts) {
|
|
124
|
+
const existing = factsByEntity.get(f.e) ?? [];
|
|
125
|
+
existing.push(f);
|
|
126
|
+
factsByEntity.set(f.e, existing);
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
const linksByEntity = new Map<string, Link[]>();
|
|
130
|
+
if (op.links) {
|
|
131
|
+
for (const l of op.links) {
|
|
132
|
+
const existing = linksByEntity.get(l.e1) ?? [];
|
|
133
|
+
existing.push(l);
|
|
134
|
+
linksByEntity.set(l.e1, existing);
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
const records: EmbeddingRecord[] = [];
|
|
139
|
+
|
|
140
|
+
for (const [eid, facts] of factsByEntity) {
|
|
141
|
+
const links = linksByEntity.get(eid) ?? [];
|
|
142
|
+
|
|
143
|
+
// Entity summary embedding
|
|
144
|
+
const summaryText = entitySummaryText(eid, facts, links);
|
|
145
|
+
if (summaryText.trim()) {
|
|
146
|
+
try {
|
|
147
|
+
const vector = await embedFn(summaryText);
|
|
148
|
+
records.push({
|
|
149
|
+
id: `entity:${eid}:summary`,
|
|
150
|
+
entityId: eid,
|
|
151
|
+
content: summaryText,
|
|
152
|
+
chunkType: 'summary_md' as any,
|
|
153
|
+
updatedAt: now,
|
|
154
|
+
embedding: vector,
|
|
155
|
+
});
|
|
156
|
+
} catch {}
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
// Individual fact embeddings (optional)
|
|
160
|
+
if (embedIndividual) {
|
|
161
|
+
for (const fact of facts) {
|
|
162
|
+
if (['type', 'createdAt', 'updatedAt'].includes(fact.a)) continue;
|
|
163
|
+
const text = `${fact.a}: ${fact.v}`;
|
|
164
|
+
try {
|
|
165
|
+
const vector = await embedFn(text);
|
|
166
|
+
records.push({
|
|
167
|
+
id: `entity:${eid}:fact:${fact.a}`,
|
|
168
|
+
entityId: eid,
|
|
169
|
+
content: text,
|
|
170
|
+
chunkType: 'doc_comment' as any,
|
|
171
|
+
updatedAt: now,
|
|
172
|
+
embedding: vector,
|
|
173
|
+
});
|
|
174
|
+
} catch {}
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
if (records.length > 0) {
|
|
180
|
+
store.upsertBatch(records);
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
// ---------------------------------------------------------------------------
|
|
186
|
+
// RAG Context Builder
|
|
187
|
+
// ---------------------------------------------------------------------------
|
|
188
|
+
|
|
189
|
+
export interface RAGContext {
|
|
190
|
+
/** The original query. */
|
|
191
|
+
query: string;
|
|
192
|
+
/** Retrieved chunks ranked by relevance. */
|
|
193
|
+
chunks: Array<{
|
|
194
|
+
content: string;
|
|
195
|
+
entityId: string;
|
|
196
|
+
score: number;
|
|
197
|
+
chunkType: string;
|
|
198
|
+
}>;
|
|
199
|
+
/** Total token estimate (rough: 1 token ≈ 4 chars). */
|
|
200
|
+
estimatedTokens: number;
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
/**
|
|
204
|
+
* Build a RAG context from a natural language query.
|
|
205
|
+
* Searches the vector store and assembles ranked context chunks.
|
|
206
|
+
*/
|
|
207
|
+
export async function buildRAGContext(
|
|
208
|
+
query: string,
|
|
209
|
+
vectorStore: VectorStore,
|
|
210
|
+
embedFn: Embedder = embed,
|
|
211
|
+
options?: {
|
|
212
|
+
maxChunks?: number;
|
|
213
|
+
maxTokens?: number;
|
|
214
|
+
minScore?: number;
|
|
215
|
+
},
|
|
216
|
+
): Promise<RAGContext> {
|
|
217
|
+
const maxChunks = options?.maxChunks ?? 10;
|
|
218
|
+
const maxTokens = options?.maxTokens ?? 4000;
|
|
219
|
+
const minScore = options?.minScore ?? 0.1;
|
|
220
|
+
|
|
221
|
+
const queryVector = await embedFn(query);
|
|
222
|
+
const results = vectorStore.search(queryVector, {
|
|
223
|
+
limit: maxChunks * 2,
|
|
224
|
+
minScore,
|
|
225
|
+
});
|
|
226
|
+
|
|
227
|
+
const chunks: RAGContext['chunks'] = [];
|
|
228
|
+
let totalChars = 0;
|
|
229
|
+
|
|
230
|
+
for (const r of results) {
|
|
231
|
+
if (chunks.length >= maxChunks) break;
|
|
232
|
+
if (totalChars + r.chunk.content.length > maxTokens * 4) break;
|
|
233
|
+
|
|
234
|
+
chunks.push({
|
|
235
|
+
content: r.chunk.content,
|
|
236
|
+
entityId: r.chunk.entityId,
|
|
237
|
+
score: r.score,
|
|
238
|
+
chunkType: r.chunk.chunkType,
|
|
239
|
+
});
|
|
240
|
+
totalChars += r.chunk.content.length;
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
return {
|
|
244
|
+
query,
|
|
245
|
+
chunks,
|
|
246
|
+
estimatedTokens: Math.ceil(totalChars / 4),
|
|
247
|
+
};
|
|
248
|
+
}
|
package/src/embeddings/index.ts
CHANGED
|
@@ -27,6 +27,13 @@ export { VectorStore, cosineSimilarity } from './store.js';
|
|
|
27
27
|
export { EmbeddingManager } from './search.js';
|
|
28
28
|
export type { SearchableEngine, Embedder } from './search.js';
|
|
29
29
|
|
|
30
|
+
// Auto-embedding middleware + RAG
|
|
31
|
+
export { createAutoEmbedMiddleware, buildRAGContext } from './auto-embed.js';
|
|
32
|
+
export type { AutoEmbedOptions, RAGContext } from './auto-embed.js';
|
|
33
|
+
|
|
34
|
+
// New graph chunk types
|
|
35
|
+
export type { GraphChunkType } from './types.js';
|
|
36
|
+
|
|
30
37
|
// Chunker
|
|
31
38
|
export {
|
|
32
39
|
chunkIssue,
|
package/src/embeddings/model.ts
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* Embedding Model
|
|
3
3
|
*
|
|
4
|
-
* Lazy-loads @
|
|
4
|
+
* Lazy-loads @huggingface/transformers (v3+) with all-MiniLM-L6-v2 (384-dim).
|
|
5
|
+
* Falls back to @xenova/transformers (v2) if the new package is unavailable.
|
|
5
6
|
* Model is loaded once on first use and cached for subsequent calls.
|
|
6
7
|
*
|
|
7
8
|
* @see TRL-18
|
|
@@ -16,6 +17,24 @@ import { EmbeddingModelConfig, DEFAULT_MODEL_CONFIG } from './types.js';
|
|
|
16
17
|
let pipeline: any = null;
|
|
17
18
|
let loadPromise: Promise<any> | null = null;
|
|
18
19
|
|
|
20
|
+
/**
|
|
21
|
+
* Dynamically import the transformers library.
|
|
22
|
+
* Tries @huggingface/transformers first (v3+), falls back to @xenova/transformers (v2).
|
|
23
|
+
*/
|
|
24
|
+
async function importTransformers(): Promise<{ pipeline: any }> {
|
|
25
|
+
try {
|
|
26
|
+
return await import('@huggingface/transformers' as string);
|
|
27
|
+
} catch {
|
|
28
|
+
try {
|
|
29
|
+
return await import('@xenova/transformers' as string);
|
|
30
|
+
} catch {
|
|
31
|
+
throw new Error(
|
|
32
|
+
'No transformers library found. Install @huggingface/transformers (recommended) or @xenova/transformers.',
|
|
33
|
+
);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
19
38
|
/**
|
|
20
39
|
* Load the embedding model lazily. Returns the feature-extraction pipeline.
|
|
21
40
|
* Subsequent calls return the cached pipeline.
|
|
@@ -27,9 +46,7 @@ export async function loadModel(
|
|
|
27
46
|
|
|
28
47
|
if (!loadPromise) {
|
|
29
48
|
loadPromise = (async () => {
|
|
30
|
-
const { pipeline: createPipeline } = await
|
|
31
|
-
'@xenova/transformers'
|
|
32
|
-
);
|
|
49
|
+
const { pipeline: createPipeline } = await importTransformers();
|
|
33
50
|
const opts: Record<string, unknown> = {};
|
|
34
51
|
if (config.cacheDir) {
|
|
35
52
|
opts.cache_dir = config.cacheDir;
|
package/src/embeddings/types.ts
CHANGED
|
@@ -71,7 +71,7 @@ export interface SearchOptions {
|
|
|
71
71
|
// ---------------------------------------------------------------------------
|
|
72
72
|
|
|
73
73
|
export interface EmbeddingModelConfig {
|
|
74
|
-
/** Model name for
|
|
74
|
+
/** Model name for transformers.js (default: "Xenova/all-MiniLM-L6-v2") */
|
|
75
75
|
modelName: string;
|
|
76
76
|
/** Embedding dimension (default: 384) */
|
|
77
77
|
dimension: number;
|
|
@@ -83,3 +83,10 @@ export const DEFAULT_MODEL_CONFIG: EmbeddingModelConfig = {
|
|
|
83
83
|
modelName: 'Xenova/all-MiniLM-L6-v2',
|
|
84
84
|
dimension: 384,
|
|
85
85
|
};
|
|
86
|
+
|
|
87
|
+
/** New chunk types for generic graph entities */
|
|
88
|
+
export type GraphChunkType =
|
|
89
|
+
| ChunkType
|
|
90
|
+
| 'entity_summary'
|
|
91
|
+
| 'entity_fact'
|
|
92
|
+
| 'entity_link';
|
package/src/engine.ts
CHANGED
|
@@ -22,7 +22,7 @@ import { readFile } from 'fs/promises';
|
|
|
22
22
|
import { join, dirname } from 'path';
|
|
23
23
|
import { EAVStore } from './core/store/eav-store.js';
|
|
24
24
|
import type { Fact, Link } from './core/store/eav-store.js';
|
|
25
|
-
import { FileWatcher } from './watcher/fs-watcher.js';
|
|
25
|
+
import { FileWatcher, type ScanProgress } from './watcher/fs-watcher.js';
|
|
26
26
|
import { Ingestion } from './watcher/ingestion.js';
|
|
27
27
|
import { decompose } from './vcs/decompose.js';
|
|
28
28
|
import { createVcsOp, isVcsOpKind } from './vcs/ops.js';
|
|
@@ -235,6 +235,13 @@ interface PersistedConfig {
|
|
|
235
235
|
// Engine
|
|
236
236
|
// ---------------------------------------------------------------------------
|
|
237
237
|
|
|
238
|
+
export interface InitProgress {
|
|
239
|
+
phase: 'discovering' | 'hashing' | 'recording' | 'done';
|
|
240
|
+
current: number;
|
|
241
|
+
total: number;
|
|
242
|
+
message: string;
|
|
243
|
+
}
|
|
244
|
+
|
|
238
245
|
export class TrellisVcsEngine {
|
|
239
246
|
private config: TrellisVcsConfig;
|
|
240
247
|
private store: EAVStore;
|
|
@@ -281,7 +288,9 @@ export class TrellisVcsEngine {
|
|
|
281
288
|
/**
|
|
282
289
|
* Initialize a new TrellisVCS repo. Creates .trellis/ directory and config.
|
|
283
290
|
*/
|
|
284
|
-
async initRepo(
|
|
291
|
+
async initRepo(opts?: {
|
|
292
|
+
onProgress?: (progress: InitProgress) => void;
|
|
293
|
+
}): Promise<{ opsCreated: number }> {
|
|
285
294
|
const trellisDir = join(this.config.rootPath, '.trellis');
|
|
286
295
|
if (!existsSync(trellisDir)) {
|
|
287
296
|
mkdirSync(trellisDir, { recursive: true });
|
|
@@ -322,9 +331,27 @@ export class TrellisVcsEngine {
|
|
|
322
331
|
debounceMs: this.config.debounceMs,
|
|
323
332
|
onEvent: () => {},
|
|
324
333
|
});
|
|
325
|
-
const events = await scanner.scan(
|
|
334
|
+
const events = await scanner.scan({
|
|
335
|
+
onProgress: (progress: ScanProgress) => {
|
|
336
|
+
if (progress.phase === 'done') {
|
|
337
|
+
return;
|
|
338
|
+
}
|
|
339
|
+
opts?.onProgress?.({
|
|
340
|
+
phase: progress.phase,
|
|
341
|
+
current: progress.current,
|
|
342
|
+
total: progress.total,
|
|
343
|
+
message: progress.message,
|
|
344
|
+
});
|
|
345
|
+
},
|
|
346
|
+
});
|
|
326
347
|
|
|
327
348
|
let opsCreated = 1; // branch op
|
|
349
|
+
opts?.onProgress?.({
|
|
350
|
+
phase: 'recording',
|
|
351
|
+
current: 0,
|
|
352
|
+
total: events.length,
|
|
353
|
+
message: `Recording ${events.length} initial file operations…`,
|
|
354
|
+
});
|
|
328
355
|
for (const event of events) {
|
|
329
356
|
// Store file content in blob store
|
|
330
357
|
if (event.contentHash) {
|
|
@@ -346,9 +373,24 @@ export class TrellisVcsEngine {
|
|
|
346
373
|
});
|
|
347
374
|
this.applyOp(op);
|
|
348
375
|
opsCreated++;
|
|
376
|
+
const recordedFiles = opsCreated - 1;
|
|
377
|
+
if (recordedFiles % 25 === 0 || recordedFiles === events.length) {
|
|
378
|
+
opts?.onProgress?.({
|
|
379
|
+
phase: 'recording',
|
|
380
|
+
current: recordedFiles,
|
|
381
|
+
total: events.length,
|
|
382
|
+
message: `Recorded ${recordedFiles}/${events.length} initial file ops`,
|
|
383
|
+
});
|
|
384
|
+
}
|
|
349
385
|
}
|
|
350
386
|
|
|
351
387
|
await this.flushAutoCheckpoint();
|
|
388
|
+
opts?.onProgress?.({
|
|
389
|
+
phase: 'done',
|
|
390
|
+
current: opsCreated,
|
|
391
|
+
total: opsCreated,
|
|
392
|
+
message: `Initialized repository with ${opsCreated} operations`,
|
|
393
|
+
});
|
|
352
394
|
return { opsCreated };
|
|
353
395
|
}
|
|
354
396
|
|
package/src/index.ts
CHANGED
|
@@ -18,3 +18,12 @@ export { TrellisVcsEngine } from './engine.js';
|
|
|
18
18
|
export * from './vcs/index.js';
|
|
19
19
|
export { FileWatcher } from './watcher/fs-watcher.js';
|
|
20
20
|
export { Ingestion } from './watcher/ingestion.js';
|
|
21
|
+
|
|
22
|
+
// Core kernel (generic graph CRUD, independent of VCS)
|
|
23
|
+
export { TrellisKernel } from './core/kernel/trellis-kernel.js';
|
|
24
|
+
export { SqliteKernelBackend } from './core/persist/sqlite-backend.js';
|
|
25
|
+
export type {
|
|
26
|
+
KernelConfig,
|
|
27
|
+
MutateResult,
|
|
28
|
+
EntityRecord,
|
|
29
|
+
} from './core/kernel/trellis-kernel.js';
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* HTTP Sync Transport
|
|
3
|
+
*
|
|
4
|
+
* Implements SyncTransport over HTTP for network-based peer sync.
|
|
5
|
+
* Uses a simple JSON REST protocol:
|
|
6
|
+
* POST /sync/message — send a sync message
|
|
7
|
+
* GET /sync/peers — list connected peers
|
|
8
|
+
*
|
|
9
|
+
* The server side is a lightweight Bun HTTP server.
|
|
10
|
+
* The client side uses fetch() for outbound messages.
|
|
11
|
+
*
|
|
12
|
+
* @module trellis/sync
|
|
13
|
+
*/
|
|
14
|
+
|
|
15
|
+
import type { SyncTransport, SyncMessage, PeerId } from './types.js';
|
|
16
|
+
|
|
17
|
+
// ---------------------------------------------------------------------------
|
|
18
|
+
// HTTP Transport (Client)
|
|
19
|
+
// ---------------------------------------------------------------------------
|
|
20
|
+
|
|
21
|
+
export class HttpSyncTransport implements SyncTransport {
|
|
22
|
+
private localPeerId: string;
|
|
23
|
+
private peerUrls: Map<string, string> = new Map();
|
|
24
|
+
private messageHandler: ((msg: SyncMessage) => void) | null = null;
|
|
25
|
+
private knownPeers: Map<string, PeerId> = new Map();
|
|
26
|
+
|
|
27
|
+
constructor(localPeerId: string) {
|
|
28
|
+
this.localPeerId = localPeerId;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Add a remote peer by URL (e.g. "http://192.168.1.10:4200").
|
|
33
|
+
*/
|
|
34
|
+
addPeer(peerId: string, url: string, name?: string): void {
|
|
35
|
+
this.peerUrls.set(peerId, url);
|
|
36
|
+
this.knownPeers.set(peerId, {
|
|
37
|
+
id: peerId,
|
|
38
|
+
name: name ?? peerId,
|
|
39
|
+
lastSeen: new Date().toISOString(),
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* Remove a remote peer.
|
|
45
|
+
*/
|
|
46
|
+
removePeer(peerId: string): void {
|
|
47
|
+
this.peerUrls.delete(peerId);
|
|
48
|
+
this.knownPeers.delete(peerId);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
async send(peerId: string, message: SyncMessage): Promise<void> {
|
|
52
|
+
const url = this.peerUrls.get(peerId);
|
|
53
|
+
if (!url) throw new Error(`Unknown peer "${peerId}". Add it with addPeer() first.`);
|
|
54
|
+
|
|
55
|
+
const resp = await fetch(`${url}/sync/message`, {
|
|
56
|
+
method: 'POST',
|
|
57
|
+
headers: { 'Content-Type': 'application/json' },
|
|
58
|
+
body: JSON.stringify(message),
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
if (!resp.ok) {
|
|
62
|
+
throw new Error(`Sync message to ${peerId} failed: ${resp.status} ${resp.statusText}`);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
// Check if the response contains a reply message
|
|
66
|
+
const contentType = resp.headers.get('content-type');
|
|
67
|
+
if (contentType?.includes('application/json')) {
|
|
68
|
+
const reply = await resp.json();
|
|
69
|
+
if (reply && reply.type && this.messageHandler) {
|
|
70
|
+
this.messageHandler(reply as SyncMessage);
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
onMessage(handler: (message: SyncMessage) => void): void {
|
|
76
|
+
this.messageHandler = handler;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
/**
|
|
80
|
+
* Receive a message (called by the HTTP server handler).
|
|
81
|
+
*/
|
|
82
|
+
receiveMessage(message: SyncMessage): void {
|
|
83
|
+
// Track peer
|
|
84
|
+
this.knownPeers.set(message.peerId, {
|
|
85
|
+
id: message.peerId,
|
|
86
|
+
name: message.peerId,
|
|
87
|
+
lastSeen: new Date().toISOString(),
|
|
88
|
+
});
|
|
89
|
+
|
|
90
|
+
if (this.messageHandler) {
|
|
91
|
+
this.messageHandler(message);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
peers(): PeerId[] {
|
|
96
|
+
return [...this.knownPeers.values()];
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
getLocalPeerId(): string {
|
|
100
|
+
return this.localPeerId;
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
// ---------------------------------------------------------------------------
|
|
105
|
+
// HTTP Sync Server (creates Bun.serve handler)
|
|
106
|
+
// ---------------------------------------------------------------------------
|
|
107
|
+
|
|
108
|
+
export interface HttpSyncServerConfig {
|
|
109
|
+
port: number;
|
|
110
|
+
transport: HttpSyncTransport;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
/**
|
|
114
|
+
* Create a Bun-compatible HTTP request handler for sync messages.
|
|
115
|
+
* Can be used with Bun.serve() or as middleware.
|
|
116
|
+
*/
|
|
117
|
+
export function createSyncHandler(transport: HttpSyncTransport): (req: Request) => Response | null {
|
|
118
|
+
return (req: Request): Response | null => {
|
|
119
|
+
const url = new URL(req.url);
|
|
120
|
+
|
|
121
|
+
if (url.pathname === '/sync/message' && req.method === 'POST') {
|
|
122
|
+
// Handle async parsing synchronously for Bun
|
|
123
|
+
return new Response(
|
|
124
|
+
req.json().then((body: any) => {
|
|
125
|
+
transport.receiveMessage(body as SyncMessage);
|
|
126
|
+
return JSON.stringify({ ok: true });
|
|
127
|
+
}) as any,
|
|
128
|
+
{ headers: { 'Content-Type': 'application/json' } },
|
|
129
|
+
);
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
if (url.pathname === '/sync/peers' && req.method === 'GET') {
|
|
133
|
+
return new Response(
|
|
134
|
+
JSON.stringify({
|
|
135
|
+
localPeerId: transport.getLocalPeerId(),
|
|
136
|
+
peers: transport.peers(),
|
|
137
|
+
}),
|
|
138
|
+
{ headers: { 'Content-Type': 'application/json' } },
|
|
139
|
+
);
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
return null; // Not a sync route
|
|
143
|
+
};
|
|
144
|
+
}
|
package/src/sync/index.ts
CHANGED
|
@@ -30,3 +30,14 @@ export type { ReconcileResult, ReconcileConflict } from './reconciler.js';
|
|
|
30
30
|
export { SyncEngine } from './sync-engine.js';
|
|
31
31
|
|
|
32
32
|
export { MemoryTransport } from './memory-transport.js';
|
|
33
|
+
|
|
34
|
+
export { HttpSyncTransport, createSyncHandler } from './http-transport.js';
|
|
35
|
+
|
|
36
|
+
export { WebSocketSyncTransport } from './ws-transport.js';
|
|
37
|
+
|
|
38
|
+
export {
|
|
39
|
+
MultiRepoManager,
|
|
40
|
+
parseCrossRepoRef,
|
|
41
|
+
formatCrossRepoRef,
|
|
42
|
+
} from './multi-repo.js';
|
|
43
|
+
export type { LinkedRepo, CrossRepoRef } from './multi-repo.js';
|