semantic-grep 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,89 @@
1
+ import * as AI from "ai";
2
+ import { createVoyage } from "voyage-ai-provider";
3
+ import { Effect, Schedule } from "effect";
4
+ import { GlobalStore } from "../storage/global-store";
5
+ import { ApiKeyMissingError, EmbeddingError } from "../types";
6
+
7
+ export namespace Embedder {
8
+ const getVoyager = GlobalStore.getApiKeyOrFail.pipe(
9
+ Effect.map((apiKey) => createVoyage({ apiKey }))
10
+ );
11
+
12
+ export const embedQuery = (query: string) =>
13
+ getVoyager.pipe(
14
+ Effect.flatMap((voyager) =>
15
+ Effect.tryPromise({
16
+ try: () =>
17
+ AI.embed({
18
+ model: voyager.textEmbeddingModel("voyage-code-3"),
19
+ value: query,
20
+ }),
21
+ catch: (error) =>
22
+ new EmbeddingError({
23
+ message: "Failed to embed query",
24
+ cause: error,
25
+ }),
26
+ })
27
+ ),
28
+ Effect.map((result) => result.embedding)
29
+ );
30
+
31
+ export const embedMany = (values: string[]) => {
32
+ if (values.length === 0) {
33
+ return Effect.succeed({ embeddings: [] as number[][], tokens: 0 });
34
+ }
35
+
36
+ return getVoyager.pipe(
37
+ Effect.flatMap((voyager) =>
38
+ Effect.tryPromise({
39
+ try: () =>
40
+ AI.embedMany({
41
+ model: voyager.textEmbeddingModel("voyage-code-3"),
42
+ values,
43
+ }),
44
+ catch: (error) =>
45
+ new EmbeddingError({
46
+ message: "Failed to embed batch",
47
+ cause: error,
48
+ }),
49
+ })
50
+ ),
51
+ Effect.map((result) => ({
52
+ embeddings: result.embeddings,
53
+ tokens: result.usage?.tokens ?? 0,
54
+ })),
55
+ Effect.retry({
56
+ schedule: Schedule.exponential("1 second", 2),
57
+ times: 3,
58
+ }),
59
+ Effect.catchTag("EmbeddingError", (error) => {
60
+ console.error(`Embedding failed after retries: ${error.message}`);
61
+ return Effect.succeed({
62
+ embeddings: values.map(() => [] as number[]),
63
+ tokens: 0,
64
+ });
65
+ })
66
+ );
67
+ };
68
+
69
+ export const cosineSimilarity = (a: number[], b: number[]): number => {
70
+ if (a.length === 0 || b.length === 0) return 0;
71
+
72
+ let dotProduct = 0;
73
+ let normA = 0;
74
+ let normB = 0;
75
+
76
+ for (let i = 0; i < a.length; i++) {
77
+ const ai = a[i] ?? 0;
78
+ const bi = b[i] ?? 0;
79
+ dotProduct += ai * bi;
80
+ normA += ai * ai;
81
+ normB += bi * bi;
82
+ }
83
+
84
+ const denominator = Math.sqrt(normA) * Math.sqrt(normB);
85
+ if (denominator === 0) return 0;
86
+
87
+ return dotProduct / denominator;
88
+ };
89
+ }
@@ -0,0 +1,343 @@
1
+ import path from "path";
2
+ import * as crypto from "crypto";
3
+ import { chunkBatchStreamEffect } from "code-chunk";
4
+ import { Effect, Ref, Schedule, Fiber, Console } from "effect";
5
+ import * as Chunk from "effect/Chunk";
6
+ import * as Stream from "effect/Stream";
7
+
8
+ import {
9
+ type ChunkWithMeta,
10
+ type ChunkWithEmbedding,
11
+ type ChunkReference,
12
+ type ProjectManifest,
13
+ FileReadError,
14
+ } from "../types";
15
+ import { Embedder } from "./embedder";
16
+ import { EmbeddingCache } from "../storage/embedding-cache";
17
+ import { GlobalStore } from "../storage/global-store";
18
+ import { GitTracker } from "../watcher/git-tracker";
19
+
20
+ interface IndexStats {
21
+ filesRead: Ref.Ref<number>;
22
+ chunksCreated: Ref.Ref<number>;
23
+ cacheHits: Ref.Ref<number>;
24
+ embedded: Ref.Ref<number>;
25
+ processed: Ref.Ref<number>;
26
+ skippedFiles: Ref.Ref<string[]>;
27
+ }
28
+
29
+ export namespace Indexer {
30
+ export const filterFilePath = GitTracker.filterFilePath;
31
+
32
+ export const hashChunk = (text: string): string => {
33
+ return crypto.createHash("sha256").update(text).digest("hex");
34
+ };
35
+
36
+ export const toChunkReference = (
37
+ chunk: ChunkWithEmbedding
38
+ ): ChunkReference => {
39
+ return {
40
+ chunkHash: chunk.hash,
41
+ filepath: chunk.filepath,
42
+ lineRange: chunk.lineRange,
43
+ text: chunk.text,
44
+ };
45
+ };
46
+
47
+ export const getFilePaths = (pwd: string) => GitTracker.getTrackedFiles(pwd);
48
+
49
+ export const readFile = (pwd: string, filepath: string) =>
50
+ Effect.tryPromise({
51
+ try: () => Bun.file(`${pwd}/${filepath}`).text(),
52
+ catch: (error) =>
53
+ new FileReadError({
54
+ path: `${pwd}/${filepath}`,
55
+ cause: error,
56
+ }),
57
+ });
58
+
59
+ const logProgress = (stats: IndexStats) =>
60
+ Effect.gen(function* () {
61
+ const filesRead = yield* Ref.get(stats.filesRead);
62
+ const chunksCreated = yield* Ref.get(stats.chunksCreated);
63
+ const cacheHits = yield* Ref.get(stats.cacheHits);
64
+ const embedded = yield* Ref.get(stats.embedded);
65
+ const processed = yield* Ref.get(stats.processed);
66
+ const skippedFiles = yield* Ref.get(stats.skippedFiles);
67
+
68
+ yield* Console.log(
69
+ `[Progress] Files: ${filesRead} | Skipped: ${skippedFiles.length} | Chunks: ${chunksCreated} | Cache hits: ${cacheHits} | Embedded: ${embedded} | Processed: ${processed}`
70
+ );
71
+ });
72
+
73
+ const processBatch = (chunks: readonly ChunkWithMeta[], stats: IndexStats) =>
74
+ Effect.gen(function* () {
75
+ const chunksWithCache = yield* Effect.all(
76
+ chunks.map((chunk) =>
77
+ EmbeddingCache.get(chunk.hash).pipe(
78
+ Effect.map((cached) => ({ ...chunk, cached }))
79
+ )
80
+ )
81
+ );
82
+
83
+ const cachedChunks = chunksWithCache.filter(
84
+ (c): c is typeof c & { cached: number[] } => c.cached !== null
85
+ );
86
+ const uncachedChunks = chunksWithCache.filter((c) => c.cached === null);
87
+
88
+ if (cachedChunks.length > 0) {
89
+ yield* Ref.update(stats.cacheHits, (n) => n + cachedChunks.length);
90
+ }
91
+
92
+ let embeddedChunks: ChunkWithEmbedding[] = [];
93
+ if (uncachedChunks.length > 0) {
94
+ const result = yield* Embedder.embedMany(
95
+ uncachedChunks.map((c) => c.text)
96
+ );
97
+
98
+ embeddedChunks = uncachedChunks.map((chunk, index) => ({
99
+ filepath: chunk.filepath,
100
+ hash: chunk.hash,
101
+ text: chunk.text,
102
+ lineRange: chunk.lineRange,
103
+ embedding: result.embeddings[index] ?? [],
104
+ }));
105
+
106
+ yield* Ref.update(stats.embedded, (n) => n + embeddedChunks.length);
107
+
108
+ yield* Effect.all(
109
+ embeddedChunks.map((chunk) =>
110
+ EmbeddingCache.save(chunk.hash, chunk.embedding)
111
+ )
112
+ );
113
+ }
114
+
115
+ const finalChunks: ChunkWithEmbedding[] = [
116
+ ...cachedChunks.map((c) => ({
117
+ filepath: c.filepath,
118
+ hash: c.hash,
119
+ text: c.text,
120
+ lineRange: c.lineRange,
121
+ embedding: c.cached,
122
+ })),
123
+ ...embeddedChunks,
124
+ ];
125
+
126
+ return finalChunks;
127
+ });
128
+
129
+ export const indexProject = (pwd: string) =>
130
+ Effect.gen(function* () {
131
+ yield* GlobalStore.ensureDirectories;
132
+
133
+ const stats: IndexStats = {
134
+ filesRead: yield* Ref.make(0),
135
+ chunksCreated: yield* Ref.make(0),
136
+ cacheHits: yield* Ref.make(0),
137
+ embedded: yield* Ref.make(0),
138
+ processed: yield* Ref.make(0),
139
+ skippedFiles: yield* Ref.make<string[]>([]),
140
+ };
141
+
142
+ const logFiber = yield* logProgress(stats).pipe(
143
+ Effect.repeat(Schedule.fixed("1 second")),
144
+ Effect.forkDaemon
145
+ );
146
+
147
+ const filePaths = yield* getFilePaths(pwd);
148
+ const fileResults = yield* Effect.all(
149
+ filePaths.map((filepath) =>
150
+ readFile(pwd, filepath).pipe(
151
+ Effect.tap(() => Ref.update(stats.filesRead, (n) => n + 1)),
152
+ Effect.map((code) => ({ filepath, code, skipped: false as const })),
153
+ Effect.catchAll(() =>
154
+ Ref.update(stats.skippedFiles, (arr) => [...arr, filepath]).pipe(
155
+ Effect.map(() => ({
156
+ filepath,
157
+ code: "",
158
+ skipped: true as const,
159
+ }))
160
+ )
161
+ )
162
+ )
163
+ ),
164
+ { concurrency: "unbounded" }
165
+ );
166
+
167
+ const files = fileResults.filter((f) => !f.skipped);
168
+
169
+ yield* Console.log(
170
+ `[Indexing] Read ${files.length} files, starting chunking...`
171
+ );
172
+
173
+ const chunksStream = yield* chunkBatchStreamEffect(files, {
174
+ maxChunkSize: 4000,
175
+ concurrency: Infinity,
176
+ }).pipe(
177
+ Stream.flatMap((result) => {
178
+ if (result.error !== null || result.chunks === null) {
179
+ return Stream.empty;
180
+ }
181
+ return Stream.fromIterable(
182
+ result.chunks.map((chunk) => ({
183
+ filepath: result.filepath,
184
+ hash: hashChunk(chunk.contextualizedText),
185
+ text: chunk.contextualizedText,
186
+ lineRange: chunk.lineRange,
187
+ }))
188
+ );
189
+ }),
190
+ Stream.tap(() => Ref.update(stats.chunksCreated, (n) => n + 1)),
191
+ Stream.grouped(5),
192
+ Stream.mapEffect(
193
+ (batch) => processBatch(Chunk.toReadonlyArray(batch), stats),
194
+ { concurrency: 50 }
195
+ ),
196
+ Stream.flatMap((chunks) => Stream.fromIterable(chunks)),
197
+ Stream.tap(() => Ref.update(stats.processed, (n) => n + 1)),
198
+ Stream.runCollect,
199
+ Effect.map(Chunk.toReadonlyArray)
200
+ );
201
+
202
+ yield* Fiber.interrupt(logFiber);
203
+ yield* logProgress(stats);
204
+
205
+ const skippedFiles = yield* Ref.get(stats.skippedFiles);
206
+ if (skippedFiles.length > 0) {
207
+ yield* Console.log(
208
+ `[Skipped Files] Could not read ${skippedFiles.length} files`
209
+ );
210
+ }
211
+
212
+ yield* Console.log(
213
+ `[Complete] Indexed ${chunksStream.length} chunks total`
214
+ );
215
+
216
+ return chunksStream;
217
+ });
218
+
219
+ export const indexFiles = (pwd: string, filepaths: string[]) =>
220
+ Effect.gen(function* () {
221
+ yield* GlobalStore.ensureDirectories;
222
+
223
+ const filtered = filepaths.filter(filterFilePath);
224
+ if (filtered.length === 0) {
225
+ return [];
226
+ }
227
+
228
+ yield* Console.log(`[Indexing] Processing ${filtered.length} files...`);
229
+
230
+ const fileResults = yield* Effect.all(
231
+ filtered.map((filepath) =>
232
+ readFile(pwd, filepath).pipe(
233
+ Effect.map((code) => ({ filepath, code, skipped: false as const })),
234
+ Effect.catchAll(() =>
235
+ Effect.succeed({ filepath, code: "", skipped: true as const })
236
+ )
237
+ )
238
+ ),
239
+ { concurrency: "unbounded" }
240
+ );
241
+
242
+ const files = fileResults.filter((f) => !f.skipped);
243
+
244
+ const stats: IndexStats = {
245
+ filesRead: yield* Ref.make(files.length),
246
+ chunksCreated: yield* Ref.make(0),
247
+ cacheHits: yield* Ref.make(0),
248
+ embedded: yield* Ref.make(0),
249
+ processed: yield* Ref.make(0),
250
+ skippedFiles: yield* Ref.make<string[]>([]),
251
+ };
252
+
253
+ const chunksStream = yield* chunkBatchStreamEffect(files, {
254
+ maxChunkSize: 4000,
255
+ concurrency: Infinity,
256
+ }).pipe(
257
+ Stream.flatMap((result) => {
258
+ if (result.error !== null || result.chunks === null) {
259
+ return Stream.empty;
260
+ }
261
+ return Stream.fromIterable(
262
+ result.chunks.map((chunk) => ({
263
+ filepath: result.filepath,
264
+ hash: hashChunk(chunk.contextualizedText),
265
+ text: chunk.contextualizedText,
266
+ lineRange: chunk.lineRange,
267
+ }))
268
+ );
269
+ }),
270
+ Stream.tap(() => Ref.update(stats.chunksCreated, (n) => n + 1)),
271
+ Stream.grouped(5),
272
+ Stream.mapEffect(
273
+ (batch) => processBatch(Chunk.toReadonlyArray(batch), stats),
274
+ { concurrency: 50 }
275
+ ),
276
+ Stream.flatMap((chunks) => Stream.fromIterable(chunks)),
277
+ Stream.runCollect,
278
+ Effect.map(Chunk.toReadonlyArray)
279
+ );
280
+
281
+ yield* Console.log(
282
+ `[Complete] Indexed ${chunksStream.length} chunks from ${files.length} files`
283
+ );
284
+
285
+ return chunksStream;
286
+ });
287
+
288
+ export const saveIndexedChunks = (
289
+ pwd: string,
290
+ chunks: ChunkWithEmbedding[],
291
+ gitCommitHash?: string
292
+ ) => {
293
+ const projectId = GlobalStore.getProjectId(pwd);
294
+
295
+ const manifest: ProjectManifest = {
296
+ projectId,
297
+ projectPath: pwd,
298
+ lastIndexedAt: new Date().toISOString(),
299
+ gitCommitHash,
300
+ chunks: chunks.map(toChunkReference),
301
+ };
302
+
303
+ return GlobalStore.saveProjectManifest(manifest);
304
+ };
305
+
306
+ export const updateManifestWithChanges = (
307
+ pwd: string,
308
+ addedOrModifiedChunks: ChunkWithEmbedding[],
309
+ deletedFilepaths: string[],
310
+ gitCommitHash?: string
311
+ ) =>
312
+ Effect.gen(function* () {
313
+ const projectId = GlobalStore.getProjectId(pwd);
314
+ const existingManifest = yield* GlobalStore.loadProjectManifest(
315
+ projectId
316
+ );
317
+
318
+ let existingChunks: ChunkReference[] = [];
319
+ if (existingManifest) {
320
+ existingChunks = existingManifest.chunks.filter(
321
+ (c) => !deletedFilepaths.includes(c.filepath)
322
+ );
323
+ const modifiedFilepaths = new Set(
324
+ addedOrModifiedChunks.map((c) => c.filepath)
325
+ );
326
+ existingChunks = existingChunks.filter(
327
+ (c) => !modifiedFilepaths.has(c.filepath)
328
+ );
329
+ }
330
+
331
+ const newChunks = addedOrModifiedChunks.map(toChunkReference);
332
+
333
+ const manifest: ProjectManifest = {
334
+ projectId,
335
+ projectPath: pwd,
336
+ lastIndexedAt: new Date().toISOString(),
337
+ gitCommitHash,
338
+ chunks: [...existingChunks, ...newChunks],
339
+ };
340
+
341
+ yield* GlobalStore.saveProjectManifest(manifest);
342
+ });
343
+ }
@@ -0,0 +1,71 @@
1
+ import { Effect } from "effect";
2
+ import { EMBEDDINGS_DIR, FileReadError } from "../types";
3
+ import { GlobalStore } from "./global-store";
4
+
5
+ export namespace EmbeddingCache {
6
+ export const get = (hash: string) =>
7
+ Effect.tryPromise({
8
+ try: async () => {
9
+ const embeddingPath = `${EMBEDDINGS_DIR}/${hash}.json`;
10
+ const file = Bun.file(embeddingPath);
11
+ if (await file.exists()) {
12
+ return (await file.json()) as number[];
13
+ }
14
+ return null;
15
+ },
16
+ catch: (error) =>
17
+ new FileReadError({
18
+ path: `${EMBEDDINGS_DIR}/${hash}.json`,
19
+ cause: error,
20
+ }),
21
+ });
22
+
23
+ export const save = (hash: string, embedding: number[]) =>
24
+ GlobalStore.ensureDirectories.pipe(
25
+ Effect.flatMap(() =>
26
+ Effect.tryPromise({
27
+ try: () => {
28
+ const embeddingPath = `${EMBEDDINGS_DIR}/${hash}.json`;
29
+ return Bun.write(embeddingPath, JSON.stringify(embedding));
30
+ },
31
+ catch: (error) =>
32
+ new FileReadError({
33
+ path: `${EMBEDDINGS_DIR}/${hash}.json`,
34
+ cause: error,
35
+ }),
36
+ })
37
+ )
38
+ );
39
+
40
+ export const getMany = (hashes: string[]) =>
41
+ Effect.all(
42
+ hashes.map((hash) =>
43
+ get(hash).pipe(Effect.map((embedding) => [hash, embedding] as const))
44
+ ),
45
+ { concurrency: 1000 }
46
+ ).pipe(
47
+ Effect.map((entries) => new Map(entries) as Map<string, number[] | null>)
48
+ );
49
+
50
+ export const saveMany = (embeddings: Map<string, number[]>) =>
51
+ GlobalStore.ensureDirectories.pipe(
52
+ Effect.flatMap(() =>
53
+ Effect.all(
54
+ Array.from(embeddings.entries()).map(([hash, embedding]) =>
55
+ save(hash, embedding)
56
+ ),
57
+ { concurrency: 1000 }
58
+ )
59
+ )
60
+ );
61
+
62
+ export const exists = (hash: string) =>
63
+ Effect.tryPromise({
64
+ try: async () => {
65
+ const embeddingPath = `${EMBEDDINGS_DIR}/${hash}.json`;
66
+ const file = Bun.file(embeddingPath);
67
+ return file.exists();
68
+ },
69
+ catch: () => false,
70
+ }).pipe(Effect.catchAll(() => Effect.succeed(false)));
71
+ }
@@ -0,0 +1,208 @@
1
+ import * as crypto from "crypto";
2
+ import { Effect } from "effect";
3
+ import {
4
+ SEMANTIC_HOME,
5
+ EMBEDDINGS_DIR,
6
+ PROJECTS_DIR,
7
+ CONFIG_PATH,
8
+ type GlobalConfig,
9
+ type ProjectManifest,
10
+ type GitTreeCache,
11
+ ConfigError,
12
+ ApiKeyMissingError,
13
+ } from "../types";
14
+
15
+ const DEFAULT_CONFIG: GlobalConfig = {
16
+ version: "1.0.0",
17
+ };
18
+
19
+ export namespace GlobalStore {
20
+ export const getProjectId = (projectPath: string): string => {
21
+ const hash = crypto.createHash("sha256").update(projectPath).digest("hex");
22
+ };
23
+
24
+ export const getProjectDir = (projectId: string): string => {
25
+ return `${PROJECTS_DIR}/${projectId}`;
26
+ };
27
+
28
+ export const clearProjectCache = (projectId: string) =>
29
+ Effect.tryPromise({
30
+ try: async () => {
31
+ const projectDir = getProjectDir(projectId);
32
+ const manifestPath = `${projectDir}/manifest.json`;
33
+ const gitTreePath = `${projectDir}/git-tree.json`;
34
+
35
+ const manifestExists = await Bun.file(manifestPath).exists();
36
+ const gitTreeExists = await Bun.file(gitTreePath).exists();
37
+
38
+ if (manifestExists) {
39
+ await Bun.$`rm -f ${manifestPath}`;
40
+ }
41
+ if (gitTreeExists) {
42
+ await Bun.$`rm -f ${gitTreePath}`;
43
+ }
44
+
45
+ return {
46
+ manifestCleared: manifestExists,
47
+ gitTreeCleared: gitTreeExists,
48
+ };
49
+ },
50
+ catch: (error) =>
51
+ new ConfigError({
52
+ message: `Failed to clear repo cache for project ${projectId}`,
53
+ cause: error,
54
+ }),
55
+ });
56
+
57
+ export const ensureDirectories = Effect.tryPromise({
58
+ try: async () => {
59
+ await Bun.$`mkdir -p ${SEMANTIC_HOME}`;
60
+ await Bun.$`mkdir -p ${EMBEDDINGS_DIR}`;
61
+ await Bun.$`mkdir -p ${PROJECTS_DIR}`;
62
+ },
63
+ catch: (error) =>
64
+ new ConfigError({
65
+ message: "Failed to create directories",
66
+ cause: error,
67
+ }),
68
+ });
69
+
70
+ export const ensureProjectDir = (projectId: string) =>
71
+ Effect.tryPromise({
72
+ try: async () => {
73
+ const projectDir = getProjectDir(projectId);
74
+ await Bun.$`mkdir -p ${projectDir}`;
75
+ },
76
+ catch: (error) =>
77
+ new ConfigError({
78
+ message: `Failed to create project directory for ${projectId}`,
79
+ cause: error,
80
+ }),
81
+ });
82
+
83
+ export const loadConfig = Effect.tryPromise({
84
+ try: async () => {
85
+ const file = Bun.file(CONFIG_PATH);
86
+ if (await file.exists()) {
87
+ return (await file.json()) as GlobalConfig;
88
+ }
89
+ return DEFAULT_CONFIG;
90
+ },
91
+ catch: (error) =>
92
+ new ConfigError({
93
+ message: "Failed to load config",
94
+ cause: error,
95
+ }),
96
+ });
97
+
98
+ export const saveConfig = (config: GlobalConfig) =>
99
+ ensureDirectories.pipe(
100
+ Effect.flatMap(() =>
101
+ Effect.tryPromise({
102
+ try: () => Bun.write(CONFIG_PATH, JSON.stringify(config, null, 2)),
103
+ catch: (error) =>
104
+ new ConfigError({
105
+ message: "Failed to save config",
106
+ cause: error,
107
+ }),
108
+ })
109
+ )
110
+ );
111
+
112
+ export const setApiKey = (apiKey: string) =>
113
+ loadConfig.pipe(
114
+ Effect.map((config) => ({ ...config, voyageApiKey: apiKey })),
115
+ Effect.flatMap(saveConfig)
116
+ );
117
+
118
+ export const getApiKey = loadConfig.pipe(
119
+ Effect.map((config) => config.voyageApiKey)
120
+ );
121
+
122
+ export const getApiKeyOrFail = loadConfig.pipe(
123
+ Effect.flatMap((config) =>
124
+ config.voyageApiKey
125
+ ? Effect.succeed(config.voyageApiKey)
126
+ : Effect.fail(
127
+ new ApiKeyMissingError({
128
+ message:
129
+ "No API key configured. Run: semantic config add-api-key <your_key>",
130
+ })
131
+ )
132
+ )
133
+ );
134
+
135
+ export const loadProjectManifest = (projectId: string) =>
136
+ Effect.tryPromise({
137
+ try: async () => {
138
+ const manifestPath = `${getProjectDir(projectId)}/manifest.json`;
139
+ const file = Bun.file(manifestPath);
140
+ if (await file.exists()) {
141
+ return (await file.json()) as ProjectManifest;
142
+ }
143
+ return null;
144
+ },
145
+ catch: (error) =>
146
+ new ConfigError({
147
+ message: `Failed to load manifest for project ${projectId}`,
148
+ cause: error,
149
+ }),
150
+ });
151
+
152
+ export const saveProjectManifest = (manifest: ProjectManifest) =>
153
+ ensureProjectDir(manifest.projectId).pipe(
154
+ Effect.flatMap(() =>
155
+ Effect.tryPromise({
156
+ try: () => {
157
+ const projectDir = getProjectDir(manifest.projectId);
158
+ return Bun.write(
159
+ `${projectDir}/manifest.json`,
160
+ JSON.stringify(manifest, null, 2)
161
+ );
162
+ },
163
+ catch: (error) =>
164
+ new ConfigError({
165
+ message: `Failed to save manifest for project ${manifest.projectId}`,
166
+ cause: error,
167
+ }),
168
+ })
169
+ )
170
+ );
171
+
172
+ export const loadGitTreeCache = (projectId: string) =>
173
+ Effect.tryPromise({
174
+ try: async () => {
175
+ const cachePath = `${getProjectDir(projectId)}/git-tree.json`;
176
+ const file = Bun.file(cachePath);
177
+ if (await file.exists()) {
178
+ return (await file.json()) as GitTreeCache;
179
+ }
180
+ return null;
181
+ },
182
+ catch: (error) =>
183
+ new ConfigError({
184
+ message: `Failed to load git tree cache for project ${projectId}`,
185
+ cause: error,
186
+ }),
187
+ });
188
+
189
+ export const saveGitTreeCache = (projectId: string, cache: GitTreeCache) =>
190
+ ensureProjectDir(projectId).pipe(
191
+ Effect.flatMap(() =>
192
+ Effect.tryPromise({
193
+ try: () => {
194
+ const projectDir = getProjectDir(projectId);
195
+ return Bun.write(
196
+ `${projectDir}/git-tree.json`,
197
+ JSON.stringify(cache, null, 2)
198
+ );
199
+ },
200
+ catch: (error) =>
201
+ new ConfigError({
202
+ message: `Failed to save git tree cache for project ${projectId}`,
203
+ cause: error,
204
+ }),
205
+ })
206
+ )
207
+ );
208
+ }