@gmickel/gno 1.1.0 → 1.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/cli/run.ts CHANGED
@@ -169,7 +169,10 @@ Run '${CLI_NAME} --help' for full command list.
169
169
  * No process.exit() - caller sets process.exitCode.
170
170
  */
171
171
  export async function runCli(argv: string[]): Promise<number> {
172
- // Reset global state for clean invocation (important for testing)
172
+ // Reset global state for clean invocation (important for testing).
173
+ // The detach paths (runServeDetach / runDaemonDetach) read argv from
174
+ // Commander's per-invocation `Command.rawArgs` via `resolveCliArgv()`,
175
+ // so no separate process-global capture is needed here.
173
176
  resetGlobals();
174
177
 
175
178
  const isJson = argvWantsJson(argv);
@@ -204,8 +207,13 @@ export async function runCli(argv: string[]): Promise<number> {
204
207
  } catch (err) {
205
208
  // Handle CliError with proper JSON formatting
206
209
  if (err instanceof CliError) {
207
- const output = formatErrorForOutput(err, { json: isJson });
208
- process.stderr.write(`${output}\n`);
210
+ // `silent` is reserved for codes whose exit value carries the meaning
211
+ // (e.g. NOT_RUNNING from `--stop` per spec/cli.md). Skip stderr but
212
+ // still propagate the code.
213
+ if (!err.silent) {
214
+ const output = formatErrorForOutput(err, { json: isJson });
215
+ process.stderr.write(`${output}\n`);
216
+ }
209
217
  return exitCodeFor(err);
210
218
  }
211
219
 
package/src/llm/cache.ts CHANGED
@@ -41,6 +41,27 @@ import { getLockPath, getManifestLockPath, withLock } from "./lockfile";
41
41
  const HF_QUANT_PATTERN = /^([^/]+)\/([^/:]+):(\w+)$/;
42
42
  const HF_PATH_PATTERN = /^([^/]+)\/([^/]+)\/(.+\.gguf)$/;
43
43
 
44
+ async function computeSha256(path: string): Promise<string> {
45
+ const hasher = new Bun.CryptoHasher("sha256");
46
+ const reader = Bun.file(path).stream().getReader();
47
+
48
+ try {
49
+ while (true) {
50
+ const { done, value } = await reader.read();
51
+ if (done) {
52
+ break;
53
+ }
54
+ if (value) {
55
+ hasher.update(value);
56
+ }
57
+ }
58
+ } finally {
59
+ reader.releaseLock();
60
+ }
61
+
62
+ return hasher.digest("hex");
63
+ }
64
+
44
65
  export type ParsedModelUri =
45
66
  | {
46
67
  scheme: "hf";
@@ -588,6 +609,7 @@ export class ModelCache {
588
609
  ): Promise<void> {
589
610
  // Get file size outside lock (IO-bound, doesn't need protection)
590
611
  let size = 0;
612
+ let checksum = "";
591
613
  try {
592
614
  const stats = await stat(modelPath);
593
615
  size = stats.size;
@@ -595,6 +617,12 @@ export class ModelCache {
595
617
  // Ignore
596
618
  }
597
619
 
620
+ try {
621
+ checksum = await computeSha256(modelPath);
622
+ } catch {
623
+ // Best-effort metadata only
624
+ }
625
+
598
626
  await this.updateManifest((manifest) => {
599
627
  // Remove existing entry if present
600
628
  manifest.models = manifest.models.filter((m) => m.uri !== uri);
@@ -605,7 +633,7 @@ export class ModelCache {
605
633
  type,
606
634
  path: modelPath,
607
635
  size,
608
- checksum: "", // TODO: compute SHA-256 for large files
636
+ checksum,
609
637
  cachedAt: new Date().toISOString(),
610
638
  });
611
639
  });
@@ -5,7 +5,7 @@
5
5
  * @module src/llm/lockfile
6
6
  */
7
7
 
8
- import { open, rename, rm, stat } from "node:fs/promises";
8
+ import { open, readFile, rename, rm, stat } from "node:fs/promises";
9
9
  // node:os: hostname and user for lock ownership
10
10
  import { hostname, userInfo } from "node:os";
11
11
  // node:path: join for manifest lock path
@@ -68,6 +68,48 @@ function sleep(ms: number): Promise<void> {
68
68
  return new Promise((resolve) => setTimeout(resolve, ms));
69
69
  }
70
70
 
71
+ async function readLockMeta(lockPath: string): Promise<LockMeta | null> {
72
+ try {
73
+ const parsed = JSON.parse(
74
+ await readFile(lockPath, "utf-8")
75
+ ) as Partial<LockMeta>;
76
+ if (
77
+ typeof parsed.pid !== "number" ||
78
+ typeof parsed.hostname !== "string" ||
79
+ typeof parsed.user !== "string" ||
80
+ typeof parsed.createdAt !== "string"
81
+ ) {
82
+ return null;
83
+ }
84
+ return {
85
+ pid: parsed.pid,
86
+ hostname: parsed.hostname,
87
+ user: parsed.user,
88
+ createdAt: parsed.createdAt,
89
+ };
90
+ } catch {
91
+ return null;
92
+ }
93
+ }
94
+
95
+ function isProcessAlive(pid: number): boolean {
96
+ if (!Number.isInteger(pid) || pid <= 0) {
97
+ return false;
98
+ }
99
+
100
+ try {
101
+ process.kill(pid, 0);
102
+ return true;
103
+ } catch (error) {
104
+ return (
105
+ error !== null &&
106
+ typeof error === "object" &&
107
+ "code" in error &&
108
+ error.code === "EPERM"
109
+ );
110
+ }
111
+ }
112
+
71
113
  /**
72
114
  * Check if a lockfile is stale (older than TTL or owner process dead).
73
115
  */
@@ -81,9 +123,12 @@ async function isLockStale(lockPath: string, ttlMs: number): Promise<boolean> {
81
123
  return true;
82
124
  }
83
125
 
84
- // TODO: Could also check if PID is alive on same hostname
85
- // For now, just use TTL-based staleness
86
- return false;
126
+ const meta = await readLockMeta(lockPath);
127
+ if (!meta || meta.hostname !== hostname()) {
128
+ return false;
129
+ }
130
+
131
+ return !isProcessAlive(meta.pid);
87
132
  } catch {
88
133
  // Lock doesn't exist or can't be read
89
134
  return true;
@@ -15,6 +15,7 @@ import type {
15
15
  SearchResults,
16
16
  } from "./types";
17
17
 
18
+ import { getContentBatch } from "../store/content-batch";
18
19
  import { err, ok } from "../store/types";
19
20
  import { createChunkLookup } from "./chunk-lookup";
20
21
  import { matchesExcludedChunks, matchesExcludedText } from "./exclude";
@@ -277,14 +278,21 @@ export async function searchBm25(
277
278
  const sortedEntries = [...bestByDocid.values()].sort(
278
279
  (a, b) => a.score - b.score
279
280
  );
281
+ const fullContentResult = await getContentBatch(
282
+ store,
283
+ sortedEntries
284
+ .map(({ fts }) => fts.mirrorHash)
285
+ .filter((mirrorHash): mirrorHash is string => Boolean(mirrorHash))
286
+ );
287
+ if (!fullContentResult.ok) {
288
+ return err("QUERY_FAILED", fullContentResult.error.message);
289
+ }
290
+ const fullContentByHash = fullContentResult.value;
291
+
280
292
  for (const { fts, chunk } of sortedEntries) {
281
- let fullContent: string | undefined;
282
- if (fts.mirrorHash) {
283
- const contentResult = await store.getContent(fts.mirrorHash);
284
- if (contentResult.ok && contentResult.value) {
285
- fullContent = contentResult.value;
286
- }
287
- }
293
+ const fullContent = fts.mirrorHash
294
+ ? fullContentByHash.get(fts.mirrorHash)
295
+ : undefined;
288
296
  const collectionPath = fts.collection
289
297
  ? collectionPaths.get(fts.collection)
290
298
  : undefined;
@@ -11,6 +11,7 @@ import type { StorePort } from "../store/types";
11
11
  import type { VectorIndexPort } from "../store/vector/types";
12
12
  import type { SearchOptions, SearchResult, SearchResults } from "./types";
13
13
 
14
+ import { getContentBatch } from "../store/content-batch";
14
15
  import { err, ok } from "../store/types";
15
16
  import { createChunkLookup } from "./chunk-lookup";
16
17
  import { formatQueryForEmbedding } from "./contextual";
@@ -249,14 +250,21 @@ export async function searchVectorWithEmbedding(
249
250
 
250
251
  // For --full, fetch full content and build results
251
252
  if (options.full) {
253
+ const fullContentResult = await getContentBatch(
254
+ store,
255
+ [...bestByDocid.values()]
256
+ .map(({ doc }) => doc.mirrorHash)
257
+ .filter((mirrorHash): mirrorHash is string => Boolean(mirrorHash))
258
+ );
259
+ if (!fullContentResult.ok) {
260
+ return err("QUERY_FAILED", fullContentResult.error.message);
261
+ }
262
+ const fullContentByHash = fullContentResult.value;
263
+
252
264
  for (const { doc, chunk, score } of bestByDocid.values()) {
253
- let fullContent: string | undefined;
254
- if (doc.mirrorHash) {
255
- const contentResult = await store.getContent(doc.mirrorHash);
256
- if (contentResult.ok && contentResult.value) {
257
- fullContent = contentResult.value;
258
- }
259
- }
265
+ const fullContent = doc.mirrorHash
266
+ ? fullContentByHash.get(doc.mirrorHash)
267
+ : undefined;
260
268
 
261
269
  const collectionPath = collectionPaths.get(doc.collection);
262
270
 
@@ -9,6 +9,7 @@ import type { DocumentRow, StorePort, TagRow } from "../store/types";
9
9
 
10
10
  import { parseRef } from "../cli/commands/ref-parser";
11
11
  import { parseFrontmatter } from "../ingestion/frontmatter";
12
+ import { getContentBatch } from "../store/content-batch";
12
13
  import {
13
14
  buildEncryptedPublishArtifact,
14
15
  buildPublishArtifact,
@@ -155,16 +156,40 @@ async function exportCollectionArtifact(
155
156
  throw new Error(`Collection "${collection.name}" has no active documents`);
156
157
  }
157
158
 
159
+ const contentResult = await getContentBatch(
160
+ store,
161
+ activeDocs
162
+ .map((doc) => doc.mirrorHash)
163
+ .filter((mirrorHash): mirrorHash is string => Boolean(mirrorHash))
164
+ );
165
+ if (!contentResult.ok) {
166
+ throw new Error(contentResult.error.message);
167
+ }
168
+
169
+ const tagsResult = await store.getTagsBatch(activeDocs.map((doc) => doc.id));
170
+ if (!tagsResult.ok) {
171
+ throw new Error(tagsResult.error.message);
172
+ }
173
+
174
+ const contentByHash = contentResult.value;
175
+ const tagsByDocId = tagsResult.value;
176
+
158
177
  const notes: PublishArtifactNote[] = [];
159
178
  for (const doc of activeDocs) {
160
- const rawMarkdown = await loadDocumentMarkdown(store, doc);
179
+ if (!doc.mirrorHash) {
180
+ throw new Error(`Document has no converted content: ${doc.uri}`);
181
+ }
182
+ const rawMarkdown = contentByHash.get(doc.mirrorHash);
183
+ if (rawMarkdown === undefined) {
184
+ throw new Error(`Unable to load content for ${doc.uri}`);
185
+ }
161
186
  if (isPublishDisabledByFrontmatter(rawMarkdown)) {
162
187
  continue;
163
188
  }
164
189
  const sanitized = sanitizeObsidianMarkdown(rawMarkdown);
165
190
  warnings.push(...sanitized.warnings);
166
191
  const markdown = sanitized.markdown;
167
- const tags = await loadDocumentTags(store, doc);
192
+ const tags = tagsByDocId.get(doc.id) ?? [];
168
193
  const frontmatter = parseFrontmatter(markdown).metadata;
169
194
  const title = deriveExportedTitle(doc);
170
195
  notes.push({
@@ -0,0 +1,38 @@
1
+ import type { StorePort, StoreResult } from "./types";
2
+
3
+ import { err, ok } from "./types";
4
+
5
+ /**
6
+ * Load content in batch when the store supports it.
7
+ * Falls back to sequential reads for lightweight test doubles.
8
+ */
9
+ export async function getContentBatch(
10
+ store: StorePort,
11
+ mirrorHashes: string[]
12
+ ): Promise<StoreResult<Map<string, string>>> {
13
+ const uniqueHashes = [...new Set(mirrorHashes)];
14
+ if (uniqueHashes.length === 0) {
15
+ return ok(new Map());
16
+ }
17
+
18
+ if (store.getContentBatch) {
19
+ return store.getContentBatch(uniqueHashes);
20
+ }
21
+
22
+ const contentByHash = new Map<string, string>();
23
+ for (const mirrorHash of uniqueHashes) {
24
+ const contentResult = await store.getContent(mirrorHash);
25
+ if (!contentResult.ok) {
26
+ return err(
27
+ "QUERY_FAILED",
28
+ contentResult.error.message,
29
+ contentResult.error.cause
30
+ );
31
+ }
32
+ if (contentResult.value !== null) {
33
+ contentByHash.set(mirrorHash, contentResult.value);
34
+ }
35
+ }
36
+
37
+ return ok(contentByHash);
38
+ }
@@ -1048,6 +1048,41 @@ export class SqliteAdapter implements StorePort, SqliteDbProvider {
1048
1048
  }
1049
1049
  }
1050
1050
 
1051
+ async getContentBatch(
1052
+ mirrorHashes: string[]
1053
+ ): Promise<StoreResult<Map<string, string>>> {
1054
+ try {
1055
+ const db = this.ensureOpen();
1056
+
1057
+ if (mirrorHashes.length === 0) {
1058
+ return ok(new Map());
1059
+ }
1060
+
1061
+ interface DbContentRow {
1062
+ mirror_hash: string;
1063
+ markdown: string;
1064
+ }
1065
+
1066
+ const placeholders = mirrorHashes.map(() => "?").join(", ");
1067
+ const rows = db
1068
+ .query<DbContentRow, string[]>(
1069
+ `SELECT mirror_hash, markdown FROM content
1070
+ WHERE mirror_hash IN (${placeholders})`
1071
+ )
1072
+ .all(...mirrorHashes);
1073
+
1074
+ return ok(
1075
+ new Map(rows.map((row) => [row.mirror_hash, row.markdown] as const))
1076
+ );
1077
+ } catch (cause) {
1078
+ return err(
1079
+ "QUERY_FAILED",
1080
+ cause instanceof Error ? cause.message : "Failed to get content batch",
1081
+ cause
1082
+ );
1083
+ }
1084
+ }
1085
+
1051
1086
  // ─────────────────────────────────────────────────────────────────────────
1052
1087
  // Chunks
1053
1088
  // ─────────────────────────────────────────────────────────────────────────
@@ -694,6 +694,14 @@ export interface StorePort {
694
694
  */
695
695
  getContent(mirrorHash: string): Promise<StoreResult<string | null>>;
696
696
 
697
+ /**
698
+ * Batch fetch markdown content for multiple mirror hashes.
699
+ * Returns a map of mirrorHash -> markdown for hashes that exist.
700
+ */
701
+ getContentBatch?(
702
+ mirrorHashes: string[]
703
+ ): Promise<StoreResult<Map<string, string>>>;
704
+
697
705
  // ─────────────────────────────────────────────────────────────────────────
698
706
  // Chunks
699
707
  // ─────────────────────────────────────────────────────────────────────────