@offworld/sdk 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs ADDED
@@ -0,0 +1,3670 @@
1
+ import { chmodSync, existsSync, lstatSync, mkdirSync, readFileSync, readdirSync, rmSync, statSync, symlinkSync, unlinkSync, writeFileSync } from "node:fs";
2
+ import { basename, dirname, join, resolve } from "node:path";
3
+ import { ConfigSchema, GlobalMapSchema, ProjectMapSchema } from "@offworld/types";
4
+ import { xdgConfig, xdgData, xdgState } from "xdg-basedir";
5
+ import { homedir } from "node:os";
6
+ import { createHash } from "node:crypto";
7
+ import { GlobalMapSchema as GlobalMapSchema$1, ProjectMapSchema as ProjectMapSchema$1 } from "@offworld/types/schemas";
8
+ import { execFileSync, execSync, spawn } from "node:child_process";
9
+ import { z } from "zod";
10
+ import { ConvexHttpClient } from "convex/browser";
11
+ import { api } from "@offworld/backend-api/api";
12
+
13
+ //#region src/constants.ts
14
+ /**
15
+ * SDK Constants
16
+ */
17
+ /** SDK version - must match package.json */
18
+ const VERSION = "0.1.0";
19
+ /**
20
+ * Default patterns to ignore when scanning repositories.
21
+ * Includes directories, binary files, IDE configs, and build outputs.
22
+ */
23
+ const DEFAULT_IGNORE_PATTERNS = [
24
+ ".git",
25
+ ".git/**",
26
+ ".svn",
27
+ ".hg",
28
+ "node_modules",
29
+ "node_modules/**",
30
+ "vendor",
31
+ "vendor/**",
32
+ ".pnpm",
33
+ ".yarn",
34
+ "dist",
35
+ "dist/**",
36
+ "build",
37
+ "build/**",
38
+ "out",
39
+ "out/**",
40
+ ".next",
41
+ ".nuxt",
42
+ ".output",
43
+ "target",
44
+ "__pycache__",
45
+ "*.pyc",
46
+ ".vscode",
47
+ ".vscode/**",
48
+ ".idea",
49
+ ".idea/**",
50
+ "*.swp",
51
+ "*.swo",
52
+ ".DS_Store",
53
+ "*.jpg",
54
+ "*.jpeg",
55
+ "*.png",
56
+ "*.gif",
57
+ "*.ico",
58
+ "*.webp",
59
+ "*.svg",
60
+ "*.bmp",
61
+ "*.tiff",
62
+ "*.mp4",
63
+ "*.webm",
64
+ "*.mov",
65
+ "*.avi",
66
+ "*.mkv",
67
+ "*.mp3",
68
+ "*.wav",
69
+ "*.flac",
70
+ "*.ogg",
71
+ "*.pdf",
72
+ "*.zip",
73
+ "*.tar",
74
+ "*.gz",
75
+ "*.rar",
76
+ "*.7z",
77
+ "*.exe",
78
+ "*.dll",
79
+ "*.so",
80
+ "*.dylib",
81
+ "*.bin",
82
+ "*.wasm",
83
+ "*.woff",
84
+ "*.woff2",
85
+ "*.ttf",
86
+ "*.eot",
87
+ "*.otf",
88
+ "package-lock.json",
89
+ "yarn.lock",
90
+ "pnpm-lock.yaml",
91
+ "bun.lockb",
92
+ "Cargo.lock",
93
+ "Gemfile.lock",
94
+ "poetry.lock",
95
+ "composer.lock",
96
+ "go.sum",
97
+ "coverage",
98
+ "coverage/**",
99
+ ".nyc_output",
100
+ ".coverage",
101
+ "htmlcov",
102
+ "*.log",
103
+ "logs",
104
+ "tmp",
105
+ "temp",
106
+ ".tmp",
107
+ ".temp",
108
+ ".cache",
109
+ ".env",
110
+ ".env.*",
111
+ "*.pem",
112
+ "*.key"
113
+ ];
114
+
115
+ //#endregion
116
+ //#region src/paths.ts
117
+ /**
118
+ * XDG-based directory paths for offworld CLI
119
+ * Uses xdg-basedir package for cross-platform compatibility (Linux/macOS)
120
+ */
121
+ const APP_NAME = "offworld";
122
+ /**
123
+ * Main namespace for all XDG-compliant paths
124
+ */
125
+ const Paths = {
126
+ get config() {
127
+ return join(xdgConfig ?? join(homedir(), ".config"), APP_NAME);
128
+ },
129
+ get data() {
130
+ return join(xdgData ?? join(homedir(), ".local", "share"), APP_NAME);
131
+ },
132
+ get state() {
133
+ return join(xdgState ?? join(homedir(), ".local", "state"), APP_NAME);
134
+ },
135
+ get configFile() {
136
+ return join(this.config, "offworld.json");
137
+ },
138
+ get authFile() {
139
+ return join(this.data, "auth.json");
140
+ },
141
+ get metaDir() {
142
+ return join(this.data, "meta");
143
+ },
144
+ get defaultRepoRoot() {
145
+ return join(homedir(), "ow");
146
+ },
147
+ get offworldSkillDir() {
148
+ return join(this.data, "skill", "offworld");
149
+ },
150
+ get offworldReferencesDir() {
151
+ return join(this.offworldSkillDir, "references");
152
+ },
153
+ get offworldAssetsDir() {
154
+ return join(this.offworldSkillDir, "assets");
155
+ },
156
+ get offworldGlobalMapPath() {
157
+ return join(this.offworldAssetsDir, "map.json");
158
+ }
159
+ };
160
+ /**
161
+ * Expands ~ to user's home directory (for backward compatibility)
162
+ */
163
+ function expandTilde(path) {
164
+ if (path.startsWith("~/")) return join(homedir(), path.slice(2));
165
+ return path;
166
+ }
167
+
168
+ //#endregion
169
+ //#region src/config.ts
170
+ /**
171
+ * Config utilities for path management and configuration loading
172
+ */
173
+ /**
174
+ * Returns the repository root directory.
175
+ * Uses configured repoRoot or defaults to ~/ow
176
+ */
177
+ function getMetaRoot() {
178
+ return Paths.data;
179
+ }
180
+ function getRepoRoot(config) {
181
+ return expandTilde(config?.repoRoot ?? Paths.defaultRepoRoot);
182
+ }
183
+ /**
184
+ * Returns the path for a specific repository.
185
+ * Format: {repoRoot}/{provider}/{owner}/{repo}
186
+ *
187
+ * @param fullName - The repo identifier in "owner/repo" format
188
+ * @param provider - Git provider (defaults to "github")
189
+ * @param config - Optional config for custom repoRoot
190
+ */
191
+ function getRepoPath(fullName, provider = "github", config) {
192
+ const root = getRepoRoot(config);
193
+ const [owner, repo] = fullName.split("/");
194
+ if (!owner || !repo) throw new Error(`Invalid fullName format: ${fullName}. Expected "owner/repo"`);
195
+ return join(root, provider, owner, repo);
196
+ }
197
+ /**
198
+ * Convert owner/repo format to meta directory name.
199
+ * Collapses owner==repo (e.g., better-auth/better-auth -> better-auth)
200
+ */
201
+ function toMetaDirName(repoName) {
202
+ if (repoName.includes("/")) {
203
+ const [owner, repo] = repoName.split("/");
204
+ if (owner === repo) return repo;
205
+ return `${owner}-${repo}`;
206
+ }
207
+ return repoName;
208
+ }
209
+ /**
210
+ * Convert owner/repo format to reference filename.
211
+ * Collapses redundant owner/repo pairs by checking if repo name is contained in owner:
212
+ * - honojs/hono -> hono.md (hono is in honojs)
213
+ * - get-convex/convex-backend -> convex-backend.md (convex is in get-convex)
214
+ * - tanstack/query -> tanstack-query.md (query is not in tanstack)
215
+ */
216
+ function toReferenceFileName(repoName) {
217
+ if (repoName.includes("/")) {
218
+ const [owner, repo] = repoName.split("/");
219
+ const ownerLower = owner.toLowerCase();
220
+ const repoLower = repo.toLowerCase();
221
+ if (repoLower.split("-").find((part) => part.length >= 3 && ownerLower.includes(part)) || ownerLower === repoLower) return `${repoLower}.md`;
222
+ return `${ownerLower}-${repoLower}.md`;
223
+ }
224
+ return `${repoName.toLowerCase()}.md`;
225
+ }
226
+ function toReferenceName(repoName) {
227
+ return toReferenceFileName(repoName).replace(/\.md$/, "");
228
+ }
229
+ function getReferencePath(fullName) {
230
+ return join(Paths.offworldReferencesDir, toReferenceFileName(fullName));
231
+ }
232
+ function getMetaPath(fullName) {
233
+ return join(Paths.data, "meta", toMetaDirName(fullName));
234
+ }
235
+ /**
236
+ * Returns the path to the configuration file
237
+ * Uses XDG Base Directory specification
238
+ */
239
+ function getConfigPath() {
240
+ return Paths.configFile;
241
+ }
242
+ /**
243
+ * Loads configuration from ~/.config/offworld/offworld.json
244
+ * Returns defaults if file doesn't exist
245
+ */
246
+ function loadConfig() {
247
+ const configPath = getConfigPath();
248
+ if (!existsSync(configPath)) return ConfigSchema.parse({});
249
+ try {
250
+ const content = readFileSync(configPath, "utf-8");
251
+ const data = JSON.parse(content);
252
+ return ConfigSchema.parse(data);
253
+ } catch {
254
+ return ConfigSchema.parse({});
255
+ }
256
+ }
257
+ /**
258
+ * Saves configuration to ~/.config/offworld/offworld.json
259
+ * Creates directory if it doesn't exist
260
+ * Merges with existing config
261
+ */
262
+ function saveConfig(updates) {
263
+ const configPath = getConfigPath();
264
+ const configDir = dirname(configPath);
265
+ if (!existsSync(configDir)) mkdirSync(configDir, { recursive: true });
266
+ const merged = {
267
+ ...loadConfig(),
268
+ ...updates
269
+ };
270
+ const validated = ConfigSchema.parse(merged);
271
+ writeFileSync(configPath, JSON.stringify(validated, null, 2), "utf-8");
272
+ return validated;
273
+ }
274
+
275
+ //#endregion
276
+ //#region src/repo-source.ts
277
+ /**
278
+ * Repository source parsing utilities
279
+ */
280
+ var RepoSourceError = class extends Error {
281
+ constructor(message) {
282
+ super(message);
283
+ this.name = "RepoSourceError";
284
+ }
285
+ };
286
+ var PathNotFoundError = class extends RepoSourceError {
287
+ constructor(path) {
288
+ super(`Path does not exist: ${path}`);
289
+ this.name = "PathNotFoundError";
290
+ }
291
+ };
292
+ var NotGitRepoError = class extends RepoSourceError {
293
+ constructor(path) {
294
+ super(`Directory is not a git repository: ${path}`);
295
+ this.name = "NotGitRepoError";
296
+ }
297
+ };
298
+ const PROVIDER_HOSTS = {
299
+ "github.com": "github",
300
+ "gitlab.com": "gitlab",
301
+ "bitbucket.org": "bitbucket"
302
+ };
303
+ const HTTPS_URL_REGEX = /^https?:\/\/(github\.com|gitlab\.com|bitbucket\.org)\/([^/]+)\/([^/]+?)(?:\.git)?$/;
304
+ const SSH_URL_REGEX = /^git@(github\.com|gitlab\.com|bitbucket\.org):([^/]+)\/([^/]+?)(?:\.git)?$/;
305
+ const SHORT_FORMAT_REGEX = /^([^/:@]+)\/([^/:@]+)$/;
306
+ /**
307
+ * Generates a short hash of a path for local repo identification
308
+ */
309
+ function hashPath(path) {
310
+ return createHash("sha256").update(path).digest("hex").slice(0, 12);
311
+ }
312
+ /**
313
+ * Builds a clone URL for a remote repository
314
+ */
315
+ function buildCloneUrl(provider, owner, repo) {
316
+ return `https://${{
317
+ github: "github.com",
318
+ gitlab: "gitlab.com",
319
+ bitbucket: "bitbucket.org"
320
+ }[provider]}/${owner}/${repo}.git`;
321
+ }
322
+ /**
323
+ * Parses a remote repository from HTTPS URL format
324
+ */
325
+ function parseHttpsUrl(input) {
326
+ const match = input.match(HTTPS_URL_REGEX);
327
+ if (!match) return null;
328
+ const [, host, owner, repo] = match;
329
+ if (!host || !owner || !repo) return null;
330
+ const provider = PROVIDER_HOSTS[host];
331
+ if (!provider) return null;
332
+ const ownerLower = owner.toLowerCase();
333
+ const repoLower = repo.toLowerCase();
334
+ return {
335
+ type: "remote",
336
+ provider,
337
+ owner: ownerLower,
338
+ repo: repoLower,
339
+ fullName: `${ownerLower}/${repoLower}`,
340
+ qualifiedName: `${host}:${ownerLower}/${repoLower}`,
341
+ cloneUrl: buildCloneUrl(provider, ownerLower, repoLower)
342
+ };
343
+ }
344
+ /**
345
+ * Parses a remote repository from SSH URL format
346
+ */
347
+ function parseSshUrl(input) {
348
+ const match = input.match(SSH_URL_REGEX);
349
+ if (!match) return null;
350
+ const [, host, owner, repo] = match;
351
+ if (!host || !owner || !repo) return null;
352
+ const provider = PROVIDER_HOSTS[host];
353
+ if (!provider) return null;
354
+ const ownerLower = owner.toLowerCase();
355
+ const repoLower = repo.toLowerCase();
356
+ return {
357
+ type: "remote",
358
+ provider,
359
+ owner: ownerLower,
360
+ repo: repoLower,
361
+ fullName: `${ownerLower}/${repoLower}`,
362
+ qualifiedName: `${host}:${ownerLower}/${repoLower}`,
363
+ cloneUrl: buildCloneUrl(provider, ownerLower, repoLower)
364
+ };
365
+ }
366
+ /**
367
+ * Parses a remote repository from short format (owner/repo)
368
+ * Defaults to GitHub as provider
369
+ */
370
+ function parseShortFormat(input) {
371
+ const match = input.match(SHORT_FORMAT_REGEX);
372
+ if (!match) return null;
373
+ const [, owner, repo] = match;
374
+ if (!owner || !repo) return null;
375
+ const provider = "github";
376
+ const host = "github.com";
377
+ const ownerLower = owner.toLowerCase();
378
+ const repoLower = repo.toLowerCase();
379
+ return {
380
+ type: "remote",
381
+ provider,
382
+ owner: ownerLower,
383
+ repo: repoLower,
384
+ fullName: `${ownerLower}/${repoLower}`,
385
+ qualifiedName: `${host}:${ownerLower}/${repoLower}`,
386
+ cloneUrl: buildCloneUrl(provider, ownerLower, repoLower)
387
+ };
388
+ }
389
+ /**
390
+ * Parses a local repository path
391
+ * Validates that the path exists and contains a .git directory
392
+ */
393
+ function parseLocalPath(input) {
394
+ const absolutePath = resolve(expandTilde(input));
395
+ if (!existsSync(absolutePath)) throw new PathNotFoundError(absolutePath);
396
+ if (!statSync(absolutePath).isDirectory()) throw new RepoSourceError(`Path is not a directory: ${absolutePath}`);
397
+ if (!existsSync(resolve(absolutePath, ".git"))) throw new NotGitRepoError(absolutePath);
398
+ return {
399
+ type: "local",
400
+ path: absolutePath,
401
+ name: basename(absolutePath),
402
+ qualifiedName: `local:${hashPath(absolutePath)}`
403
+ };
404
+ }
405
+ /**
406
+ * Determines if input looks like a local path
407
+ */
408
+ function isLocalPath(input) {
409
+ return input.startsWith(".") || input.startsWith("/") || input.startsWith("~");
410
+ }
411
+ /**
412
+ * Parses a repository input and returns a structured RepoSource
413
+ *
414
+ * Supported formats:
415
+ * - owner/repo (short format, defaults to GitHub)
416
+ * - https://github.com/owner/repo
417
+ * - https://gitlab.com/owner/repo
418
+ * - https://bitbucket.org/owner/repo
419
+ * - git@github.com:owner/repo.git (SSH format)
420
+ * - . (current directory as local repo)
421
+ * - /absolute/path (local repo)
422
+ *
423
+ * @throws PathNotFoundError if local path doesn't exist
424
+ * @throws NotGitRepoError if local path is not a git repository
425
+ * @throws RepoSourceError for other parsing failures
426
+ */
427
+ function parseRepoInput(input) {
428
+ const trimmed = input.trim();
429
+ const httpsResult = parseHttpsUrl(trimmed);
430
+ if (httpsResult) return httpsResult;
431
+ const sshResult = parseSshUrl(trimmed);
432
+ if (sshResult) return sshResult;
433
+ if (isLocalPath(trimmed)) return parseLocalPath(trimmed);
434
+ const shortResult = parseShortFormat(trimmed);
435
+ if (shortResult) return shortResult;
436
+ throw new RepoSourceError(`Unable to parse repository input: ${input}. Expected formats: owner/repo, https://github.com/owner/repo, git@github.com:owner/repo.git, or a local path`);
437
+ }
438
+ function getReferenceFileNameForSource(source) {
439
+ if (source.type === "remote") return toReferenceFileName(source.fullName);
440
+ return toReferenceFileName(source.name);
441
+ }
442
+
443
+ //#endregion
444
+ //#region src/index-manager.ts
445
+ /**
446
+ * Map manager for global and project maps
447
+ *
448
+ * Manages:
449
+ * - Global map: ~/.local/share/offworld/skill/offworld/assets/map.json
450
+ * - Project map: ./.offworld/map.json
451
+ */
452
+ /**
453
+ * Reads the global map from ~/.local/share/offworld/skill/offworld/assets/map.json
454
+ * Returns empty map if file doesn't exist or is invalid
455
+ */
456
+ function readGlobalMap() {
457
+ const mapPath = Paths.offworldGlobalMapPath;
458
+ if (!existsSync(mapPath)) return { repos: {} };
459
+ try {
460
+ const content = readFileSync(mapPath, "utf-8");
461
+ const data = JSON.parse(content);
462
+ return GlobalMapSchema.parse(data);
463
+ } catch {
464
+ return { repos: {} };
465
+ }
466
+ }
467
+ /**
468
+ * Writes the global map to ~/.local/share/offworld/skill/offworld/assets/map.json
469
+ * Creates directory if it doesn't exist
470
+ */
471
+ function writeGlobalMap(map) {
472
+ const mapPath = Paths.offworldGlobalMapPath;
473
+ const mapDir = dirname(mapPath);
474
+ if (!existsSync(mapDir)) mkdirSync(mapDir, { recursive: true });
475
+ const validated = GlobalMapSchema.parse(map);
476
+ writeFileSync(mapPath, JSON.stringify(validated, null, 2), "utf-8");
477
+ }
478
+ /**
479
+ * Adds or updates a repo entry in the global map
480
+ *
481
+ * @param qualifiedName - The qualified repo name (owner/repo)
482
+ * @param entry - The map entry to add/update
483
+ */
484
+ function upsertGlobalMapEntry(qualifiedName, entry) {
485
+ const map = readGlobalMap();
486
+ map.repos[qualifiedName] = entry;
487
+ writeGlobalMap(map);
488
+ }
489
+ /**
490
+ * Removes a repo entry from the global map
491
+ *
492
+ * @param qualifiedName - The qualified repo name (owner/repo)
493
+ * @returns true if repo was removed, false if not found
494
+ */
495
+ function removeGlobalMapEntry(qualifiedName) {
496
+ const map = readGlobalMap();
497
+ if (!(qualifiedName in map.repos)) return false;
498
+ delete map.repos[qualifiedName];
499
+ writeGlobalMap(map);
500
+ return true;
501
+ }
502
+ /**
503
+ * Writes a project map to ./.offworld/map.json
504
+ *
505
+ * @param projectRoot - Absolute path to project root
506
+ * @param entries - Map of qualified repo names to project map entries
507
+ */
508
+ function writeProjectMap(projectRoot, entries) {
509
+ const mapPath = join(projectRoot, ".offworld", "map.json");
510
+ const mapDir = dirname(mapPath);
511
+ if (!existsSync(mapDir)) mkdirSync(mapDir, { recursive: true });
512
+ const projectMap = {
513
+ version: 1,
514
+ scope: "project",
515
+ globalMapPath: Paths.offworldGlobalMapPath,
516
+ repos: entries
517
+ };
518
+ const validated = ProjectMapSchema.parse(projectMap);
519
+ writeFileSync(mapPath, JSON.stringify(validated, null, 2), "utf-8");
520
+ }
521
+
522
+ //#endregion
523
+ //#region src/map.ts
524
+ /**
525
+ * Map query helpers for fast routing without reading full map.json
526
+ */
527
+ function readGlobalMapSafe() {
528
+ const mapPath = Paths.offworldGlobalMapPath;
529
+ if (!existsSync(mapPath)) return null;
530
+ try {
531
+ const content = readFileSync(mapPath, "utf-8");
532
+ return GlobalMapSchema$1.parse(JSON.parse(content));
533
+ } catch {
534
+ return null;
535
+ }
536
+ }
537
+ function readProjectMapSafe(cwd) {
538
+ const mapPath = resolve(cwd, ".offworld/map.json");
539
+ if (!existsSync(mapPath)) return null;
540
+ try {
541
+ const content = readFileSync(mapPath, "utf-8");
542
+ return ProjectMapSchema$1.parse(JSON.parse(content));
543
+ } catch {
544
+ return null;
545
+ }
546
+ }
547
+ /**
548
+ * Normalize input to match against repo keys.
549
+ * Accepts: github.com:owner/repo, owner/repo, repo
550
+ */
551
+ function normalizeInput(input) {
552
+ const trimmed = input.trim().toLowerCase();
553
+ if (trimmed.includes(":")) {
554
+ const parts = trimmed.split(":", 2);
555
+ const provider = parts[0];
556
+ const fullName = parts[1] ?? "";
557
+ return {
558
+ provider,
559
+ fullName,
560
+ repoName: fullName.split("/").pop() ?? fullName
561
+ };
562
+ }
563
+ if (trimmed.includes("/")) return {
564
+ fullName: trimmed,
565
+ repoName: trimmed.split("/").pop() ?? trimmed
566
+ };
567
+ return {
568
+ fullName: trimmed,
569
+ repoName: trimmed
570
+ };
571
+ }
572
+ /**
573
+ * Tokenize a string for search matching.
574
+ * Lowercase, strip @, split on /_- and whitespace.
575
+ */
576
+ function tokenize(str) {
577
+ return str.toLowerCase().replace(/@/g, "").split(/[/_\-\s]+/).filter(Boolean);
578
+ }
579
+ /**
580
+ * Resolve an input string to a qualified repo key in a map.
581
+ *
582
+ * @param input - Accepts github.com:owner/repo, owner/repo, or repo name
583
+ * @param map - A global or project map
584
+ * @returns The matching qualified name or null
585
+ */
586
+ function resolveRepoKey(input, map) {
587
+ const { provider, fullName, repoName } = normalizeInput(input);
588
+ const keys = Object.keys(map.repos);
589
+ if (provider) {
590
+ const qualifiedKey = `${provider}:${fullName}`;
591
+ if (keys.includes(qualifiedKey)) return qualifiedKey;
592
+ }
593
+ for (const key of keys) if ((key.includes(":") ? key.split(":")[1] : key)?.toLowerCase() === fullName) return key;
594
+ for (const key of keys) if (key.split("/").pop()?.toLowerCase() === repoName) return key;
595
+ return null;
596
+ }
597
+ /**
598
+ * Get a map entry for a repo, preferring project map if available.
599
+ *
600
+ * @param input - Repo identifier (github.com:owner/repo, owner/repo, or repo)
601
+ * @param options - Options for lookup
602
+ * @returns Entry with scope and qualified name, or null if not found
603
+ */
604
+ function getMapEntry(input, options = {}) {
605
+ const { preferProject = true, cwd = process.cwd() } = options;
606
+ const projectMap = preferProject ? readProjectMapSafe(cwd) : null;
607
+ const globalMap = readGlobalMapSafe();
608
+ if (projectMap) {
609
+ const key = resolveRepoKey(input, projectMap);
610
+ if (key && projectMap.repos[key]) return {
611
+ scope: "project",
612
+ qualifiedName: key,
613
+ entry: projectMap.repos[key]
614
+ };
615
+ }
616
+ if (globalMap) {
617
+ const key = resolveRepoKey(input, globalMap);
618
+ if (key && globalMap.repos[key]) return {
619
+ scope: "global",
620
+ qualifiedName: key,
621
+ entry: globalMap.repos[key]
622
+ };
623
+ }
624
+ return null;
625
+ }
626
+ /**
627
+ * Search the map for repos matching a term.
628
+ *
629
+ * Scoring:
630
+ * - Exact fullName match: 100
631
+ * - Keyword hit: 50 per keyword
632
+ * - Partial contains in fullName: 25
633
+ * - Partial contains in keywords: 10
634
+ *
635
+ * @param term - Search term
636
+ * @param options - Search options
637
+ * @returns Sorted list of matches
638
+ */
639
+ function searchMap(term, options = {}) {
640
+ const { limit = 10 } = options;
641
+ const globalMap = readGlobalMapSafe();
642
+ if (!globalMap) return [];
643
+ const termTokens = tokenize(term);
644
+ const termLower = term.toLowerCase();
645
+ const results = [];
646
+ for (const qualifiedName of Object.keys(globalMap.repos)) {
647
+ const entry = globalMap.repos[qualifiedName];
648
+ if (!entry) continue;
649
+ const fullName = qualifiedName.includes(":") ? qualifiedName.split(":")[1] ?? qualifiedName : qualifiedName;
650
+ const fullNameLower = fullName.toLowerCase();
651
+ const keywords = entry.keywords ?? [];
652
+ const keywordsLower = keywords.map((k) => k.toLowerCase());
653
+ let score = 0;
654
+ if (fullNameLower === termLower) score += 100;
655
+ for (const token of termTokens) if (keywordsLower.includes(token)) score += 50;
656
+ if (fullNameLower.includes(termLower) && score < 100) score += 25;
657
+ for (const kw of keywordsLower) if (kw.includes(termLower)) score += 10;
658
+ const fullNameTokens = tokenize(fullName);
659
+ for (const token of termTokens) if (fullNameTokens.includes(token)) score += 30;
660
+ if (score > 0) results.push({
661
+ qualifiedName,
662
+ fullName,
663
+ localPath: entry.localPath,
664
+ primary: entry.primary,
665
+ keywords,
666
+ score
667
+ });
668
+ }
669
+ results.sort((a, b) => {
670
+ if (b.score !== a.score) return b.score - a.score;
671
+ return a.fullName.localeCompare(b.fullName);
672
+ });
673
+ return results.slice(0, limit);
674
+ }
675
+ /**
676
+ * Get the project map path if it exists in cwd.
677
+ */
678
+ function getProjectMapPath(cwd = process.cwd()) {
679
+ const mapPath = resolve(cwd, ".offworld/map.json");
680
+ return existsSync(mapPath) ? mapPath : null;
681
+ }
682
+
683
+ //#endregion
684
+ //#region src/clone.ts
685
+ /**
686
+ * Git clone and repository management utilities
687
+ */
688
+ var CloneError = class extends Error {
689
+ constructor(message) {
690
+ super(message);
691
+ this.name = "CloneError";
692
+ }
693
+ };
694
+ var RepoExistsError = class extends CloneError {
695
+ constructor(path) {
696
+ super(`Repository already exists at: ${path}`);
697
+ this.name = "RepoExistsError";
698
+ }
699
+ };
700
+ var RepoNotFoundError = class extends CloneError {
701
+ constructor(qualifiedName) {
702
+ super(`Repository not found in index: ${qualifiedName}`);
703
+ this.name = "RepoNotFoundError";
704
+ }
705
+ };
706
+ var GitError = class extends CloneError {
707
+ constructor(message, command, exitCode) {
708
+ super(`Git command failed: ${message}`);
709
+ this.command = command;
710
+ this.exitCode = exitCode;
711
+ this.name = "GitError";
712
+ }
713
+ };
714
+ function execGit(args, cwd) {
715
+ try {
716
+ return execFileSync("git", args, {
717
+ cwd,
718
+ encoding: "utf-8",
719
+ stdio: [
720
+ "pipe",
721
+ "pipe",
722
+ "pipe"
723
+ ]
724
+ }).trim();
725
+ } catch (error) {
726
+ const err = error;
727
+ throw new GitError((err.stderr ? typeof err.stderr === "string" ? err.stderr : err.stderr.toString() : err.message || "Unknown error").trim(), `git ${args.join(" ")}`, err.status ?? null);
728
+ }
729
+ }
730
+ function execGitAsync(args, cwd) {
731
+ return new Promise((resolve, reject) => {
732
+ const proc = spawn("git", args, {
733
+ cwd,
734
+ stdio: [
735
+ "ignore",
736
+ "pipe",
737
+ "pipe"
738
+ ],
739
+ env: {
740
+ ...process.env,
741
+ GIT_TERMINAL_PROMPT: "0"
742
+ }
743
+ });
744
+ let stdout = "";
745
+ let stderr = "";
746
+ proc.stdout.on("data", (data) => {
747
+ stdout += data.toString();
748
+ });
749
+ proc.stderr.on("data", (data) => {
750
+ stderr += data.toString();
751
+ });
752
+ proc.on("close", (code) => {
753
+ if (code === 0) resolve(stdout.trim());
754
+ else reject(new GitError(stderr.trim() || "Unknown error", `git ${args.join(" ")}`, code));
755
+ });
756
+ proc.on("error", (err) => {
757
+ reject(new GitError(err.message, `git ${args.join(" ")}`, null));
758
+ });
759
+ });
760
+ }
761
+ /**
762
+ * Get the current commit SHA for a repository
763
+ */
764
+ function getCommitSha(repoPath) {
765
+ return execGit(["rev-parse", "HEAD"], repoPath);
766
+ }
767
+ /**
768
+ * Get the number of commits between two SHAs.
769
+ * Returns the number of commits from `olderSha` to `newerSha`.
770
+ * Returns null if the distance cannot be determined (e.g., shallow clone without the commit).
771
+ *
772
+ * @param repoPath - Path to the git repository
773
+ * @param olderSha - The older commit SHA (e.g., remote skill's commit)
774
+ * @param newerSha - The newer commit SHA (e.g., current HEAD), defaults to HEAD
775
+ */
776
+ function getCommitDistance(repoPath, olderSha, newerSha = "HEAD") {
777
+ try {
778
+ try {
779
+ execGit([
780
+ "cat-file",
781
+ "-e",
782
+ olderSha
783
+ ], repoPath);
784
+ } catch {
785
+ return null;
786
+ }
787
+ const count = execGit([
788
+ "rev-list",
789
+ "--count",
790
+ `${olderSha}..${newerSha}`
791
+ ], repoPath);
792
+ return Number.parseInt(count, 10);
793
+ } catch {
794
+ return null;
795
+ }
796
+ }
797
+ const SPARSE_CHECKOUT_DIRS = [
798
+ "src",
799
+ "lib",
800
+ "packages",
801
+ "docs",
802
+ "README.md",
803
+ "package.json"
804
+ ];
805
+ /**
806
+ * Clone a remote repository to the local repo root.
807
+ *
808
+ * @param source - Remote repo source from parseRepoInput()
809
+ * @param options - Clone options (shallow, branch, config)
810
+ * @returns The local path where the repo was cloned
811
+ * @throws RepoExistsError if repo already exists (unless force is true)
812
+ * @throws GitError if clone fails
813
+ */
814
+ async function cloneRepo(source, options = {}) {
815
+ const config = options.config ?? loadConfig();
816
+ const repoPath = getRepoPath(source.fullName, source.provider, config);
817
+ if (existsSync(repoPath)) if (options.force) rmSync(repoPath, {
818
+ recursive: true,
819
+ force: true
820
+ });
821
+ else throw new RepoExistsError(repoPath);
822
+ try {
823
+ if (options.sparse) await cloneSparse(source.cloneUrl, repoPath, options);
824
+ else await cloneStandard(source.cloneUrl, repoPath, options);
825
+ } catch (err) {
826
+ cleanupEmptyParentDirs(repoPath);
827
+ throw err;
828
+ }
829
+ const referenceFileName = toReferenceFileName(source.fullName);
830
+ const hasReference = existsSync(join(Paths.offworldReferencesDir, referenceFileName));
831
+ upsertGlobalMapEntry(source.qualifiedName, {
832
+ localPath: repoPath,
833
+ references: hasReference ? [referenceFileName] : [],
834
+ primary: hasReference ? referenceFileName : "",
835
+ keywords: [],
836
+ updatedAt: (/* @__PURE__ */ new Date()).toISOString()
837
+ });
838
+ return repoPath;
839
+ }
840
+ function cleanupEmptyParentDirs(repoPath) {
841
+ const ownerDir = dirname(repoPath);
842
+ if (existsSync(ownerDir) && readdirSync(ownerDir).length === 0) rmSync(ownerDir, {
843
+ recursive: true,
844
+ force: true
845
+ });
846
+ }
847
+ async function cloneStandard(cloneUrl, repoPath, options) {
848
+ const args = ["clone"];
849
+ if (options.shallow) args.push("--depth", "1");
850
+ if (options.branch) args.push("--branch", options.branch);
851
+ args.push(cloneUrl, repoPath);
852
+ await execGitAsync(args);
853
+ }
854
+ async function cloneSparse(cloneUrl, repoPath, options) {
855
+ const args = [
856
+ "clone",
857
+ "--filter=blob:none",
858
+ "--no-checkout",
859
+ "--sparse"
860
+ ];
861
+ if (options.shallow) args.push("--depth", "1");
862
+ if (options.branch) args.push("--branch", options.branch);
863
+ args.push(cloneUrl, repoPath);
864
+ await execGitAsync(args);
865
+ await execGitAsync([
866
+ "sparse-checkout",
867
+ "set",
868
+ ...SPARSE_CHECKOUT_DIRS
869
+ ], repoPath);
870
+ await execGitAsync(["checkout"], repoPath);
871
+ }
872
+ /**
873
+ * Check if a repository is a shallow clone.
874
+ */
875
+ function isShallowClone(repoPath) {
876
+ try {
877
+ return execGit(["rev-parse", "--is-shallow-repository"], repoPath) === "true";
878
+ } catch {
879
+ return false;
880
+ }
881
+ }
882
+ /**
883
+ * Convert a shallow clone to a full clone by fetching all history.
884
+ * Returns true if the repo was shallow and is now unshallowed.
885
+ * Returns false if the repo was already a full clone.
886
+ */
887
+ async function unshallowRepo(repoPath) {
888
+ if (!isShallowClone(repoPath)) return false;
889
+ await execGitAsync(["fetch", "--unshallow"], repoPath);
890
+ return true;
891
+ }
892
+ /**
893
+ * Update a cloned repository by running git fetch and pull.
894
+ *
895
+ * @param qualifiedName - The qualified name of the repo (e.g., "github.com:owner/repo")
896
+ * @param options - Update options
897
+ * @returns Update result with commit SHAs
898
+ * @throws RepoNotFoundError if repo not in index
899
+ * @throws GitError if fetch/pull fails
900
+ */
901
+ async function updateRepo(qualifiedName, options = {}) {
902
+ const entry = readGlobalMap().repos[qualifiedName];
903
+ if (!entry) throw new RepoNotFoundError(qualifiedName);
904
+ const repoPath = entry.localPath;
905
+ if (!existsSync(repoPath)) throw new RepoNotFoundError(qualifiedName);
906
+ const previousSha = getCommitSha(repoPath);
907
+ let unshallowed = false;
908
+ if (options.unshallow) unshallowed = await unshallowRepo(repoPath);
909
+ await execGitAsync(["fetch"], repoPath);
910
+ await execGitAsync(["pull", "--ff-only"], repoPath);
911
+ const currentSha = getCommitSha(repoPath);
912
+ upsertGlobalMapEntry(qualifiedName, {
913
+ ...entry,
914
+ updatedAt: (/* @__PURE__ */ new Date()).toISOString()
915
+ });
916
+ return {
917
+ updated: previousSha !== currentSha,
918
+ previousSha,
919
+ currentSha,
920
+ unshallowed
921
+ };
922
+ }
923
+ /**
924
+ * Remove a cloned repository and its reference data.
925
+ *
926
+ * @param qualifiedName - The qualified name of the repo
927
+ * @param options - Remove options
928
+ * @returns true if removed, false if not found
929
+ */
930
+ async function removeRepo(qualifiedName, options = {}) {
931
+ const entry = readGlobalMap().repos[qualifiedName];
932
+ if (!entry) return false;
933
+ const { referenceOnly = false, repoOnly = false } = options;
934
+ const removeRepoFiles = !referenceOnly;
935
+ const removeReferenceFiles = !repoOnly;
936
+ if (removeRepoFiles && existsSync(entry.localPath)) {
937
+ rmSync(entry.localPath, {
938
+ recursive: true,
939
+ force: true
940
+ });
941
+ cleanupEmptyParentDirs(entry.localPath);
942
+ }
943
+ if (removeReferenceFiles) {
944
+ for (const referenceFileName of entry.references) {
945
+ const referencePath = join(Paths.offworldReferencesDir, referenceFileName);
946
+ if (existsSync(referencePath)) rmSync(referencePath, { force: true });
947
+ }
948
+ if (entry.primary) {
949
+ const metaDirName = entry.primary.replace(/\.md$/, "");
950
+ const metaPath = join(Paths.metaDir, metaDirName);
951
+ if (existsSync(metaPath)) rmSync(metaPath, {
952
+ recursive: true,
953
+ force: true
954
+ });
955
+ }
956
+ }
957
+ if (removeRepoFiles) removeGlobalMapEntry(qualifiedName);
958
+ else if (removeReferenceFiles) upsertGlobalMapEntry(qualifiedName, {
959
+ ...entry,
960
+ references: [],
961
+ primary: ""
962
+ });
963
+ return true;
964
+ }
965
+ /**
966
+ * List all cloned repositories from the global map.
967
+ *
968
+ * @returns Array of qualified names
969
+ */
970
+ function listRepos() {
971
+ const map = readGlobalMap();
972
+ return Object.keys(map.repos);
973
+ }
974
+ /**
975
+ * Check if a repository is cloned and in the map.
976
+ *
977
+ * @param qualifiedName - The qualified name of the repo
978
+ * @returns true if repo exists in map and on disk
979
+ */
980
+ function isRepoCloned(qualifiedName) {
981
+ const entry = readGlobalMap().repos[qualifiedName];
982
+ if (!entry) return false;
983
+ return existsSync(entry.localPath);
984
+ }
985
+ /**
986
+ * Get the local path for a cloned repository.
987
+ *
988
+ * @param qualifiedName - The qualified name of the repo
989
+ * @returns The local path or undefined if not cloned
990
+ */
991
+ function getClonedRepoPath(qualifiedName) {
992
+ const entry = readGlobalMap().repos[qualifiedName];
993
+ if (!entry) return void 0;
994
+ if (!existsSync(entry.localPath)) return void 0;
995
+ return entry.localPath;
996
+ }
997
+
998
+ //#endregion
999
+ //#region src/ai/errors.ts
1000
+ /**
1001
+ * Base class for OpenCode reference errors
1002
+ */
1003
+ var OpenCodeReferenceError = class extends Error {
1004
+ _tag = "OpenCodeReferenceError";
1005
+ constructor(message, details) {
1006
+ super(message);
1007
+ this.details = details;
1008
+ this.name = "OpenCodeReferenceError";
1009
+ }
1010
+ };
1011
+ /**
1012
+ * Error when the @opencode-ai/sdk package is not installed
1013
+ */
1014
+ var OpenCodeSDKError = class extends OpenCodeReferenceError {
1015
+ _tag = "OpenCodeSDKError";
1016
+ constructor() {
1017
+ super("Failed to import @opencode-ai/sdk. Install it with: bun add @opencode-ai/sdk");
1018
+ this.name = "OpenCodeSDKError";
1019
+ }
1020
+ };
1021
+ /**
1022
+ * Error when the requested provider is not found
1023
+ */
1024
+ var InvalidProviderError = class extends OpenCodeReferenceError {
1025
+ _tag = "InvalidProviderError";
1026
+ hint;
1027
+ constructor(providerID, availableProviders) {
1028
+ const hint = availableProviders.length > 0 ? `Available providers: ${availableProviders.join(", ")}` : "No providers available. Check your OpenCode configuration.";
1029
+ super(`Provider "${providerID}" not found. ${hint}`);
1030
+ this.providerID = providerID;
1031
+ this.availableProviders = availableProviders;
1032
+ this.name = "InvalidProviderError";
1033
+ this.hint = hint;
1034
+ }
1035
+ };
1036
+ /**
1037
+ * Error when the provider exists but is not connected/authenticated
1038
+ */
1039
+ var ProviderNotConnectedError = class extends OpenCodeReferenceError {
1040
+ _tag = "ProviderNotConnectedError";
1041
+ hint;
1042
+ constructor(providerID, connectedProviders) {
1043
+ const hint = connectedProviders.length > 0 ? `Connected providers: ${connectedProviders.join(", ")}. Run 'opencode auth ${providerID}' to connect.` : `No providers connected. Run 'opencode auth ${providerID}' to authenticate.`;
1044
+ super(`Provider "${providerID}" is not connected. ${hint}`);
1045
+ this.providerID = providerID;
1046
+ this.connectedProviders = connectedProviders;
1047
+ this.name = "ProviderNotConnectedError";
1048
+ this.hint = hint;
1049
+ }
1050
+ };
1051
+ /**
1052
+ * Error when the requested model is not found for a provider
1053
+ */
1054
+ var InvalidModelError = class extends OpenCodeReferenceError {
1055
+ _tag = "InvalidModelError";
1056
+ hint;
1057
+ constructor(modelID, providerID, availableModels) {
1058
+ const hint = availableModels.length > 0 ? `Available models for ${providerID}: ${availableModels.slice(0, 10).join(", ")}${availableModels.length > 10 ? ` (and ${availableModels.length - 10} more)` : ""}` : `No models available for provider "${providerID}".`;
1059
+ super(`Model "${modelID}" not found for provider "${providerID}". ${hint}`);
1060
+ this.modelID = modelID;
1061
+ this.providerID = providerID;
1062
+ this.availableModels = availableModels;
1063
+ this.name = "InvalidModelError";
1064
+ this.hint = hint;
1065
+ }
1066
+ };
1067
+ /**
1068
+ * Error when the OpenCode server fails to start
1069
+ */
1070
+ var ServerStartError = class extends OpenCodeReferenceError {
1071
+ _tag = "ServerStartError";
1072
+ hint;
1073
+ constructor(message, port, details) {
1074
+ const hint = port ? `Failed to start server on port ${port}. Ensure no other process is using this port.` : "Failed to start OpenCode server. Check your OpenCode installation and configuration.";
1075
+ super(`${message}. ${hint}`, details);
1076
+ this.port = port;
1077
+ this.name = "ServerStartError";
1078
+ this.hint = hint;
1079
+ }
1080
+ };
1081
+ /**
1082
+ * Error when a session operation fails
1083
+ */
1084
+ var SessionError = class extends OpenCodeReferenceError {
1085
+ _tag = "SessionError";
1086
+ hint;
1087
+ constructor(message, sessionId, sessionState, details) {
1088
+ const hint = `Session operation failed${sessionId ? ` (session: ${sessionId})` : ""}.${sessionState ? ` State: ${sessionState}.` : ""} Try creating a new session.`;
1089
+ super(`${message}. ${hint}`, details);
1090
+ this.sessionId = sessionId;
1091
+ this.sessionState = sessionState;
1092
+ this.name = "SessionError";
1093
+ this.hint = hint;
1094
+ }
1095
+ };
1096
+ /**
1097
+ * Error when a request times out
1098
+ */
1099
+ var TimeoutError = class extends OpenCodeReferenceError {
1100
+ _tag = "TimeoutError";
1101
+ hint;
1102
+ constructor(timeoutMs, operation = "operation") {
1103
+ const hint = `The ${operation} did not complete within ${timeoutMs}ms. Consider increasing the timeout or checking if the model is responding.`;
1104
+ super(`Timeout: ${operation} did not complete within ${timeoutMs}ms. ${hint}`);
1105
+ this.timeoutMs = timeoutMs;
1106
+ this.operation = operation;
1107
+ this.name = "TimeoutError";
1108
+ this.hint = hint;
1109
+ }
1110
+ };
1111
+
1112
+ //#endregion
1113
+ //#region src/ai/stream/types.ts
1114
+ /**
1115
+ * Zod schemas for OpenCode stream events
1116
+ * Replaces inline `as` casts with runtime-validated types
1117
+ */
1118
+ const PartBaseSchema = z.object({
1119
+ id: z.string().optional(),
1120
+ sessionID: z.string().optional(),
1121
+ messageID: z.string().optional()
1122
+ });
1123
+ const TextPartSchema = PartBaseSchema.extend({
1124
+ type: z.literal("text"),
1125
+ text: z.string().optional()
1126
+ });
1127
+ const ToolStatePendingSchema = z.object({ status: z.literal("pending") });
1128
+ const ToolStateRunningSchema = z.object({
1129
+ status: z.literal("running"),
1130
+ title: z.string().optional(),
1131
+ input: z.unknown().optional(),
1132
+ metadata: z.record(z.string(), z.unknown()).optional(),
1133
+ time: z.object({ start: z.number() }).optional()
1134
+ });
1135
+ const ToolStateCompletedSchema = z.object({
1136
+ status: z.literal("completed"),
1137
+ title: z.string().optional(),
1138
+ input: z.record(z.string(), z.unknown()).optional(),
1139
+ output: z.string().optional(),
1140
+ metadata: z.record(z.string(), z.unknown()).optional(),
1141
+ time: z.object({
1142
+ start: z.number(),
1143
+ end: z.number()
1144
+ }).optional()
1145
+ });
1146
+ const ToolStateErrorSchema = z.object({
1147
+ status: z.literal("error"),
1148
+ error: z.string().optional(),
1149
+ input: z.record(z.string(), z.unknown()).optional(),
1150
+ time: z.object({
1151
+ start: z.number(),
1152
+ end: z.number()
1153
+ }).optional()
1154
+ });
1155
+ const ToolStateSchema = z.discriminatedUnion("status", [
1156
+ ToolStatePendingSchema,
1157
+ ToolStateRunningSchema,
1158
+ ToolStateCompletedSchema,
1159
+ ToolStateErrorSchema
1160
+ ]);
1161
+ const ToolPartSchema = PartBaseSchema.extend({
1162
+ type: z.literal("tool"),
1163
+ callID: z.string().optional(),
1164
+ tool: z.string().optional(),
1165
+ state: ToolStateSchema.optional()
1166
+ });
1167
+ const StepStartPartSchema = PartBaseSchema.extend({ type: z.literal("step-start") });
1168
+ const StepFinishPartSchema = PartBaseSchema.extend({
1169
+ type: z.literal("step-finish"),
1170
+ reason: z.string().optional()
1171
+ });
1172
+ const ToolUsePartSchema = PartBaseSchema.extend({
1173
+ type: z.literal("tool-use"),
1174
+ toolUseId: z.string().optional(),
1175
+ name: z.string().optional()
1176
+ });
1177
+ const ToolResultPartSchema = PartBaseSchema.extend({
1178
+ type: z.literal("tool-result"),
1179
+ toolUseId: z.string().optional()
1180
+ });
1181
+ const MessagePartSchema = z.discriminatedUnion("type", [
1182
+ TextPartSchema,
1183
+ ToolPartSchema,
1184
+ StepStartPartSchema,
1185
+ StepFinishPartSchema,
1186
+ ToolUsePartSchema,
1187
+ ToolResultPartSchema
1188
+ ]);
1189
+ /**
1190
+ * Session error payload
1191
+ */
1192
+ const SessionErrorSchema = z.object({
1193
+ name: z.string().optional(),
1194
+ message: z.string().optional(),
1195
+ code: z.string().optional()
1196
+ });
1197
+ const MessagePartUpdatedPropsSchema = z.object({ part: MessagePartSchema });
1198
+ /**
1199
+ * Properties for session.idle event
1200
+ */
1201
+ const SessionIdlePropsSchema = z.object({ sessionID: z.string() });
1202
+ /**
1203
+ * Properties for session.error event
1204
+ */
1205
+ const SessionErrorPropsSchema = z.object({
1206
+ sessionID: z.string(),
1207
+ error: SessionErrorSchema.optional()
1208
+ });
1209
+ /**
1210
+ * Properties for session.updated event
1211
+ */
1212
+ const SessionUpdatedPropsSchema = z.object({
1213
+ sessionID: z.string(),
1214
+ status: z.string().optional()
1215
+ });
1216
+ /**
1217
+ * message.part.updated event
1218
+ */
1219
+ const MessagePartUpdatedEventSchema = z.object({
1220
+ type: z.literal("message.part.updated"),
1221
+ properties: MessagePartUpdatedPropsSchema
1222
+ });
1223
+ /**
1224
+ * session.idle event
1225
+ */
1226
+ const SessionIdleEventSchema = z.object({
1227
+ type: z.literal("session.idle"),
1228
+ properties: SessionIdlePropsSchema
1229
+ });
1230
+ /**
1231
+ * session.error event
1232
+ */
1233
+ const SessionErrorEventSchema = z.object({
1234
+ type: z.literal("session.error"),
1235
+ properties: SessionErrorPropsSchema
1236
+ });
1237
+ /**
1238
+ * session.updated event
1239
+ */
1240
+ const SessionUpdatedEventSchema = z.object({
1241
+ type: z.literal("session.updated"),
1242
+ properties: SessionUpdatedPropsSchema
1243
+ });
1244
+ /**
1245
+ * Generic event for unknown types (passthrough)
1246
+ */
1247
+ const GenericEventSchema = z.object({
1248
+ type: z.string(),
1249
+ properties: z.record(z.string(), z.unknown())
1250
+ });
1251
+
1252
+ //#endregion
1253
+ //#region src/ai/stream/accumulator.ts
1254
+ /**
1255
+ * Accumulates text from streaming message parts.
1256
+ * Tracks multiple parts by ID and provides delta text between updates.
1257
+ */
1258
+ var TextAccumulator = class {
1259
+ parts = /* @__PURE__ */ new Map();
1260
+ _firstTextReceived = false;
1261
+ /**
1262
+ * Whether any text has been received
1263
+ */
1264
+ get hasReceivedText() {
1265
+ return this._firstTextReceived;
1266
+ }
1267
+ /**
1268
+ * Accumulate text from a message part and return the delta (new text only).
1269
+ * Returns null if the part should be skipped (non-text, no ID, no text).
1270
+ */
1271
+ accumulatePart(part) {
1272
+ if (part.type !== "text" || !part.text || !part.id) return null;
1273
+ const partId = part.id;
1274
+ const prevText = this.parts.get(partId) ?? "";
1275
+ this.parts.set(partId, part.text);
1276
+ if (!this._firstTextReceived) this._firstTextReceived = true;
1277
+ if (part.text.length > prevText.length) return part.text.slice(prevText.length);
1278
+ return null;
1279
+ }
1280
+ /**
1281
+ * Get the full accumulated text from all parts
1282
+ */
1283
+ getFullText() {
1284
+ return Array.from(this.parts.values()).join("");
1285
+ }
1286
+ /**
1287
+ * Clear accumulated text
1288
+ */
1289
+ clear() {
1290
+ this.parts.clear();
1291
+ this._firstTextReceived = false;
1292
+ }
1293
+ };
1294
+
1295
+ //#endregion
1296
+ //#region src/ai/stream/transformer.ts
1297
+ /**
1298
+ * Stream event transformer and parser
1299
+ * Safely parses and validates stream events using Zod schemas
1300
+ */
1301
+ /**
1302
+ * Parse a raw stream event into a typed result.
1303
+ * Uses safe parsing - returns type: "unknown" for unrecognized or invalid events.
1304
+ */
1305
+ function parseStreamEvent(event) {
1306
+ switch (event.type) {
1307
+ case "message.part.updated": {
1308
+ const propsResult = MessagePartUpdatedPropsSchema.safeParse(event.properties);
1309
+ if (!propsResult.success) return {
1310
+ type: "unknown",
1311
+ rawType: event.type
1312
+ };
1313
+ const props = propsResult.data;
1314
+ return {
1315
+ type: "message.part.updated",
1316
+ props,
1317
+ textPart: props.part.type === "text" ? props.part : null,
1318
+ toolPart: props.part.type === "tool" ? props.part : null
1319
+ };
1320
+ }
1321
+ case "session.idle": {
1322
+ const propsResult = SessionIdlePropsSchema.safeParse(event.properties);
1323
+ if (!propsResult.success) return {
1324
+ type: "unknown",
1325
+ rawType: event.type
1326
+ };
1327
+ return {
1328
+ type: "session.idle",
1329
+ props: propsResult.data
1330
+ };
1331
+ }
1332
+ case "session.error": {
1333
+ const propsResult = SessionErrorPropsSchema.safeParse(event.properties);
1334
+ if (!propsResult.success) return {
1335
+ type: "unknown",
1336
+ rawType: event.type
1337
+ };
1338
+ const props = propsResult.data;
1339
+ let error = null;
1340
+ if (props.error) {
1341
+ const errorResult = SessionErrorSchema.safeParse(props.error);
1342
+ if (errorResult.success) error = errorResult.data;
1343
+ }
1344
+ return {
1345
+ type: "session.error",
1346
+ props,
1347
+ error
1348
+ };
1349
+ }
1350
+ default: return {
1351
+ type: "unknown",
1352
+ rawType: event.type
1353
+ };
1354
+ }
1355
+ }
1356
+ function isEventForSession(event, sessionId) {
1357
+ const props = event.properties;
1358
+ if ("sessionID" in props && typeof props.sessionID === "string") return props.sessionID === sessionId;
1359
+ if ("part" in props && typeof props.part === "object" && props.part !== null && "sessionID" in props.part && typeof props.part.sessionID === "string") return props.part.sessionID === sessionId;
1360
+ return true;
1361
+ }
1362
+
1363
+ //#endregion
1364
+ //#region src/ai/opencode.ts
1365
+ const DEFAULT_AI_PROVIDER = "opencode";
1366
+ const DEFAULT_AI_MODEL = "claude-opus-4-5";
1367
+ let cachedCreateOpencode = null;
1368
+ let cachedCreateOpencodeClient = null;
1369
+ async function getOpenCodeSDK() {
1370
+ if (cachedCreateOpencode && cachedCreateOpencodeClient) return {
1371
+ createOpencode: cachedCreateOpencode,
1372
+ createOpencodeClient: cachedCreateOpencodeClient
1373
+ };
1374
+ try {
1375
+ const sdk = await import("@opencode-ai/sdk");
1376
+ cachedCreateOpencode = sdk.createOpencode;
1377
+ cachedCreateOpencodeClient = sdk.createOpencodeClient;
1378
+ return {
1379
+ createOpencode: cachedCreateOpencode,
1380
+ createOpencodeClient: cachedCreateOpencodeClient
1381
+ };
1382
+ } catch {
1383
+ throw new OpenCodeSDKError();
1384
+ }
1385
+ }
1386
+ function formatToolMessage(tool, state) {
1387
+ if (state.title) return state.title;
1388
+ if (!tool) return null;
1389
+ const input = state.input;
1390
+ if (!input) return `Running ${tool}...`;
1391
+ switch (tool) {
1392
+ case "read": {
1393
+ const path = input.filePath ?? input.path;
1394
+ if (typeof path === "string") return `Reading ${path.split("/").pop()}...`;
1395
+ return "Reading file...";
1396
+ }
1397
+ case "glob": {
1398
+ const pattern = input.pattern;
1399
+ if (typeof pattern === "string") return `Globbing ${pattern}...`;
1400
+ return "Searching files...";
1401
+ }
1402
+ case "grep": {
1403
+ const pattern = input.pattern;
1404
+ if (typeof pattern === "string") return `Searching for "${pattern.length > 30 ? `${pattern.slice(0, 30)}...` : pattern}"...`;
1405
+ return "Searching content...";
1406
+ }
1407
+ case "list": {
1408
+ const path = input.path;
1409
+ if (typeof path === "string") return `Listing ${path}...`;
1410
+ return "Listing directory...";
1411
+ }
1412
+ default: return `Running ${tool}...`;
1413
+ }
1414
+ }
1415
+ async function streamPrompt(options) {
1416
+ const { prompt, cwd, systemPrompt, provider: optProvider, model: optModel, timeoutMs, onDebug, onStream } = options;
1417
+ const debug = onDebug ?? (() => {});
1418
+ const stream = onStream ?? (() => {});
1419
+ const startTime = Date.now();
1420
+ debug("Loading OpenCode SDK...");
1421
+ const { createOpencode, createOpencodeClient } = await getOpenCodeSDK();
1422
+ const maxAttempts = 10;
1423
+ let server = null;
1424
+ let client = null;
1425
+ let port = 0;
1426
+ const config = {
1427
+ plugin: [],
1428
+ mcp: {},
1429
+ instructions: [],
1430
+ agent: {
1431
+ build: { disable: true },
1432
+ general: { disable: true },
1433
+ plan: { disable: true },
1434
+ explore: { disable: true },
1435
+ analyze: {
1436
+ prompt: [
1437
+ "You are an expert at analyzing open source codebases and producing documentation.",
1438
+ "",
1439
+ "Your job is to read the codebase and produce structured output based on the user's request.",
1440
+ "Use glob to discover files, grep to search for patterns, and read to examine file contents.",
1441
+ "",
1442
+ "Guidelines:",
1443
+ "- Explore the codebase thoroughly before producing output",
1444
+ "- Focus on understanding architecture, key abstractions, and usage patterns",
1445
+ "- When asked for JSON output, respond with ONLY valid JSON - no markdown, no code blocks",
1446
+ "- When asked for prose, write clear and concise documentation",
1447
+ "- Always base your analysis on actual code you've read, never speculate"
1448
+ ].join("\n"),
1449
+ mode: "primary",
1450
+ description: "Analyze open source codebases and produce summaries and reference files",
1451
+ tools: {
1452
+ read: true,
1453
+ grep: true,
1454
+ glob: true,
1455
+ list: true,
1456
+ write: false,
1457
+ bash: false,
1458
+ delete: false,
1459
+ edit: false,
1460
+ patch: false,
1461
+ path: false,
1462
+ todowrite: false,
1463
+ todoread: false,
1464
+ websearch: false,
1465
+ webfetch: false,
1466
+ codesearch: false,
1467
+ skill: false,
1468
+ task: false,
1469
+ mcp: false,
1470
+ question: false,
1471
+ plan_enter: false,
1472
+ plan_exit: false
1473
+ },
1474
+ permission: {
1475
+ edit: "deny",
1476
+ bash: "deny",
1477
+ webfetch: "deny",
1478
+ external_directory: "deny"
1479
+ }
1480
+ }
1481
+ }
1482
+ };
1483
+ debug("Starting embedded OpenCode server...");
1484
+ for (let attempt = 0; attempt < maxAttempts; attempt++) {
1485
+ port = Math.floor(Math.random() * 3e3) + 3e3;
1486
+ try {
1487
+ server = (await createOpencode({
1488
+ port,
1489
+ cwd,
1490
+ config
1491
+ })).server;
1492
+ client = createOpencodeClient({
1493
+ baseUrl: `http://localhost:${port}`,
1494
+ directory: cwd
1495
+ });
1496
+ debug(`Server started on port ${port}`);
1497
+ break;
1498
+ } catch (err) {
1499
+ if (err instanceof Error && err.message?.includes("port")) continue;
1500
+ throw new ServerStartError("Failed to start OpenCode server", port, err);
1501
+ }
1502
+ }
1503
+ if (!server || !client) throw new ServerStartError("Failed to start OpenCode server after all attempts");
1504
+ const providerID = optProvider ?? DEFAULT_AI_PROVIDER;
1505
+ const modelID = optModel ?? DEFAULT_AI_MODEL;
1506
+ try {
1507
+ debug("Creating session...");
1508
+ const sessionResult = await client.session.create();
1509
+ if (sessionResult.error) throw new SessionError("Failed to create session", void 0, void 0, sessionResult.error);
1510
+ const sessionId = sessionResult.data.id;
1511
+ debug(`Session created: ${sessionId}`);
1512
+ debug("Validating provider and model...");
1513
+ const providerResult = await client.provider.list();
1514
+ if (providerResult.error) throw new OpenCodeReferenceError("Failed to fetch provider list", providerResult.error);
1515
+ const { all: allProviders, connected: connectedProviders } = providerResult.data;
1516
+ const allProviderIds = allProviders.map((p) => p.id);
1517
+ const provider = allProviders.find((p) => p.id === providerID);
1518
+ if (!provider) throw new InvalidProviderError(providerID, allProviderIds);
1519
+ if (!connectedProviders.includes(providerID)) throw new ProviderNotConnectedError(providerID, connectedProviders);
1520
+ const availableModelIds = Object.keys(provider.models);
1521
+ if (!provider.models[modelID]) throw new InvalidModelError(modelID, providerID, availableModelIds);
1522
+ debug(`Provider "${providerID}" and model "${modelID}" validated`);
1523
+ debug("Subscribing to events...");
1524
+ const { stream: eventStream } = await client.event.subscribe();
1525
+ const fullPrompt = systemPrompt ? `${systemPrompt}\n\nAnalyzing codebase at: ${cwd}\n\n${prompt}` : `Analyzing codebase at: ${cwd}\n\n${prompt}`;
1526
+ debug("Sending prompt...");
1527
+ const promptPromise = client.session.prompt({
1528
+ path: { id: sessionId },
1529
+ body: {
1530
+ agent: "analyze",
1531
+ parts: [{
1532
+ type: "text",
1533
+ text: fullPrompt
1534
+ }],
1535
+ model: {
1536
+ providerID,
1537
+ modelID
1538
+ }
1539
+ }
1540
+ });
1541
+ const textAccumulator = new TextAccumulator();
1542
+ debug("Waiting for response...");
1543
+ let timeoutId = null;
1544
+ const processEvents = async () => {
1545
+ for await (const event of eventStream) {
1546
+ if (!isEventForSession(event, sessionId)) continue;
1547
+ const parsed = parseStreamEvent(event);
1548
+ switch (parsed.type) {
1549
+ case "message.part.updated":
1550
+ if (parsed.toolPart?.state) {
1551
+ const { state, tool } = parsed.toolPart;
1552
+ if (state.status === "running") {
1553
+ const message = formatToolMessage(tool, state);
1554
+ if (message) debug(message);
1555
+ }
1556
+ }
1557
+ if (parsed.textPart) {
1558
+ const delta = textAccumulator.accumulatePart(parsed.textPart);
1559
+ if (!textAccumulator.hasReceivedText) debug("Writing reference...");
1560
+ if (delta) stream(delta);
1561
+ }
1562
+ break;
1563
+ case "session.idle":
1564
+ if (parsed.props.sessionID === sessionId) {
1565
+ debug("Response complete");
1566
+ return textAccumulator.getFullText();
1567
+ }
1568
+ break;
1569
+ case "session.error":
1570
+ if (parsed.props.sessionID === sessionId) {
1571
+ const errorName = parsed.error?.name ?? "Unknown session error";
1572
+ debug(`Session error: ${JSON.stringify(parsed.props.error)}`);
1573
+ throw new SessionError(errorName, sessionId, "error", parsed.props.error);
1574
+ }
1575
+ break;
1576
+ case "unknown": break;
1577
+ }
1578
+ }
1579
+ return textAccumulator.getFullText();
1580
+ };
1581
+ let responseText;
1582
+ if (timeoutMs && timeoutMs > 0) {
1583
+ const timeoutPromise = new Promise((_, reject) => {
1584
+ timeoutId = setTimeout(() => {
1585
+ reject(new TimeoutError(timeoutMs, "session response"));
1586
+ }, timeoutMs);
1587
+ });
1588
+ responseText = await Promise.race([processEvents(), timeoutPromise]);
1589
+ if (timeoutId) clearTimeout(timeoutId);
1590
+ } else responseText = await processEvents();
1591
+ await promptPromise;
1592
+ if (!responseText) throw new OpenCodeReferenceError("No response received from OpenCode");
1593
+ debug(`Response received (${responseText.length} chars)`);
1594
+ const durationMs = Date.now() - startTime;
1595
+ debug(`Complete in ${durationMs}ms`);
1596
+ return {
1597
+ text: responseText,
1598
+ sessionId,
1599
+ durationMs
1600
+ };
1601
+ } finally {
1602
+ debug("Closing server...");
1603
+ server.close();
1604
+ }
1605
+ }
1606
+
1607
+ //#endregion
1608
+ //#region src/sync.ts
1609
+ /**
1610
+ * Sync utilities for CLI-Convex communication
1611
+ * Uses ConvexHttpClient for direct type-safe API calls
1612
+ */
1613
+ function getConvexUrl() {
1614
+ return process.env.CONVEX_URL || "";
1615
+ }
1616
+ const GITHUB_API_BASE = "https://api.github.com";
1617
+ var SyncError = class extends Error {
1618
+ constructor(message) {
1619
+ super(message);
1620
+ this.name = "SyncError";
1621
+ }
1622
+ };
1623
+ var NetworkError = class extends SyncError {
1624
+ constructor(message, statusCode) {
1625
+ super(message);
1626
+ this.statusCode = statusCode;
1627
+ this.name = "NetworkError";
1628
+ }
1629
+ };
1630
+ var AuthenticationError = class extends SyncError {
1631
+ constructor(message = "Authentication required. Please run 'ow auth login' first.") {
1632
+ super(message);
1633
+ this.name = "AuthenticationError";
1634
+ }
1635
+ };
1636
+ var RateLimitError = class extends SyncError {
1637
+ constructor(message = "Rate limit exceeded. You can push up to 3 times per repo per day.") {
1638
+ super(message);
1639
+ this.name = "RateLimitError";
1640
+ }
1641
+ };
1642
+ var ConflictError = class extends SyncError {
1643
+ constructor(message = "A newer reference already exists on the server.", remoteCommitSha) {
1644
+ super(message);
1645
+ this.remoteCommitSha = remoteCommitSha;
1646
+ this.name = "ConflictError";
1647
+ }
1648
+ };
1649
+ var CommitExistsError = class extends SyncError {
1650
+ constructor(message = "A reference already exists for this commit SHA.") {
1651
+ super(message);
1652
+ this.name = "CommitExistsError";
1653
+ }
1654
+ };
1655
+ var InvalidInputError = class extends SyncError {
1656
+ constructor(message) {
1657
+ super(message);
1658
+ this.name = "InvalidInputError";
1659
+ }
1660
+ };
1661
+ var InvalidReferenceError = class extends SyncError {
1662
+ constructor(message) {
1663
+ super(message);
1664
+ this.name = "InvalidReferenceError";
1665
+ }
1666
+ };
1667
+ var RepoNotFoundError$1 = class extends SyncError {
1668
+ constructor(message = "Repository not found on GitHub.") {
1669
+ super(message);
1670
+ this.name = "RepoNotFoundError";
1671
+ }
1672
+ };
1673
+ var LowStarsError = class extends SyncError {
1674
+ constructor(message = "Repository has less than 5 stars.") {
1675
+ super(message);
1676
+ this.name = "LowStarsError";
1677
+ }
1678
+ };
1679
+ var PrivateRepoError = class extends SyncError {
1680
+ constructor(message = "Private repositories are not supported.") {
1681
+ super(message);
1682
+ this.name = "PrivateRepoError";
1683
+ }
1684
+ };
1685
+ var CommitNotFoundError = class extends SyncError {
1686
+ constructor(message = "Commit not found in repository.") {
1687
+ super(message);
1688
+ this.name = "CommitNotFoundError";
1689
+ }
1690
+ };
1691
+ var GitHubError = class extends SyncError {
1692
+ constructor(message = "GitHub API error.") {
1693
+ super(message);
1694
+ this.name = "GitHubError";
1695
+ }
1696
+ };
1697
+ var PushNotAllowedError = class extends SyncError {
1698
+ constructor(message, reason) {
1699
+ super(message);
1700
+ this.reason = reason;
1701
+ this.name = "PushNotAllowedError";
1702
+ }
1703
+ };
1704
+ function createClient(token) {
1705
+ const convexUrl = getConvexUrl();
1706
+ if (!convexUrl) throw new SyncError("CONVEX_URL not configured. For local development, ensure apps/cli/.env contains CONVEX_URL=your_convex_url");
1707
+ const client = new ConvexHttpClient(convexUrl);
1708
+ if (token) client.setAuth(token);
1709
+ return client;
1710
+ }
1711
+ /**
1712
+ * Fetches reference from the remote server
1713
+ * @param fullName - Repository full name (owner/repo)
1714
+ * @returns Reference data or null if not found
1715
+ */
1716
+ async function pullReference(fullName) {
1717
+ const client = createClient();
1718
+ try {
1719
+ let result = await client.query(api.references.pull, {
1720
+ fullName,
1721
+ referenceName: toReferenceName(fullName)
1722
+ });
1723
+ if (!result) result = await client.query(api.references.pull, { fullName });
1724
+ if (!result) return null;
1725
+ client.mutation(api.references.recordPull, {
1726
+ fullName,
1727
+ referenceName: result.referenceName
1728
+ }).catch(() => {});
1729
+ return {
1730
+ fullName: result.fullName,
1731
+ referenceName: result.referenceName,
1732
+ referenceDescription: result.referenceDescription,
1733
+ referenceContent: result.referenceContent,
1734
+ commitSha: result.commitSha,
1735
+ generatedAt: result.generatedAt
1736
+ };
1737
+ } catch (error) {
1738
+ throw new NetworkError(`Failed to pull reference: ${error instanceof Error ? error.message : error}`);
1739
+ }
1740
+ }
1741
+ /**
1742
+ * Fetches a specific reference by name from the remote server
1743
+ * @param fullName - Repository full name (owner/repo)
1744
+ * @param referenceName - Specific reference name to pull
1745
+ * @returns Reference data or null if not found
1746
+ */
1747
+ async function pullReferenceByName(fullName, referenceName) {
1748
+ const client = createClient();
1749
+ try {
1750
+ const result = await client.query(api.references.pull, {
1751
+ fullName,
1752
+ referenceName
1753
+ });
1754
+ if (!result) return null;
1755
+ client.mutation(api.references.recordPull, {
1756
+ fullName,
1757
+ referenceName
1758
+ }).catch(() => {});
1759
+ return {
1760
+ fullName: result.fullName,
1761
+ referenceName: result.referenceName,
1762
+ referenceDescription: result.referenceDescription,
1763
+ referenceContent: result.referenceContent,
1764
+ commitSha: result.commitSha,
1765
+ generatedAt: result.generatedAt
1766
+ };
1767
+ } catch (error) {
1768
+ throw new NetworkError(`Failed to pull reference: ${error instanceof Error ? error.message : error}`);
1769
+ }
1770
+ }
1771
+ /**
1772
+ * Pushes reference to the remote server
1773
+ * All validation happens server-side
1774
+ * @param reference - Reference data to push
1775
+ * @param token - Authentication token
1776
+ * @returns Push result
1777
+ */
1778
+ async function pushReference(reference, token) {
1779
+ const client = createClient(token);
1780
+ try {
1781
+ const result = await client.action(api.references.push, {
1782
+ fullName: reference.fullName,
1783
+ referenceName: reference.referenceName,
1784
+ referenceDescription: reference.referenceDescription,
1785
+ referenceContent: reference.referenceContent,
1786
+ commitSha: reference.commitSha,
1787
+ generatedAt: reference.generatedAt
1788
+ });
1789
+ if (!result.success) switch (result.error) {
1790
+ case "auth_required": throw new AuthenticationError();
1791
+ case "rate_limit": throw new RateLimitError("Rate limit exceeded. You can push up to 20 times per day.");
1792
+ case "commit_already_exists": throw new CommitExistsError(result.message);
1793
+ case "invalid_input": throw new InvalidInputError(result.message ?? "Invalid input");
1794
+ case "invalid_reference": throw new InvalidReferenceError(result.message ?? "Invalid reference content");
1795
+ case "repo_not_found": throw new RepoNotFoundError$1(result.message);
1796
+ case "low_stars": throw new LowStarsError(result.message);
1797
+ case "private_repo": throw new PrivateRepoError(result.message);
1798
+ case "commit_not_found": throw new CommitNotFoundError(result.message);
1799
+ case "github_error": throw new GitHubError(result.message);
1800
+ default: throw new SyncError(result.message ?? "Unknown error");
1801
+ }
1802
+ return { success: true };
1803
+ } catch (error) {
1804
+ if (error instanceof SyncError) throw error;
1805
+ throw new NetworkError(`Failed to push reference: ${error instanceof Error ? error.message : error}`);
1806
+ }
1807
+ }
1808
+ /**
1809
+ * Checks if reference exists on remote server (lightweight check)
1810
+ * @param fullName - Repository full name (owner/repo)
1811
+ * @returns Check result
1812
+ */
1813
+ async function checkRemote(fullName) {
1814
+ const client = createClient();
1815
+ try {
1816
+ let result = await client.query(api.references.check, {
1817
+ fullName,
1818
+ referenceName: toReferenceName(fullName)
1819
+ });
1820
+ if (!result.exists) result = await client.query(api.references.check, { fullName });
1821
+ if (!result.exists) return { exists: false };
1822
+ return {
1823
+ exists: true,
1824
+ commitSha: result.commitSha,
1825
+ generatedAt: result.generatedAt
1826
+ };
1827
+ } catch (error) {
1828
+ throw new NetworkError(`Failed to check remote: ${error instanceof Error ? error.message : error}`);
1829
+ }
1830
+ }
1831
+ /**
1832
+ * Checks if a specific reference exists on the remote server
1833
+ * @param fullName - Repository full name (owner/repo)
1834
+ * @param referenceName - Specific reference name to check
1835
+ * @returns Check result
1836
+ */
1837
+ async function checkRemoteByName(fullName, referenceName) {
1838
+ const client = createClient();
1839
+ try {
1840
+ const result = await client.query(api.references.check, {
1841
+ fullName,
1842
+ referenceName
1843
+ });
1844
+ if (!result.exists) return { exists: false };
1845
+ return {
1846
+ exists: true,
1847
+ commitSha: result.commitSha,
1848
+ generatedAt: result.generatedAt
1849
+ };
1850
+ } catch (error) {
1851
+ throw new NetworkError(`Failed to check remote: ${error instanceof Error ? error.message : error}`);
1852
+ }
1853
+ }
1854
+ /**
1855
+ * Compares local vs remote commit SHA to check staleness
1856
+ * @param fullName - Repository full name (owner/repo)
1857
+ * @param localCommitSha - Local commit SHA
1858
+ * @returns Staleness result
1859
+ */
1860
+ async function checkStaleness(fullName, localCommitSha) {
1861
+ const remote = await checkRemote(fullName);
1862
+ if (!remote.exists || !remote.commitSha) return {
1863
+ isStale: false,
1864
+ localCommitSha,
1865
+ remoteCommitSha: void 0
1866
+ };
1867
+ return {
1868
+ isStale: localCommitSha !== remote.commitSha,
1869
+ localCommitSha,
1870
+ remoteCommitSha: remote.commitSha
1871
+ };
1872
+ }
1873
+ /**
1874
+ * Fetches GitHub repository metadata
1875
+ * @param owner - Repository owner
1876
+ * @param repo - Repository name
1877
+ * @returns Repository metadata or null on error
1878
+ */
1879
+ async function fetchGitHubMetadata(owner, repo) {
1880
+ try {
1881
+ const response = await fetch(`${GITHUB_API_BASE}/repos/${owner}/${repo}`, { headers: {
1882
+ Accept: "application/vnd.github.v3+json",
1883
+ "User-Agent": "offworld-cli"
1884
+ } });
1885
+ if (!response.ok) return null;
1886
+ const data = await response.json();
1887
+ return {
1888
+ stars: data.stargazers_count ?? 0,
1889
+ description: data.description ?? void 0,
1890
+ language: data.language ?? void 0,
1891
+ defaultBranch: data.default_branch ?? "main"
1892
+ };
1893
+ } catch {
1894
+ return null;
1895
+ }
1896
+ }
1897
+ /**
1898
+ * Fetches GitHub repository stars
1899
+ * @param owner - Repository owner
1900
+ * @param repo - Repository name
1901
+ * @returns Number of stars, or 0 on error
1902
+ */
1903
+ async function fetchRepoStars(owner, repo) {
1904
+ return (await fetchGitHubMetadata(owner, repo))?.stars ?? 0;
1905
+ }
1906
+ /**
1907
+ * Checks if a repository can be pushed to offworld.sh (client-side quick checks)
1908
+ * Note: Star count and other validations happen server-side
1909
+ *
1910
+ * @param source - Repository source
1911
+ * @returns Can push result
1912
+ */
1913
+ function canPushToWeb(source) {
1914
+ if (source.type === "local") return {
1915
+ allowed: false,
1916
+ reason: "Local repositories cannot be pushed to offworld.sh. Only remote repositories with a public URL are supported."
1917
+ };
1918
+ if (source.provider !== "github") return {
1919
+ allowed: false,
1920
+ reason: `${source.provider} repositories are not yet supported. GitHub support only for now - GitLab and Bitbucket coming soon!`
1921
+ };
1922
+ return { allowed: true };
1923
+ }
1924
+ /**
1925
+ * Validates that a source can be pushed and throws appropriate error if not
1926
+ * Note: This only does quick client-side checks. Full validation happens server-side.
1927
+ * @param source - Repository source
1928
+ * @throws PushNotAllowedError if push is not allowed
1929
+ */
1930
+ function validatePushAllowed(source) {
1931
+ const result = canPushToWeb(source);
1932
+ if (!result.allowed) {
1933
+ const reason = source.type === "local" ? "local" : "not-github";
1934
+ throw new PushNotAllowedError(result.reason, reason);
1935
+ }
1936
+ }
1937
+
1938
+ //#endregion
1939
+ //#region src/auth.ts
1940
+ /**
1941
+ * Authentication utilities for offworld CLI
1942
+ */
1943
+ var AuthError = class extends Error {
1944
+ constructor(message) {
1945
+ super(message);
1946
+ this.name = "AuthError";
1947
+ }
1948
+ };
1949
+ var NotLoggedInError = class extends AuthError {
1950
+ constructor(message = "Not logged in. Please run 'ow auth login' first.") {
1951
+ super(message);
1952
+ this.name = "NotLoggedInError";
1953
+ }
1954
+ };
1955
+ var TokenExpiredError = class extends AuthError {
1956
+ constructor(message = "Session expired. Please run 'ow auth login' again.") {
1957
+ super(message);
1958
+ this.name = "TokenExpiredError";
1959
+ }
1960
+ };
1961
+ /**
1962
+ * Extracts expiration timestamp from JWT access token
1963
+ * @param token - JWT access token
1964
+ * @returns ISO string of expiration date, or undefined if extraction fails
1965
+ */
1966
+ function extractJwtExpiration(token) {
1967
+ try {
1968
+ const parts = token.split(".");
1969
+ if (parts.length !== 3) return void 0;
1970
+ const payload = parts[1];
1971
+ if (!payload) return void 0;
1972
+ const decoded = JSON.parse(Buffer.from(payload, "base64").toString("utf-8"));
1973
+ if (typeof decoded.exp !== "number") return void 0;
1974
+ return (/* @__PURE__ */ new Date(decoded.exp * 1e3)).toISOString();
1975
+ } catch {
1976
+ return;
1977
+ }
1978
+ }
1979
+ /**
1980
+ * Returns the auth file path using XDG Base Directory spec
1981
+ * Location: ~/.local/share/offworld/auth.json
1982
+ */
1983
+ function getAuthPath() {
1984
+ return Paths.authFile;
1985
+ }
1986
+ /**
1987
+ * Saves authentication data to ~/.local/share/offworld/auth.json
1988
+ * Creates directory if it doesn't exist
1989
+ */
1990
+ function saveAuthData(data) {
1991
+ const authPath = getAuthPath();
1992
+ const authDir = dirname(authPath);
1993
+ if (!existsSync(authDir)) mkdirSync(authDir, { recursive: true });
1994
+ writeFileSync(authPath, JSON.stringify(data, null, 2), "utf-8");
1995
+ chmodSync(authPath, 384);
1996
+ }
1997
+ /**
1998
+ * Loads authentication data from ~/.local/share/offworld/auth.json
1999
+ * Returns null if file doesn't exist or is invalid
2000
+ */
2001
+ function loadAuthData() {
2002
+ const authPath = getAuthPath();
2003
+ if (!existsSync(authPath)) return null;
2004
+ try {
2005
+ const content = readFileSync(authPath, "utf-8");
2006
+ const data = JSON.parse(content);
2007
+ if (!data.token || typeof data.token !== "string") return null;
2008
+ return data;
2009
+ } catch {
2010
+ return null;
2011
+ }
2012
+ }
2013
+ /**
2014
+ * Clears stored authentication data
2015
+ * @returns true if auth file was deleted, false if it didn't exist
2016
+ */
2017
+ function clearAuthData() {
2018
+ const authPath = getAuthPath();
2019
+ if (!existsSync(authPath)) return false;
2020
+ try {
2021
+ unlinkSync(authPath);
2022
+ return true;
2023
+ } catch {
2024
+ return false;
2025
+ }
2026
+ }
2027
+ /**
2028
+ * Gets the current authentication token
2029
+ * Auto-refreshes if token expires within 1 minute
2030
+ * @throws NotLoggedInError if not logged in
2031
+ * @throws TokenExpiredError if token is expired and refresh fails
2032
+ */
2033
+ async function getToken() {
2034
+ const data = loadAuthData();
2035
+ if (!data) throw new NotLoggedInError();
2036
+ let expiresAtStr = data.expiresAt;
2037
+ if (!expiresAtStr) {
2038
+ expiresAtStr = extractJwtExpiration(data.token);
2039
+ if (expiresAtStr) {
2040
+ data.expiresAt = expiresAtStr;
2041
+ saveAuthData(data);
2042
+ }
2043
+ }
2044
+ if (expiresAtStr) {
2045
+ const expiresAt = new Date(expiresAtStr);
2046
+ const now = /* @__PURE__ */ new Date();
2047
+ const oneMinute = 60 * 1e3;
2048
+ if (expiresAt <= now) {
2049
+ if (data.refreshToken) try {
2050
+ return (await refreshAccessToken()).token;
2051
+ } catch {
2052
+ throw new TokenExpiredError();
2053
+ }
2054
+ throw new TokenExpiredError();
2055
+ }
2056
+ if (expiresAt.getTime() - now.getTime() < oneMinute) {
2057
+ if (data.refreshToken) try {
2058
+ return (await refreshAccessToken()).token;
2059
+ } catch {
2060
+ return data.token;
2061
+ }
2062
+ }
2063
+ }
2064
+ return data.token;
2065
+ }
2066
+ /**
2067
+ * Gets the current authentication token, or null if not logged in
2068
+ * Does not throw errors
2069
+ */
2070
+ async function getTokenOrNull() {
2071
+ try {
2072
+ return await getToken();
2073
+ } catch {
2074
+ return null;
2075
+ }
2076
+ }
2077
+ /**
2078
+ * Checks if user is logged in with valid token
2079
+ */
2080
+ async function isLoggedIn() {
2081
+ return await getTokenOrNull() !== null;
2082
+ }
2083
+ async function getAuthStatus() {
2084
+ const data = loadAuthData();
2085
+ if (!data) return { isLoggedIn: false };
2086
+ if (data.expiresAt) {
2087
+ if (new Date(data.expiresAt) <= /* @__PURE__ */ new Date()) {
2088
+ if (data.refreshToken) try {
2089
+ const refreshed = await refreshAccessToken();
2090
+ return {
2091
+ isLoggedIn: true,
2092
+ email: refreshed.email,
2093
+ workosId: refreshed.workosId,
2094
+ expiresAt: refreshed.expiresAt
2095
+ };
2096
+ } catch {
2097
+ return { isLoggedIn: false };
2098
+ }
2099
+ return { isLoggedIn: false };
2100
+ }
2101
+ }
2102
+ return {
2103
+ isLoggedIn: true,
2104
+ email: data.email,
2105
+ workosId: data.workosId,
2106
+ expiresAt: data.expiresAt
2107
+ };
2108
+ }
2109
+ const WORKOS_API = "https://api.workos.com";
2110
+ function getWorkosClientId() {
2111
+ return process.env.WORKOS_CLIENT_ID || "";
2112
+ }
2113
+ /**
2114
+ * Refreshes the access token using the stored refresh token
2115
+ * @returns New auth data with refreshed token
2116
+ * @throws AuthError if refresh fails
2117
+ */
2118
+ async function refreshAccessToken() {
2119
+ const data = loadAuthData();
2120
+ if (!data?.refreshToken) throw new AuthError("No refresh token available. Please log in again.");
2121
+ const clientId = getWorkosClientId();
2122
+ if (!clientId) throw new AuthError("WORKOS_CLIENT_ID not configured");
2123
+ try {
2124
+ const response = await fetch(`${WORKOS_API}/user_management/authenticate`, {
2125
+ method: "POST",
2126
+ headers: { "Content-Type": "application/x-www-form-urlencoded" },
2127
+ body: new URLSearchParams({
2128
+ grant_type: "refresh_token",
2129
+ refresh_token: data.refreshToken,
2130
+ client_id: clientId
2131
+ })
2132
+ });
2133
+ if (!response.ok) throw new AuthError(`Token refresh failed: ${await response.text()}`);
2134
+ const tokenData = await response.json();
2135
+ const newAuthData = {
2136
+ token: tokenData.access_token,
2137
+ email: tokenData.user.email,
2138
+ workosId: tokenData.user.id,
2139
+ refreshToken: tokenData.refresh_token,
2140
+ expiresAt: tokenData.expires_at ? (/* @__PURE__ */ new Date(tokenData.expires_at * 1e3)).toISOString() : extractJwtExpiration(tokenData.access_token)
2141
+ };
2142
+ saveAuthData(newAuthData);
2143
+ return newAuthData;
2144
+ } catch (error) {
2145
+ if (error instanceof AuthError) throw error;
2146
+ throw new AuthError(`Failed to refresh token: ${error instanceof Error ? error.message : "Unknown error"}`);
2147
+ }
2148
+ }
2149
+
2150
+ //#endregion
2151
+ //#region src/agents.ts
2152
+ /**
2153
+ * Agent Registry & Auto-Detection
2154
+ *
2155
+ * Centralized registry of supported AI coding agents with their
2156
+ * skill directory locations and detection functions.
2157
+ */
2158
+ const agents = {
2159
+ opencode: {
2160
+ name: "opencode",
2161
+ displayName: "OpenCode",
2162
+ skillsDir: ".opencode/skills",
2163
+ globalSkillsDir: "~/.config/opencode/skills",
2164
+ detectInstalled: () => existsSync(expandTilde("~/.config/opencode"))
2165
+ },
2166
+ "claude-code": {
2167
+ name: "claude-code",
2168
+ displayName: "Claude Code",
2169
+ skillsDir: ".claude/skills",
2170
+ globalSkillsDir: "~/.claude/skills",
2171
+ detectInstalled: () => existsSync(expandTilde("~/.claude"))
2172
+ },
2173
+ codex: {
2174
+ name: "codex",
2175
+ displayName: "Codex (OpenAI)",
2176
+ skillsDir: ".codex/skills",
2177
+ globalSkillsDir: "~/.codex/skills",
2178
+ detectInstalled: () => existsSync(expandTilde("~/.codex"))
2179
+ },
2180
+ amp: {
2181
+ name: "amp",
2182
+ displayName: "Amp",
2183
+ skillsDir: ".agents/skills",
2184
+ globalSkillsDir: "~/.config/agents/skills",
2185
+ detectInstalled: () => existsSync(expandTilde("~/.config/amp"))
2186
+ },
2187
+ antigravity: {
2188
+ name: "antigravity",
2189
+ displayName: "Antigravity",
2190
+ skillsDir: ".agent/skills",
2191
+ globalSkillsDir: "~/.gemini/antigravity/skills",
2192
+ detectInstalled: () => existsSync(expandTilde("~/.gemini/antigravity"))
2193
+ },
2194
+ cursor: {
2195
+ name: "cursor",
2196
+ displayName: "Cursor",
2197
+ skillsDir: ".cursor/skills",
2198
+ globalSkillsDir: "~/.cursor/skills",
2199
+ detectInstalled: () => existsSync(expandTilde("~/.cursor"))
2200
+ }
2201
+ };
2202
+ /**
2203
+ * Detect which agents are installed on the system.
2204
+ * Checks for the existence of each agent's config directory.
2205
+ *
2206
+ * @returns Array of installed agent identifiers
2207
+ */
2208
+ function detectInstalledAgents() {
2209
+ const installed = [];
2210
+ for (const config of Object.values(agents)) if (config.detectInstalled()) installed.push(config.name);
2211
+ return installed;
2212
+ }
2213
+ /**
2214
+ * Get the configuration for a specific agent.
2215
+ *
2216
+ * @param type - Agent identifier
2217
+ * @returns AgentConfig for the specified agent
2218
+ */
2219
+ function getAgentConfig(type) {
2220
+ return agents[type];
2221
+ }
2222
+ /**
2223
+ * Get all agent configurations as an array.
2224
+ *
2225
+ * @returns Array of all agent configurations
2226
+ */
2227
+ function getAllAgentConfigs() {
2228
+ return Object.values(agents);
2229
+ }
2230
+
2231
+ //#endregion
2232
+ //#region src/generate.ts
2233
+ /**
2234
+ * Simplified AI-only reference generation
2235
+ *
2236
+ * This module provides a streamlined approach to generating reference files
2237
+ * by delegating all codebase exploration to the AI agent via OpenCode.
2238
+ */
2239
+ function normalizeKeyword(value) {
2240
+ const trimmed = value.trim();
2241
+ if (!trimmed) return [];
2242
+ const normalized = trimmed.toLowerCase();
2243
+ const tokens = /* @__PURE__ */ new Set();
2244
+ const addToken = (token) => {
2245
+ const cleaned = token.trim().toLowerCase();
2246
+ if (cleaned.length < 2) return;
2247
+ tokens.add(cleaned);
2248
+ };
2249
+ addToken(normalized);
2250
+ addToken(normalized.replaceAll("/", "-"));
2251
+ addToken(normalized.replaceAll("/", ""));
2252
+ for (const token of normalized.split(/[\s/_-]+/)) addToken(token);
2253
+ if (normalized.startsWith("@")) addToken(normalized.slice(1));
2254
+ return Array.from(tokens);
2255
+ }
2256
+ function deriveKeywords(fullName, localPath, referenceContent) {
2257
+ const keywords = /* @__PURE__ */ new Set();
2258
+ const addKeywords = (value) => {
2259
+ for (const token of normalizeKeyword(value)) keywords.add(token);
2260
+ };
2261
+ addKeywords(fullName);
2262
+ const headingMatch = referenceContent.match(/^#\s+(.+)$/m);
2263
+ if (headingMatch?.[1]) addKeywords(headingMatch[1]);
2264
+ const packageJsonPath = join(localPath, "package.json");
2265
+ if (existsSync(packageJsonPath)) try {
2266
+ const content = readFileSync(packageJsonPath, "utf-8");
2267
+ const data = JSON.parse(content);
2268
+ if (data.name) addKeywords(data.name);
2269
+ if (Array.isArray(data.keywords)) {
2270
+ for (const keyword of data.keywords) if (typeof keyword === "string") addKeywords(keyword);
2271
+ }
2272
+ } catch {}
2273
+ return Array.from(keywords);
2274
+ }
2275
+ function createReferenceGenerationPrompt(referenceName) {
2276
+ return `You are an expert at analyzing open source libraries and producing reference documentation for AI coding agents.
2277
+
2278
+ ## PRIMARY GOAL
2279
+
2280
+ Generate a reference markdown file that helps developers USE this library effectively. This is NOT a contribution guide - it's a usage reference for developers consuming this library in their own projects.
2281
+
2282
+ ## CRITICAL RULES
2283
+
2284
+ 1. **USER PERSPECTIVE ONLY**: Write for developers who will npm/pip/cargo install this library and use it in THEIR code.
2285
+ - DO NOT include: how to contribute, internal test commands, repo-specific policies
2286
+ - DO NOT include: "never mock in tests" or similar internal dev guidelines
2287
+ - DO NOT include: commands like "npx hereby", "just ready", "bun test" that run the library's own tests
2288
+ - DO include: how to install, import, configure, and use the public API
2289
+
2290
+ 2. **NO FRONTMATTER**: Output pure markdown with NO YAML frontmatter. Start directly with the library name heading.
2291
+
2292
+ 3. **QUICK REFERENCES**: Include a "Quick References" section with paths to key entry points in the repo:
2293
+ - Paths must be relative from repo root (e.g., \`src/index.ts\`, \`docs/api.md\`)
2294
+ - Include: main entry point, type definitions, README, key docs
2295
+ - DO NOT include absolute paths or user-specific paths
2296
+ - Keep to 3-5 most important files that help users understand the library
2297
+
2298
+ 4. **PUBLIC API FOCUS**: Document what users import and call, not internal implementation details.
2299
+ - Entry points: what to import from the package
2300
+ - Configuration: how to set up/initialize
2301
+ - Core methods/functions: the main API surface
2302
+ - Types: key TypeScript interfaces users need
2303
+
2304
+ 5. **MONOREPO AWARENESS**: Many libraries are monorepos with multiple packages:
2305
+ - Check for \`packages/\`, \`apps/\`, \`crates/\`, or \`libs/\` directories
2306
+ - Check root package.json for \`workspaces\` field
2307
+ - If monorepo: document the package structure and key packages users would install
2308
+ - Use full paths from repo root (e.g., \`packages/core/src/index.ts\`)
2309
+ - Identify which packages are publishable vs internal
2310
+
2311
+ ## EXPLORATION STEPS
2312
+
2313
+ Use Read, Grep, Glob tools to explore:
2314
+ 1. Root package.json / Cargo.toml - check for workspaces/monorepo config
2315
+ 2. Check for \`packages/\`, \`apps/\`, \`crates/\` directories
2316
+ 3. README.md - official usage documentation
2317
+ 4. For monorepos: explore each publishable package's entry point
2318
+ 5. docs/ or website/ - find documentation
2319
+ 6. examples/ - real usage patterns
2320
+ 7. TypeScript definitions (.d.ts) - public API surface
2321
+
2322
+ ## OUTPUT FORMAT
2323
+
2324
+ IMPORTANT: Reference name is "${referenceName}" (for internal tracking only - do NOT include in output).
2325
+
2326
+ \`\`\`markdown
2327
+ # {Library Name}
2328
+
2329
+ {2-3 sentence overview of what this library does and its key value proposition}
2330
+
2331
+ ## Quick References
2332
+
2333
+ | File | Purpose |
2334
+ |------|---------|
2335
+ | \`packages/{pkg}/src/index.ts\` | Main entry point (monorepo example) |
2336
+ | \`src/index.ts\` | Main entry point (single-package example) |
2337
+ | \`README.md\` | Documentation |
2338
+
2339
+ (For monorepos, include paths to key publishable packages)
2340
+
2341
+ ## Packages (for monorepos only)
2342
+
2343
+ | Package | npm name | Description |
2344
+ |---------|----------|-------------|
2345
+ | \`packages/core\` | \`@scope/core\` | Core functionality |
2346
+ | \`packages/react\` | \`@scope/react\` | React bindings |
2347
+
2348
+ (OMIT this section for single-package repos)
2349
+
2350
+ ## When to Use
2351
+
2352
+ - {Practical scenario where a developer would reach for this library}
2353
+ - {Another real-world use case}
2354
+ - {Problem this library solves}
2355
+
2356
+ ## Installation
2357
+
2358
+ \`\`\`bash
2359
+ # Single package
2360
+ npm install {package-name}
2361
+
2362
+ # Monorepo (show key packages)
2363
+ npm install @scope/core @scope/react
2364
+ \`\`\`
2365
+
2366
+ ## Best Practices
2367
+
2368
+ 1. {Actionable best practice for USERS of this library}
2369
+ 2. {Common mistake to avoid when using this library}
2370
+ 3. {Performance or correctness tip}
2371
+
2372
+ ## Common Patterns
2373
+
2374
+ **{Pattern Name}:**
2375
+ \`\`\`{language}
2376
+ {Minimal working code example}
2377
+ \`\`\`
2378
+
2379
+ **{Another Pattern}:**
2380
+ \`\`\`{language}
2381
+ {Another code example}
2382
+ \`\`\`
2383
+
2384
+ ## API Quick Reference
2385
+
2386
+ | Export | Type | Description |
2387
+ |--------|------|-------------|
2388
+ | \`{main export}\` | {type} | {what it does} |
2389
+ | \`{another export}\` | {type} | {what it does} |
2390
+
2391
+ {Add more sections as appropriate for the library: Configuration, Types, CLI Commands (if user-facing), etc.}
2392
+ \`\`\`
2393
+
2394
+ ## QUALITY CHECKLIST
2395
+
2396
+ Before outputting, verify:
2397
+ - [ ] NO YAML frontmatter - start directly with # heading
2398
+ - [ ] Every code example is something a USER would write, not a contributor
2399
+ - [ ] No internal test commands or contribution workflows
2400
+ - [ ] Quick References paths are relative from repo root (no absolute/user paths)
2401
+ - [ ] Best practices are for using the library, not developing it
2402
+ - [ ] If monorepo: Packages section lists publishable packages with npm names
2403
+ - [ ] If monorepo: paths include package directory (e.g., \`packages/core/src/index.ts\`)
2404
+
2405
+ Now explore the codebase and generate the reference content.
2406
+
2407
+ CRITICAL: Wrap your final reference output in XML tags exactly like this:
2408
+ <reference_output>
2409
+ # {Library Name}
2410
+ (the complete markdown content with NO frontmatter)
2411
+ </reference_output>
2412
+
2413
+ Output ONLY the reference content inside the tags. No explanations before or after the tags.`;
2414
+ }
2415
+ /**
2416
+ * Extract the actual reference markdown content from AI response.
2417
+ * The response may include echoed prompt/system context before the actual reference.
2418
+ * We look for the LAST occurrence of XML tags: <reference_output>...</reference_output>
2419
+ * (Using last occurrence avoids extracting example tags from echoed prompt)
2420
+ */
2421
+ function extractReferenceContent(rawResponse) {
2422
+ const openTag = "<reference_output>";
2423
+ const closeIndex = rawResponse.lastIndexOf("</reference_output>");
2424
+ if (closeIndex !== -1) {
2425
+ const openIndex = rawResponse.lastIndexOf(openTag, closeIndex);
2426
+ if (openIndex !== -1) {
2427
+ let content = rawResponse.slice(openIndex + 18, closeIndex).trim();
2428
+ if (content.startsWith("```")) {
2429
+ content = content.replace(/^```(?:markdown)?\s*\n?/, "");
2430
+ content = content.replace(/\n?```\s*$/, "");
2431
+ }
2432
+ content = content.trim();
2433
+ validateReferenceContent(content);
2434
+ return content;
2435
+ }
2436
+ }
2437
+ throw new Error("Failed to extract reference content: no <reference_output> tags found in AI response. The AI may have failed to follow the output format instructions.");
2438
+ }
2439
+ /**
2440
+ * Validate extracted reference content has minimum required structure.
2441
+ * Throws if content is invalid.
2442
+ */
2443
+ function validateReferenceContent(content) {
2444
+ if (content.length < 500) throw new Error(`Invalid reference content: too short (${content.length} chars, minimum 500). The AI may have produced placeholder or incomplete content.`);
2445
+ if (!content.startsWith("#")) throw new Error("Invalid reference content: must start with markdown heading. Content must begin with '# Library Name' (no YAML frontmatter).");
2446
+ }
2447
+ /**
2448
+ * Generate a reference markdown file for a repository using AI.
2449
+ *
2450
+ * Opens an OpenCode session and instructs the AI agent to explore the codebase
2451
+ * using Read, Grep, and Glob tools, then produce a comprehensive reference.
2452
+ *
2453
+ * @param repoPath - Path to the repository to analyze
2454
+ * @param repoName - Qualified name of the repo (e.g., "tanstack/query" or "my-local-repo")
2455
+ * @param options - Generation options (provider, model, callbacks)
2456
+ * @returns The generated reference content and commit SHA
2457
+ */
2458
+ async function generateReferenceWithAI(repoPath, repoName, options = {}) {
2459
+ const { provider, model, onDebug, onStream } = options;
2460
+ const [configProvider, configModel] = loadConfig().defaultModel?.split("/") ?? [];
2461
+ const aiProvider = provider ?? configProvider;
2462
+ const aiModel = model ?? configModel;
2463
+ onDebug?.(`Starting AI reference generation for ${repoName}`);
2464
+ onDebug?.(`Repo path: ${repoPath}`);
2465
+ onDebug?.(`Provider: ${aiProvider ?? "default"}, Model: ${aiModel ?? "default"}`);
2466
+ const commitSha = getCommitSha(repoPath);
2467
+ onDebug?.(`Commit SHA: ${commitSha}`);
2468
+ const referenceName = toReferenceName(repoName);
2469
+ onDebug?.(`Reference name: ${referenceName}`);
2470
+ const result = await streamPrompt({
2471
+ prompt: createReferenceGenerationPrompt(referenceName),
2472
+ cwd: repoPath,
2473
+ provider: aiProvider,
2474
+ model: aiModel,
2475
+ onDebug,
2476
+ onStream
2477
+ });
2478
+ onDebug?.(`Generation complete (${result.durationMs}ms, ${result.text.length} chars)`);
2479
+ const referenceContent = extractReferenceContent(result.text);
2480
+ onDebug?.(`Extracted reference content (${referenceContent.length} chars)`);
2481
+ return {
2482
+ referenceContent,
2483
+ commitSha
2484
+ };
2485
+ }
2486
+ /**
2487
+ * Ensure a symlink exists, removing any existing file/directory at the path
2488
+ */
2489
+ function ensureSymlink(target, linkPath) {
2490
+ try {
2491
+ const stat = lstatSync(linkPath);
2492
+ if (stat.isSymbolicLink()) unlinkSync(linkPath);
2493
+ else if (stat.isDirectory()) rmSync(linkPath, { recursive: true });
2494
+ else unlinkSync(linkPath);
2495
+ } catch {}
2496
+ mkdirSync(join(linkPath, ".."), { recursive: true });
2497
+ symlinkSync(target, linkPath, "dir");
2498
+ }
2499
+ /**
2500
+ * Static template for the global SKILL.md file.
2501
+ * This is the single routing skill that all agents see.
2502
+ */
2503
+ const GLOBAL_SKILL_TEMPLATE = `---
2504
+ name: offworld
2505
+ description: Routes queries to Offworld reference files. Find and read per-repo references for dependency knowledge.
2506
+ allowed-tools: Bash(ow:*) Read
2507
+ ---
2508
+
2509
+ # Offworld Reference Router
2510
+
2511
+ Look up dependency/library documentation via CLI, then read the reference file.
2512
+
2513
+ ## Usage
2514
+
2515
+ **Find a reference:**
2516
+ \`\`\`bash
2517
+ ow map search <term> # search by name or keyword
2518
+ ow map show <repo> # get info for specific repo
2519
+ \`\`\`
2520
+
2521
+ **Get paths for tools:**
2522
+ \`\`\`bash
2523
+ ow map show <repo> --ref # reference file path (use with Read)
2524
+ ow map show <repo> --path # clone directory path
2525
+ \`\`\`
2526
+
2527
+ **Example workflow:**
2528
+ \`\`\`bash
2529
+ # 1. Find the repo
2530
+ ow map search zod
2531
+
2532
+ # 2. Get reference path
2533
+ ow map show colinhacks/zod --ref
2534
+ # Output: /Users/.../.local/share/offworld/skill/offworld/references/colinhacks-zod.md
2535
+
2536
+ # 3. Read the reference with the path from step 2
2537
+ \`\`\`
2538
+
2539
+ ## If Reference Not Found
2540
+
2541
+ \`\`\`bash
2542
+ ow pull <owner/repo> # clone + generate reference
2543
+ ow project init # scan project deps, install references
2544
+ \`\`\`
2545
+
2546
+ ## All Commands
2547
+
2548
+ | Command | Description |
2549
+ |---------|-------------|
2550
+ | \`ow map search <term>\` | Find repos by name/keyword |
2551
+ | \`ow map show <repo>\` | Show repo info |
2552
+ | \`ow map show <repo> --ref\` | Print reference file path |
2553
+ | \`ow map show <repo> --path\` | Print clone directory path |
2554
+ | \`ow list\` | List all installed repos |
2555
+ | \`ow pull <repo>\` | Clone + generate reference |
2556
+
2557
+ ## Notes
2558
+
2559
+ - Project map (\`.offworld/map.json\`) takes precedence over global map when present
2560
+ - Reference files are markdown with API docs, patterns, best practices
2561
+ - Clone paths useful for exploring source code after reading reference
2562
+ `;
2563
+ /**
2564
+ * Ensures the global SKILL.md exists and symlinks the offworld/ directory to all agent skill directories.
2565
+ *
2566
+ * Creates:
2567
+ * - ~/.local/share/offworld/skill/offworld/SKILL.md (static routing template)
2568
+ * - ~/.local/share/offworld/skill/offworld/assets/ (for map.json)
2569
+ * - ~/.local/share/offworld/skill/offworld/references/ (for reference files)
2570
+ * - Symlinks entire offworld/ directory to each agent's skill directory
2571
+ */
2572
+ function installGlobalSkill() {
2573
+ const config = loadConfig();
2574
+ mkdirSync(Paths.offworldSkillDir, { recursive: true });
2575
+ mkdirSync(Paths.offworldAssetsDir, { recursive: true });
2576
+ mkdirSync(Paths.offworldReferencesDir, { recursive: true });
2577
+ const skillPath = join(Paths.offworldSkillDir, "SKILL.md");
2578
+ if (!existsSync(skillPath)) writeFileSync(skillPath, GLOBAL_SKILL_TEMPLATE, "utf-8");
2579
+ const configuredAgents = config.agents ?? [];
2580
+ for (const agentName of configuredAgents) {
2581
+ const agentConfig = agents[agentName];
2582
+ if (agentConfig) {
2583
+ const agentSkillDir = expandTilde(join(agentConfig.globalSkillsDir, "offworld"));
2584
+ ensureSymlink(Paths.offworldSkillDir, agentSkillDir);
2585
+ }
2586
+ }
2587
+ }
2588
+ /**
2589
+ * Install a reference file for a specific repository.
2590
+ *
2591
+ * Creates:
2592
+ * - ~/.local/share/offworld/skill/offworld/references/{owner-repo}.md
2593
+ * - ~/.local/share/offworld/meta/{owner-repo}/meta.json
2594
+ * - Updates global map with reference info
2595
+ *
2596
+ * @param qualifiedName - Qualified key for map storage (e.g., "github.com:owner/repo" or "local:name")
2597
+ * @param fullName - Full repo name for file naming (e.g., "owner/repo")
2598
+ * @param localPath - Absolute path to the cloned repository
2599
+ * @param referenceContent - The generated reference markdown content
2600
+ * @param meta - Metadata about the generation (referenceUpdatedAt, commitSha, version)
2601
+ * @param keywords - Optional array of keywords for search/routing
2602
+ */
2603
+ function installReference(qualifiedName, fullName, localPath, referenceContent, meta, keywords) {
2604
+ installGlobalSkill();
2605
+ const referenceFileName = toReferenceFileName(fullName);
2606
+ const metaDirName = toMetaDirName(fullName);
2607
+ const referencePath = join(Paths.offworldReferencesDir, referenceFileName);
2608
+ mkdirSync(Paths.offworldReferencesDir, { recursive: true });
2609
+ writeFileSync(referencePath, referenceContent, "utf-8");
2610
+ const metaDir = join(Paths.metaDir, metaDirName);
2611
+ mkdirSync(metaDir, { recursive: true });
2612
+ const metaJson = JSON.stringify(meta, null, 2);
2613
+ writeFileSync(join(metaDir, "meta.json"), metaJson, "utf-8");
2614
+ const map = readGlobalMap();
2615
+ const existingEntry = map.repos[qualifiedName];
2616
+ const legacyProviderMap = {
2617
+ "github.com": "github",
2618
+ "gitlab.com": "gitlab",
2619
+ "bitbucket.org": "bitbucket"
2620
+ };
2621
+ const [host] = qualifiedName.split(":");
2622
+ const legacyProvider = host ? legacyProviderMap[host] : void 0;
2623
+ const legacyQualifiedName = legacyProvider ? `${legacyProvider}:${fullName}` : void 0;
2624
+ const legacyEntry = legacyQualifiedName ? map.repos[legacyQualifiedName] : void 0;
2625
+ const references = [...existingEntry?.references ?? [], ...legacyEntry?.references ?? []];
2626
+ if (!references.includes(referenceFileName)) references.push(referenceFileName);
2627
+ const derivedKeywords = keywords ?? deriveKeywords(fullName, localPath, referenceContent);
2628
+ const keywordsSet = new Set([
2629
+ ...existingEntry?.keywords ?? [],
2630
+ ...legacyEntry?.keywords ?? [],
2631
+ ...derivedKeywords
2632
+ ]);
2633
+ map.repos[qualifiedName] = {
2634
+ localPath,
2635
+ references,
2636
+ primary: referenceFileName,
2637
+ keywords: Array.from(keywordsSet),
2638
+ updatedAt: (/* @__PURE__ */ new Date()).toISOString()
2639
+ };
2640
+ if (legacyQualifiedName && legacyQualifiedName in map.repos) delete map.repos[legacyQualifiedName];
2641
+ writeGlobalMap(map);
2642
+ }
2643
+
2644
+ //#endregion
2645
+ //#region src/manifest.ts
2646
+ /**
2647
+ * Dependency manifest parsing for multiple package ecosystems
2648
+ */
2649
+ /**
2650
+ * Detects the manifest type in a directory
2651
+ */
2652
+ function detectManifestType(dir) {
2653
+ if (existsSync(join(dir, "package.json"))) return "npm";
2654
+ if (existsSync(join(dir, "pyproject.toml"))) return "python";
2655
+ if (existsSync(join(dir, "Cargo.toml"))) return "rust";
2656
+ if (existsSync(join(dir, "go.mod"))) return "go";
2657
+ if (existsSync(join(dir, "requirements.txt"))) return "python";
2658
+ return "unknown";
2659
+ }
2660
+ /**
2661
+ * Parses dependencies from manifest files
2662
+ */
2663
+ function parseDependencies(dir) {
2664
+ switch (detectManifestType(dir)) {
2665
+ case "npm": return parsePackageJson(join(dir, "package.json"));
2666
+ case "python": return existsSync(join(dir, "pyproject.toml")) ? parsePyprojectToml(join(dir, "pyproject.toml")) : parseRequirementsTxt(join(dir, "requirements.txt"));
2667
+ case "rust": return parseCargoToml(join(dir, "Cargo.toml"));
2668
+ case "go": return parseGoMod(join(dir, "go.mod"));
2669
+ default: return [];
2670
+ }
2671
+ }
2672
+ /**
2673
+ * Parse package.json dependencies
2674
+ */
2675
+ function parsePackageJson(path) {
2676
+ try {
2677
+ const content = readFileSync(path, "utf-8");
2678
+ const pkg = JSON.parse(content);
2679
+ const deps = [];
2680
+ if (pkg.dependencies && typeof pkg.dependencies === "object") for (const [name, version] of Object.entries(pkg.dependencies)) deps.push({
2681
+ name,
2682
+ version,
2683
+ dev: false
2684
+ });
2685
+ if (pkg.devDependencies && typeof pkg.devDependencies === "object") for (const [name, version] of Object.entries(pkg.devDependencies)) deps.push({
2686
+ name,
2687
+ version,
2688
+ dev: true
2689
+ });
2690
+ return deps;
2691
+ } catch {
2692
+ return [];
2693
+ }
2694
+ }
2695
+ /**
2696
+ * Parse pyproject.toml dependencies
2697
+ */
2698
+ function parsePyprojectToml(path) {
2699
+ try {
2700
+ const content = readFileSync(path, "utf-8");
2701
+ const deps = [];
2702
+ const depsSection = content.match(/\[project\.dependencies\]([\s\S]*?)(?=\[|$)/);
2703
+ if (!depsSection?.[1]) return [];
2704
+ const lines = depsSection[1].split("\n");
2705
+ for (const line of lines) {
2706
+ const match = line.match(/["']([a-zA-Z0-9_-]+)(?:[>=<~!]+([^"']+))?["']/);
2707
+ if (match?.[1]) deps.push({
2708
+ name: match[1],
2709
+ version: match[2]?.trim(),
2710
+ dev: false
2711
+ });
2712
+ }
2713
+ return deps;
2714
+ } catch {
2715
+ return [];
2716
+ }
2717
+ }
2718
+ /**
2719
+ * Parse Cargo.toml dependencies
2720
+ */
2721
+ function parseCargoToml(path) {
2722
+ try {
2723
+ const content = readFileSync(path, "utf-8");
2724
+ const deps = [];
2725
+ const depsSection = content.match(/\[dependencies\]([\s\S]*?)(?=\[|$)/);
2726
+ if (!depsSection?.[1]) return [];
2727
+ const lines = depsSection[1].split("\n");
2728
+ for (const line of lines) {
2729
+ const simpleMatch = line.match(/^([a-zA-Z0-9_-]+)\s*=\s*"([^"]+)"/);
2730
+ const tableMatch = line.match(/^([a-zA-Z0-9_-]+)\s*=\s*{.*version\s*=\s*"([^"]+)"/);
2731
+ if (simpleMatch?.[1] && simpleMatch[2]) deps.push({
2732
+ name: simpleMatch[1],
2733
+ version: simpleMatch[2],
2734
+ dev: false
2735
+ });
2736
+ else if (tableMatch?.[1] && tableMatch[2]) deps.push({
2737
+ name: tableMatch[1],
2738
+ version: tableMatch[2],
2739
+ dev: false
2740
+ });
2741
+ }
2742
+ return deps;
2743
+ } catch {
2744
+ return [];
2745
+ }
2746
+ }
2747
+ /**
2748
+ * Parse go.mod dependencies
2749
+ */
2750
+ function parseGoMod(path) {
2751
+ try {
2752
+ const content = readFileSync(path, "utf-8");
2753
+ const deps = [];
2754
+ const requireSection = content.match(/require\s*\(([\s\S]*?)\)/);
2755
+ if (!requireSection?.[1]) {
2756
+ const singleRequire = content.match(/require\s+([^\s]+)\s+([^\s]+)/);
2757
+ if (singleRequire?.[1] && singleRequire[2]) deps.push({
2758
+ name: singleRequire[1],
2759
+ version: singleRequire[2],
2760
+ dev: false
2761
+ });
2762
+ return deps;
2763
+ }
2764
+ const lines = requireSection[1].split("\n");
2765
+ for (const line of lines) {
2766
+ const match = line.match(/^\s*([^\s]+)\s+([^\s]+)/);
2767
+ if (match?.[1] && match[2]) deps.push({
2768
+ name: match[1],
2769
+ version: match[2],
2770
+ dev: false
2771
+ });
2772
+ }
2773
+ return deps;
2774
+ } catch {
2775
+ return [];
2776
+ }
2777
+ }
2778
+ /**
2779
+ * Parse requirements.txt dependencies
2780
+ */
2781
+ function parseRequirementsTxt(path) {
2782
+ try {
2783
+ const content = readFileSync(path, "utf-8");
2784
+ const deps = [];
2785
+ const lines = content.split("\n");
2786
+ for (const line of lines) {
2787
+ const trimmed = line.trim();
2788
+ if (!trimmed || trimmed.startsWith("#")) continue;
2789
+ const match = trimmed.match(/^([a-zA-Z0-9_-]+)(?:[>=<~!]+(.+))?/);
2790
+ if (match?.[1]) deps.push({
2791
+ name: match[1],
2792
+ version: match[2]?.trim(),
2793
+ dev: false
2794
+ });
2795
+ }
2796
+ return deps;
2797
+ } catch {
2798
+ return [];
2799
+ }
2800
+ }
2801
+
2802
+ //#endregion
2803
+ //#region src/dep-mappings.ts
2804
+ /**
2805
+ * Hardcoded mappings for popular packages.
2806
+ * Covers top ecosystems: React, Vue, Svelte, backend, database, validation, styling.
2807
+ */
2808
+ const KNOWN_MAPPINGS = {
2809
+ react: "facebook/react",
2810
+ "react-dom": "facebook/react",
2811
+ next: "vercel/next.js",
2812
+ remix: "remix-run/remix",
2813
+ "@remix-run/react": "remix-run/remix",
2814
+ "@remix-run/node": "remix-run/remix",
2815
+ vue: "vuejs/core",
2816
+ nuxt: "nuxt/nuxt",
2817
+ "@nuxt/kit": "nuxt/nuxt",
2818
+ svelte: "sveltejs/svelte",
2819
+ sveltekit: "sveltejs/kit",
2820
+ "@sveltejs/kit": "sveltejs/kit",
2821
+ "@tanstack/query": "tanstack/query",
2822
+ "@tanstack/react-query": "tanstack/query",
2823
+ "@tanstack/vue-query": "tanstack/query",
2824
+ "@tanstack/router": "tanstack/router",
2825
+ "@tanstack/react-router": "tanstack/router",
2826
+ "@tanstack/start": "tanstack/router",
2827
+ zustand: "pmndrs/zustand",
2828
+ jotai: "pmndrs/jotai",
2829
+ valtio: "pmndrs/valtio",
2830
+ redux: "reduxjs/redux",
2831
+ "@reduxjs/toolkit": "reduxjs/redux-toolkit",
2832
+ express: "expressjs/express",
2833
+ hono: "honojs/hono",
2834
+ fastify: "fastify/fastify",
2835
+ koa: "koajs/koa",
2836
+ nestjs: "nestjs/nest",
2837
+ "@nestjs/core": "nestjs/nest",
2838
+ trpc: "trpc/trpc",
2839
+ "@trpc/server": "trpc/trpc",
2840
+ "@trpc/client": "trpc/trpc",
2841
+ "@trpc/react-query": "trpc/trpc",
2842
+ graphql: "graphql/graphql-js",
2843
+ "apollo-server": "apollographql/apollo-server",
2844
+ "@apollo/client": "apollographql/apollo-client",
2845
+ "drizzle-orm": "drizzle-team/drizzle-orm",
2846
+ prisma: "prisma/prisma",
2847
+ "@prisma/client": "prisma/prisma",
2848
+ typeorm: "typeorm/typeorm",
2849
+ sequelize: "sequelize/sequelize",
2850
+ mongoose: "Automattic/mongoose",
2851
+ knex: "knex/knex",
2852
+ zod: "colinhacks/zod",
2853
+ valibot: "fabian-hiller/valibot",
2854
+ yup: "jquense/yup",
2855
+ joi: "hapijs/joi",
2856
+ tailwindcss: "tailwindlabs/tailwindcss",
2857
+ "styled-components": "styled-components/styled-components",
2858
+ "@emotion/react": "emotion-js/emotion",
2859
+ sass: "sass/sass",
2860
+ vite: "vitejs/vite",
2861
+ webpack: "webpack/webpack",
2862
+ esbuild: "evanw/esbuild",
2863
+ rollup: "rollup/rollup",
2864
+ "@vitejs/plugin-react": "vitejs/vite-plugin-react",
2865
+ vitest: "vitest-dev/vitest",
2866
+ jest: "jestjs/jest",
2867
+ "@testing-library/react": "testing-library/react-testing-library",
2868
+ cypress: "cypress-io/cypress",
2869
+ playwright: "microsoft/playwright",
2870
+ convex: "get-convex/convex-backend",
2871
+ "better-auth": "better-auth/better-auth",
2872
+ lodash: "lodash/lodash",
2873
+ "date-fns": "date-fns/date-fns",
2874
+ axios: "axios/axios",
2875
+ ky: "sindresorhus/ky",
2876
+ turborepo: "vercel/turborepo",
2877
+ nx: "nrwl/nx"
2878
+ };
2879
+ /**
2880
+ * Parse GitHub repo from various git URL formats.
2881
+ * Handles:
2882
+ * - git+https://github.com/owner/repo.git
2883
+ * - https://github.com/owner/repo
2884
+ * - git://github.com/owner/repo.git
2885
+ * - github:owner/repo
2886
+ */
2887
+ function parseGitHubUrl(url) {
2888
+ for (const pattern of [/github\.com[/:]([\w-]+)\/([\w.-]+?)(?:\.git)?$/, /^github:([\w-]+)\/([\w.-]+)$/]) {
2889
+ const match = url.match(pattern);
2890
+ if (match) return `${match[1]}/${match[2]}`;
2891
+ }
2892
+ return null;
2893
+ }
2894
+ /**
2895
+ * Fallback to npm registry to extract repository.url.
2896
+ * Returns null if package not found, no repo field, or not a GitHub repo.
2897
+ */
2898
+ async function resolveFromNpm(packageName) {
2899
+ try {
2900
+ const res = await fetch(`https://registry.npmjs.org/${packageName}`);
2901
+ if (!res.ok) return null;
2902
+ const repoUrl = (await res.json()).repository?.url;
2903
+ if (!repoUrl) return null;
2904
+ return parseGitHubUrl(repoUrl);
2905
+ } catch {
2906
+ return null;
2907
+ }
2908
+ }
2909
+ /**
2910
+ * Three-tier resolution:
2911
+ * 1. Check KNOWN_MAPPINGS
2912
+ * 2. Query npm registry
2913
+ * 3. Return unknown (caller prompts user)
2914
+ */
2915
+ async function resolveDependencyRepo(dep) {
2916
+ if (dep in KNOWN_MAPPINGS) return {
2917
+ dep,
2918
+ repo: KNOWN_MAPPINGS[dep] ?? null,
2919
+ source: "known"
2920
+ };
2921
+ const npmRepo = await resolveFromNpm(dep);
2922
+ if (npmRepo) return {
2923
+ dep,
2924
+ repo: npmRepo,
2925
+ source: "npm"
2926
+ };
2927
+ return {
2928
+ dep,
2929
+ repo: null,
2930
+ source: "unknown"
2931
+ };
2932
+ }
2933
+
2934
+ //#endregion
2935
+ //#region src/reference-matcher.ts
2936
+ /**
2937
+ * Reference matching utilities for dependency resolution
2938
+ *
2939
+ * Maps dependencies to their reference status (installed, available, generate, unknown)
2940
+ */
2941
+ /**
2942
+ * Check if a reference is installed locally.
2943
+ * A reference is considered installed if {owner-repo}.md exists in offworld/references/.
2944
+ *
2945
+ * @param repo - Repo name in owner/repo format
2946
+ * @returns true if reference is installed locally
2947
+ */
2948
+ function isReferenceInstalled(repo) {
2949
+ const referenceFileName = toReferenceFileName(repo);
2950
+ return existsSync(join(Paths.offworldReferencesDir, referenceFileName));
2951
+ }
2952
+ /**
2953
+ * Match dependencies to their reference availability status.
2954
+ *
2955
+ * Status logic:
2956
+ * - installed: {owner-repo}.md exists in offworld/references/
2957
+ * - available: Has valid GitHub repo (can be cloned)
2958
+ * - generate: Has valid GitHub repo but will need AI generation
2959
+ * - unknown: No GitHub repo found
2960
+ *
2961
+ * Note: Since we don't have a reference registry/index, "available" and "generate"
2962
+ * are the same. We use "available" for consistency and reserve "generate" for
2963
+ * future use when we can distinguish pre-existing vs needs-generation.
2964
+ *
2965
+ * @param resolvedDeps - Array of resolved dependencies with repo info
2966
+ * @returns Array of reference matches with status
2967
+ */
2968
+ function matchDependenciesToReferences(resolvedDeps) {
2969
+ return resolvedDeps.map((dep) => {
2970
+ if (!dep.repo) return {
2971
+ dep: dep.dep,
2972
+ repo: null,
2973
+ status: "unknown",
2974
+ source: dep.source
2975
+ };
2976
+ if (isReferenceInstalled(dep.repo)) return {
2977
+ dep: dep.dep,
2978
+ repo: dep.repo,
2979
+ status: "installed",
2980
+ source: dep.source
2981
+ };
2982
+ return {
2983
+ dep: dep.dep,
2984
+ repo: dep.repo,
2985
+ status: "available",
2986
+ source: dep.source
2987
+ };
2988
+ });
2989
+ }
2990
+
2991
+ //#endregion
2992
+ //#region src/agents-md.ts
2993
+ /**
2994
+ * AGENTS.md manipulation utilities
2995
+ *
2996
+ * Manages updating project AGENTS.md and agent-specific files with reference information.
2997
+ */
2998
+ /**
2999
+ * Generate markdown table for project references section.
3000
+ *
3001
+ * @param references - Array of installed references
3002
+ * @returns Markdown string with table
3003
+ */
3004
+ function generateReferencesTable(references) {
3005
+ const lines = [
3006
+ "## Project References",
3007
+ "",
3008
+ "References installed for this project's dependencies:",
3009
+ "",
3010
+ "| Dependency | Reference | Path |",
3011
+ "| --- | --- | --- |"
3012
+ ];
3013
+ for (const reference of references) lines.push(`| ${reference.dependency} | ${reference.reference} | ${reference.path} |`);
3014
+ lines.push("");
3015
+ lines.push("To update references: `ow pull <dependency>`");
3016
+ lines.push("To regenerate all: `ow project init --all --generate`");
3017
+ lines.push("");
3018
+ return lines.join("\n");
3019
+ }
3020
+ /**
3021
+ * Update or append Project References section in a markdown file.
3022
+ * If the section exists, replaces its content. Otherwise, appends to end.
3023
+ *
3024
+ * @param filePath - Path to markdown file
3025
+ * @param references - Array of installed references
3026
+ */
3027
+ function appendReferencesSection(filePath, references) {
3028
+ const content = existsSync(filePath) ? readFileSync(filePath, "utf-8") : "";
3029
+ const referencesMarkdown = generateReferencesTable(references);
3030
+ const sectionRegex = /^## Project References\n(?:.*\n)*?(?=^## |$)/m;
3031
+ const match = content.match(sectionRegex);
3032
+ let updatedContent;
3033
+ if (match) updatedContent = content.replace(sectionRegex, referencesMarkdown);
3034
+ else updatedContent = content.trim() + "\n\n" + referencesMarkdown;
3035
+ writeFileSync(filePath, updatedContent, "utf-8");
3036
+ }
3037
+ /**
3038
+ * Update AGENTS.md and agent-specific files with project references.
3039
+ * Creates files if they don't exist.
3040
+ *
3041
+ * @param projectRoot - Project root directory
3042
+ * @param references - Array of installed references to document
3043
+ */
3044
+ function updateAgentFiles(projectRoot, references) {
3045
+ const agentsMdPath = join(projectRoot, "AGENTS.md");
3046
+ const claudeMdPath = join(projectRoot, "CLAUDE.md");
3047
+ appendReferencesSection(agentsMdPath, references);
3048
+ if (existsSync(claudeMdPath)) appendReferencesSection(claudeMdPath, references);
3049
+ }
3050
+
3051
+ //#endregion
3052
+ //#region src/repo-manager.ts
3053
+ function getDirSize(dirPath) {
3054
+ if (!existsSync(dirPath)) return 0;
3055
+ let size = 0;
3056
+ try {
3057
+ const entries = readdirSync(dirPath, { withFileTypes: true });
3058
+ for (const entry of entries) {
3059
+ const fullPath = join(dirPath, entry.name);
3060
+ if (entry.isDirectory()) size += getDirSize(fullPath);
3061
+ else if (entry.isFile()) try {
3062
+ size += statSync(fullPath).size;
3063
+ } catch {}
3064
+ }
3065
+ } catch {}
3066
+ return size;
3067
+ }
3068
+ function getLastAccessTime(dirPath) {
3069
+ if (!existsSync(dirPath)) return null;
3070
+ let latestTime = null;
3071
+ try {
3072
+ latestTime = statSync(dirPath).mtime;
3073
+ const fetchHead = join(dirPath, ".git", "FETCH_HEAD");
3074
+ if (existsSync(fetchHead)) {
3075
+ const fetchStat = statSync(fetchHead);
3076
+ if (!latestTime || fetchStat.mtime > latestTime) latestTime = fetchStat.mtime;
3077
+ }
3078
+ } catch {}
3079
+ return latestTime;
3080
+ }
3081
+ function matchesPattern(name, pattern) {
3082
+ if (!pattern || pattern === "*") return true;
3083
+ return new RegExp("^" + pattern.replace(/[.+^${}()|[\]\\]/g, "\\$&").replace(/\*/g, ".*").replace(/\?/g, ".") + "$", "i").test(name);
3084
+ }
3085
+ const yieldToEventLoop = () => new Promise((resolve) => setImmediate(resolve));
3086
+ async function getRepoStatus(options = {}) {
3087
+ const { onProgress } = options;
3088
+ const map = readGlobalMap();
3089
+ const qualifiedNames = Object.keys(map.repos);
3090
+ const total = qualifiedNames.length;
3091
+ let withReference = 0;
3092
+ let missing = 0;
3093
+ let diskBytes = 0;
3094
+ for (let i = 0; i < qualifiedNames.length; i++) {
3095
+ const qualifiedName = qualifiedNames[i];
3096
+ const entry = map.repos[qualifiedName];
3097
+ onProgress?.(i + 1, total, qualifiedName);
3098
+ await yieldToEventLoop();
3099
+ if (!existsSync(entry.localPath)) {
3100
+ missing++;
3101
+ continue;
3102
+ }
3103
+ if (entry.references.length > 0) withReference++;
3104
+ diskBytes += getDirSize(entry.localPath);
3105
+ }
3106
+ return {
3107
+ total,
3108
+ withReference,
3109
+ missing,
3110
+ diskBytes
3111
+ };
3112
+ }
3113
+ async function updateAllRepos(options = {}) {
3114
+ const { pattern, dryRun = false, unshallow = false, onProgress } = options;
3115
+ const map = readGlobalMap();
3116
+ const qualifiedNames = Object.keys(map.repos);
3117
+ const updated = [];
3118
+ const skipped = [];
3119
+ const unshallowed = [];
3120
+ const errors = [];
3121
+ for (const qualifiedName of qualifiedNames) {
3122
+ const entry = map.repos[qualifiedName];
3123
+ if (pattern && !matchesPattern(qualifiedName, pattern)) continue;
3124
+ if (!existsSync(entry.localPath)) {
3125
+ skipped.push(qualifiedName);
3126
+ onProgress?.(qualifiedName, "skipped", "missing on disk");
3127
+ continue;
3128
+ }
3129
+ if (dryRun) {
3130
+ updated.push(qualifiedName);
3131
+ onProgress?.(qualifiedName, "updated", "would update");
3132
+ continue;
3133
+ }
3134
+ onProgress?.(qualifiedName, "updating");
3135
+ try {
3136
+ const result = await updateRepo(qualifiedName, { unshallow });
3137
+ if (result.unshallowed) {
3138
+ unshallowed.push(qualifiedName);
3139
+ onProgress?.(qualifiedName, "unshallowed", "converted to full clone");
3140
+ }
3141
+ if (result.updated) {
3142
+ updated.push(qualifiedName);
3143
+ onProgress?.(qualifiedName, "updated", `${result.previousSha.slice(0, 7)} → ${result.currentSha.slice(0, 7)}`);
3144
+ } else if (!result.unshallowed) {
3145
+ skipped.push(qualifiedName);
3146
+ onProgress?.(qualifiedName, "skipped", "already up to date");
3147
+ }
3148
+ } catch (err) {
3149
+ const message = err instanceof GitError ? err.message : String(err);
3150
+ errors.push({
3151
+ repo: qualifiedName,
3152
+ error: message
3153
+ });
3154
+ onProgress?.(qualifiedName, "error", message);
3155
+ }
3156
+ }
3157
+ return {
3158
+ updated,
3159
+ skipped,
3160
+ unshallowed,
3161
+ errors
3162
+ };
3163
+ }
3164
+ async function pruneRepos(options = {}) {
3165
+ const { dryRun = false, onProgress } = options;
3166
+ const map = readGlobalMap();
3167
+ const qualifiedNames = Object.keys(map.repos);
3168
+ const removedFromIndex = [];
3169
+ const orphanedDirs = [];
3170
+ for (const qualifiedName of qualifiedNames) {
3171
+ const entry = map.repos[qualifiedName];
3172
+ await yieldToEventLoop();
3173
+ if (!existsSync(entry.localPath)) {
3174
+ onProgress?.(qualifiedName, "missing on disk");
3175
+ removedFromIndex.push(qualifiedName);
3176
+ if (!dryRun) removeGlobalMapEntry(qualifiedName);
3177
+ }
3178
+ }
3179
+ const repoRoot = getRepoRoot(loadConfig());
3180
+ if (existsSync(repoRoot)) {
3181
+ const indexedPaths = new Set(Object.values(map.repos).map((r) => r.localPath));
3182
+ try {
3183
+ const providers = readdirSync(repoRoot, { withFileTypes: true });
3184
+ for (const provider of providers) {
3185
+ if (!provider.isDirectory()) continue;
3186
+ const providerPath = join(repoRoot, provider.name);
3187
+ const owners = readdirSync(providerPath, { withFileTypes: true });
3188
+ for (const owner of owners) {
3189
+ if (!owner.isDirectory()) continue;
3190
+ const ownerPath = join(providerPath, owner.name);
3191
+ const repoNames = readdirSync(ownerPath, { withFileTypes: true });
3192
+ for (const repoName of repoNames) {
3193
+ await yieldToEventLoop();
3194
+ if (!repoName.isDirectory()) continue;
3195
+ const repoPath = join(ownerPath, repoName.name);
3196
+ if (!existsSync(join(repoPath, ".git"))) continue;
3197
+ if (!indexedPaths.has(repoPath)) {
3198
+ const fullName = `${owner.name}/${repoName.name}`;
3199
+ onProgress?.(fullName, "not in map");
3200
+ orphanedDirs.push(repoPath);
3201
+ }
3202
+ }
3203
+ }
3204
+ }
3205
+ } catch {}
3206
+ }
3207
+ return {
3208
+ removedFromIndex,
3209
+ orphanedDirs
3210
+ };
3211
+ }
3212
+ async function gcRepos(options = {}) {
3213
+ const { olderThanDays, withoutReference = false, dryRun = false, onProgress } = options;
3214
+ const map = readGlobalMap();
3215
+ const qualifiedNames = Object.keys(map.repos);
3216
+ const removed = [];
3217
+ let freedBytes = 0;
3218
+ const now = /* @__PURE__ */ new Date();
3219
+ const cutoffDate = olderThanDays ? /* @__PURE__ */ new Date(now.getTime() - olderThanDays * 24 * 60 * 60 * 1e3) : null;
3220
+ for (const qualifiedName of qualifiedNames) {
3221
+ const entry = map.repos[qualifiedName];
3222
+ await yieldToEventLoop();
3223
+ if (!existsSync(entry.localPath)) continue;
3224
+ let shouldRemove = false;
3225
+ let reason = "";
3226
+ if (cutoffDate) {
3227
+ const lastAccess = getLastAccessTime(entry.localPath);
3228
+ if (lastAccess && lastAccess < cutoffDate) {
3229
+ shouldRemove = true;
3230
+ reason = `not accessed in ${olderThanDays}+ days`;
3231
+ }
3232
+ }
3233
+ if (withoutReference && entry.references.length === 0) {
3234
+ shouldRemove = true;
3235
+ reason = reason ? `${reason}, no reference` : "no reference";
3236
+ }
3237
+ if (!shouldRemove) continue;
3238
+ const sizeBytes = getDirSize(entry.localPath);
3239
+ onProgress?.(qualifiedName, reason, sizeBytes);
3240
+ if (!dryRun) {
3241
+ rmSync(entry.localPath, {
3242
+ recursive: true,
3243
+ force: true
3244
+ });
3245
+ for (const refFile of entry.references) {
3246
+ const refPath = join(Paths.offworldReferencesDir, refFile);
3247
+ if (existsSync(refPath)) rmSync(refPath, { force: true });
3248
+ }
3249
+ if (entry.primary) {
3250
+ const metaDirName = entry.primary.replace(/\.md$/, "");
3251
+ const metaPath = join(Paths.metaDir, metaDirName);
3252
+ if (existsSync(metaPath)) rmSync(metaPath, {
3253
+ recursive: true,
3254
+ force: true
3255
+ });
3256
+ }
3257
+ removeGlobalMapEntry(qualifiedName);
3258
+ }
3259
+ removed.push({
3260
+ repo: qualifiedName,
3261
+ reason,
3262
+ sizeBytes
3263
+ });
3264
+ freedBytes += sizeBytes;
3265
+ }
3266
+ return {
3267
+ removed,
3268
+ freedBytes
3269
+ };
3270
+ }
3271
+ async function discoverRepos(options = {}) {
3272
+ const { dryRun = false, onProgress } = options;
3273
+ const config = loadConfig();
3274
+ const repoRoot = options.repoRoot ?? getRepoRoot(config);
3275
+ const discovered = [];
3276
+ let alreadyIndexed = 0;
3277
+ if (!existsSync(repoRoot)) return {
3278
+ discovered,
3279
+ alreadyIndexed
3280
+ };
3281
+ const map = readGlobalMap();
3282
+ const indexedPaths = new Set(Object.values(map.repos).map((r) => r.localPath));
3283
+ try {
3284
+ const providers = readdirSync(repoRoot, { withFileTypes: true });
3285
+ for (const provider of providers) {
3286
+ if (!provider.isDirectory()) continue;
3287
+ const providerPath = join(repoRoot, provider.name);
3288
+ const providerHost = {
3289
+ github: "github.com",
3290
+ gitlab: "gitlab.com",
3291
+ bitbucket: "bitbucket.org"
3292
+ }[provider.name] ?? provider.name;
3293
+ const owners = readdirSync(providerPath, { withFileTypes: true });
3294
+ for (const owner of owners) {
3295
+ if (!owner.isDirectory()) continue;
3296
+ const ownerPath = join(providerPath, owner.name);
3297
+ const repoNames = readdirSync(ownerPath, { withFileTypes: true });
3298
+ for (const repoName of repoNames) {
3299
+ await yieldToEventLoop();
3300
+ if (!repoName.isDirectory()) continue;
3301
+ const repoPath = join(ownerPath, repoName.name);
3302
+ if (!existsSync(join(repoPath, ".git"))) continue;
3303
+ if (indexedPaths.has(repoPath)) {
3304
+ alreadyIndexed++;
3305
+ continue;
3306
+ }
3307
+ const fullName = `${owner.name}/${repoName.name}`;
3308
+ const qualifiedName = `${providerHost}:${fullName}`;
3309
+ onProgress?.(fullName, providerHost);
3310
+ if (!dryRun) upsertGlobalMapEntry(qualifiedName, {
3311
+ localPath: repoPath,
3312
+ references: [],
3313
+ primary: "",
3314
+ keywords: [],
3315
+ updatedAt: (/* @__PURE__ */ new Date()).toISOString()
3316
+ });
3317
+ discovered.push({
3318
+ fullName,
3319
+ qualifiedName,
3320
+ localPath: repoPath
3321
+ });
3322
+ }
3323
+ }
3324
+ }
3325
+ } catch {}
3326
+ return {
3327
+ discovered,
3328
+ alreadyIndexed
3329
+ };
3330
+ }
3331
+
3332
+ //#endregion
3333
+ //#region src/models.ts
3334
+ const MODELS_DEV_URL = "https://models.dev/api.json";
3335
+ let cachedData = null;
3336
+ let cacheTime = 0;
3337
+ const CACHE_TTL_MS = 300 * 1e3;
3338
+ /**
3339
+ * Fetch raw data from models.dev with caching
3340
+ */
3341
+ async function fetchModelsDevData() {
3342
+ const now = Date.now();
3343
+ if (cachedData && now - cacheTime < CACHE_TTL_MS) return cachedData;
3344
+ const res = await fetch(MODELS_DEV_URL, { signal: AbortSignal.timeout(1e4) });
3345
+ if (!res.ok) throw new Error(`Failed to fetch models.dev: ${res.status} ${res.statusText}`);
3346
+ cachedData = await res.json();
3347
+ cacheTime = now;
3348
+ return cachedData;
3349
+ }
3350
+ /**
3351
+ * List all available providers from models.dev
3352
+ */
3353
+ async function listProviders() {
3354
+ const data = await fetchModelsDevData();
3355
+ return Object.values(data).map((p) => ({
3356
+ id: p.id,
3357
+ name: p.name,
3358
+ env: p.env
3359
+ })).sort((a, b) => a.name.localeCompare(b.name));
3360
+ }
3361
+ /**
3362
+ * Get a specific provider with all its models
3363
+ */
3364
+ async function getProvider(providerId) {
3365
+ const provider = (await fetchModelsDevData())[providerId];
3366
+ if (!provider) return null;
3367
+ return {
3368
+ id: provider.id,
3369
+ name: provider.name,
3370
+ env: provider.env,
3371
+ models: Object.values(provider.models).filter((m) => m.status !== "deprecated").map((m) => ({
3372
+ id: m.id,
3373
+ name: m.name,
3374
+ reasoning: m.reasoning,
3375
+ experimental: m.experimental,
3376
+ status: m.status
3377
+ })).sort((a, b) => a.name.localeCompare(b.name))
3378
+ };
3379
+ }
3380
+ /**
3381
+ * Get all providers with their models
3382
+ */
3383
+ async function listProvidersWithModels() {
3384
+ const data = await fetchModelsDevData();
3385
+ return Object.values(data).map((p) => ({
3386
+ id: p.id,
3387
+ name: p.name,
3388
+ env: p.env,
3389
+ models: Object.values(p.models).filter((m) => m.status !== "deprecated").map((m) => ({
3390
+ id: m.id,
3391
+ name: m.name,
3392
+ reasoning: m.reasoning,
3393
+ experimental: m.experimental,
3394
+ status: m.status
3395
+ })).sort((a, b) => a.name.localeCompare(b.name))
3396
+ })).sort((a, b) => a.name.localeCompare(b.name));
3397
+ }
3398
+ /**
3399
+ * Validate that a provider/model combination exists
3400
+ */
3401
+ async function validateProviderModel(providerId, modelId) {
3402
+ const provider = await getProvider(providerId);
3403
+ if (!provider) {
3404
+ const providers = await listProviders();
3405
+ return {
3406
+ valid: false,
3407
+ error: `Provider "${providerId}" not found. Available: ${providers.slice(0, 10).map((p) => p.id).join(", ")}${providers.length > 10 ? "..." : ""}`
3408
+ };
3409
+ }
3410
+ if (!provider.models.find((m) => m.id === modelId)) return {
3411
+ valid: false,
3412
+ error: `Model "${modelId}" not found for provider "${providerId}". Available: ${provider.models.slice(0, 10).map((m) => m.id).join(", ")}${provider.models.length > 10 ? "..." : ""}`
3413
+ };
3414
+ return { valid: true };
3415
+ }
3416
+
3417
+ //#endregion
3418
+ //#region src/installation.ts
3419
+ /**
3420
+ * Installation utilities for upgrade/uninstall commands
3421
+ */
3422
+ const GITHUB_REPO = "oscabriel/offworld";
3423
+ const NPM_PACKAGE = "offworld";
3424
+ /**
3425
+ * Detect how offworld was installed
3426
+ */
3427
+ function detectInstallMethod() {
3428
+ const execPath = process.execPath;
3429
+ if (execPath.includes(".local/bin")) return "curl";
3430
+ const checks = [
3431
+ {
3432
+ name: "npm",
3433
+ test: () => {
3434
+ try {
3435
+ return execSync("npm list -g --depth=0 2>/dev/null", { encoding: "utf-8" }).includes(NPM_PACKAGE);
3436
+ } catch {
3437
+ return false;
3438
+ }
3439
+ }
3440
+ },
3441
+ {
3442
+ name: "pnpm",
3443
+ test: () => {
3444
+ try {
3445
+ return execSync("pnpm list -g --depth=0 2>/dev/null", { encoding: "utf-8" }).includes(NPM_PACKAGE);
3446
+ } catch {
3447
+ return false;
3448
+ }
3449
+ }
3450
+ },
3451
+ {
3452
+ name: "bun",
3453
+ test: () => {
3454
+ try {
3455
+ return execSync("bun pm ls -g 2>/dev/null", { encoding: "utf-8" }).includes(NPM_PACKAGE);
3456
+ } catch {
3457
+ return false;
3458
+ }
3459
+ }
3460
+ },
3461
+ {
3462
+ name: "brew",
3463
+ test: () => {
3464
+ try {
3465
+ execSync("brew list --formula offworld 2>/dev/null", { encoding: "utf-8" });
3466
+ return true;
3467
+ } catch {
3468
+ return false;
3469
+ }
3470
+ }
3471
+ }
3472
+ ];
3473
+ if (execPath.includes("npm")) {
3474
+ if (checks.find((c) => c.name === "npm")?.test()) return "npm";
3475
+ }
3476
+ if (execPath.includes("pnpm")) {
3477
+ if (checks.find((c) => c.name === "pnpm")?.test()) return "pnpm";
3478
+ }
3479
+ if (execPath.includes("bun")) {
3480
+ if (checks.find((c) => c.name === "bun")?.test()) return "bun";
3481
+ }
3482
+ if (execPath.includes("Cellar") || execPath.includes("homebrew")) {
3483
+ if (checks.find((c) => c.name === "brew")?.test()) return "brew";
3484
+ }
3485
+ for (const check of checks) if (check.test()) return check.name;
3486
+ return "unknown";
3487
+ }
3488
+ /**
3489
+ * Get current installed version
3490
+ */
3491
+ function getCurrentVersion() {
3492
+ return VERSION;
3493
+ }
3494
+ /**
3495
+ * Fetch latest version from appropriate source
3496
+ */
3497
+ async function fetchLatestVersion(method) {
3498
+ const installMethod = method ?? detectInstallMethod();
3499
+ try {
3500
+ if (installMethod === "npm" || installMethod === "pnpm" || installMethod === "bun") {
3501
+ const response = await fetch(`https://registry.npmjs.org/${NPM_PACKAGE}/latest`);
3502
+ if (!response.ok) return null;
3503
+ return (await response.json()).version ?? null;
3504
+ }
3505
+ const response = await fetch(`https://api.github.com/repos/${GITHUB_REPO}/releases/latest`, { headers: {
3506
+ Accept: "application/vnd.github.v3+json",
3507
+ "User-Agent": "offworld-cli"
3508
+ } });
3509
+ if (!response.ok) return null;
3510
+ return (await response.json()).tag_name?.replace(/^v/, "") ?? null;
3511
+ } catch {
3512
+ return null;
3513
+ }
3514
+ }
3515
+ /**
3516
+ * Execute upgrade for given method
3517
+ */
3518
+ function executeUpgrade(method, version) {
3519
+ return new Promise((resolve, reject) => {
3520
+ let cmd;
3521
+ let args;
3522
+ switch (method) {
3523
+ case "curl":
3524
+ cmd = "bash";
3525
+ args = ["-c", `curl -fsSL https://offworld.sh/install | VERSION=${version} bash`];
3526
+ break;
3527
+ case "npm":
3528
+ cmd = "npm";
3529
+ args = [
3530
+ "install",
3531
+ "-g",
3532
+ `${NPM_PACKAGE}@${version}`
3533
+ ];
3534
+ break;
3535
+ case "pnpm":
3536
+ cmd = "pnpm";
3537
+ args = [
3538
+ "install",
3539
+ "-g",
3540
+ `${NPM_PACKAGE}@${version}`
3541
+ ];
3542
+ break;
3543
+ case "bun":
3544
+ cmd = "bun";
3545
+ args = [
3546
+ "install",
3547
+ "-g",
3548
+ `${NPM_PACKAGE}@${version}`
3549
+ ];
3550
+ break;
3551
+ case "brew":
3552
+ cmd = "brew";
3553
+ args = ["upgrade", "offworld"];
3554
+ break;
3555
+ default:
3556
+ reject(/* @__PURE__ */ new Error(`Cannot upgrade: unknown installation method`));
3557
+ return;
3558
+ }
3559
+ const proc = spawn(cmd, args, { stdio: "inherit" });
3560
+ proc.on("close", (code) => {
3561
+ if (code === 0) resolve();
3562
+ else reject(/* @__PURE__ */ new Error(`Upgrade failed with exit code ${code}`));
3563
+ });
3564
+ proc.on("error", reject);
3565
+ });
3566
+ }
3567
+ /**
3568
+ * Execute uninstall for given method
3569
+ */
3570
+ function executeUninstall(method) {
3571
+ return new Promise((resolve, reject) => {
3572
+ let cmd;
3573
+ let args;
3574
+ switch (method) {
3575
+ case "curl":
3576
+ try {
3577
+ const binPath = join(homedir(), ".local", "bin", "ow");
3578
+ if (existsSync(binPath)) execSync(`rm -f "${binPath}"`, { stdio: "inherit" });
3579
+ resolve();
3580
+ } catch (err) {
3581
+ reject(err);
3582
+ }
3583
+ return;
3584
+ case "npm":
3585
+ cmd = "npm";
3586
+ args = [
3587
+ "uninstall",
3588
+ "-g",
3589
+ NPM_PACKAGE
3590
+ ];
3591
+ break;
3592
+ case "pnpm":
3593
+ cmd = "pnpm";
3594
+ args = [
3595
+ "uninstall",
3596
+ "-g",
3597
+ NPM_PACKAGE
3598
+ ];
3599
+ break;
3600
+ case "bun":
3601
+ cmd = "bun";
3602
+ args = [
3603
+ "remove",
3604
+ "-g",
3605
+ NPM_PACKAGE
3606
+ ];
3607
+ break;
3608
+ case "brew":
3609
+ cmd = "brew";
3610
+ args = ["uninstall", "offworld"];
3611
+ break;
3612
+ default:
3613
+ reject(/* @__PURE__ */ new Error(`Cannot uninstall: unknown installation method`));
3614
+ return;
3615
+ }
3616
+ const proc = spawn(cmd, args, { stdio: "inherit" });
3617
+ proc.on("close", (code) => {
3618
+ if (code === 0) resolve();
3619
+ else reject(/* @__PURE__ */ new Error(`Uninstall failed with exit code ${code}`));
3620
+ });
3621
+ proc.on("error", reject);
3622
+ });
3623
+ }
3624
+ /**
3625
+ * Get shell config files to clean
3626
+ */
3627
+ function getShellConfigFiles() {
3628
+ const home = homedir();
3629
+ const configs = [];
3630
+ for (const file of [
3631
+ ".bashrc",
3632
+ ".bash_profile",
3633
+ ".profile",
3634
+ ".zshrc",
3635
+ ".zshenv",
3636
+ ".config/fish/config.fish"
3637
+ ]) {
3638
+ const path = join(home, file);
3639
+ if (existsSync(path)) configs.push(path);
3640
+ }
3641
+ return configs;
3642
+ }
3643
+ /**
3644
+ * Clean PATH entries from shell config
3645
+ */
3646
+ function cleanShellConfig(filePath) {
3647
+ try {
3648
+ const lines = readFileSync(filePath, "utf-8").split("\n");
3649
+ const filtered = [];
3650
+ let modified = false;
3651
+ for (const line of lines) {
3652
+ const trimmed = line.trim();
3653
+ if (trimmed.includes(".local/bin") && (trimmed.startsWith("export PATH=") || trimmed.startsWith("fish_add_path"))) {
3654
+ if (trimmed.includes("# offworld") || trimmed === "export PATH=\"$HOME/.local/bin:$PATH\"") {
3655
+ modified = true;
3656
+ continue;
3657
+ }
3658
+ }
3659
+ filtered.push(line);
3660
+ }
3661
+ if (modified) writeFileSync(filePath, filtered.join("\n"), "utf-8");
3662
+ return modified;
3663
+ } catch {
3664
+ return false;
3665
+ }
3666
+ }
3667
+
3668
+ //#endregion
3669
+ export { AuthError, AuthenticationError, CloneError, CommitExistsError, CommitNotFoundError, ConflictError, DEFAULT_IGNORE_PATTERNS, GitError, GitHubError, InvalidInputError, InvalidReferenceError, KNOWN_MAPPINGS, LowStarsError, NetworkError, NotGitRepoError, NotLoggedInError, OpenCodeSDKError, PathNotFoundError, Paths, PrivateRepoError, PushNotAllowedError, RateLimitError, RepoExistsError, RepoNotFoundError, RepoSourceError, SyncError, RepoNotFoundError$1 as SyncRepoNotFoundError, TokenExpiredError, VERSION, agents, appendReferencesSection, canPushToWeb, checkRemote, checkRemoteByName, checkStaleness, cleanShellConfig, clearAuthData, cloneRepo, detectInstallMethod, detectInstalledAgents, detectManifestType, discoverRepos, executeUninstall, executeUpgrade, expandTilde, fetchGitHubMetadata, fetchLatestVersion, fetchRepoStars, gcRepos, generateReferenceWithAI, getAgentConfig, getAllAgentConfigs, getAuthPath, getAuthStatus, getClonedRepoPath, getCommitDistance, getCommitSha, getConfigPath, getCurrentVersion, getMapEntry, getMetaPath, getMetaRoot, getProjectMapPath, getProvider, getReferenceFileNameForSource, getReferencePath, getRepoPath, getRepoRoot, getRepoStatus, getShellConfigFiles, getToken, getTokenOrNull, installGlobalSkill, installReference, isLoggedIn, isReferenceInstalled, isRepoCloned, isShallowClone, listProviders, listProvidersWithModels, listRepos, loadAuthData, loadConfig, matchDependenciesToReferences, parseDependencies, parseRepoInput, pruneRepos, pullReference, pullReferenceByName, pushReference, readGlobalMap, refreshAccessToken, removeGlobalMapEntry, removeRepo, resolveDependencyRepo, resolveFromNpm, resolveRepoKey, saveAuthData, saveConfig, searchMap, streamPrompt, toMetaDirName, toReferenceFileName, toReferenceName, unshallowRepo, updateAgentFiles, updateAllRepos, updateRepo, upsertGlobalMapEntry, validateProviderModel, validatePushAllowed, writeGlobalMap, writeProjectMap };
3670
+ //# sourceMappingURL=index.mjs.map