@offworld/sdk 0.2.2 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +35 -20
- package/dist/ai/index.d.mts +134 -0
- package/dist/ai/index.d.mts.map +1 -0
- package/dist/ai/index.mjs +924 -0
- package/dist/ai/index.mjs.map +1 -0
- package/dist/clone-DyLvmbJZ.mjs +364 -0
- package/dist/clone-DyLvmbJZ.mjs.map +1 -0
- package/dist/config-DW8J4gl5.mjs +174 -0
- package/dist/config-DW8J4gl5.mjs.map +1 -0
- package/dist/convex/_generated/api.d.ts +67 -0
- package/dist/convex/_generated/api.js +23 -0
- package/dist/convex/_generated/dataModel.d.ts +60 -0
- package/dist/convex/_generated/server.d.ts +143 -0
- package/dist/convex/_generated/server.js +93 -0
- package/dist/index.d.mts +2 -953
- package/dist/index.mjs +4 -3909
- package/dist/internal.d.mts +69 -0
- package/dist/internal.d.mts.map +1 -0
- package/dist/internal.mjs +326 -0
- package/dist/internal.mjs.map +1 -0
- package/dist/public-DbZeh2Mr.mjs +1823 -0
- package/dist/public-DbZeh2Mr.mjs.map +1 -0
- package/dist/public-MYVLaKUi.d.mts +655 -0
- package/dist/public-MYVLaKUi.d.mts.map +1 -0
- package/dist/sync/index.d.mts +175 -0
- package/dist/sync/index.d.mts.map +1 -0
- package/dist/sync/index.mjs +4 -0
- package/dist/sync-DuLJ5wla.mjs +4 -0
- package/dist/sync-wcy5fJRb.mjs +372 -0
- package/dist/sync-wcy5fJRb.mjs.map +1 -0
- package/package.json +35 -6
- package/dist/index.d.mts.map +0 -1
- package/dist/index.mjs.map +0 -1
|
@@ -0,0 +1,1823 @@
|
|
|
1
|
+
import { f as Paths, l as toMetaDirName, o as getRepoRoot, p as expandTilde, s as loadConfig, u as toReferenceFileName } from "./config-DW8J4gl5.mjs";
|
|
2
|
+
import { _ as upsertGlobalMapEntry, g as removeGlobalMapEntry, h as readGlobalMap, m as updateRepo, n as GitError, v as writeGlobalMap } from "./clone-DyLvmbJZ.mjs";
|
|
3
|
+
import { chmodSync, existsSync, lstatSync, mkdirSync, readFileSync, readdirSync, rmSync, statSync, symlinkSync, unlinkSync, writeFileSync } from "node:fs";
|
|
4
|
+
import { basename, dirname, join, resolve } from "node:path";
|
|
5
|
+
import { ModelsDevDataSchema, NpmPackageResponseSchema, WorkOSTokenResponseSchema } from "@offworld/types";
|
|
6
|
+
import { createHash } from "node:crypto";
|
|
7
|
+
import { GlobalMapSchema as GlobalMapSchema$1, ProjectMapSchema as ProjectMapSchema$1 } from "@offworld/types/schemas";
|
|
8
|
+
import { z } from "zod";
|
|
9
|
+
|
|
10
|
+
//#region src/constants.ts
|
|
11
|
+
/**
|
|
12
|
+
* SDK Constants
|
|
13
|
+
*/
|
|
14
|
+
/** SDK version - must match package.json */
|
|
15
|
+
const VERSION = "0.3.0";
|
|
16
|
+
/**
|
|
17
|
+
* Default patterns to ignore when scanning repositories.
|
|
18
|
+
* Includes directories, binary files, IDE configs, and build outputs.
|
|
19
|
+
*/
|
|
20
|
+
const DEFAULT_IGNORE_PATTERNS = [
|
|
21
|
+
".git",
|
|
22
|
+
".git/**",
|
|
23
|
+
".svn",
|
|
24
|
+
".hg",
|
|
25
|
+
"node_modules",
|
|
26
|
+
"node_modules/**",
|
|
27
|
+
"vendor",
|
|
28
|
+
"vendor/**",
|
|
29
|
+
".pnpm",
|
|
30
|
+
".yarn",
|
|
31
|
+
"dist",
|
|
32
|
+
"dist/**",
|
|
33
|
+
"build",
|
|
34
|
+
"build/**",
|
|
35
|
+
"out",
|
|
36
|
+
"out/**",
|
|
37
|
+
".next",
|
|
38
|
+
".nuxt",
|
|
39
|
+
".output",
|
|
40
|
+
"target",
|
|
41
|
+
"__pycache__",
|
|
42
|
+
"*.pyc",
|
|
43
|
+
".vscode",
|
|
44
|
+
".vscode/**",
|
|
45
|
+
".idea",
|
|
46
|
+
".idea/**",
|
|
47
|
+
"*.swp",
|
|
48
|
+
"*.swo",
|
|
49
|
+
".DS_Store",
|
|
50
|
+
"*.jpg",
|
|
51
|
+
"*.jpeg",
|
|
52
|
+
"*.png",
|
|
53
|
+
"*.gif",
|
|
54
|
+
"*.ico",
|
|
55
|
+
"*.webp",
|
|
56
|
+
"*.svg",
|
|
57
|
+
"*.bmp",
|
|
58
|
+
"*.tiff",
|
|
59
|
+
"*.mp4",
|
|
60
|
+
"*.webm",
|
|
61
|
+
"*.mov",
|
|
62
|
+
"*.avi",
|
|
63
|
+
"*.mkv",
|
|
64
|
+
"*.mp3",
|
|
65
|
+
"*.wav",
|
|
66
|
+
"*.flac",
|
|
67
|
+
"*.ogg",
|
|
68
|
+
"*.pdf",
|
|
69
|
+
"*.zip",
|
|
70
|
+
"*.tar",
|
|
71
|
+
"*.gz",
|
|
72
|
+
"*.rar",
|
|
73
|
+
"*.7z",
|
|
74
|
+
"*.exe",
|
|
75
|
+
"*.dll",
|
|
76
|
+
"*.so",
|
|
77
|
+
"*.dylib",
|
|
78
|
+
"*.bin",
|
|
79
|
+
"*.wasm",
|
|
80
|
+
"*.woff",
|
|
81
|
+
"*.woff2",
|
|
82
|
+
"*.ttf",
|
|
83
|
+
"*.eot",
|
|
84
|
+
"*.otf",
|
|
85
|
+
"package-lock.json",
|
|
86
|
+
"yarn.lock",
|
|
87
|
+
"pnpm-lock.yaml",
|
|
88
|
+
"bun.lockb",
|
|
89
|
+
"Cargo.lock",
|
|
90
|
+
"Gemfile.lock",
|
|
91
|
+
"poetry.lock",
|
|
92
|
+
"composer.lock",
|
|
93
|
+
"go.sum",
|
|
94
|
+
"coverage",
|
|
95
|
+
"coverage/**",
|
|
96
|
+
".nyc_output",
|
|
97
|
+
".coverage",
|
|
98
|
+
"htmlcov",
|
|
99
|
+
"*.log",
|
|
100
|
+
"logs",
|
|
101
|
+
"tmp",
|
|
102
|
+
"temp",
|
|
103
|
+
".tmp",
|
|
104
|
+
".temp",
|
|
105
|
+
".cache",
|
|
106
|
+
".env",
|
|
107
|
+
".env.*",
|
|
108
|
+
"*.pem",
|
|
109
|
+
"*.key"
|
|
110
|
+
];
|
|
111
|
+
|
|
112
|
+
//#endregion
|
|
113
|
+
//#region src/repo-source.ts
|
|
114
|
+
/**
|
|
115
|
+
* Repository source parsing utilities
|
|
116
|
+
*/
|
|
117
|
+
var RepoSourceError = class extends Error {
|
|
118
|
+
constructor(message) {
|
|
119
|
+
super(message);
|
|
120
|
+
this.name = "RepoSourceError";
|
|
121
|
+
}
|
|
122
|
+
};
|
|
123
|
+
var PathNotFoundError = class extends RepoSourceError {
|
|
124
|
+
constructor(path) {
|
|
125
|
+
super(`Path does not exist: ${path}`);
|
|
126
|
+
this.name = "PathNotFoundError";
|
|
127
|
+
}
|
|
128
|
+
};
|
|
129
|
+
var NotGitRepoError = class extends RepoSourceError {
|
|
130
|
+
constructor(path) {
|
|
131
|
+
super(`Directory is not a git repository: ${path}`);
|
|
132
|
+
this.name = "NotGitRepoError";
|
|
133
|
+
}
|
|
134
|
+
};
|
|
135
|
+
const PROVIDER_HOSTS = {
|
|
136
|
+
"github.com": "github",
|
|
137
|
+
"gitlab.com": "gitlab",
|
|
138
|
+
"bitbucket.org": "bitbucket"
|
|
139
|
+
};
|
|
140
|
+
const HTTPS_URL_REGEX = /^https?:\/\/(github\.com|gitlab\.com|bitbucket\.org)\/([^/]+)\/([^/]+?)(?:\.git)?$/;
|
|
141
|
+
const SSH_URL_REGEX = /^git@(github\.com|gitlab\.com|bitbucket\.org):([^/]+)\/([^/]+?)(?:\.git)?$/;
|
|
142
|
+
const SHORT_FORMAT_REGEX = /^([^/:@]+)\/([^/:@]+)$/;
|
|
143
|
+
/**
|
|
144
|
+
* Generates a short hash of a path for local repo identification
|
|
145
|
+
*/
|
|
146
|
+
function hashPath(path) {
|
|
147
|
+
return createHash("sha256").update(path).digest("hex").slice(0, 12);
|
|
148
|
+
}
|
|
149
|
+
/**
|
|
150
|
+
* Builds a clone URL for a remote repository
|
|
151
|
+
*/
|
|
152
|
+
function buildCloneUrl(provider, owner, repo) {
|
|
153
|
+
return `https://${{
|
|
154
|
+
github: "github.com",
|
|
155
|
+
gitlab: "gitlab.com",
|
|
156
|
+
bitbucket: "bitbucket.org"
|
|
157
|
+
}[provider]}/${owner}/${repo}.git`;
|
|
158
|
+
}
|
|
159
|
+
/**
|
|
160
|
+
* Parses a remote repository from HTTPS URL format
|
|
161
|
+
*/
|
|
162
|
+
function parseHttpsUrl(input) {
|
|
163
|
+
const match = input.match(HTTPS_URL_REGEX);
|
|
164
|
+
if (!match) return null;
|
|
165
|
+
const [, host, owner, repo] = match;
|
|
166
|
+
if (!host || !owner || !repo) return null;
|
|
167
|
+
const provider = PROVIDER_HOSTS[host];
|
|
168
|
+
if (!provider) return null;
|
|
169
|
+
const ownerLower = owner.toLowerCase();
|
|
170
|
+
const repoLower = repo.toLowerCase();
|
|
171
|
+
return {
|
|
172
|
+
type: "remote",
|
|
173
|
+
provider,
|
|
174
|
+
owner: ownerLower,
|
|
175
|
+
repo: repoLower,
|
|
176
|
+
fullName: `${ownerLower}/${repoLower}`,
|
|
177
|
+
qualifiedName: `${host}:${ownerLower}/${repoLower}`,
|
|
178
|
+
cloneUrl: buildCloneUrl(provider, ownerLower, repoLower)
|
|
179
|
+
};
|
|
180
|
+
}
|
|
181
|
+
/**
|
|
182
|
+
* Parses a remote repository from SSH URL format
|
|
183
|
+
*/
|
|
184
|
+
function parseSshUrl(input) {
|
|
185
|
+
const match = input.match(SSH_URL_REGEX);
|
|
186
|
+
if (!match) return null;
|
|
187
|
+
const [, host, owner, repo] = match;
|
|
188
|
+
if (!host || !owner || !repo) return null;
|
|
189
|
+
const provider = PROVIDER_HOSTS[host];
|
|
190
|
+
if (!provider) return null;
|
|
191
|
+
const ownerLower = owner.toLowerCase();
|
|
192
|
+
const repoLower = repo.toLowerCase();
|
|
193
|
+
return {
|
|
194
|
+
type: "remote",
|
|
195
|
+
provider,
|
|
196
|
+
owner: ownerLower,
|
|
197
|
+
repo: repoLower,
|
|
198
|
+
fullName: `${ownerLower}/${repoLower}`,
|
|
199
|
+
qualifiedName: `${host}:${ownerLower}/${repoLower}`,
|
|
200
|
+
cloneUrl: buildCloneUrl(provider, ownerLower, repoLower)
|
|
201
|
+
};
|
|
202
|
+
}
|
|
203
|
+
/**
|
|
204
|
+
* Parses a remote repository from short format (owner/repo)
|
|
205
|
+
* Defaults to GitHub as provider
|
|
206
|
+
*/
|
|
207
|
+
function parseShortFormat(input) {
|
|
208
|
+
const match = input.match(SHORT_FORMAT_REGEX);
|
|
209
|
+
if (!match) return null;
|
|
210
|
+
const [, owner, repo] = match;
|
|
211
|
+
if (!owner || !repo) return null;
|
|
212
|
+
const provider = "github";
|
|
213
|
+
const host = "github.com";
|
|
214
|
+
const ownerLower = owner.toLowerCase();
|
|
215
|
+
const repoLower = repo.toLowerCase();
|
|
216
|
+
return {
|
|
217
|
+
type: "remote",
|
|
218
|
+
provider,
|
|
219
|
+
owner: ownerLower,
|
|
220
|
+
repo: repoLower,
|
|
221
|
+
fullName: `${ownerLower}/${repoLower}`,
|
|
222
|
+
qualifiedName: `${host}:${ownerLower}/${repoLower}`,
|
|
223
|
+
cloneUrl: buildCloneUrl(provider, ownerLower, repoLower)
|
|
224
|
+
};
|
|
225
|
+
}
|
|
226
|
+
/**
|
|
227
|
+
* Parses a local repository path
|
|
228
|
+
* Validates that the path exists and contains a .git directory
|
|
229
|
+
*/
|
|
230
|
+
function parseLocalPath(input) {
|
|
231
|
+
const absolutePath = resolve(expandTilde(input));
|
|
232
|
+
if (!existsSync(absolutePath)) throw new PathNotFoundError(absolutePath);
|
|
233
|
+
if (!statSync(absolutePath).isDirectory()) throw new RepoSourceError(`Path is not a directory: ${absolutePath}`);
|
|
234
|
+
if (!existsSync(resolve(absolutePath, ".git"))) throw new NotGitRepoError(absolutePath);
|
|
235
|
+
return {
|
|
236
|
+
type: "local",
|
|
237
|
+
path: absolutePath,
|
|
238
|
+
name: basename(absolutePath),
|
|
239
|
+
qualifiedName: `local:${hashPath(absolutePath)}`
|
|
240
|
+
};
|
|
241
|
+
}
|
|
242
|
+
/**
|
|
243
|
+
* Determines if input looks like a local path
|
|
244
|
+
*/
|
|
245
|
+
function isLocalPath(input) {
|
|
246
|
+
return input.startsWith(".") || input.startsWith("/") || input.startsWith("~");
|
|
247
|
+
}
|
|
248
|
+
/**
|
|
249
|
+
* Parses a repository input and returns a structured RepoSource
|
|
250
|
+
*
|
|
251
|
+
* Supported formats:
|
|
252
|
+
* - owner/repo (short format, defaults to GitHub)
|
|
253
|
+
* - https://github.com/owner/repo
|
|
254
|
+
* - https://gitlab.com/owner/repo
|
|
255
|
+
* - https://bitbucket.org/owner/repo
|
|
256
|
+
* - git@github.com:owner/repo.git (SSH format)
|
|
257
|
+
* - . (current directory as local repo)
|
|
258
|
+
* - /absolute/path (local repo)
|
|
259
|
+
*
|
|
260
|
+
* @throws PathNotFoundError if local path doesn't exist
|
|
261
|
+
* @throws NotGitRepoError if local path is not a git repository
|
|
262
|
+
* @throws RepoSourceError for other parsing failures
|
|
263
|
+
*/
|
|
264
|
+
function parseRepoInput(input) {
|
|
265
|
+
const trimmed = input.trim();
|
|
266
|
+
const httpsResult = parseHttpsUrl(trimmed);
|
|
267
|
+
if (httpsResult) return httpsResult;
|
|
268
|
+
const sshResult = parseSshUrl(trimmed);
|
|
269
|
+
if (sshResult) return sshResult;
|
|
270
|
+
if (isLocalPath(trimmed)) return parseLocalPath(trimmed);
|
|
271
|
+
const shortResult = parseShortFormat(trimmed);
|
|
272
|
+
if (shortResult) return shortResult;
|
|
273
|
+
throw new RepoSourceError(`Unable to parse repository input: ${input}. Expected formats: owner/repo, https://github.com/owner/repo, git@github.com:owner/repo.git, or a local path`);
|
|
274
|
+
}
|
|
275
|
+
function getReferenceFileNameForSource(source) {
|
|
276
|
+
if (source.type === "remote") return toReferenceFileName(source.fullName);
|
|
277
|
+
return toReferenceFileName(source.name);
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
//#endregion
|
|
281
|
+
//#region src/map.ts
|
|
282
|
+
/**
|
|
283
|
+
* Map query helpers for fast routing without reading full map.json
|
|
284
|
+
*/
|
|
285
|
+
function readGlobalMapSafe() {
|
|
286
|
+
const mapPath = Paths.offworldGlobalMapPath;
|
|
287
|
+
if (!existsSync(mapPath)) return null;
|
|
288
|
+
try {
|
|
289
|
+
const content = readFileSync(mapPath, "utf-8");
|
|
290
|
+
return GlobalMapSchema$1.parse(JSON.parse(content));
|
|
291
|
+
} catch {
|
|
292
|
+
return null;
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
function readProjectMapSafe(cwd) {
|
|
296
|
+
const mapPath = resolve(cwd, ".offworld/map.json");
|
|
297
|
+
if (!existsSync(mapPath)) return null;
|
|
298
|
+
try {
|
|
299
|
+
const content = readFileSync(mapPath, "utf-8");
|
|
300
|
+
return ProjectMapSchema$1.parse(JSON.parse(content));
|
|
301
|
+
} catch {
|
|
302
|
+
return null;
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
/**
|
|
306
|
+
* Normalize input to match against repo keys.
|
|
307
|
+
* Accepts: github.com:owner/repo, owner/repo, repo
|
|
308
|
+
*/
|
|
309
|
+
function normalizeInput(input) {
|
|
310
|
+
const trimmed = input.trim().toLowerCase();
|
|
311
|
+
if (trimmed.includes(":")) {
|
|
312
|
+
const parts = trimmed.split(":", 2);
|
|
313
|
+
const provider = parts[0];
|
|
314
|
+
const fullName = parts[1] ?? "";
|
|
315
|
+
return {
|
|
316
|
+
provider,
|
|
317
|
+
fullName,
|
|
318
|
+
repoName: fullName.split("/").pop() ?? fullName
|
|
319
|
+
};
|
|
320
|
+
}
|
|
321
|
+
if (trimmed.includes("/")) return {
|
|
322
|
+
fullName: trimmed,
|
|
323
|
+
repoName: trimmed.split("/").pop() ?? trimmed
|
|
324
|
+
};
|
|
325
|
+
return {
|
|
326
|
+
fullName: trimmed,
|
|
327
|
+
repoName: trimmed
|
|
328
|
+
};
|
|
329
|
+
}
|
|
330
|
+
/**
|
|
331
|
+
* Tokenize a string for search matching.
|
|
332
|
+
* Lowercase, strip @, split on /_- and whitespace.
|
|
333
|
+
*/
|
|
334
|
+
function tokenize(str) {
|
|
335
|
+
return str.toLowerCase().replace(/@/g, "").split(/[/_\-\s]+/).filter(Boolean);
|
|
336
|
+
}
|
|
337
|
+
/**
|
|
338
|
+
* Resolve an input string to a qualified repo key in a map.
|
|
339
|
+
*
|
|
340
|
+
* @param input - Accepts github.com:owner/repo, owner/repo, or repo name
|
|
341
|
+
* @param map - A global or project map
|
|
342
|
+
* @returns The matching qualified name or null
|
|
343
|
+
*/
|
|
344
|
+
function resolveRepoKey(input, map) {
|
|
345
|
+
const { provider, fullName, repoName } = normalizeInput(input);
|
|
346
|
+
const keys = Object.keys(map.repos);
|
|
347
|
+
if (provider) {
|
|
348
|
+
const qualifiedKey = `${provider}:${fullName}`;
|
|
349
|
+
if (keys.includes(qualifiedKey)) return qualifiedKey;
|
|
350
|
+
}
|
|
351
|
+
for (const key of keys) if ((key.includes(":") ? key.split(":")[1] : key)?.toLowerCase() === fullName) return key;
|
|
352
|
+
for (const key of keys) if (key.split("/").pop()?.toLowerCase() === repoName) return key;
|
|
353
|
+
return null;
|
|
354
|
+
}
|
|
355
|
+
/**
|
|
356
|
+
* Get a map entry for a repo, preferring project map if available.
|
|
357
|
+
*
|
|
358
|
+
* @param input - Repo identifier (github.com:owner/repo, owner/repo, or repo)
|
|
359
|
+
* @param options - Options for lookup
|
|
360
|
+
* @returns Entry with scope and qualified name, or null if not found
|
|
361
|
+
*/
|
|
362
|
+
function getMapEntry(input, options = {}) {
|
|
363
|
+
const { preferProject = true, cwd = process.cwd() } = options;
|
|
364
|
+
const projectMap = preferProject ? readProjectMapSafe(cwd) : null;
|
|
365
|
+
const globalMap = readGlobalMapSafe();
|
|
366
|
+
if (projectMap) {
|
|
367
|
+
const key = resolveRepoKey(input, projectMap);
|
|
368
|
+
if (key && projectMap.repos[key]) return {
|
|
369
|
+
scope: "project",
|
|
370
|
+
qualifiedName: key,
|
|
371
|
+
entry: projectMap.repos[key]
|
|
372
|
+
};
|
|
373
|
+
}
|
|
374
|
+
if (globalMap) {
|
|
375
|
+
const key = resolveRepoKey(input, globalMap);
|
|
376
|
+
if (key && globalMap.repos[key]) return {
|
|
377
|
+
scope: "global",
|
|
378
|
+
qualifiedName: key,
|
|
379
|
+
entry: globalMap.repos[key]
|
|
380
|
+
};
|
|
381
|
+
}
|
|
382
|
+
return null;
|
|
383
|
+
}
|
|
384
|
+
/**
|
|
385
|
+
* Search the map for repos matching a term.
|
|
386
|
+
*
|
|
387
|
+
* Scoring:
|
|
388
|
+
* - Exact fullName match: 100
|
|
389
|
+
* - Keyword hit: 50 per keyword
|
|
390
|
+
* - Partial contains in fullName: 25
|
|
391
|
+
* - Partial contains in keywords: 10
|
|
392
|
+
*
|
|
393
|
+
* @param term - Search term
|
|
394
|
+
* @param options - Search options
|
|
395
|
+
* @returns Sorted list of matches
|
|
396
|
+
*/
|
|
397
|
+
function searchMap(term, options = {}) {
|
|
398
|
+
const { limit = 10 } = options;
|
|
399
|
+
const globalMap = readGlobalMapSafe();
|
|
400
|
+
if (!globalMap) return [];
|
|
401
|
+
const termTokens = tokenize(term);
|
|
402
|
+
const termLower = term.toLowerCase();
|
|
403
|
+
const results = [];
|
|
404
|
+
for (const qualifiedName of Object.keys(globalMap.repos)) {
|
|
405
|
+
const entry = globalMap.repos[qualifiedName];
|
|
406
|
+
if (!entry) continue;
|
|
407
|
+
const fullName = qualifiedName.includes(":") ? qualifiedName.split(":")[1] ?? qualifiedName : qualifiedName;
|
|
408
|
+
const fullNameLower = fullName.toLowerCase();
|
|
409
|
+
const keywords = entry.keywords ?? [];
|
|
410
|
+
const keywordsLower = keywords.map((k) => k.toLowerCase());
|
|
411
|
+
let score = 0;
|
|
412
|
+
if (fullNameLower === termLower) score += 100;
|
|
413
|
+
for (const token of termTokens) if (keywordsLower.includes(token)) score += 50;
|
|
414
|
+
if (fullNameLower.includes(termLower) && score < 100) score += 25;
|
|
415
|
+
for (const kw of keywordsLower) if (kw.includes(termLower)) score += 10;
|
|
416
|
+
const fullNameTokens = tokenize(fullName);
|
|
417
|
+
for (const token of termTokens) if (fullNameTokens.includes(token)) score += 30;
|
|
418
|
+
if (score > 0) results.push({
|
|
419
|
+
qualifiedName,
|
|
420
|
+
fullName,
|
|
421
|
+
localPath: entry.localPath,
|
|
422
|
+
primary: entry.primary,
|
|
423
|
+
keywords,
|
|
424
|
+
score
|
|
425
|
+
});
|
|
426
|
+
}
|
|
427
|
+
results.sort((a, b) => {
|
|
428
|
+
if (b.score !== a.score) return b.score - a.score;
|
|
429
|
+
return a.fullName.localeCompare(b.fullName);
|
|
430
|
+
});
|
|
431
|
+
return results.slice(0, limit);
|
|
432
|
+
}
|
|
433
|
+
/**
|
|
434
|
+
* Get the project map path if it exists in cwd.
|
|
435
|
+
*/
|
|
436
|
+
function getProjectMapPath(cwd = process.cwd()) {
|
|
437
|
+
const mapPath = resolve(cwd, ".offworld/map.json");
|
|
438
|
+
return existsSync(mapPath) ? mapPath : null;
|
|
439
|
+
}
|
|
440
|
+
|
|
441
|
+
//#endregion
|
|
442
|
+
//#region src/auth.ts
|
|
443
|
+
/**
|
|
444
|
+
* Authentication utilities for offworld CLI
|
|
445
|
+
*/
|
|
446
|
+
const AuthDataSchema = z.object({
|
|
447
|
+
token: z.string(),
|
|
448
|
+
expiresAt: z.string().optional(),
|
|
449
|
+
workosId: z.string().optional(),
|
|
450
|
+
refreshToken: z.string().optional(),
|
|
451
|
+
email: z.string().optional()
|
|
452
|
+
});
|
|
453
|
+
var AuthError = class extends Error {
|
|
454
|
+
constructor(message) {
|
|
455
|
+
super(message);
|
|
456
|
+
this.name = "AuthError";
|
|
457
|
+
}
|
|
458
|
+
};
|
|
459
|
+
var NotLoggedInError = class extends AuthError {
|
|
460
|
+
constructor(message = "Not logged in. Please run 'ow auth login' first.") {
|
|
461
|
+
super(message);
|
|
462
|
+
this.name = "NotLoggedInError";
|
|
463
|
+
}
|
|
464
|
+
};
|
|
465
|
+
var TokenExpiredError = class extends AuthError {
|
|
466
|
+
constructor(message = "Session expired. Please run 'ow auth login' again.") {
|
|
467
|
+
super(message);
|
|
468
|
+
this.name = "TokenExpiredError";
|
|
469
|
+
}
|
|
470
|
+
};
|
|
471
|
+
function extractJwtExpiration(token) {
|
|
472
|
+
try {
|
|
473
|
+
const parts = token.split(".");
|
|
474
|
+
if (parts.length !== 3) return void 0;
|
|
475
|
+
const payload = parts[1];
|
|
476
|
+
if (!payload) return void 0;
|
|
477
|
+
const decoded = JSON.parse(Buffer.from(payload, "base64").toString("utf-8"));
|
|
478
|
+
if (typeof decoded.exp !== "number") return void 0;
|
|
479
|
+
return (/* @__PURE__ */ new Date(decoded.exp * 1e3)).toISOString();
|
|
480
|
+
} catch {
|
|
481
|
+
return;
|
|
482
|
+
}
|
|
483
|
+
}
|
|
484
|
+
function getAuthPath() {
|
|
485
|
+
return Paths.authFile;
|
|
486
|
+
}
|
|
487
|
+
function saveAuthData(data) {
|
|
488
|
+
const authPath = getAuthPath();
|
|
489
|
+
const authDir = dirname(authPath);
|
|
490
|
+
if (!existsSync(authDir)) mkdirSync(authDir, { recursive: true });
|
|
491
|
+
writeFileSync(authPath, JSON.stringify(data, null, 2), "utf-8");
|
|
492
|
+
chmodSync(authPath, 384);
|
|
493
|
+
}
|
|
494
|
+
/**
|
|
495
|
+
* Loads authentication data from ~/.local/share/offworld/auth.json
|
|
496
|
+
* Returns null if file doesn't exist or is invalid
|
|
497
|
+
*/
|
|
498
|
+
function loadAuthData() {
|
|
499
|
+
const authPath = getAuthPath();
|
|
500
|
+
if (!existsSync(authPath)) return null;
|
|
501
|
+
try {
|
|
502
|
+
const content = readFileSync(authPath, "utf-8");
|
|
503
|
+
const json = JSON.parse(content);
|
|
504
|
+
const parsed = AuthDataSchema.safeParse(json);
|
|
505
|
+
if (!parsed.success) return null;
|
|
506
|
+
return parsed.data;
|
|
507
|
+
} catch {
|
|
508
|
+
return null;
|
|
509
|
+
}
|
|
510
|
+
}
|
|
511
|
+
/**
|
|
512
|
+
* Clears stored authentication data
|
|
513
|
+
* @returns true if auth file was deleted, false if it didn't exist
|
|
514
|
+
*/
|
|
515
|
+
function clearAuthData() {
|
|
516
|
+
const authPath = getAuthPath();
|
|
517
|
+
if (!existsSync(authPath)) return false;
|
|
518
|
+
try {
|
|
519
|
+
unlinkSync(authPath);
|
|
520
|
+
return true;
|
|
521
|
+
} catch {
|
|
522
|
+
return false;
|
|
523
|
+
}
|
|
524
|
+
}
|
|
525
|
+
async function getToken() {
|
|
526
|
+
const data = loadAuthData();
|
|
527
|
+
if (!data) throw new NotLoggedInError();
|
|
528
|
+
let expiresAtStr = data.expiresAt;
|
|
529
|
+
if (!expiresAtStr) {
|
|
530
|
+
expiresAtStr = extractJwtExpiration(data.token);
|
|
531
|
+
if (expiresAtStr) {
|
|
532
|
+
data.expiresAt = expiresAtStr;
|
|
533
|
+
saveAuthData(data);
|
|
534
|
+
}
|
|
535
|
+
}
|
|
536
|
+
if (expiresAtStr) {
|
|
537
|
+
const expiresAt = new Date(expiresAtStr);
|
|
538
|
+
const now = /* @__PURE__ */ new Date();
|
|
539
|
+
const oneMinute = 60 * 1e3;
|
|
540
|
+
if (expiresAt <= now) {
|
|
541
|
+
if (data.refreshToken) try {
|
|
542
|
+
return (await refreshAccessToken()).token;
|
|
543
|
+
} catch {
|
|
544
|
+
throw new TokenExpiredError();
|
|
545
|
+
}
|
|
546
|
+
throw new TokenExpiredError();
|
|
547
|
+
}
|
|
548
|
+
if (expiresAt.getTime() - now.getTime() < oneMinute) {
|
|
549
|
+
if (data.refreshToken) try {
|
|
550
|
+
return (await refreshAccessToken()).token;
|
|
551
|
+
} catch {
|
|
552
|
+
return data.token;
|
|
553
|
+
}
|
|
554
|
+
}
|
|
555
|
+
}
|
|
556
|
+
return data.token;
|
|
557
|
+
}
|
|
558
|
+
/**
|
|
559
|
+
* Gets the current authentication token, or null if not logged in
|
|
560
|
+
* Does not throw errors
|
|
561
|
+
*/
|
|
562
|
+
async function getTokenOrNull() {
|
|
563
|
+
try {
|
|
564
|
+
return await getToken();
|
|
565
|
+
} catch {
|
|
566
|
+
return null;
|
|
567
|
+
}
|
|
568
|
+
}
|
|
569
|
+
async function isLoggedIn() {
|
|
570
|
+
return await getTokenOrNull() !== null;
|
|
571
|
+
}
|
|
572
|
+
async function getAuthStatus() {
|
|
573
|
+
const data = loadAuthData();
|
|
574
|
+
if (!data) return { isLoggedIn: false };
|
|
575
|
+
let expiresAtStr = data.expiresAt;
|
|
576
|
+
if (!expiresAtStr) {
|
|
577
|
+
expiresAtStr = extractJwtExpiration(data.token);
|
|
578
|
+
if (expiresAtStr) {
|
|
579
|
+
data.expiresAt = expiresAtStr;
|
|
580
|
+
saveAuthData(data);
|
|
581
|
+
}
|
|
582
|
+
}
|
|
583
|
+
if (expiresAtStr) {
|
|
584
|
+
const expiresAt = new Date(expiresAtStr);
|
|
585
|
+
const now = /* @__PURE__ */ new Date();
|
|
586
|
+
const oneMinute = 60 * 1e3;
|
|
587
|
+
if (expiresAt <= now) {
|
|
588
|
+
if (data.refreshToken) try {
|
|
589
|
+
const refreshed = await refreshAccessToken();
|
|
590
|
+
return {
|
|
591
|
+
isLoggedIn: true,
|
|
592
|
+
email: refreshed.email,
|
|
593
|
+
workosId: refreshed.workosId,
|
|
594
|
+
expiresAt: refreshed.expiresAt
|
|
595
|
+
};
|
|
596
|
+
} catch {
|
|
597
|
+
return { isLoggedIn: false };
|
|
598
|
+
}
|
|
599
|
+
return { isLoggedIn: false };
|
|
600
|
+
}
|
|
601
|
+
if (expiresAt.getTime() - now.getTime() < oneMinute) {
|
|
602
|
+
if (data.refreshToken) try {
|
|
603
|
+
const refreshed = await refreshAccessToken();
|
|
604
|
+
return {
|
|
605
|
+
isLoggedIn: true,
|
|
606
|
+
email: refreshed.email,
|
|
607
|
+
workosId: refreshed.workosId,
|
|
608
|
+
expiresAt: refreshed.expiresAt
|
|
609
|
+
};
|
|
610
|
+
} catch {
|
|
611
|
+
return {
|
|
612
|
+
isLoggedIn: true,
|
|
613
|
+
email: data.email,
|
|
614
|
+
workosId: data.workosId,
|
|
615
|
+
expiresAt: expiresAtStr
|
|
616
|
+
};
|
|
617
|
+
}
|
|
618
|
+
}
|
|
619
|
+
}
|
|
620
|
+
return {
|
|
621
|
+
isLoggedIn: true,
|
|
622
|
+
email: data.email,
|
|
623
|
+
workosId: data.workosId,
|
|
624
|
+
expiresAt: expiresAtStr
|
|
625
|
+
};
|
|
626
|
+
}
|
|
627
|
+
const WORKOS_API = "https://api.workos.com";
|
|
628
|
+
const PRODUCTION_WORKOS_CLIENT_ID = "client_01KFAD76TNGN02AP96982HG35E";
|
|
629
|
+
function getWorkosClientId() {
|
|
630
|
+
return process.env.WORKOS_CLIENT_ID ?? PRODUCTION_WORKOS_CLIENT_ID;
|
|
631
|
+
}
|
|
632
|
+
async function refreshAccessToken() {
|
|
633
|
+
const data = loadAuthData();
|
|
634
|
+
if (!data?.refreshToken) throw new AuthError("No refresh token available. Please log in again.");
|
|
635
|
+
try {
|
|
636
|
+
const response = await fetch(`${WORKOS_API}/user_management/authenticate`, {
|
|
637
|
+
method: "POST",
|
|
638
|
+
headers: { "Content-Type": "application/x-www-form-urlencoded" },
|
|
639
|
+
body: new URLSearchParams({
|
|
640
|
+
grant_type: "refresh_token",
|
|
641
|
+
refresh_token: data.refreshToken,
|
|
642
|
+
client_id: getWorkosClientId()
|
|
643
|
+
})
|
|
644
|
+
});
|
|
645
|
+
if (!response.ok) throw new AuthError(`Token refresh failed: ${await response.text()}`);
|
|
646
|
+
const json = await response.json();
|
|
647
|
+
const tokenData = WorkOSTokenResponseSchema.parse(json);
|
|
648
|
+
const newAuthData = {
|
|
649
|
+
token: tokenData.access_token,
|
|
650
|
+
email: tokenData.user.email,
|
|
651
|
+
workosId: tokenData.user.id,
|
|
652
|
+
refreshToken: tokenData.refresh_token,
|
|
653
|
+
expiresAt: tokenData.expires_at ? (/* @__PURE__ */ new Date(tokenData.expires_at * 1e3)).toISOString() : extractJwtExpiration(tokenData.access_token)
|
|
654
|
+
};
|
|
655
|
+
saveAuthData(newAuthData);
|
|
656
|
+
return newAuthData;
|
|
657
|
+
} catch (error) {
|
|
658
|
+
if (error instanceof AuthError) throw error;
|
|
659
|
+
throw new AuthError(`Failed to refresh token: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
660
|
+
}
|
|
661
|
+
}
|
|
662
|
+
|
|
663
|
+
//#endregion
|
|
664
|
+
//#region src/agents.ts
|
|
665
|
+
/**
|
|
666
|
+
* Agent Registry & Auto-Detection
|
|
667
|
+
*
|
|
668
|
+
* Centralized registry of supported AI coding agents with their
|
|
669
|
+
* skill directory locations and detection functions.
|
|
670
|
+
*/
|
|
671
|
+
const agents = {
|
|
672
|
+
opencode: {
|
|
673
|
+
name: "opencode",
|
|
674
|
+
displayName: "OpenCode",
|
|
675
|
+
skillsDir: ".opencode/skills",
|
|
676
|
+
globalSkillsDir: "~/.config/opencode/skills",
|
|
677
|
+
detectInstalled: () => existsSync(expandTilde("~/.config/opencode"))
|
|
678
|
+
},
|
|
679
|
+
"claude-code": {
|
|
680
|
+
name: "claude-code",
|
|
681
|
+
displayName: "Claude Code",
|
|
682
|
+
skillsDir: ".claude/skills",
|
|
683
|
+
globalSkillsDir: "~/.claude/skills",
|
|
684
|
+
detectInstalled: () => existsSync(expandTilde("~/.claude"))
|
|
685
|
+
},
|
|
686
|
+
codex: {
|
|
687
|
+
name: "codex",
|
|
688
|
+
displayName: "Codex (OpenAI)",
|
|
689
|
+
skillsDir: ".codex/skills",
|
|
690
|
+
globalSkillsDir: "~/.codex/skills",
|
|
691
|
+
detectInstalled: () => existsSync(expandTilde("~/.codex"))
|
|
692
|
+
},
|
|
693
|
+
amp: {
|
|
694
|
+
name: "amp",
|
|
695
|
+
displayName: "Amp",
|
|
696
|
+
skillsDir: ".agents/skills",
|
|
697
|
+
globalSkillsDir: "~/.config/agents/skills",
|
|
698
|
+
detectInstalled: () => existsSync(expandTilde("~/.config/amp"))
|
|
699
|
+
},
|
|
700
|
+
antigravity: {
|
|
701
|
+
name: "antigravity",
|
|
702
|
+
displayName: "Antigravity",
|
|
703
|
+
skillsDir: ".agent/skills",
|
|
704
|
+
globalSkillsDir: "~/.gemini/antigravity/skills",
|
|
705
|
+
detectInstalled: () => existsSync(expandTilde("~/.gemini/antigravity"))
|
|
706
|
+
},
|
|
707
|
+
cursor: {
|
|
708
|
+
name: "cursor",
|
|
709
|
+
displayName: "Cursor",
|
|
710
|
+
skillsDir: ".cursor/skills",
|
|
711
|
+
globalSkillsDir: "~/.cursor/skills",
|
|
712
|
+
detectInstalled: () => existsSync(expandTilde("~/.cursor"))
|
|
713
|
+
}
|
|
714
|
+
};
|
|
715
|
+
/**
|
|
716
|
+
* Detect which agents are installed on the system.
|
|
717
|
+
* Checks for the existence of each agent's config directory.
|
|
718
|
+
*
|
|
719
|
+
* @returns Array of installed agent identifiers
|
|
720
|
+
*/
|
|
721
|
+
function detectInstalledAgents() {
|
|
722
|
+
const installed = [];
|
|
723
|
+
for (const config of Object.values(agents)) if (config.detectInstalled()) installed.push(config.name);
|
|
724
|
+
return installed;
|
|
725
|
+
}
|
|
726
|
+
/**
|
|
727
|
+
* Get the configuration for a specific agent.
|
|
728
|
+
*
|
|
729
|
+
* @param type - Agent identifier
|
|
730
|
+
* @returns AgentConfig for the specified agent
|
|
731
|
+
*/
|
|
732
|
+
function getAgentConfig(type) {
|
|
733
|
+
return agents[type];
|
|
734
|
+
}
|
|
735
|
+
/**
|
|
736
|
+
* Get all agent configurations as an array.
|
|
737
|
+
*
|
|
738
|
+
* @returns Array of all agent configurations
|
|
739
|
+
*/
|
|
740
|
+
function getAllAgentConfigs() {
|
|
741
|
+
return Object.values(agents);
|
|
742
|
+
}
|
|
743
|
+
|
|
744
|
+
//#endregion
|
|
745
|
+
//#region src/reference.ts
|
|
746
|
+
const PackageJsonKeywordsSchema = z.object({
|
|
747
|
+
name: z.string().optional(),
|
|
748
|
+
keywords: z.array(z.string()).optional()
|
|
749
|
+
});
|
|
750
|
+
function normalizeKeyword(value) {
|
|
751
|
+
const trimmed = value.trim();
|
|
752
|
+
if (!trimmed) return [];
|
|
753
|
+
const normalized = trimmed.toLowerCase();
|
|
754
|
+
const tokens = /* @__PURE__ */ new Set();
|
|
755
|
+
const addToken = (token) => {
|
|
756
|
+
const cleaned = token.trim().toLowerCase();
|
|
757
|
+
if (cleaned.length < 2) return;
|
|
758
|
+
tokens.add(cleaned);
|
|
759
|
+
};
|
|
760
|
+
addToken(normalized);
|
|
761
|
+
addToken(normalized.replaceAll("/", "-"));
|
|
762
|
+
addToken(normalized.replaceAll("/", ""));
|
|
763
|
+
for (const token of normalized.split(/[\s/_-]+/)) addToken(token);
|
|
764
|
+
if (normalized.startsWith("@")) addToken(normalized.slice(1));
|
|
765
|
+
return Array.from(tokens);
|
|
766
|
+
}
|
|
767
|
+
function deriveKeywords(fullName, localPath, referenceContent) {
|
|
768
|
+
const keywords = /* @__PURE__ */ new Set();
|
|
769
|
+
const addKeywords = (value) => {
|
|
770
|
+
for (const token of normalizeKeyword(value)) keywords.add(token);
|
|
771
|
+
};
|
|
772
|
+
addKeywords(fullName);
|
|
773
|
+
const headingMatch = referenceContent.match(/^#\s+(.+)$/m);
|
|
774
|
+
if (headingMatch?.[1]) addKeywords(headingMatch[1]);
|
|
775
|
+
const packageJsonPath = join(localPath, "package.json");
|
|
776
|
+
if (existsSync(packageJsonPath)) try {
|
|
777
|
+
const content = readFileSync(packageJsonPath, "utf-8");
|
|
778
|
+
const json = JSON.parse(content);
|
|
779
|
+
const parsed = PackageJsonKeywordsSchema.safeParse(json);
|
|
780
|
+
if (parsed.success) {
|
|
781
|
+
if (parsed.data.name) addKeywords(parsed.data.name);
|
|
782
|
+
if (parsed.data.keywords) for (const keyword of parsed.data.keywords) addKeywords(keyword);
|
|
783
|
+
}
|
|
784
|
+
} catch {}
|
|
785
|
+
return Array.from(keywords);
|
|
786
|
+
}
|
|
787
|
+
/**
|
|
788
|
+
* Ensure a symlink exists, removing any existing file/directory at the path
|
|
789
|
+
*/
|
|
790
|
+
function ensureSymlink(target, linkPath) {
|
|
791
|
+
try {
|
|
792
|
+
const stat = lstatSync(linkPath);
|
|
793
|
+
if (stat.isSymbolicLink()) unlinkSync(linkPath);
|
|
794
|
+
else if (stat.isDirectory()) rmSync(linkPath, { recursive: true });
|
|
795
|
+
else unlinkSync(linkPath);
|
|
796
|
+
} catch {}
|
|
797
|
+
mkdirSync(join(linkPath, ".."), { recursive: true });
|
|
798
|
+
symlinkSync(target, linkPath, "dir");
|
|
799
|
+
}
|
|
800
|
+
/**
|
|
801
|
+
* Static template for the global SKILL.md file.
|
|
802
|
+
* This is the single routing skill that all agents see.
|
|
803
|
+
*/
|
|
804
|
+
const GLOBAL_SKILL_TEMPLATE = `---
|
|
805
|
+
name: offworld
|
|
806
|
+
description: Routes queries to Offworld reference files. Find and read per-repo references for dependency knowledge.
|
|
807
|
+
allowed-tools: Bash(ow:*) Read
|
|
808
|
+
---
|
|
809
|
+
|
|
810
|
+
# Offworld Reference Router
|
|
811
|
+
|
|
812
|
+
Use \`ow\` to locate and read Offworld reference files for dependencies.
|
|
813
|
+
|
|
814
|
+
## What This Does
|
|
815
|
+
|
|
816
|
+
- Finds references for libraries and repos
|
|
817
|
+
- Returns paths for reference files and local clones
|
|
818
|
+
- Helps you read the right context fast
|
|
819
|
+
|
|
820
|
+
## When to Use
|
|
821
|
+
|
|
822
|
+
- You need docs or patterns for a dependency
|
|
823
|
+
- You want the verified reference instead of web search
|
|
824
|
+
- You are about to work inside a repo clone
|
|
825
|
+
|
|
826
|
+
## Prerequisites
|
|
827
|
+
|
|
828
|
+
Check that the CLI is available:
|
|
829
|
+
|
|
830
|
+
\`\`\`bash
|
|
831
|
+
ow --version
|
|
832
|
+
\`\`\`
|
|
833
|
+
|
|
834
|
+
If \`ow\` is not available, install it:
|
|
835
|
+
|
|
836
|
+
\`\`\`bash
|
|
837
|
+
curl -fsSL https://offworld.sh/install | bash
|
|
838
|
+
\`\`\`
|
|
839
|
+
|
|
840
|
+
## Setup
|
|
841
|
+
|
|
842
|
+
Initialize Offworld once per machine:
|
|
843
|
+
|
|
844
|
+
\`\`\`bash
|
|
845
|
+
ow init
|
|
846
|
+
\`\`\`
|
|
847
|
+
|
|
848
|
+
For a specific project, build a project map:
|
|
849
|
+
|
|
850
|
+
\`\`\`bash
|
|
851
|
+
ow project init
|
|
852
|
+
\`\`\`
|
|
853
|
+
|
|
854
|
+
## Usage
|
|
855
|
+
|
|
856
|
+
**Find a reference:**
|
|
857
|
+
\`\`\`bash
|
|
858
|
+
ow map search <term> # search by name or keyword
|
|
859
|
+
ow map show <repo> # get info for specific repo
|
|
860
|
+
\`\`\`
|
|
861
|
+
|
|
862
|
+
**Get paths for tools:**
|
|
863
|
+
\`\`\`bash
|
|
864
|
+
ow map show <repo> --ref # reference file path (use with Read)
|
|
865
|
+
ow map show <repo> --path # clone directory path
|
|
866
|
+
\`\`\`
|
|
867
|
+
|
|
868
|
+
**Example workflow:**
|
|
869
|
+
\`\`\`bash
|
|
870
|
+
# 1. Find the repo
|
|
871
|
+
ow map search zod
|
|
872
|
+
|
|
873
|
+
# 2. Get reference path
|
|
874
|
+
ow map show colinhacks/zod --ref
|
|
875
|
+
# Output: /Users/.../.local/share/offworld/skill/offworld/references/colinhacks-zod.md
|
|
876
|
+
|
|
877
|
+
# 3. Read the reference with the path from step 2
|
|
878
|
+
\`\`\`
|
|
879
|
+
|
|
880
|
+
## If Reference Not Found
|
|
881
|
+
|
|
882
|
+
\`\`\`bash
|
|
883
|
+
ow pull <owner/repo> # clone + generate reference
|
|
884
|
+
ow project init # scan project deps, install references
|
|
885
|
+
\`\`\`
|
|
886
|
+
|
|
887
|
+
## Notes
|
|
888
|
+
|
|
889
|
+
- Project map (\`.offworld/map.json\`) takes precedence over global map when present
|
|
890
|
+
- Reference files are markdown with API docs, patterns, best practices
|
|
891
|
+
- Clone paths useful for exploring source code after reading reference
|
|
892
|
+
|
|
893
|
+
## Additional Resources
|
|
894
|
+
|
|
895
|
+
- Docs: https://offworld.sh/cli
|
|
896
|
+
`;
|
|
897
|
+
/**
|
|
898
|
+
* Ensures the global SKILL.md exists and symlinks the offworld/ directory to all agent skill directories.
|
|
899
|
+
*
|
|
900
|
+
* Creates:
|
|
901
|
+
* - ~/.local/share/offworld/skill/offworld/SKILL.md (static routing template)
|
|
902
|
+
* - ~/.local/share/offworld/skill/offworld/assets/ (for map.json)
|
|
903
|
+
* - ~/.local/share/offworld/skill/offworld/references/ (for reference files)
|
|
904
|
+
* - Symlinks entire offworld/ directory to each agent's skill directory
|
|
905
|
+
*/
|
|
906
|
+
function installGlobalSkill() {
|
|
907
|
+
const config = loadConfig();
|
|
908
|
+
mkdirSync(Paths.offworldSkillDir, { recursive: true });
|
|
909
|
+
mkdirSync(Paths.offworldAssetsDir, { recursive: true });
|
|
910
|
+
mkdirSync(Paths.offworldReferencesDir, { recursive: true });
|
|
911
|
+
const skillPath = join(Paths.offworldSkillDir, "SKILL.md");
|
|
912
|
+
if (!existsSync(skillPath)) writeFileSync(skillPath, GLOBAL_SKILL_TEMPLATE, "utf-8");
|
|
913
|
+
const configuredAgents = config.agents ?? [];
|
|
914
|
+
for (const agentName of configuredAgents) {
|
|
915
|
+
const agentConfig = agents[agentName];
|
|
916
|
+
if (agentConfig) {
|
|
917
|
+
const agentSkillDir = expandTilde(join(agentConfig.globalSkillsDir, "offworld"));
|
|
918
|
+
ensureSymlink(Paths.offworldSkillDir, agentSkillDir);
|
|
919
|
+
}
|
|
920
|
+
}
|
|
921
|
+
}
|
|
922
|
+
/**
|
|
923
|
+
* Install a reference file for a specific repository.
|
|
924
|
+
*
|
|
925
|
+
* Creates:
|
|
926
|
+
* - ~/.local/share/offworld/skill/offworld/references/{owner-repo}.md
|
|
927
|
+
* - ~/.local/share/offworld/meta/{owner-repo}/meta.json
|
|
928
|
+
* - Updates global map with reference info
|
|
929
|
+
*
|
|
930
|
+
* @param qualifiedName - Qualified key for map storage (e.g., "github.com:owner/repo" or "local:name")
|
|
931
|
+
* @param fullName - Full repo name for file naming (e.g., "owner/repo")
|
|
932
|
+
* @param localPath - Absolute path to the cloned repository
|
|
933
|
+
* @param referenceContent - The generated reference markdown content
|
|
934
|
+
* @param meta - Metadata about the generation (referenceUpdatedAt, commitSha, version)
|
|
935
|
+
* @param keywords - Optional array of keywords for search/routing
|
|
936
|
+
*/
|
|
937
|
+
function installReference(qualifiedName, fullName, localPath, referenceContent, meta, keywords) {
|
|
938
|
+
installGlobalSkill();
|
|
939
|
+
const referenceFileName = toReferenceFileName(fullName);
|
|
940
|
+
const metaDirName = toMetaDirName(fullName);
|
|
941
|
+
const referencePath = join(Paths.offworldReferencesDir, referenceFileName);
|
|
942
|
+
mkdirSync(Paths.offworldReferencesDir, { recursive: true });
|
|
943
|
+
writeFileSync(referencePath, referenceContent, "utf-8");
|
|
944
|
+
const metaDir = join(Paths.metaDir, metaDirName);
|
|
945
|
+
mkdirSync(metaDir, { recursive: true });
|
|
946
|
+
const metaJson = JSON.stringify(meta, null, 2);
|
|
947
|
+
writeFileSync(join(metaDir, "meta.json"), metaJson, "utf-8");
|
|
948
|
+
const map = readGlobalMap();
|
|
949
|
+
const existingEntry = map.repos[qualifiedName];
|
|
950
|
+
const legacyProviderMap = {
|
|
951
|
+
"github.com": "github",
|
|
952
|
+
"gitlab.com": "gitlab",
|
|
953
|
+
"bitbucket.org": "bitbucket"
|
|
954
|
+
};
|
|
955
|
+
const [host] = qualifiedName.split(":");
|
|
956
|
+
const legacyProvider = host ? legacyProviderMap[host] : void 0;
|
|
957
|
+
const legacyQualifiedName = legacyProvider ? `${legacyProvider}:${fullName}` : void 0;
|
|
958
|
+
const legacyEntry = legacyQualifiedName ? map.repos[legacyQualifiedName] : void 0;
|
|
959
|
+
const references = [...existingEntry?.references ?? [], ...legacyEntry?.references ?? []];
|
|
960
|
+
if (!references.includes(referenceFileName)) references.push(referenceFileName);
|
|
961
|
+
const derivedKeywords = keywords ?? deriveKeywords(fullName, localPath, referenceContent);
|
|
962
|
+
const keywordsSet = new Set([
|
|
963
|
+
...existingEntry?.keywords ?? [],
|
|
964
|
+
...legacyEntry?.keywords ?? [],
|
|
965
|
+
...derivedKeywords
|
|
966
|
+
]);
|
|
967
|
+
map.repos[qualifiedName] = {
|
|
968
|
+
localPath,
|
|
969
|
+
references,
|
|
970
|
+
primary: referenceFileName,
|
|
971
|
+
keywords: Array.from(keywordsSet),
|
|
972
|
+
updatedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
973
|
+
};
|
|
974
|
+
if (legacyQualifiedName && legacyQualifiedName in map.repos) delete map.repos[legacyQualifiedName];
|
|
975
|
+
writeGlobalMap(map);
|
|
976
|
+
}
|
|
977
|
+
|
|
978
|
+
//#endregion
|
|
979
|
+
//#region src/manifest.ts
|
|
980
|
+
/**
|
|
981
|
+
* Dependency manifest parsing for multiple package ecosystems
|
|
982
|
+
*/
|
|
983
|
+
const DEFAULT_IGNORED_DIRS = new Set([
|
|
984
|
+
".git",
|
|
985
|
+
".offworld",
|
|
986
|
+
".turbo",
|
|
987
|
+
"build",
|
|
988
|
+
"dist",
|
|
989
|
+
"node_modules",
|
|
990
|
+
"out"
|
|
991
|
+
]);
|
|
992
|
+
/**
|
|
993
|
+
* Detects the manifest type in a directory
|
|
994
|
+
*/
|
|
995
|
+
function detectManifestType(dir) {
|
|
996
|
+
if (existsSync(join(dir, "package.json"))) return "npm";
|
|
997
|
+
if (existsSync(join(dir, "pyproject.toml"))) return "python";
|
|
998
|
+
if (existsSync(join(dir, "Cargo.toml"))) return "rust";
|
|
999
|
+
if (existsSync(join(dir, "go.mod"))) return "go";
|
|
1000
|
+
if (existsSync(join(dir, "requirements.txt"))) return "python";
|
|
1001
|
+
return "unknown";
|
|
1002
|
+
}
|
|
1003
|
+
/**
|
|
1004
|
+
* Parses dependencies from manifest files
|
|
1005
|
+
*/
|
|
1006
|
+
function parseDependencies(dir) {
|
|
1007
|
+
switch (detectManifestType(dir)) {
|
|
1008
|
+
case "npm": return parseNpmDependencies(dir);
|
|
1009
|
+
case "python": return existsSync(join(dir, "pyproject.toml")) ? parsePyprojectToml(join(dir, "pyproject.toml")) : parseRequirementsTxt(join(dir, "requirements.txt"));
|
|
1010
|
+
case "rust": return parseCargoToml(join(dir, "Cargo.toml"));
|
|
1011
|
+
case "go": return parseGoMod(join(dir, "go.mod"));
|
|
1012
|
+
default: return [];
|
|
1013
|
+
}
|
|
1014
|
+
}
|
|
1015
|
+
function parseNpmDependencies(dir) {
|
|
1016
|
+
return mergeDependencies(parsePackageJson(join(dir, "package.json")), parseWorkspaceDependencies(dir)).sort((a, b) => a.name.localeCompare(b.name));
|
|
1017
|
+
}
|
|
1018
|
+
function parseWorkspaceDependencies(dir) {
|
|
1019
|
+
const workspacePatterns = getWorkspacePatterns(dir);
|
|
1020
|
+
if (workspacePatterns.length === 0) return [];
|
|
1021
|
+
const packageJsonPaths = resolveWorkspacePackageJsonPaths(dir, workspacePatterns);
|
|
1022
|
+
const deps = [];
|
|
1023
|
+
for (const path of packageJsonPaths) deps.push(...parsePackageJson(path));
|
|
1024
|
+
return mergeDependencies([], deps);
|
|
1025
|
+
}
|
|
1026
|
+
function getWorkspacePatterns(dir) {
|
|
1027
|
+
const patterns = /* @__PURE__ */ new Set();
|
|
1028
|
+
const packageJsonPath = join(dir, "package.json");
|
|
1029
|
+
if (existsSync(packageJsonPath)) {
|
|
1030
|
+
const workspaces = readJson(packageJsonPath)?.workspaces;
|
|
1031
|
+
if (Array.isArray(workspaces)) {
|
|
1032
|
+
for (const pattern of workspaces) if (typeof pattern === "string") patterns.add(pattern);
|
|
1033
|
+
} else if (workspaces && typeof workspaces === "object") {
|
|
1034
|
+
const packagesField = workspaces.packages;
|
|
1035
|
+
if (Array.isArray(packagesField)) {
|
|
1036
|
+
for (const pattern of packagesField) if (typeof pattern === "string") patterns.add(pattern);
|
|
1037
|
+
}
|
|
1038
|
+
}
|
|
1039
|
+
}
|
|
1040
|
+
const pnpmWorkspacePath = existsSync(join(dir, "pnpm-workspace.yaml")) ? join(dir, "pnpm-workspace.yaml") : existsSync(join(dir, "pnpm-workspace.yml")) ? join(dir, "pnpm-workspace.yml") : null;
|
|
1041
|
+
if (pnpmWorkspacePath) for (const pattern of parsePnpmWorkspacePackages(pnpmWorkspacePath)) patterns.add(pattern);
|
|
1042
|
+
return Array.from(patterns);
|
|
1043
|
+
}
|
|
1044
|
+
function resolveWorkspacePackageJsonPaths(dir, patterns) {
|
|
1045
|
+
const includePatterns = patterns.filter((pattern) => !pattern.startsWith("!"));
|
|
1046
|
+
const excludePatterns = patterns.filter((pattern) => pattern.startsWith("!")).map((pattern) => pattern.slice(1));
|
|
1047
|
+
if (includePatterns.length === 0) return [];
|
|
1048
|
+
const includeRegexes = includePatterns.map(patternToRegex);
|
|
1049
|
+
const excludeRegexes = excludePatterns.map(patternToRegex);
|
|
1050
|
+
const matches = [];
|
|
1051
|
+
const directories = walkDirectories(dir);
|
|
1052
|
+
for (const relativePath of directories) {
|
|
1053
|
+
if (!includeRegexes.some((regex) => regex.test(relativePath))) continue;
|
|
1054
|
+
if (excludeRegexes.some((regex) => regex.test(relativePath))) continue;
|
|
1055
|
+
const packageJsonPath = join(dir, relativePath, "package.json");
|
|
1056
|
+
if (existsSync(packageJsonPath)) matches.push(packageJsonPath);
|
|
1057
|
+
}
|
|
1058
|
+
return Array.from(new Set(matches));
|
|
1059
|
+
}
|
|
1060
|
+
function walkDirectories(root) {
|
|
1061
|
+
const results = [];
|
|
1062
|
+
const stack = [""];
|
|
1063
|
+
while (stack.length > 0) {
|
|
1064
|
+
const relativePath = stack.pop();
|
|
1065
|
+
const currentPath = relativePath ? join(root, relativePath) : root;
|
|
1066
|
+
let entries;
|
|
1067
|
+
try {
|
|
1068
|
+
entries = readdirSync(currentPath, { withFileTypes: true });
|
|
1069
|
+
} catch {
|
|
1070
|
+
continue;
|
|
1071
|
+
}
|
|
1072
|
+
for (const entry of entries) {
|
|
1073
|
+
if (!entry.isDirectory()) continue;
|
|
1074
|
+
if (DEFAULT_IGNORED_DIRS.has(entry.name)) continue;
|
|
1075
|
+
const nextRelative = relativePath ? `${relativePath}/${entry.name}` : entry.name;
|
|
1076
|
+
results.push(nextRelative);
|
|
1077
|
+
stack.push(nextRelative);
|
|
1078
|
+
}
|
|
1079
|
+
}
|
|
1080
|
+
return results;
|
|
1081
|
+
}
|
|
1082
|
+
function patternToRegex(pattern) {
|
|
1083
|
+
let normalized = pattern.trim().replace(/\\/g, "/");
|
|
1084
|
+
if (normalized.startsWith("./")) normalized = normalized.slice(2);
|
|
1085
|
+
if (normalized.endsWith("/")) normalized = normalized.slice(0, -1);
|
|
1086
|
+
const withGlob = normalized.replace(/[.+^${}()|[\]\\*]/g, "\\$&").replace(/\\\*\\\*/g, ".*").replace(/\\\*/g, "[^/]+");
|
|
1087
|
+
return new RegExp(`^${withGlob}$`);
|
|
1088
|
+
}
|
|
1089
|
+
function parsePnpmWorkspacePackages(path) {
|
|
1090
|
+
try {
|
|
1091
|
+
const lines = readFileSync(path, "utf-8").split("\n");
|
|
1092
|
+
const patterns = [];
|
|
1093
|
+
let inPackages = false;
|
|
1094
|
+
for (const line of lines) {
|
|
1095
|
+
const trimmed = line.trim();
|
|
1096
|
+
if (!trimmed || trimmed.startsWith("#")) continue;
|
|
1097
|
+
if (/^packages\s*:/.test(trimmed)) {
|
|
1098
|
+
inPackages = true;
|
|
1099
|
+
continue;
|
|
1100
|
+
}
|
|
1101
|
+
if (!inPackages) continue;
|
|
1102
|
+
const entryMatch = trimmed.match(/^-\s*(.+)$/);
|
|
1103
|
+
if (entryMatch?.[1]) {
|
|
1104
|
+
const value = entryMatch[1].trim().replace(/^['"]|['"]$/g, "");
|
|
1105
|
+
if (value) patterns.push(value);
|
|
1106
|
+
continue;
|
|
1107
|
+
}
|
|
1108
|
+
if (!line.startsWith(" ") && !line.startsWith(" ")) break;
|
|
1109
|
+
}
|
|
1110
|
+
return patterns;
|
|
1111
|
+
} catch {
|
|
1112
|
+
return [];
|
|
1113
|
+
}
|
|
1114
|
+
}
|
|
1115
|
+
function readJson(path) {
|
|
1116
|
+
try {
|
|
1117
|
+
const content = readFileSync(path, "utf-8");
|
|
1118
|
+
return JSON.parse(content);
|
|
1119
|
+
} catch {
|
|
1120
|
+
return null;
|
|
1121
|
+
}
|
|
1122
|
+
}
|
|
1123
|
+
function mergeDependencies(base, incoming) {
|
|
1124
|
+
const map = /* @__PURE__ */ new Map();
|
|
1125
|
+
for (const dep of [...base, ...incoming]) {
|
|
1126
|
+
const existing = map.get(dep.name);
|
|
1127
|
+
if (!existing) {
|
|
1128
|
+
map.set(dep.name, { ...dep });
|
|
1129
|
+
continue;
|
|
1130
|
+
}
|
|
1131
|
+
const dev = existing.dev && dep.dev;
|
|
1132
|
+
const version = existing.version ?? dep.version;
|
|
1133
|
+
map.set(dep.name, {
|
|
1134
|
+
name: dep.name,
|
|
1135
|
+
version,
|
|
1136
|
+
dev
|
|
1137
|
+
});
|
|
1138
|
+
}
|
|
1139
|
+
return Array.from(map.values());
|
|
1140
|
+
}
|
|
1141
|
+
/**
|
|
1142
|
+
* Parse package.json dependencies
|
|
1143
|
+
*/
|
|
1144
|
+
function parsePackageJson(path) {
|
|
1145
|
+
try {
|
|
1146
|
+
const content = readFileSync(path, "utf-8");
|
|
1147
|
+
const pkg = JSON.parse(content);
|
|
1148
|
+
const deps = [];
|
|
1149
|
+
if (pkg.dependencies && typeof pkg.dependencies === "object") for (const [name, version] of Object.entries(pkg.dependencies)) deps.push({
|
|
1150
|
+
name,
|
|
1151
|
+
version,
|
|
1152
|
+
dev: false
|
|
1153
|
+
});
|
|
1154
|
+
if (pkg.devDependencies && typeof pkg.devDependencies === "object") for (const [name, version] of Object.entries(pkg.devDependencies)) deps.push({
|
|
1155
|
+
name,
|
|
1156
|
+
version,
|
|
1157
|
+
dev: true
|
|
1158
|
+
});
|
|
1159
|
+
if (pkg.peerDependencies && typeof pkg.peerDependencies === "object") for (const [name, version] of Object.entries(pkg.peerDependencies)) deps.push({
|
|
1160
|
+
name,
|
|
1161
|
+
version,
|
|
1162
|
+
dev: false
|
|
1163
|
+
});
|
|
1164
|
+
if (pkg.optionalDependencies && typeof pkg.optionalDependencies === "object") for (const [name, version] of Object.entries(pkg.optionalDependencies)) deps.push({
|
|
1165
|
+
name,
|
|
1166
|
+
version,
|
|
1167
|
+
dev: false
|
|
1168
|
+
});
|
|
1169
|
+
return deps;
|
|
1170
|
+
} catch {
|
|
1171
|
+
return [];
|
|
1172
|
+
}
|
|
1173
|
+
}
|
|
1174
|
+
/**
|
|
1175
|
+
* Parse pyproject.toml dependencies
|
|
1176
|
+
*/
|
|
1177
|
+
function parsePyprojectToml(path) {
|
|
1178
|
+
try {
|
|
1179
|
+
const content = readFileSync(path, "utf-8");
|
|
1180
|
+
const deps = [];
|
|
1181
|
+
const depsSection = content.match(/\[project\.dependencies\]([\s\S]*?)(?=\[|$)/);
|
|
1182
|
+
if (!depsSection?.[1]) return [];
|
|
1183
|
+
const lines = depsSection[1].split("\n");
|
|
1184
|
+
for (const line of lines) {
|
|
1185
|
+
const match = line.match(/["']([a-zA-Z0-9_-]+)(?:[>=<~!]+([^"']+))?["']/);
|
|
1186
|
+
if (match?.[1]) deps.push({
|
|
1187
|
+
name: match[1],
|
|
1188
|
+
version: match[2]?.trim(),
|
|
1189
|
+
dev: false
|
|
1190
|
+
});
|
|
1191
|
+
}
|
|
1192
|
+
return deps;
|
|
1193
|
+
} catch {
|
|
1194
|
+
return [];
|
|
1195
|
+
}
|
|
1196
|
+
}
|
|
1197
|
+
/**
|
|
1198
|
+
* Parse Cargo.toml dependencies
|
|
1199
|
+
*/
|
|
1200
|
+
function parseCargoToml(path) {
|
|
1201
|
+
try {
|
|
1202
|
+
const content = readFileSync(path, "utf-8");
|
|
1203
|
+
const deps = [];
|
|
1204
|
+
const depsSection = content.match(/\[dependencies\]([\s\S]*?)(?=\[|$)/);
|
|
1205
|
+
if (!depsSection?.[1]) return [];
|
|
1206
|
+
const lines = depsSection[1].split("\n");
|
|
1207
|
+
for (const line of lines) {
|
|
1208
|
+
const simpleMatch = line.match(/^([a-zA-Z0-9_-]+)\s*=\s*"([^"]+)"/);
|
|
1209
|
+
const tableMatch = line.match(/^([a-zA-Z0-9_-]+)\s*=\s*{.*version\s*=\s*"([^"]+)"/);
|
|
1210
|
+
if (simpleMatch?.[1] && simpleMatch[2]) deps.push({
|
|
1211
|
+
name: simpleMatch[1],
|
|
1212
|
+
version: simpleMatch[2],
|
|
1213
|
+
dev: false
|
|
1214
|
+
});
|
|
1215
|
+
else if (tableMatch?.[1] && tableMatch[2]) deps.push({
|
|
1216
|
+
name: tableMatch[1],
|
|
1217
|
+
version: tableMatch[2],
|
|
1218
|
+
dev: false
|
|
1219
|
+
});
|
|
1220
|
+
}
|
|
1221
|
+
return deps;
|
|
1222
|
+
} catch {
|
|
1223
|
+
return [];
|
|
1224
|
+
}
|
|
1225
|
+
}
|
|
1226
|
+
/**
|
|
1227
|
+
* Parse go.mod dependencies
|
|
1228
|
+
*/
|
|
1229
|
+
function parseGoMod(path) {
|
|
1230
|
+
try {
|
|
1231
|
+
const content = readFileSync(path, "utf-8");
|
|
1232
|
+
const deps = [];
|
|
1233
|
+
const requireSection = content.match(/require\s*\(([\s\S]*?)\)/);
|
|
1234
|
+
if (!requireSection?.[1]) {
|
|
1235
|
+
const singleRequire = content.match(/require\s+([^\s]+)\s+([^\s]+)/);
|
|
1236
|
+
if (singleRequire?.[1] && singleRequire[2]) deps.push({
|
|
1237
|
+
name: singleRequire[1],
|
|
1238
|
+
version: singleRequire[2],
|
|
1239
|
+
dev: false
|
|
1240
|
+
});
|
|
1241
|
+
return deps;
|
|
1242
|
+
}
|
|
1243
|
+
const lines = requireSection[1].split("\n");
|
|
1244
|
+
for (const line of lines) {
|
|
1245
|
+
const match = line.match(/^\s*([^\s]+)\s+([^\s]+)/);
|
|
1246
|
+
if (match?.[1] && match[2]) deps.push({
|
|
1247
|
+
name: match[1],
|
|
1248
|
+
version: match[2],
|
|
1249
|
+
dev: false
|
|
1250
|
+
});
|
|
1251
|
+
}
|
|
1252
|
+
return deps;
|
|
1253
|
+
} catch {
|
|
1254
|
+
return [];
|
|
1255
|
+
}
|
|
1256
|
+
}
|
|
1257
|
+
/**
|
|
1258
|
+
* Parse requirements.txt dependencies
|
|
1259
|
+
*/
|
|
1260
|
+
function parseRequirementsTxt(path) {
|
|
1261
|
+
try {
|
|
1262
|
+
const content = readFileSync(path, "utf-8");
|
|
1263
|
+
const deps = [];
|
|
1264
|
+
const lines = content.split("\n");
|
|
1265
|
+
for (const line of lines) {
|
|
1266
|
+
const trimmed = line.trim();
|
|
1267
|
+
if (!trimmed || trimmed.startsWith("#")) continue;
|
|
1268
|
+
const match = trimmed.match(/^([a-zA-Z0-9_-]+)(?:[>=<~!]+(.+))?/);
|
|
1269
|
+
if (match?.[1]) deps.push({
|
|
1270
|
+
name: match[1],
|
|
1271
|
+
version: match[2]?.trim(),
|
|
1272
|
+
dev: false
|
|
1273
|
+
});
|
|
1274
|
+
}
|
|
1275
|
+
return deps;
|
|
1276
|
+
} catch {
|
|
1277
|
+
return [];
|
|
1278
|
+
}
|
|
1279
|
+
}
|
|
1280
|
+
|
|
1281
|
+
//#endregion
|
|
1282
|
+
//#region src/dep-mappings.ts
|
|
1283
|
+
/**
|
|
1284
|
+
* Dependency name to GitHub repo resolution:
|
|
1285
|
+
* 1. Query npm registry for repository.url
|
|
1286
|
+
* 2. Fall back to FALLBACK_MAPPINGS for packages missing repository field
|
|
1287
|
+
* 3. Return unknown (caller handles)
|
|
1288
|
+
*/
|
|
1289
|
+
/**
|
|
1290
|
+
* Fallback mappings for packages where npm registry doesn't have repository.url.
|
|
1291
|
+
* Only add packages here that genuinely don't have the field set.
|
|
1292
|
+
*/
|
|
1293
|
+
const FALLBACK_MAPPINGS = {
|
|
1294
|
+
"@convex-dev/react-query": "get-convex/convex-react-query",
|
|
1295
|
+
"@opencode-ai/sdk": "anomalyco/opencode-sdk-js"
|
|
1296
|
+
};
|
|
1297
|
+
/**
|
|
1298
|
+
* Parse GitHub repo from various git URL formats.
|
|
1299
|
+
* Handles:
|
|
1300
|
+
* - git+https://github.com/owner/repo.git
|
|
1301
|
+
* - https://github.com/owner/repo
|
|
1302
|
+
* - git://github.com/owner/repo.git
|
|
1303
|
+
* - github:owner/repo
|
|
1304
|
+
*/
|
|
1305
|
+
function parseGitHubUrl(url) {
|
|
1306
|
+
for (const pattern of [/github\.com[/:]([\w-]+)\/([\w.-]+?)(?:\.git)?$/, /^github:([\w-]+)\/([\w.-]+)$/]) {
|
|
1307
|
+
const match = url.match(pattern);
|
|
1308
|
+
if (match) return `${match[1]}/${match[2]}`;
|
|
1309
|
+
}
|
|
1310
|
+
return null;
|
|
1311
|
+
}
|
|
1312
|
+
/**
|
|
1313
|
+
* Fallback to npm registry to extract repository.url.
|
|
1314
|
+
* Returns null if package not found, no repo field, or not a GitHub repo.
|
|
1315
|
+
*/
|
|
1316
|
+
async function resolveFromNpm(packageName) {
|
|
1317
|
+
try {
|
|
1318
|
+
const res = await fetch(`https://registry.npmjs.org/${packageName}`);
|
|
1319
|
+
if (!res.ok) return null;
|
|
1320
|
+
const json = await res.json();
|
|
1321
|
+
const result = NpmPackageResponseSchema.safeParse(json);
|
|
1322
|
+
if (!result.success) return null;
|
|
1323
|
+
const repoUrl = result.data.repository?.url;
|
|
1324
|
+
if (!repoUrl) return null;
|
|
1325
|
+
return parseGitHubUrl(repoUrl);
|
|
1326
|
+
} catch {
|
|
1327
|
+
return null;
|
|
1328
|
+
}
|
|
1329
|
+
}
|
|
1330
|
+
/**
|
|
1331
|
+
* Resolution order:
|
|
1332
|
+
* 1. Query npm registry for repository.url
|
|
1333
|
+
* 2. Check FALLBACK_MAPPINGS for packages missing repository field
|
|
1334
|
+
* 3. Return unknown
|
|
1335
|
+
*/
|
|
1336
|
+
async function resolveDependencyRepo(dep) {
|
|
1337
|
+
const npmRepo = await resolveFromNpm(dep);
|
|
1338
|
+
if (npmRepo) return {
|
|
1339
|
+
dep,
|
|
1340
|
+
repo: npmRepo,
|
|
1341
|
+
source: "npm"
|
|
1342
|
+
};
|
|
1343
|
+
if (dep in FALLBACK_MAPPINGS) return {
|
|
1344
|
+
dep,
|
|
1345
|
+
repo: FALLBACK_MAPPINGS[dep] ?? null,
|
|
1346
|
+
source: "fallback"
|
|
1347
|
+
};
|
|
1348
|
+
return {
|
|
1349
|
+
dep,
|
|
1350
|
+
repo: null,
|
|
1351
|
+
source: "unknown"
|
|
1352
|
+
};
|
|
1353
|
+
}
|
|
1354
|
+
|
|
1355
|
+
//#endregion
|
|
1356
|
+
//#region src/reference-matcher.ts
|
|
1357
|
+
/**
|
|
1358
|
+
* Reference matching utilities for dependency resolution
|
|
1359
|
+
*
|
|
1360
|
+
* Maps dependencies to their reference status (installed, remote, generate, unknown)
|
|
1361
|
+
*/
|
|
1362
|
+
/**
|
|
1363
|
+
* Check if a reference is installed locally.
|
|
1364
|
+
* A reference is considered installed if {owner-repo}.md exists in offworld/references/.
|
|
1365
|
+
*
|
|
1366
|
+
* @param repo - Repo name in owner/repo format
|
|
1367
|
+
* @returns true if reference is installed locally
|
|
1368
|
+
*/
|
|
1369
|
+
function isReferenceInstalled(repo) {
|
|
1370
|
+
const referenceFileName = toReferenceFileName(repo);
|
|
1371
|
+
return existsSync(join(Paths.offworldReferencesDir, referenceFileName));
|
|
1372
|
+
}
|
|
1373
|
+
/**
|
|
1374
|
+
* Match dependencies to their reference availability status.
|
|
1375
|
+
*
|
|
1376
|
+
* Status logic:
|
|
1377
|
+
* - installed: {owner-repo}.md exists in offworld/references/
|
|
1378
|
+
* - remote: Reference exists on offworld.sh (quick pull)
|
|
1379
|
+
* - generate: Has valid GitHub repo but needs AI generation (slow, uses tokens)
|
|
1380
|
+
* - unknown: No GitHub repo found
|
|
1381
|
+
*
|
|
1382
|
+
* @param resolvedDeps - Array of resolved dependencies with repo info
|
|
1383
|
+
* @returns Array of reference matches with status
|
|
1384
|
+
*/
|
|
1385
|
+
function matchDependenciesToReferences(resolvedDeps) {
|
|
1386
|
+
return resolvedDeps.map((dep) => {
|
|
1387
|
+
if (!dep.repo) return {
|
|
1388
|
+
dep: dep.dep,
|
|
1389
|
+
repo: null,
|
|
1390
|
+
status: "unknown",
|
|
1391
|
+
source: dep.source
|
|
1392
|
+
};
|
|
1393
|
+
if (isReferenceInstalled(dep.repo)) return {
|
|
1394
|
+
dep: dep.dep,
|
|
1395
|
+
repo: dep.repo,
|
|
1396
|
+
status: "installed",
|
|
1397
|
+
source: dep.source
|
|
1398
|
+
};
|
|
1399
|
+
return {
|
|
1400
|
+
dep: dep.dep,
|
|
1401
|
+
repo: dep.repo,
|
|
1402
|
+
status: "generate",
|
|
1403
|
+
source: dep.source
|
|
1404
|
+
};
|
|
1405
|
+
});
|
|
1406
|
+
}
|
|
1407
|
+
/**
|
|
1408
|
+
* Match dependencies to their reference availability status with remote check.
|
|
1409
|
+
* This is async because it checks the remote server for each dependency.
|
|
1410
|
+
*
|
|
1411
|
+
* Status logic:
|
|
1412
|
+
* - installed: {owner-repo}.md exists in offworld/references/
|
|
1413
|
+
* - remote: Reference exists on offworld.sh (quick pull)
|
|
1414
|
+
* - generate: Has valid GitHub repo but needs AI generation (slow, uses tokens)
|
|
1415
|
+
* - unknown: No GitHub repo found
|
|
1416
|
+
*
|
|
1417
|
+
* @param resolvedDeps - Array of resolved dependencies with repo info
|
|
1418
|
+
* @returns Promise of array of reference matches with status
|
|
1419
|
+
*/
|
|
1420
|
+
async function matchDependenciesToReferencesWithRemoteCheck(resolvedDeps) {
|
|
1421
|
+
const { checkRemote } = await import("./sync-DuLJ5wla.mjs");
|
|
1422
|
+
return await Promise.all(resolvedDeps.map(async (dep) => {
|
|
1423
|
+
if (!dep.repo) return {
|
|
1424
|
+
dep: dep.dep,
|
|
1425
|
+
repo: null,
|
|
1426
|
+
status: "unknown",
|
|
1427
|
+
source: dep.source
|
|
1428
|
+
};
|
|
1429
|
+
if (isReferenceInstalled(dep.repo)) return {
|
|
1430
|
+
dep: dep.dep,
|
|
1431
|
+
repo: dep.repo,
|
|
1432
|
+
status: "installed",
|
|
1433
|
+
source: dep.source
|
|
1434
|
+
};
|
|
1435
|
+
try {
|
|
1436
|
+
if ((await checkRemote(dep.repo)).exists) return {
|
|
1437
|
+
dep: dep.dep,
|
|
1438
|
+
repo: dep.repo,
|
|
1439
|
+
status: "remote",
|
|
1440
|
+
source: dep.source
|
|
1441
|
+
};
|
|
1442
|
+
} catch {}
|
|
1443
|
+
return {
|
|
1444
|
+
dep: dep.dep,
|
|
1445
|
+
repo: dep.repo,
|
|
1446
|
+
status: "generate",
|
|
1447
|
+
source: dep.source
|
|
1448
|
+
};
|
|
1449
|
+
}));
|
|
1450
|
+
}
|
|
1451
|
+
|
|
1452
|
+
//#endregion
|
|
1453
|
+
//#region src/repo-manager.ts
|
|
1454
|
+
function getDirSize(dirPath) {
|
|
1455
|
+
if (!existsSync(dirPath)) return 0;
|
|
1456
|
+
let size = 0;
|
|
1457
|
+
try {
|
|
1458
|
+
const entries = readdirSync(dirPath, { withFileTypes: true });
|
|
1459
|
+
for (const entry of entries) {
|
|
1460
|
+
const fullPath = join(dirPath, entry.name);
|
|
1461
|
+
if (entry.isDirectory()) size += getDirSize(fullPath);
|
|
1462
|
+
else if (entry.isFile()) try {
|
|
1463
|
+
size += statSync(fullPath).size;
|
|
1464
|
+
} catch {}
|
|
1465
|
+
}
|
|
1466
|
+
} catch {}
|
|
1467
|
+
return size;
|
|
1468
|
+
}
|
|
1469
|
+
function getLastAccessTime(dirPath) {
|
|
1470
|
+
if (!existsSync(dirPath)) return null;
|
|
1471
|
+
let latestTime = null;
|
|
1472
|
+
try {
|
|
1473
|
+
latestTime = statSync(dirPath).mtime;
|
|
1474
|
+
const fetchHead = join(dirPath, ".git", "FETCH_HEAD");
|
|
1475
|
+
if (existsSync(fetchHead)) {
|
|
1476
|
+
const fetchStat = statSync(fetchHead);
|
|
1477
|
+
if (!latestTime || fetchStat.mtime > latestTime) latestTime = fetchStat.mtime;
|
|
1478
|
+
}
|
|
1479
|
+
} catch {}
|
|
1480
|
+
return latestTime;
|
|
1481
|
+
}
|
|
1482
|
+
function matchesPattern(name, pattern) {
|
|
1483
|
+
if (!pattern || pattern === "*") return true;
|
|
1484
|
+
return new RegExp("^" + pattern.replace(/[.+^${}()|[\]\\]/g, "\\$&").replace(/\*/g, ".*").replace(/\?/g, ".") + "$", "i").test(name);
|
|
1485
|
+
}
|
|
1486
|
+
const yieldToEventLoop = () => new Promise((resolve) => setImmediate(resolve));
|
|
1487
|
+
async function getRepoStatus(options = {}) {
|
|
1488
|
+
const { onProgress } = options;
|
|
1489
|
+
const map = readGlobalMap();
|
|
1490
|
+
const qualifiedNames = Object.keys(map.repos);
|
|
1491
|
+
const total = qualifiedNames.length;
|
|
1492
|
+
let withReference = 0;
|
|
1493
|
+
let missing = 0;
|
|
1494
|
+
let diskBytes = 0;
|
|
1495
|
+
for (let i = 0; i < qualifiedNames.length; i++) {
|
|
1496
|
+
const qualifiedName = qualifiedNames[i];
|
|
1497
|
+
const entry = map.repos[qualifiedName];
|
|
1498
|
+
onProgress?.(i + 1, total, qualifiedName);
|
|
1499
|
+
await yieldToEventLoop();
|
|
1500
|
+
if (!existsSync(entry.localPath)) {
|
|
1501
|
+
missing++;
|
|
1502
|
+
continue;
|
|
1503
|
+
}
|
|
1504
|
+
if (entry.references.length > 0) withReference++;
|
|
1505
|
+
diskBytes += getDirSize(entry.localPath);
|
|
1506
|
+
}
|
|
1507
|
+
return {
|
|
1508
|
+
total,
|
|
1509
|
+
withReference,
|
|
1510
|
+
missing,
|
|
1511
|
+
diskBytes
|
|
1512
|
+
};
|
|
1513
|
+
}
|
|
1514
|
+
async function updateAllRepos(options = {}) {
|
|
1515
|
+
const { pattern, dryRun = false, unshallow = false, onProgress } = options;
|
|
1516
|
+
const map = readGlobalMap();
|
|
1517
|
+
const qualifiedNames = Object.keys(map.repos);
|
|
1518
|
+
const updated = [];
|
|
1519
|
+
const skipped = [];
|
|
1520
|
+
const unshallowed = [];
|
|
1521
|
+
const errors = [];
|
|
1522
|
+
for (const qualifiedName of qualifiedNames) {
|
|
1523
|
+
const entry = map.repos[qualifiedName];
|
|
1524
|
+
if (pattern && !matchesPattern(qualifiedName, pattern)) continue;
|
|
1525
|
+
if (!existsSync(entry.localPath)) {
|
|
1526
|
+
skipped.push(qualifiedName);
|
|
1527
|
+
onProgress?.(qualifiedName, "skipped", "missing on disk");
|
|
1528
|
+
continue;
|
|
1529
|
+
}
|
|
1530
|
+
if (dryRun) {
|
|
1531
|
+
updated.push(qualifiedName);
|
|
1532
|
+
onProgress?.(qualifiedName, "updated", "would update");
|
|
1533
|
+
continue;
|
|
1534
|
+
}
|
|
1535
|
+
onProgress?.(qualifiedName, "updating");
|
|
1536
|
+
try {
|
|
1537
|
+
const result = await updateRepo(qualifiedName, { unshallow });
|
|
1538
|
+
if (result.unshallowed) {
|
|
1539
|
+
unshallowed.push(qualifiedName);
|
|
1540
|
+
onProgress?.(qualifiedName, "unshallowed", "converted to full clone");
|
|
1541
|
+
}
|
|
1542
|
+
if (result.updated) {
|
|
1543
|
+
updated.push(qualifiedName);
|
|
1544
|
+
onProgress?.(qualifiedName, "updated", `${result.previousSha.slice(0, 7)} → ${result.currentSha.slice(0, 7)}`);
|
|
1545
|
+
} else if (!result.unshallowed) {
|
|
1546
|
+
skipped.push(qualifiedName);
|
|
1547
|
+
onProgress?.(qualifiedName, "skipped", "already up to date");
|
|
1548
|
+
}
|
|
1549
|
+
} catch (err) {
|
|
1550
|
+
const message = err instanceof GitError ? err.message : String(err);
|
|
1551
|
+
errors.push({
|
|
1552
|
+
repo: qualifiedName,
|
|
1553
|
+
error: message
|
|
1554
|
+
});
|
|
1555
|
+
onProgress?.(qualifiedName, "error", message);
|
|
1556
|
+
}
|
|
1557
|
+
}
|
|
1558
|
+
return {
|
|
1559
|
+
updated,
|
|
1560
|
+
skipped,
|
|
1561
|
+
unshallowed,
|
|
1562
|
+
errors
|
|
1563
|
+
};
|
|
1564
|
+
}
|
|
1565
|
+
async function pruneRepos(options = {}) {
|
|
1566
|
+
const { dryRun = false, onProgress } = options;
|
|
1567
|
+
const map = readGlobalMap();
|
|
1568
|
+
const qualifiedNames = Object.keys(map.repos);
|
|
1569
|
+
const removedFromIndex = [];
|
|
1570
|
+
const orphanedDirs = [];
|
|
1571
|
+
for (const qualifiedName of qualifiedNames) {
|
|
1572
|
+
const entry = map.repos[qualifiedName];
|
|
1573
|
+
await yieldToEventLoop();
|
|
1574
|
+
if (!existsSync(entry.localPath)) {
|
|
1575
|
+
onProgress?.(qualifiedName, "missing on disk");
|
|
1576
|
+
removedFromIndex.push(qualifiedName);
|
|
1577
|
+
if (!dryRun) removeGlobalMapEntry(qualifiedName);
|
|
1578
|
+
}
|
|
1579
|
+
}
|
|
1580
|
+
const repoRoot = getRepoRoot(loadConfig());
|
|
1581
|
+
if (existsSync(repoRoot)) {
|
|
1582
|
+
const indexedPaths = new Set(Object.values(map.repos).map((r) => r.localPath));
|
|
1583
|
+
try {
|
|
1584
|
+
const providers = readdirSync(repoRoot, { withFileTypes: true });
|
|
1585
|
+
for (const provider of providers) {
|
|
1586
|
+
if (!provider.isDirectory()) continue;
|
|
1587
|
+
const providerPath = join(repoRoot, provider.name);
|
|
1588
|
+
const owners = readdirSync(providerPath, { withFileTypes: true });
|
|
1589
|
+
for (const owner of owners) {
|
|
1590
|
+
if (!owner.isDirectory()) continue;
|
|
1591
|
+
const ownerPath = join(providerPath, owner.name);
|
|
1592
|
+
const repoNames = readdirSync(ownerPath, { withFileTypes: true });
|
|
1593
|
+
for (const repoName of repoNames) {
|
|
1594
|
+
await yieldToEventLoop();
|
|
1595
|
+
if (!repoName.isDirectory()) continue;
|
|
1596
|
+
const repoPath = join(ownerPath, repoName.name);
|
|
1597
|
+
if (!existsSync(join(repoPath, ".git"))) continue;
|
|
1598
|
+
if (!indexedPaths.has(repoPath)) {
|
|
1599
|
+
const fullName = `${owner.name}/${repoName.name}`;
|
|
1600
|
+
onProgress?.(fullName, "not in map");
|
|
1601
|
+
orphanedDirs.push(repoPath);
|
|
1602
|
+
}
|
|
1603
|
+
}
|
|
1604
|
+
}
|
|
1605
|
+
}
|
|
1606
|
+
} catch {}
|
|
1607
|
+
}
|
|
1608
|
+
return {
|
|
1609
|
+
removedFromIndex,
|
|
1610
|
+
orphanedDirs
|
|
1611
|
+
};
|
|
1612
|
+
}
|
|
1613
|
+
async function gcRepos(options = {}) {
|
|
1614
|
+
const { olderThanDays, withoutReference = false, dryRun = false, onProgress } = options;
|
|
1615
|
+
const map = readGlobalMap();
|
|
1616
|
+
const qualifiedNames = Object.keys(map.repos);
|
|
1617
|
+
const removed = [];
|
|
1618
|
+
let freedBytes = 0;
|
|
1619
|
+
const now = /* @__PURE__ */ new Date();
|
|
1620
|
+
const cutoffDate = olderThanDays ? /* @__PURE__ */ new Date(now.getTime() - olderThanDays * 24 * 60 * 60 * 1e3) : null;
|
|
1621
|
+
for (const qualifiedName of qualifiedNames) {
|
|
1622
|
+
const entry = map.repos[qualifiedName];
|
|
1623
|
+
await yieldToEventLoop();
|
|
1624
|
+
if (!existsSync(entry.localPath)) continue;
|
|
1625
|
+
let shouldRemove = false;
|
|
1626
|
+
let reason = "";
|
|
1627
|
+
if (cutoffDate) {
|
|
1628
|
+
const lastAccess = getLastAccessTime(entry.localPath);
|
|
1629
|
+
if (lastAccess && lastAccess < cutoffDate) {
|
|
1630
|
+
shouldRemove = true;
|
|
1631
|
+
reason = `not accessed in ${olderThanDays}+ days`;
|
|
1632
|
+
}
|
|
1633
|
+
}
|
|
1634
|
+
if (withoutReference && entry.references.length === 0) {
|
|
1635
|
+
shouldRemove = true;
|
|
1636
|
+
reason = reason ? `${reason}, no reference` : "no reference";
|
|
1637
|
+
}
|
|
1638
|
+
if (!shouldRemove) continue;
|
|
1639
|
+
const sizeBytes = getDirSize(entry.localPath);
|
|
1640
|
+
onProgress?.(qualifiedName, reason, sizeBytes);
|
|
1641
|
+
if (!dryRun) {
|
|
1642
|
+
rmSync(entry.localPath, {
|
|
1643
|
+
recursive: true,
|
|
1644
|
+
force: true
|
|
1645
|
+
});
|
|
1646
|
+
for (const refFile of entry.references) {
|
|
1647
|
+
const refPath = join(Paths.offworldReferencesDir, refFile);
|
|
1648
|
+
if (existsSync(refPath)) rmSync(refPath, { force: true });
|
|
1649
|
+
}
|
|
1650
|
+
if (entry.primary) {
|
|
1651
|
+
const metaDirName = entry.primary.replace(/\.md$/, "");
|
|
1652
|
+
const metaPath = join(Paths.metaDir, metaDirName);
|
|
1653
|
+
if (existsSync(metaPath)) rmSync(metaPath, {
|
|
1654
|
+
recursive: true,
|
|
1655
|
+
force: true
|
|
1656
|
+
});
|
|
1657
|
+
}
|
|
1658
|
+
removeGlobalMapEntry(qualifiedName);
|
|
1659
|
+
}
|
|
1660
|
+
removed.push({
|
|
1661
|
+
repo: qualifiedName,
|
|
1662
|
+
reason,
|
|
1663
|
+
sizeBytes
|
|
1664
|
+
});
|
|
1665
|
+
freedBytes += sizeBytes;
|
|
1666
|
+
}
|
|
1667
|
+
return {
|
|
1668
|
+
removed,
|
|
1669
|
+
freedBytes
|
|
1670
|
+
};
|
|
1671
|
+
}
|
|
1672
|
+
async function discoverRepos(options = {}) {
|
|
1673
|
+
const { dryRun = false, onProgress } = options;
|
|
1674
|
+
const config = loadConfig();
|
|
1675
|
+
const repoRoot = options.repoRoot ?? getRepoRoot(config);
|
|
1676
|
+
const discovered = [];
|
|
1677
|
+
let alreadyIndexed = 0;
|
|
1678
|
+
if (!existsSync(repoRoot)) return {
|
|
1679
|
+
discovered,
|
|
1680
|
+
alreadyIndexed
|
|
1681
|
+
};
|
|
1682
|
+
const map = readGlobalMap();
|
|
1683
|
+
const indexedPaths = new Set(Object.values(map.repos).map((r) => r.localPath));
|
|
1684
|
+
try {
|
|
1685
|
+
const providers = readdirSync(repoRoot, { withFileTypes: true });
|
|
1686
|
+
for (const provider of providers) {
|
|
1687
|
+
if (!provider.isDirectory()) continue;
|
|
1688
|
+
const providerPath = join(repoRoot, provider.name);
|
|
1689
|
+
const providerHost = {
|
|
1690
|
+
github: "github.com",
|
|
1691
|
+
gitlab: "gitlab.com",
|
|
1692
|
+
bitbucket: "bitbucket.org"
|
|
1693
|
+
}[provider.name] ?? provider.name;
|
|
1694
|
+
const owners = readdirSync(providerPath, { withFileTypes: true });
|
|
1695
|
+
for (const owner of owners) {
|
|
1696
|
+
if (!owner.isDirectory()) continue;
|
|
1697
|
+
const ownerPath = join(providerPath, owner.name);
|
|
1698
|
+
const repoNames = readdirSync(ownerPath, { withFileTypes: true });
|
|
1699
|
+
for (const repoName of repoNames) {
|
|
1700
|
+
await yieldToEventLoop();
|
|
1701
|
+
if (!repoName.isDirectory()) continue;
|
|
1702
|
+
const repoPath = join(ownerPath, repoName.name);
|
|
1703
|
+
if (!existsSync(join(repoPath, ".git"))) continue;
|
|
1704
|
+
if (indexedPaths.has(repoPath)) {
|
|
1705
|
+
alreadyIndexed++;
|
|
1706
|
+
continue;
|
|
1707
|
+
}
|
|
1708
|
+
const fullName = `${owner.name}/${repoName.name}`;
|
|
1709
|
+
const qualifiedName = `${providerHost}:${fullName}`;
|
|
1710
|
+
onProgress?.(fullName, providerHost);
|
|
1711
|
+
if (!dryRun) upsertGlobalMapEntry(qualifiedName, {
|
|
1712
|
+
localPath: repoPath,
|
|
1713
|
+
references: [],
|
|
1714
|
+
primary: "",
|
|
1715
|
+
keywords: [],
|
|
1716
|
+
updatedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
1717
|
+
});
|
|
1718
|
+
discovered.push({
|
|
1719
|
+
fullName,
|
|
1720
|
+
qualifiedName,
|
|
1721
|
+
localPath: repoPath
|
|
1722
|
+
});
|
|
1723
|
+
}
|
|
1724
|
+
}
|
|
1725
|
+
}
|
|
1726
|
+
} catch {}
|
|
1727
|
+
return {
|
|
1728
|
+
discovered,
|
|
1729
|
+
alreadyIndexed
|
|
1730
|
+
};
|
|
1731
|
+
}
|
|
1732
|
+
|
|
1733
|
+
//#endregion
|
|
1734
|
+
//#region src/models.ts
|
|
1735
|
+
const MODELS_DEV_URL = "https://models.dev/api.json";
|
|
1736
|
+
let cachedData = null;
|
|
1737
|
+
let cacheTime = 0;
|
|
1738
|
+
const CACHE_TTL_MS = 300 * 1e3;
|
|
1739
|
+
/**
|
|
1740
|
+
* Fetch raw data from models.dev with caching
|
|
1741
|
+
*/
|
|
1742
|
+
async function fetchModelsDevData() {
|
|
1743
|
+
const now = Date.now();
|
|
1744
|
+
if (cachedData && now - cacheTime < CACHE_TTL_MS) return cachedData;
|
|
1745
|
+
const res = await fetch(MODELS_DEV_URL, { signal: AbortSignal.timeout(1e4) });
|
|
1746
|
+
if (!res.ok) throw new Error(`Failed to fetch models.dev: ${res.status} ${res.statusText}`);
|
|
1747
|
+
const json = await res.json();
|
|
1748
|
+
const parsed = ModelsDevDataSchema.safeParse(json);
|
|
1749
|
+
if (!parsed.success) throw new Error(`Invalid models.dev response: ${parsed.error.message}`);
|
|
1750
|
+
cachedData = parsed.data;
|
|
1751
|
+
cacheTime = now;
|
|
1752
|
+
return cachedData;
|
|
1753
|
+
}
|
|
1754
|
+
/**
|
|
1755
|
+
* List all available providers from models.dev
|
|
1756
|
+
*/
|
|
1757
|
+
async function listProviders() {
|
|
1758
|
+
const data = await fetchModelsDevData();
|
|
1759
|
+
return Object.values(data).map((p) => ({
|
|
1760
|
+
id: p.id,
|
|
1761
|
+
name: p.name,
|
|
1762
|
+
env: p.env ?? []
|
|
1763
|
+
})).sort((a, b) => a.name.localeCompare(b.name));
|
|
1764
|
+
}
|
|
1765
|
+
/**
|
|
1766
|
+
* Get a specific provider with all its models
|
|
1767
|
+
*/
|
|
1768
|
+
async function getProvider(providerId) {
|
|
1769
|
+
const provider = (await fetchModelsDevData())[providerId];
|
|
1770
|
+
if (!provider) return null;
|
|
1771
|
+
return {
|
|
1772
|
+
id: provider.id,
|
|
1773
|
+
name: provider.name,
|
|
1774
|
+
env: provider.env ?? [],
|
|
1775
|
+
models: Object.values(provider.models).filter((m) => m.status !== "deprecated").map((m) => ({
|
|
1776
|
+
id: m.id,
|
|
1777
|
+
name: m.name,
|
|
1778
|
+
reasoning: m.reasoning ?? false,
|
|
1779
|
+
experimental: m.experimental,
|
|
1780
|
+
status: m.status
|
|
1781
|
+
})).sort((a, b) => a.name.localeCompare(b.name))
|
|
1782
|
+
};
|
|
1783
|
+
}
|
|
1784
|
+
/**
|
|
1785
|
+
* Get all providers with their models
|
|
1786
|
+
*/
|
|
1787
|
+
async function listProvidersWithModels() {
|
|
1788
|
+
const data = await fetchModelsDevData();
|
|
1789
|
+
return Object.values(data).map((p) => ({
|
|
1790
|
+
id: p.id,
|
|
1791
|
+
name: p.name,
|
|
1792
|
+
env: p.env ?? [],
|
|
1793
|
+
models: Object.values(p.models).filter((m) => m.status !== "deprecated").map((m) => ({
|
|
1794
|
+
id: m.id,
|
|
1795
|
+
name: m.name,
|
|
1796
|
+
reasoning: m.reasoning ?? false,
|
|
1797
|
+
experimental: m.experimental,
|
|
1798
|
+
status: m.status
|
|
1799
|
+
})).sort((a, b) => a.name.localeCompare(b.name))
|
|
1800
|
+
})).sort((a, b) => a.name.localeCompare(b.name));
|
|
1801
|
+
}
|
|
1802
|
+
/**
|
|
1803
|
+
* Validate that a provider/model combination exists
|
|
1804
|
+
*/
|
|
1805
|
+
async function validateProviderModel(providerId, modelId) {
|
|
1806
|
+
const provider = await getProvider(providerId);
|
|
1807
|
+
if (!provider) {
|
|
1808
|
+
const providers = await listProviders();
|
|
1809
|
+
return {
|
|
1810
|
+
valid: false,
|
|
1811
|
+
error: `Provider "${providerId}" not found. Available: ${providers.slice(0, 10).map((p) => p.id).join(", ")}${providers.length > 10 ? "..." : ""}`
|
|
1812
|
+
};
|
|
1813
|
+
}
|
|
1814
|
+
if (!provider.models.find((m) => m.id === modelId)) return {
|
|
1815
|
+
valid: false,
|
|
1816
|
+
error: `Model "${modelId}" not found for provider "${providerId}". Available: ${provider.models.slice(0, 10).map((m) => m.id).join(", ")}${provider.models.length > 10 ? "..." : ""}`
|
|
1817
|
+
};
|
|
1818
|
+
return { valid: true };
|
|
1819
|
+
}
|
|
1820
|
+
|
|
1821
|
+
//#endregion
|
|
1822
|
+
export { getToken as A, NotGitRepoError as B, getAllAgentConfigs as C, clearAuthData as D, TokenExpiredError as E, saveAuthData as F, DEFAULT_IGNORE_PATTERNS as G, RepoSourceError as H, getMapEntry as I, VERSION as K, getProjectMapPath as L, isLoggedIn as M, loadAuthData as N, getAuthPath as O, refreshAccessToken as P, resolveRepoKey as R, getAgentConfig as S, NotLoggedInError as T, getReferenceFileNameForSource as U, PathNotFoundError as V, parseRepoInput as W, parseDependencies as _, discoverRepos as a, agents as b, pruneRepos as c, matchDependenciesToReferences as d, matchDependenciesToReferencesWithRemoteCheck as f, detectManifestType as g, resolveFromNpm as h, validateProviderModel as i, getTokenOrNull as j, getAuthStatus as k, updateAllRepos as l, resolveDependencyRepo as m, listProviders as n, gcRepos as o, FALLBACK_MAPPINGS as p, listProvidersWithModels as r, getRepoStatus as s, getProvider as t, isReferenceInstalled as u, installGlobalSkill as v, AuthError as w, detectInstalledAgents as x, installReference as y, searchMap as z };
|
|
1823
|
+
//# sourceMappingURL=public-DbZeh2Mr.mjs.map
|