@neuroverseos/governance 0.8.1 → 0.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  import {
2
2
  getLens
3
- } from "./chunk-F2LWMOM5.js";
3
+ } from "./chunk-TCGGED4G.js";
4
4
  import {
5
5
  loadWorld
6
6
  } from "./chunk-I4RTIMLX.js";
@@ -859,55 +859,583 @@ async function fetchNotionAPI(url, headers, init) {
859
859
  return await res.json();
860
860
  }
861
861
 
862
- // src/radiant/core/discovery.ts
863
- import { existsSync, readdirSync, readFileSync, statSync } from "fs";
864
- import { join, resolve, basename } from "path";
862
+ // src/radiant/adapters/linear.ts
863
+ async function fetchLinearActivity(apiKey, options = {}) {
864
+ const windowDays = options.windowDays ?? 14;
865
+ const maxIssues = options.maxIssues ?? 200;
866
+ const since = new Date(Date.now() - windowDays * 24 * 60 * 60 * 1e3);
867
+ const sinceIso = since.toISOString();
868
+ const nowIso = (/* @__PURE__ */ new Date()).toISOString();
869
+ const teamFilter = options.teamIds && options.teamIds.length > 0 ? `team: { id: { in: [${options.teamIds.map((t) => JSON.stringify(t)).join(", ")}] } }` : "";
870
+ const issuesQuery = `
871
+ query RadiantIssues($since: DateTimeOrDuration!, $first: Int!) {
872
+ issues(
873
+ filter: {
874
+ updatedAt: { gte: $since }
875
+ ${teamFilter}
876
+ }
877
+ first: $first
878
+ orderBy: updatedAt
879
+ ) {
880
+ nodes {
881
+ id
882
+ identifier
883
+ title
884
+ url
885
+ createdAt
886
+ updatedAt
887
+ completedAt
888
+ canceledAt
889
+ state { name type }
890
+ assignee { id name email }
891
+ creator { id name }
892
+ team { id name }
893
+ project { id name }
894
+ cycle { id number startsAt endsAt }
895
+ comments(first: 20) {
896
+ nodes { id body createdAt user { id name } }
897
+ }
898
+ }
899
+ }
900
+ }
901
+ `;
902
+ const cyclesQuery = `
903
+ query RadiantCycles($since: DateTimeOrDuration!, $now: DateTimeOrDuration!) {
904
+ cycles(
905
+ filter: { endsAt: { gte: $since, lte: $now } }
906
+ first: 20
907
+ ) {
908
+ nodes {
909
+ id
910
+ number
911
+ startsAt
912
+ endsAt
913
+ issueCountHistory
914
+ completedIssueCountHistory
915
+ team { id name }
916
+ }
917
+ }
918
+ }
919
+ `;
920
+ const [issuesResponse, cyclesResponse] = await Promise.all([
921
+ fetchLinearGraphQL(apiKey, issuesQuery, {
922
+ since: sinceIso,
923
+ first: maxIssues
924
+ }),
925
+ fetchLinearGraphQL(apiKey, cyclesQuery, {
926
+ since: sinceIso,
927
+ now: nowIso
928
+ })
929
+ ]);
930
+ const rawIssues = issuesResponse.issues?.nodes ?? [];
931
+ const rawCycles = cyclesResponse.cycles?.nodes ?? [];
932
+ const events = [];
933
+ const assignees = /* @__PURE__ */ new Set();
934
+ const projects = /* @__PURE__ */ new Map();
935
+ let issuesCreated = 0;
936
+ let issuesCompleted = 0;
937
+ let issuesOpen = 0;
938
+ let issuesStalled = 0;
939
+ let commentsTotal = 0;
940
+ const now = Date.now();
941
+ const stallThresholdMs = 14 * 24 * 60 * 60 * 1e3;
942
+ for (const issue of rawIssues) {
943
+ const created = new Date(issue.createdAt);
944
+ const updated = new Date(issue.updatedAt);
945
+ const completed = issue.completedAt ? new Date(issue.completedAt) : null;
946
+ const assigneeId = issue.assignee?.id ?? "unassigned";
947
+ if (assigneeId !== "unassigned") assignees.add(assigneeId);
948
+ if (issue.project) {
949
+ projects.set(issue.project.name, (projects.get(issue.project.name) ?? 0) + 1);
950
+ }
951
+ const actor = {
952
+ id: assigneeId,
953
+ kind: "human",
954
+ name: issue.assignee?.name ?? "unassigned"
955
+ };
956
+ if (created >= since) {
957
+ issuesCreated++;
958
+ events.push({
959
+ id: `linear-created-${issue.id}`,
960
+ timestamp: issue.createdAt,
961
+ actor: {
962
+ id: issue.creator?.id ?? "unknown",
963
+ kind: "human",
964
+ name: issue.creator?.name ?? "unknown"
965
+ },
966
+ kind: "issue_created",
967
+ content: `[${issue.identifier}] ${issue.title}`,
968
+ metadata: {
969
+ issueId: issue.id,
970
+ url: issue.url,
971
+ team: issue.team?.name,
972
+ project: issue.project?.name,
973
+ state: issue.state?.name
974
+ }
975
+ });
976
+ }
977
+ if (completed && completed >= since) {
978
+ issuesCompleted++;
979
+ events.push({
980
+ id: `linear-completed-${issue.id}`,
981
+ timestamp: issue.completedAt,
982
+ actor,
983
+ kind: "issue_completed",
984
+ content: `[${issue.identifier}] ${issue.title}`,
985
+ metadata: {
986
+ issueId: issue.id,
987
+ url: issue.url,
988
+ team: issue.team?.name,
989
+ project: issue.project?.name,
990
+ cycleDays: issue.cycle?.startsAt && issue.completedAt ? Math.round(
991
+ (new Date(issue.completedAt).getTime() - new Date(issue.cycle.startsAt).getTime()) / (24 * 60 * 60 * 1e3)
992
+ ) : null
993
+ }
994
+ });
995
+ }
996
+ if (!completed && !issue.canceledAt) {
997
+ issuesOpen++;
998
+ const isInProgress = issue.state?.type === "started";
999
+ const idleMs = now - updated.getTime();
1000
+ if (isInProgress && idleMs > stallThresholdMs) issuesStalled++;
1001
+ }
1002
+ for (const comment of issue.comments?.nodes ?? []) {
1003
+ const commentedAt = new Date(comment.createdAt);
1004
+ if (commentedAt < since) continue;
1005
+ commentsTotal++;
1006
+ events.push({
1007
+ id: `linear-comment-${comment.id}`,
1008
+ timestamp: comment.createdAt,
1009
+ actor: {
1010
+ id: comment.user?.id ?? "unknown",
1011
+ kind: "human",
1012
+ name: comment.user?.name ?? "unknown"
1013
+ },
1014
+ kind: "issue_comment",
1015
+ content: comment.body.slice(0, 280),
1016
+ metadata: {
1017
+ issueId: issue.id,
1018
+ issueIdentifier: issue.identifier,
1019
+ url: issue.url
1020
+ }
1021
+ });
1022
+ }
1023
+ }
1024
+ let cycleCompletionRate = null;
1025
+ if (rawCycles.length > 0) {
1026
+ const rates = [];
1027
+ for (const cycle of rawCycles) {
1028
+ const committed = cycle.issueCountHistory?.at(0) ?? 0;
1029
+ const completed = cycle.completedIssueCountHistory?.at(-1) ?? 0;
1030
+ if (committed > 0) rates.push(completed / committed);
1031
+ }
1032
+ if (rates.length > 0) {
1033
+ cycleCompletionRate = Math.round(rates.reduce((a, b) => a + b, 0) / rates.length * 100) / 100;
1034
+ }
1035
+ }
1036
+ const topProjects = [...projects.entries()].sort((a, b) => b[1] - a[1]).slice(0, 5).map(([name]) => name);
1037
+ const signals = {
1038
+ issuesCreated,
1039
+ issuesCompleted,
1040
+ issuesOpen,
1041
+ issuesStalled,
1042
+ cycleCompletionRate,
1043
+ uniqueAssignees: assignees.size,
1044
+ commentsTotal,
1045
+ topProjects
1046
+ };
1047
+ events.sort((a, b) => Date.parse(a.timestamp) - Date.parse(b.timestamp));
1048
+ return { events, signals };
1049
+ }
1050
+ function formatLinearSignalsForPrompt(signals) {
1051
+ if (signals.issuesCreated === 0 && signals.issuesCompleted === 0 && signals.issuesOpen === 0) {
1052
+ return "";
1053
+ }
1054
+ const lines = [
1055
+ "## Linear Activity (planned work vs. shipped outcome)",
1056
+ "",
1057
+ `${signals.issuesCreated} issues created, ${signals.issuesCompleted} completed in window.`,
1058
+ `${signals.issuesOpen} issues still open.`
1059
+ ];
1060
+ if (signals.issuesStalled > 0) {
1061
+ lines.push(
1062
+ `${signals.issuesStalled} in-progress issues haven't moved in 14+ days (stalled).`
1063
+ );
1064
+ }
1065
+ if (signals.cycleCompletionRate !== null) {
1066
+ const pct = Math.round(signals.cycleCompletionRate * 100);
1067
+ lines.push(`Cycles ended in window completed ${pct}% of what was committed.`);
1068
+ }
1069
+ if (signals.uniqueAssignees > 0) {
1070
+ lines.push(`${signals.uniqueAssignees} unique assignees active.`);
1071
+ }
1072
+ if (signals.commentsTotal > 0) {
1073
+ lines.push(`${signals.commentsTotal} comments across issues in window.`);
1074
+ }
1075
+ if (signals.topProjects.length > 0) {
1076
+ lines.push(`Most active projects: ${signals.topProjects.join(", ")}.`);
1077
+ }
1078
+ lines.push("");
1079
+ lines.push("Linear is where the team states what it will build.");
1080
+ lines.push("GitHub is where the team reveals what actually got built.");
1081
+ lines.push("Low completion rate + high creation rate = planning faster than shipping.");
1082
+ lines.push("High stalled count = commitments made but not honored.");
1083
+ lines.push("Compare Linear signals against GitHub to find the stated-vs-shipped gap.");
1084
+ return lines.join("\n");
1085
+ }
1086
+ async function fetchLinearGraphQL(apiKey, query, variables) {
1087
+ const res = await fetch("https://api.linear.app/graphql", {
1088
+ method: "POST",
1089
+ headers: {
1090
+ // Linear accepts the raw API key in Authorization with no "Bearer" prefix.
1091
+ Authorization: apiKey,
1092
+ "Content-Type": "application/json"
1093
+ },
1094
+ body: JSON.stringify({ query, variables })
1095
+ });
1096
+ if (!res.ok) {
1097
+ throw new Error(
1098
+ `Linear API error ${res.status}: ${(await res.text()).slice(0, 300)}`
1099
+ );
1100
+ }
1101
+ const json = await res.json();
1102
+ if (json.errors && json.errors.length > 0) {
1103
+ throw new Error(
1104
+ `Linear GraphQL errors: ${json.errors.map((e) => e.message).join("; ")}`
1105
+ );
1106
+ }
1107
+ if (!json.data) {
1108
+ throw new Error("Linear API returned no data");
1109
+ }
1110
+ return json.data;
1111
+ }
1112
+
1113
+ // src/radiant/core/git-remote.ts
1114
+ import { existsSync, readFileSync, statSync } from "fs";
1115
+ import { join, resolve } from "path";
1116
+ function resolveGitConfigPath(repoDir) {
1117
+ const dotGit = join(repoDir, ".git");
1118
+ if (!existsSync(dotGit)) return null;
1119
+ try {
1120
+ const stat = statSync(dotGit);
1121
+ if (stat.isDirectory()) {
1122
+ return join(dotGit, "config");
1123
+ }
1124
+ if (stat.isFile()) {
1125
+ const content = readFileSync(dotGit, "utf-8");
1126
+ const match = /^gitdir:\s*(.+)$/m.exec(content);
1127
+ if (!match) return null;
1128
+ const gitDir = resolve(repoDir, match[1].trim());
1129
+ const configPath = join(gitDir, "config");
1130
+ return existsSync(configPath) ? configPath : null;
1131
+ }
1132
+ } catch {
1133
+ return null;
1134
+ }
1135
+ return null;
1136
+ }
1137
+ function readOriginRemote(repoDir) {
1138
+ const configPath = resolveGitConfigPath(repoDir);
1139
+ if (!configPath) return null;
1140
+ try {
1141
+ const raw = readFileSync(configPath, "utf-8");
1142
+ const sectionRe = /\[remote "origin"\]\s*\n((?:(?!\[)[^\n]*\n?)*)/;
1143
+ const section = sectionRe.exec(raw);
1144
+ if (!section) return null;
1145
+ const urlRe = /^\s*url\s*=\s*(.+?)\s*$/m;
1146
+ const url = urlRe.exec(section[1]);
1147
+ return url ? url[1] : null;
1148
+ } catch {
1149
+ return null;
1150
+ }
1151
+ }
1152
+ function parseRemoteUrl(url) {
1153
+ const trimmed = url.trim();
1154
+ if (!trimmed) return null;
1155
+ const ssh = /^git@([^:]+):([^/]+)\/(.+?)(?:\.git)?\/?$/.exec(trimmed);
1156
+ if (ssh) return { host: ssh[1], owner: ssh[2], repo: ssh[3] };
1157
+ const sshProto = /^ssh:\/\/git@([^/]+)\/([^/]+)\/(.+?)(?:\.git)?\/?$/.exec(trimmed);
1158
+ if (sshProto) return { host: sshProto[1], owner: sshProto[2], repo: sshProto[3] };
1159
+ const https = /^https?:\/\/(?:[^@/]+@)?([^/]+)\/([^/]+)\/(.+?)(?:\.git)?\/?$/.exec(trimmed);
1160
+ if (https) return { host: https[1], owner: https[2], repo: https[3] };
1161
+ return null;
1162
+ }
1163
+ function getRepoOrigin(repoDir) {
1164
+ const url = readOriginRemote(repoDir);
1165
+ if (!url) return null;
1166
+ return parseRemoteUrl(url);
1167
+ }
1168
+
1169
+ // src/radiant/core/extends.ts
1170
+ import { existsSync as existsSync2, mkdirSync, readFileSync as readFileSync2, rmSync, statSync as statSync2, writeFileSync } from "fs";
1171
+ import { join as join2, resolve as resolve2, isAbsolute } from "path";
865
1172
  import { homedir } from "os";
1173
+ import { createHash } from "crypto";
1174
+ import { execFileSync } from "child_process";
1175
+ function loadExtendsConfig(repoDir) {
1176
+ const configPath = join2(repoDir, ".neuroverse", "config.json");
1177
+ if (!existsSync2(configPath)) return null;
1178
+ try {
1179
+ const raw = readFileSync2(configPath, "utf-8");
1180
+ const parsed = JSON.parse(raw);
1181
+ return parsed;
1182
+ } catch {
1183
+ return null;
1184
+ }
1185
+ }
1186
+ function parseExtendsSpec(raw) {
1187
+ const trimmed = raw.trim();
1188
+ if (!trimmed) return null;
1189
+ if (trimmed.startsWith("github:")) {
1190
+ const rest = trimmed.slice("github:".length);
1191
+ const match = /^([^/]+)\/([^@:]+)(?:@([^:]+))?(?::(.+))?$/.exec(rest);
1192
+ if (!match) return null;
1193
+ return {
1194
+ raw: trimmed,
1195
+ kind: "github",
1196
+ owner: match[1],
1197
+ repo: match[2],
1198
+ ref: match[3] ?? "HEAD",
1199
+ subpath: match[4] ?? ""
1200
+ };
1201
+ }
1202
+ if (trimmed.startsWith("./") || trimmed.startsWith("../") || isAbsolute(trimmed)) {
1203
+ return { raw: trimmed, kind: "local", path: trimmed };
1204
+ }
1205
+ return null;
1206
+ }
1207
+ var DEFAULT_TTL_MS = 60 * 60 * 1e3;
1208
+ function getCacheDir(spec, baseCacheDir) {
1209
+ const root = baseCacheDir ?? join2(homedir(), ".neuroverse", "cache", "extends");
1210
+ const key = createHash("sha256").update(spec.raw).digest("hex").slice(0, 16);
1211
+ return join2(root, key);
1212
+ }
1213
+ function isCacheFresh(cacheDir, ttlMs) {
1214
+ const stampPath = join2(cacheDir, ".neuroverse-fetched");
1215
+ if (!existsSync2(stampPath)) return false;
1216
+ try {
1217
+ const stamp = statSync2(stampPath);
1218
+ return Date.now() - stamp.mtimeMs < ttlMs;
1219
+ } catch {
1220
+ return false;
1221
+ }
1222
+ }
1223
+ function markCacheFresh(cacheDir) {
1224
+ const stampPath = join2(cacheDir, ".neuroverse-fetched");
1225
+ try {
1226
+ mkdirSync(cacheDir, { recursive: true });
1227
+ writeFileSync(stampPath, (/* @__PURE__ */ new Date()).toISOString());
1228
+ } catch {
1229
+ }
1230
+ }
1231
+ var defaultGitFetcher = (spec, destDir) => {
1232
+ if (spec.kind !== "github") return;
1233
+ const url = `https://github.com/${spec.owner}/${spec.repo}.git`;
1234
+ const parent = resolve2(destDir, "..");
1235
+ mkdirSync(parent, { recursive: true });
1236
+ if (existsSync2(destDir)) {
1237
+ rmSync(destDir, { recursive: true, force: true });
1238
+ }
1239
+ const args = ["clone", "--depth", "1", "--filter=blob:none"];
1240
+ if (spec.ref && spec.ref !== "HEAD") {
1241
+ args.push("--branch", spec.ref);
1242
+ }
1243
+ args.push(url, destDir);
1244
+ execFileSync("git", args, { stdio: "pipe" });
1245
+ };
1246
+ function resolveExtendsSpec(spec, repoDir, options) {
1247
+ if (spec.kind === "local") {
1248
+ const full = isAbsolute(spec.path) ? spec.path : resolve2(repoDir, spec.path);
1249
+ if (!existsSync2(full)) {
1250
+ return { spec, dir: null, warning: `local extends path not found: ${full}` };
1251
+ }
1252
+ return { spec, dir: full };
1253
+ }
1254
+ const cacheRoot = options?.cacheDir;
1255
+ const ttl = options?.ttlMs ?? DEFAULT_TTL_MS;
1256
+ const cacheDir = getCacheDir(spec, cacheRoot);
1257
+ const fresh = isCacheFresh(cacheDir, ttl);
1258
+ const needsFetch = options?.forceRefresh || !fresh || !existsSync2(cacheDir);
1259
+ if (needsFetch && options?.noFetch) {
1260
+ if (existsSync2(cacheDir) && existsSync2(join2(cacheDir, ".neuroverse-fetched"))) {
1261
+ return resolveSubpath(spec, cacheDir);
1262
+ }
1263
+ return options?.silentOnMissing ? { spec, dir: null } : { spec, dir: null, warning: `NEUROVERSE_NO_FETCH set and no cache for ${spec.raw}` };
1264
+ }
1265
+ if (needsFetch) {
1266
+ const fetcher = options?.fetcher ?? defaultGitFetcher;
1267
+ try {
1268
+ fetcher(spec, cacheDir);
1269
+ markCacheFresh(cacheDir);
1270
+ } catch (err) {
1271
+ if (existsSync2(cacheDir) && existsSync2(join2(cacheDir, ".neuroverse-fetched"))) {
1272
+ const result = resolveSubpath(spec, cacheDir);
1273
+ return options?.silentOnMissing ? result : { ...result, warning: `fetch failed for ${spec.raw}, using stale cache: ${err.message}` };
1274
+ }
1275
+ return options?.silentOnMissing ? { spec, dir: null } : { spec, dir: null, warning: `fetch failed for ${spec.raw}: ${err.message}` };
1276
+ }
1277
+ }
1278
+ return resolveSubpath(spec, cacheDir);
1279
+ }
1280
+ function resolveSubpath(spec, cacheDir) {
1281
+ const target = spec.subpath ? join2(cacheDir, spec.subpath) : cacheDir;
1282
+ if (!existsSync2(target)) {
1283
+ return { spec, dir: null, warning: `subpath not found in ${spec.raw}: ${spec.subpath}` };
1284
+ }
1285
+ if (!spec.subpath) {
1286
+ const candidates = [
1287
+ join2(target, "worlds"),
1288
+ join2(target, ".neuroverse", "worlds")
1289
+ ];
1290
+ for (const c of candidates) {
1291
+ if (existsSync2(c)) return { spec, dir: c };
1292
+ }
1293
+ }
1294
+ return { spec, dir: target };
1295
+ }
1296
+ function detectOrgExtendsSpec(repoDir) {
1297
+ const origin = getRepoOrigin(repoDir);
1298
+ if (!origin) return null;
1299
+ if (origin.host !== "github.com") return null;
1300
+ if (origin.repo === "worlds") return null;
1301
+ return {
1302
+ raw: `github:${origin.owner}/worlds`,
1303
+ kind: "github",
1304
+ owner: origin.owner,
1305
+ repo: "worlds",
1306
+ ref: "HEAD",
1307
+ subpath: ""
1308
+ };
1309
+ }
1310
+ function resolveAllExtends(repoDir, options) {
1311
+ const config = options?.config !== void 0 ? options.config : loadExtendsConfig(repoDir);
1312
+ if (!config?.extends || config.extends.length === 0) return [];
1313
+ const results = [];
1314
+ for (const raw of config.extends) {
1315
+ const spec = parseExtendsSpec(raw);
1316
+ if (!spec) {
1317
+ results.push({
1318
+ spec: { raw, kind: "local" },
1319
+ dir: null,
1320
+ warning: `unparseable extends spec: ${raw}`
1321
+ });
1322
+ continue;
1323
+ }
1324
+ results.push(resolveExtendsSpec(spec, repoDir, options));
1325
+ }
1326
+ return results;
1327
+ }
1328
+
1329
+ // src/radiant/core/discovery.ts
1330
+ import { existsSync as existsSync3, readdirSync, readFileSync as readFileSync3, statSync as statSync3 } from "fs";
1331
+ import { join as join3, resolve as resolve3, basename } from "path";
1332
+ import { homedir as homedir2 } from "os";
866
1333
  function discoverWorlds(options) {
867
1334
  const worlds = [];
868
- const userDir = options?.userWorldsDir ?? join(homedir(), ".neuroverse", "worlds");
869
- if (existsSync(userDir)) {
1335
+ const warnings = [];
1336
+ const forceRefresh = process.env.NEUROVERSE_REFRESH === "1";
1337
+ const noFetch = process.env.NEUROVERSE_NO_FETCH === "1";
1338
+ const noOrg = options?.disableOrg || process.env.NEUROVERSE_NO_ORG === "1";
1339
+ const userDir = options?.userWorldsDir ?? join3(homedir2(), ".neuroverse", "worlds");
1340
+ if (existsSync3(userDir)) {
870
1341
  worlds.push(...loadWorldsFromDir(userDir, "user"));
871
1342
  }
1343
+ if (!noOrg && !options?.explicitWorldsDir) {
1344
+ const specs = [];
1345
+ if (options?.repoDir) {
1346
+ const fromGit = detectOrgExtendsSpec(options.repoDir);
1347
+ if (fromGit) specs.push(fromGit);
1348
+ }
1349
+ if (options?.scopeOwner) {
1350
+ const already = specs.some(
1351
+ (s) => s.owner?.toLowerCase() === options.scopeOwner.toLowerCase()
1352
+ );
1353
+ if (!already) {
1354
+ specs.push({
1355
+ raw: `github:${options.scopeOwner}/worlds`,
1356
+ kind: "github",
1357
+ owner: options.scopeOwner,
1358
+ repo: "worlds"
1359
+ });
1360
+ }
1361
+ }
1362
+ const baseDir = options?.repoDir ?? process.cwd();
1363
+ for (const spec of specs) {
1364
+ const result = resolveExtendsSpec(spec, baseDir, {
1365
+ cacheDir: options?.extendsCacheDir,
1366
+ fetcher: options?.extendsFetcher,
1367
+ ttlMs: options?.extendsTtlMs,
1368
+ forceRefresh,
1369
+ noFetch,
1370
+ silentOnMissing: true
1371
+ });
1372
+ worlds.push(...loadExtendsWorlds(result, "org"));
1373
+ }
1374
+ }
1375
+ if (options?.repoDir && !options.disableExtends && !options.explicitWorldsDir) {
1376
+ const results = resolveAllExtends(options.repoDir, {
1377
+ cacheDir: options.extendsCacheDir,
1378
+ fetcher: options.extendsFetcher,
1379
+ ttlMs: options.extendsTtlMs,
1380
+ forceRefresh,
1381
+ noFetch
1382
+ });
1383
+ for (const result of results) {
1384
+ worlds.push(...loadExtendsWorlds(result, "extends"));
1385
+ if (result.warning) warnings.push(result.warning);
1386
+ }
1387
+ }
872
1388
  if (options?.explicitWorldsDir) {
873
1389
  worlds.push(...loadWorldsFromDir(options.explicitWorldsDir, "repo"));
874
1390
  } else if (options?.repoDir) {
875
1391
  const repoPaths = [
876
- join(options.repoDir, "worlds"),
877
- join(options.repoDir, ".neuroverse", "worlds")
1392
+ join3(options.repoDir, "worlds"),
1393
+ join3(options.repoDir, ".neuroverse", "worlds")
878
1394
  ];
879
1395
  for (const p of repoPaths) {
880
- if (existsSync(p)) {
1396
+ if (existsSync3(p)) {
881
1397
  worlds.push(...loadWorldsFromDir(p, "repo"));
882
1398
  break;
883
1399
  }
884
1400
  }
885
1401
  }
886
- const combinedContent = worlds.map((w) => `<!-- world: ${w.name} (${w.source}) -->
887
- ${w.content}`).join("\n\n---\n\n");
1402
+ const combinedContent = worlds.map((w) => {
1403
+ const tag = w.extendsFrom ? `<!-- world: ${w.name} (${w.source} ${w.extendsFrom}) -->` : `<!-- world: ${w.name} (${w.source}) -->`;
1404
+ return `${tag}
1405
+ ${w.content}`;
1406
+ }).join("\n\n---\n\n");
888
1407
  const summary = worlds.length === 0 ? "no worlds discovered" : worlds.map((w) => `${w.name} (${w.source})`).join(", ");
889
- return { worlds, combinedContent, summary };
1408
+ return { worlds, combinedContent, summary, warnings };
890
1409
  }
891
1410
  function formatActiveWorlds(stack) {
892
1411
  if (stack.worlds.length === 0) return "No worlds loaded.";
893
1412
  const lines = ["ACTIVE WORLDS", ""];
894
1413
  for (const w of stack.worlds) {
895
- const sourceLabel = w.source === "base" ? "universal" : w.source === "user" ? "personal" : "this repo";
1414
+ const sourceLabel = w.source === "base" ? "universal" : w.source === "user" ? "personal" : w.source === "org" ? `org (${w.extendsFrom ?? "auto"})` : w.source === "extends" ? `shared (${w.extendsFrom ?? "extends"})` : "this repo";
896
1415
  lines.push(` ${w.name} (${sourceLabel})`);
897
1416
  }
1417
+ if (stack.warnings.length > 0) {
1418
+ lines.push("", "WARNINGS");
1419
+ for (const w of stack.warnings) lines.push(` ${w}`);
1420
+ }
898
1421
  return lines.join("\n");
899
1422
  }
1423
+ function loadExtendsWorlds(result, source) {
1424
+ if (!result.dir) return [];
1425
+ const loaded = loadWorldsFromDir(result.dir, source);
1426
+ return loaded.map((w) => ({ ...w, extendsFrom: result.spec.raw }));
1427
+ }
900
1428
  function loadWorldsFromDir(dirPath, source) {
901
- const dir = resolve(dirPath);
902
- if (!existsSync(dir)) return [];
903
- const stat = statSync(dir);
1429
+ const dir = resolve3(dirPath);
1430
+ if (!existsSync3(dir)) return [];
1431
+ const stat = statSync3(dir);
904
1432
  if (stat.isFile() && dir.endsWith(".md")) {
905
1433
  try {
906
1434
  return [{
907
1435
  name: basename(dir).replace(/\.worldmodel\.md$/, "").replace(/\.nv-world\.md$/, ""),
908
1436
  source,
909
1437
  path: dir,
910
- content: readFileSync(dir, "utf-8")
1438
+ content: readFileSync3(dir, "utf-8")
911
1439
  }];
912
1440
  } catch {
913
1441
  return [];
@@ -918,28 +1446,28 @@ function loadWorldsFromDir(dirPath, source) {
918
1446
  (f) => f.endsWith(".worldmodel.md") || f.endsWith(".nv-world.md")
919
1447
  ).sort();
920
1448
  return files.map((f) => {
921
- const fullPath = join(dir, f);
1449
+ const fullPath = join3(dir, f);
922
1450
  return {
923
1451
  name: f.replace(/\.worldmodel\.md$/, "").replace(/\.nv-world\.md$/, ""),
924
1452
  source,
925
1453
  path: fullPath,
926
- content: readFileSync(fullPath, "utf-8")
1454
+ content: readFileSync3(fullPath, "utf-8")
927
1455
  };
928
1456
  });
929
1457
  }
930
1458
 
931
1459
  // src/radiant/adapters/exocortex.ts
932
- import { readFileSync as readFileSync2, existsSync as existsSync2, readdirSync as readdirSync2, statSync as statSync2 } from "fs";
933
- import { join as join2, resolve as resolve2 } from "path";
1460
+ import { readFileSync as readFileSync4, existsSync as existsSync4, readdirSync as readdirSync2, statSync as statSync4 } from "fs";
1461
+ import { join as join4, resolve as resolve4 } from "path";
934
1462
  function readExocortex(dirPath, repoName) {
935
- const dir = resolve2(dirPath);
1463
+ const dir = resolve4(dirPath);
936
1464
  let filesLoaded = 0;
937
1465
  function tryRead(...paths) {
938
1466
  for (const p of paths) {
939
- const full = join2(dir, p);
940
- if (existsSync2(full)) {
1467
+ const full = join4(dir, p);
1468
+ if (existsSync4(full)) {
941
1469
  try {
942
- const content = readFileSync2(full, "utf-8").trim();
1470
+ const content = readFileSync4(full, "utf-8").trim();
943
1471
  if (content) {
944
1472
  filesLoaded++;
945
1473
  return content;
@@ -1049,14 +1577,14 @@ ${ctx.methods}`);
1049
1577
  return sections.join("\n\n");
1050
1578
  }
1051
1579
  function readTeamExocortices(teamDir) {
1052
- const dir = resolve2(teamDir);
1053
- if (!existsSync2(dir)) return [];
1580
+ const dir = resolve4(teamDir);
1581
+ if (!existsSync4(dir)) return [];
1054
1582
  const entries = readdirSync2(dir);
1055
1583
  const results = [];
1056
1584
  for (const entry of entries) {
1057
- const entryPath = join2(dir, entry);
1585
+ const entryPath = join4(dir, entry);
1058
1586
  try {
1059
- const stat = statSync2(entryPath);
1587
+ const stat = statSync4(entryPath);
1060
1588
  if (stat.isDirectory()) {
1061
1589
  const ctx = readExocortex(entryPath);
1062
1590
  if (ctx.filesLoaded > 0) {
@@ -1101,25 +1629,25 @@ function summarizeExocortex(ctx) {
1101
1629
  }
1102
1630
 
1103
1631
  // src/radiant/memory/palace.ts
1104
- import { readFileSync as readFileSync3, writeFileSync, mkdirSync, readdirSync as readdirSync3, existsSync as existsSync3 } from "fs";
1105
- import { join as join3, resolve as resolve3 } from "path";
1632
+ import { readFileSync as readFileSync5, writeFileSync as writeFileSync2, mkdirSync as mkdirSync2, readdirSync as readdirSync3, existsSync as existsSync5 } from "fs";
1633
+ import { join as join5, resolve as resolve5 } from "path";
1106
1634
  function writeRead(exocortexDir, frontmatter, text) {
1107
- const dir = resolve3(exocortexDir, "radiant", "reads");
1108
- mkdirSync(dir, { recursive: true });
1635
+ const dir = resolve5(exocortexDir, "radiant", "reads");
1636
+ mkdirSync2(dir, { recursive: true });
1109
1637
  const date = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
1110
1638
  const filename = `${date}.md`;
1111
- const filepath = join3(dir, filename);
1639
+ const filepath = join5(dir, filename);
1112
1640
  const content = `${frontmatter}
1113
1641
 
1114
1642
  ${text}
1115
1643
  `;
1116
- writeFileSync(filepath, content, "utf-8");
1644
+ writeFileSync2(filepath, content, "utf-8");
1117
1645
  return filepath;
1118
1646
  }
1119
1647
  function updateKnowledge(exocortexDir, persistence, options) {
1120
- const dir = resolve3(exocortexDir, "radiant");
1121
- mkdirSync(dir, { recursive: true });
1122
- const filepath = join3(dir, "knowledge.md");
1648
+ const dir = resolve5(exocortexDir, "radiant");
1649
+ mkdirSync2(dir, { recursive: true });
1650
+ const filepath = join5(dir, "knowledge.md");
1123
1651
  const totalReads = options?.totalReads ?? 0;
1124
1652
  const existingUntriggered = loadUntriggeredCounts(filepath);
1125
1653
  const lines = [
@@ -1206,14 +1734,14 @@ function updateKnowledge(exocortexDir, persistence, options) {
1206
1734
  lines.push(`${name}=${count}`);
1207
1735
  }
1208
1736
  lines.push("-->");
1209
- writeFileSync(filepath, lines.join("\n"), "utf-8");
1737
+ writeFileSync2(filepath, lines.join("\n"), "utf-8");
1210
1738
  return filepath;
1211
1739
  }
1212
1740
  function loadUntriggeredCounts(filepath) {
1213
1741
  const counts = /* @__PURE__ */ new Map();
1214
- if (!existsSync3(filepath)) return counts;
1742
+ if (!existsSync5(filepath)) return counts;
1215
1743
  try {
1216
- const content = readFileSync3(filepath, "utf-8");
1744
+ const content = readFileSync5(filepath, "utf-8");
1217
1745
  const match = content.match(
1218
1746
  /<!-- untriggered_counts[\s\S]*?-->/
1219
1747
  );
@@ -1231,13 +1759,13 @@ function loadUntriggeredCounts(filepath) {
1231
1759
  return counts;
1232
1760
  }
1233
1761
  function loadPriorReads(exocortexDir) {
1234
- const dir = resolve3(exocortexDir, "radiant", "reads");
1235
- if (!existsSync3(dir)) return [];
1762
+ const dir = resolve5(exocortexDir, "radiant", "reads");
1763
+ if (!existsSync5(dir)) return [];
1236
1764
  const files = readdirSync3(dir).filter((f) => f.endsWith(".md")).sort();
1237
1765
  const reads = [];
1238
1766
  for (const filename of files) {
1239
- const filepath = join3(dir, filename);
1240
- const content = readFileSync3(filepath, "utf-8");
1767
+ const filepath = join5(dir, filename);
1768
+ const content = readFileSync5(filepath, "utf-8");
1241
1769
  const date = filename.replace(".md", "");
1242
1770
  const fmMatch = content.match(/^---\n([\s\S]*?)\n---/);
1243
1771
  const frontmatter = fmMatch ? fmMatch[1] : "";
@@ -1559,6 +2087,138 @@ var DEFAULT_SIGNAL_EXTRACTORS = Object.freeze([
1559
2087
  DECISION_MOMENTUM_EXTRACTOR
1560
2088
  ]);
1561
2089
 
2090
+ // src/radiant/core/vocabulary.ts
2091
+ function extractDeclaredVocabulary(worldmodelContent) {
2092
+ const aligned = extractSection(worldmodelContent, "Aligned Behaviors").map(
2093
+ (b) => parseBehavior(b, "aligned")
2094
+ );
2095
+ const drift = extractSection(worldmodelContent, "Drift Behaviors").map(
2096
+ (b) => parseBehavior(b, "drift")
2097
+ );
2098
+ const allNames = [...aligned, ...drift].map((p) => p.name);
2099
+ return { aligned, drift, allNames };
2100
+ }
2101
+ function extractSection(content, header) {
2102
+ const escaped = header.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
2103
+ const pattern = new RegExp(
2104
+ `##\\s+${escaped}\\s*\\n([\\s\\S]*?)(?=\\n##\\s|$)`,
2105
+ "i"
2106
+ );
2107
+ const match = content.match(pattern);
2108
+ if (!match) return [];
2109
+ const body = match[1];
2110
+ const bullets = body.match(/^[-*]\s+.+$/gm);
2111
+ if (!bullets) return [];
2112
+ return bullets.map((b) => b.replace(/^[-*]\s+/, "").trim()).filter((b) => b.length > 0 && !b.startsWith("<!--"));
2113
+ }
2114
+ function parseBehavior(bullet, kind) {
2115
+ const explicit = bullet.match(
2116
+ /^`?([a-z][a-z0-9_]*)`?\s+[—\u2014-]\s+(.+)$/i
2117
+ );
2118
+ if (explicit && isSnakeCase(explicit[1])) {
2119
+ return {
2120
+ name: explicit[1].toLowerCase(),
2121
+ prose: explicit[2].trim(),
2122
+ kind
2123
+ };
2124
+ }
2125
+ return { name: snakeCaseName(bullet), prose: bullet, kind };
2126
+ }
2127
+ function isSnakeCase(s) {
2128
+ return /^[a-z][a-z0-9_]*$/.test(s);
2129
+ }
2130
+ function snakeCaseName(s) {
2131
+ const base = s.toLowerCase().replace(/[^a-z0-9]+/g, "_").replace(/^_+|_+$/g, "");
2132
+ if (base.length <= 60) return base;
2133
+ const truncated = base.slice(0, 60);
2134
+ const lastUnderscore = truncated.lastIndexOf("_");
2135
+ return lastUnderscore > 20 ? truncated.slice(0, lastUnderscore) : truncated;
2136
+ }
2137
+ function matchDeclaredPattern(candidateName, candidateDescription, vocabulary) {
2138
+ const candidateText = `${candidateName.replace(/_/g, " ")} ${candidateDescription}`;
2139
+ const candidateWords = contentWords(candidateText);
2140
+ if (candidateWords.size === 0) return null;
2141
+ let best = null;
2142
+ for (const pattern of [...vocabulary.aligned, ...vocabulary.drift]) {
2143
+ const proseWords = contentWords(pattern.prose);
2144
+ if (proseWords.size === 0) continue;
2145
+ let shared = 0;
2146
+ for (const w of proseWords) {
2147
+ if (candidateWords.has(w)) shared++;
2148
+ }
2149
+ const coverage = shared / proseWords.size;
2150
+ if (shared >= 2 && coverage >= 0.3) {
2151
+ if (!best || coverage > best.score) {
2152
+ best = { pattern, score: coverage };
2153
+ }
2154
+ }
2155
+ }
2156
+ return best?.pattern ?? null;
2157
+ }
2158
+ var STOPWORDS = /* @__PURE__ */ new Set([
2159
+ "about",
2160
+ "after",
2161
+ "against",
2162
+ "among",
2163
+ "around",
2164
+ "because",
2165
+ "been",
2166
+ "before",
2167
+ "being",
2168
+ "between",
2169
+ "both",
2170
+ "could",
2171
+ "does",
2172
+ "doing",
2173
+ "during",
2174
+ "each",
2175
+ "from",
2176
+ "further",
2177
+ "have",
2178
+ "having",
2179
+ "into",
2180
+ "itself",
2181
+ "most",
2182
+ "nor",
2183
+ "only",
2184
+ "other",
2185
+ "over",
2186
+ "same",
2187
+ "should",
2188
+ "some",
2189
+ "such",
2190
+ "than",
2191
+ "that",
2192
+ "their",
2193
+ "them",
2194
+ "then",
2195
+ "there",
2196
+ "these",
2197
+ "they",
2198
+ "this",
2199
+ "those",
2200
+ "through",
2201
+ "under",
2202
+ "until",
2203
+ "very",
2204
+ "were",
2205
+ "what",
2206
+ "when",
2207
+ "where",
2208
+ "which",
2209
+ "while",
2210
+ "will",
2211
+ "with",
2212
+ "without",
2213
+ "would",
2214
+ "your",
2215
+ "yours"
2216
+ ]);
2217
+ function contentWords(text) {
2218
+ const words = text.toLowerCase().match(/[a-z][a-z0-9_]+/g) ?? [];
2219
+ return new Set(words.filter((w) => w.length > 3 && !STOPWORDS.has(w)));
2220
+ }
2221
+
1562
2222
  // src/radiant/types.ts
1563
2223
  var DEFAULT_EVIDENCE_GATE = { k: 3, c: 0.5 };
1564
2224
  function isScored(s) {
@@ -1601,7 +2261,11 @@ function scoreComposite(a_L, a_C, a_N) {
1601
2261
  async function interpretPatterns(input) {
1602
2262
  const prompt = buildInterpretationPrompt(input);
1603
2263
  const raw = await input.ai.complete(prompt, "Analyze the activity and produce the read.");
1604
- const parsed = parseInterpretation(raw, input.canonicalPatterns ?? []);
2264
+ const canonicalNames = [
2265
+ ...input.canonicalPatterns ?? [],
2266
+ ...input.declaredVocabulary?.allNames ?? []
2267
+ ];
2268
+ const parsed = parseInterpretation(raw, canonicalNames, input.declaredVocabulary);
1605
2269
  return {
1606
2270
  patterns: parsed.patterns,
1607
2271
  meaning: parsed.meaning,
@@ -1612,8 +2276,10 @@ async function interpretPatterns(input) {
1612
2276
  function buildInterpretationPrompt(input) {
1613
2277
  const signalSummary = formatSignalSummary(input.signals);
1614
2278
  const eventSample = formatEventSample(input.events, 30);
1615
- const canonicalList = (input.canonicalPatterns ?? []).length > 0 ? `Patterns the organization has already named (use these names if you see them):
1616
- ${input.canonicalPatterns.map((p) => `- ${p}`).join("\n")}` : "No patterns have been named yet. Everything you observe is new.";
2279
+ const canonicalList = formatDeclaredVocabulary(
2280
+ input.declaredVocabulary,
2281
+ input.canonicalPatterns ?? []
2282
+ );
1617
2283
  const compressedWorld = compressWorldmodel(input.worldmodelContent);
1618
2284
  const cl = compressLens(input.lens);
1619
2285
  const frame = input.lens.primary_frame;
@@ -1724,6 +2390,44 @@ Only recommend a move when the evidence actually calls for one.
1724
2390
  Do NOT use these phrases anywhere in your output:
1725
2391
  ${forbiddenList}`;
1726
2392
  }
2393
+ function formatDeclaredVocabulary(vocabulary, extraNames) {
2394
+ const aligned = vocabulary?.aligned ?? [];
2395
+ const drift = vocabulary?.drift ?? [];
2396
+ if (aligned.length === 0 && drift.length === 0 && extraNames.length === 0) {
2397
+ return 'No patterns have been named yet. Everything you observe is new \u2014 mark it type: "candidate".';
2398
+ }
2399
+ const parts = [];
2400
+ parts.push("## Declared vocabulary (use these names when you see matching evidence)");
2401
+ parts.push("");
2402
+ parts.push(
2403
+ 'The worldmodel declares the patterns below. If your observation matches one of these, use the EXACT snake_case name shown and mark type: "canonical" \u2014 do not invent a new name for something that already has one.'
2404
+ );
2405
+ parts.push("");
2406
+ if (aligned.length > 0) {
2407
+ parts.push("### Aligned behaviors (positive patterns)");
2408
+ for (const p of aligned) {
2409
+ parts.push(`- \`${p.name}\` \u2014 ${p.prose}`);
2410
+ }
2411
+ parts.push("");
2412
+ }
2413
+ if (drift.length > 0) {
2414
+ parts.push("### Drift behaviors (negative patterns)");
2415
+ for (const p of drift) {
2416
+ parts.push(`- \`${p.name}\` \u2014 ${p.prose}`);
2417
+ }
2418
+ parts.push("");
2419
+ }
2420
+ if (extraNames.length > 0) {
2421
+ parts.push(
2422
+ `Additional canonical names (from prior runs or caller): ${extraNames.join(", ")}`
2423
+ );
2424
+ parts.push("");
2425
+ }
2426
+ parts.push(
2427
+ 'If you observe something genuinely new that matches NO declared pattern, mark it type: "candidate" with a freshly-invented snake_case name.'
2428
+ );
2429
+ return parts.join("\n");
2430
+ }
1727
2431
  function formatSignalSummary(signals) {
1728
2432
  const lines = [];
1729
2433
  const domains = ["life", "cyber", "joint"];
@@ -1749,7 +2453,7 @@ function formatEventSample(events, maxEvents) {
1749
2453
  "${content}"`;
1750
2454
  }).join("\n");
1751
2455
  }
1752
- function parseInterpretation(raw, canonicalNames) {
2456
+ function parseInterpretation(raw, canonicalNames, vocabulary) {
1753
2457
  let meaning = "";
1754
2458
  let move = "";
1755
2459
  let patternsArray = [];
@@ -1779,14 +2483,23 @@ function parseInterpretation(raw, canonicalNames) {
1779
2483
  const patterns = [];
1780
2484
  for (const item of patternsArray) {
1781
2485
  if (!isPatternLike(item)) continue;
1782
- const nameStr = String(item.name ?? "unnamed");
2486
+ const rawName = String(item.name ?? "unnamed");
2487
+ const description = String(item.description ?? "");
1783
2488
  const ev = item.evidence;
1784
- const isCanonical = item.type === "canonical" || canonicalSet.has(nameStr.toLowerCase());
2489
+ let name = rawName;
2490
+ let isCanonical = item.type === "canonical" || canonicalSet.has(rawName.toLowerCase());
2491
+ if (!isCanonical && vocabulary) {
2492
+ const matched = matchDeclaredPattern(rawName, description, vocabulary);
2493
+ if (matched) {
2494
+ name = matched.name;
2495
+ isCanonical = true;
2496
+ }
2497
+ }
1785
2498
  patterns.push({
1786
- name: nameStr,
2499
+ name,
1787
2500
  type: isCanonical ? "canonical" : "candidate",
1788
- declaredAs: isCanonical ? nameStr : void 0,
1789
- description: String(item.description ?? ""),
2501
+ declaredAs: isCanonical ? name : void 0,
2502
+ description,
1790
2503
  evidence: {
1791
2504
  signals: Array.isArray(ev?.signals) ? ev.signals.map(String) : [],
1792
2505
  events: Array.isArray(ev?.events) ? ev.events.map(String) : [],
@@ -2109,10 +2822,24 @@ Compare stated intent against actual GitHub activity. Gaps = drift.`;
2109
2822
  } catch {
2110
2823
  }
2111
2824
  }
2825
+ const linearKey = process.env.LINEAR_API_KEY;
2826
+ if (linearKey) {
2827
+ try {
2828
+ const linear = await fetchLinearActivity(linearKey, { windowDays });
2829
+ events.push(...linear.events);
2830
+ adapterSignals += "\n\n" + formatLinearSignalsForPrompt(linear.signals);
2831
+ activeAdapters.push("linear");
2832
+ } catch {
2833
+ }
2834
+ }
2112
2835
  events.sort((a, b) => Date.parse(a.timestamp) - Date.parse(b.timestamp));
2836
+ if (input.personalUser) {
2837
+ events = filterEventsByUser(events, input.personalUser);
2838
+ }
2113
2839
  const classified = classifyEvents(events);
2114
2840
  const signals = extractSignals(classified);
2115
2841
  const scores = computeScores(signals, input.worldmodelContent !== "");
2842
+ const declaredVocabulary = extractDeclaredVocabulary(worldmodelContent);
2116
2843
  const { patterns, meaning, move } = await interpretPatterns({
2117
2844
  signals,
2118
2845
  events: classified,
@@ -2120,6 +2847,7 @@ Compare stated intent against actual GitHub activity. Gaps = drift.`;
2120
2847
  lens,
2121
2848
  ai: input.ai,
2122
2849
  canonicalPatterns: input.canonicalPatterns,
2850
+ declaredVocabulary,
2123
2851
  statedIntent: [statedIntent, adapterSignals, priorReadContext].filter(Boolean).join("\n\n") || void 0
2124
2852
  });
2125
2853
  const rewrittenPatterns = patterns.map((p) => lens.rewrite(p));
@@ -2174,6 +2902,10 @@ Compare stated intent against actual GitHub activity. Gaps = drift.`;
2174
2902
  worldStack
2175
2903
  };
2176
2904
  }
2905
+ function filterEventsByUser(events, username) {
2906
+ const target = username.toLowerCase();
2907
+ return events.filter((e) => e.actor.id.toLowerCase() === target);
2908
+ }
2177
2909
  function computeScores(signals, worldmodelLoaded) {
2178
2910
  const gate = DEFAULT_EVIDENCE_GATE;
2179
2911
  const lifeSignals = signals.filter((s) => s.domain === "life");
@@ -2283,6 +3015,17 @@ export {
2283
3015
  formatSlackSignalsForPrompt,
2284
3016
  fetchNotionActivity,
2285
3017
  formatNotionSignalsForPrompt,
3018
+ fetchLinearActivity,
3019
+ formatLinearSignalsForPrompt,
3020
+ readOriginRemote,
3021
+ parseRemoteUrl,
3022
+ getRepoOrigin,
3023
+ loadExtendsConfig,
3024
+ parseExtendsSpec,
3025
+ getCacheDir,
3026
+ resolveExtendsSpec,
3027
+ detectOrgExtendsSpec,
3028
+ resolveAllExtends,
2286
3029
  discoverWorlds,
2287
3030
  formatActiveWorlds,
2288
3031
  readExocortex,
@@ -2300,6 +3043,8 @@ export {
2300
3043
  classifyEvents,
2301
3044
  extractSignals,
2302
3045
  DEFAULT_SIGNAL_EXTRACTORS,
3046
+ extractDeclaredVocabulary,
3047
+ matchDeclaredPattern,
2303
3048
  DEFAULT_EVIDENCE_GATE,
2304
3049
  isScored,
2305
3050
  isSentinel,
@@ -2312,6 +3057,7 @@ export {
2312
3057
  interpretPatterns,
2313
3058
  render,
2314
3059
  emergent,
3060
+ filterEventsByUser,
2315
3061
  createAnthropicAI,
2316
3062
  createMockAI
2317
3063
  };