substrate-ai 0.20.57 → 0.20.59

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli/index.js CHANGED
@@ -1,10 +1,10 @@
1
1
  #!/usr/bin/env node
2
- import { FileStateStore, RunManifest, SUBSTRATE_OWNED_SETTINGS_KEYS, SupervisorLock, VALID_PHASES, WorkGraphRepository, ZERO_FINDINGS_BY_AUTHOR, ZERO_FINDING_COUNTS, ZERO_PROBE_AUTHOR_METRICS, aggregateProbeAuthorMetrics, buildPipelineStatusOutput, createDatabaseAdapter, createStateStore, findPackageRoot, formatOutput, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, inspectProcessTree, parseDbTimestampAsUtc, parseRuntimeProbes, registerHealthCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveRunManifest, rollupFindingCounts, rollupFindingsByAuthor, rollupProbeAuthorByClass, rollupProbeAuthorMetrics } from "../health-PdI4-96I.js";
2
+ import { FileStateStore, RunManifest, SUBSTRATE_OWNED_SETTINGS_KEYS, SupervisorLock, VALID_PHASES, WorkGraphRepository, ZERO_FINDINGS_BY_AUTHOR, ZERO_FINDING_COUNTS, ZERO_PROBE_AUTHOR_METRICS, aggregateProbeAuthorMetrics, buildPipelineStatusOutput, createDatabaseAdapter, createStateStore, findPackageRoot, formatOutput, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, inspectProcessTree, parseDbTimestampAsUtc, parseRuntimeProbes, registerHealthCommand, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveMainRepoRoot, resolveRunManifest, rollupFindingCounts, rollupFindingsByAuthor, rollupProbeAuthorByClass, rollupProbeAuthorMetrics } from "../health-CzYD6ghE.js";
3
3
  import { createLogger } from "../logger-KeHncl-f.js";
4
4
  import { createEventBus } from "../helpers-CElYrONe.js";
5
5
  import { AdapterRegistry, BudgetConfigSchema, CURRENT_CONFIG_FORMAT_VERSION, CURRENT_TASK_GRAPH_VERSION, ConfigError, CostTrackerConfigSchema, DEFAULT_CONFIG, DoltClient, DoltNotInstalled, GlobalSettingsSchema, InMemoryDatabaseAdapter, IngestionServer, MonitorDatabaseImpl, OPERATIONAL_FINDING, PartialGlobalSettingsSchema, PartialProviderConfigSchema, ProvidersSchema, RoutingRecommender, STORY_METRICS, TelemetryConfigSchema, addTokenUsage, aggregateTokenUsageForRun, checkDoltInstalled, compareRunMetrics, createAmendmentRun, createConfigSystem, createDecision, createDoltClient, createPipelineRun, getActiveDecisions, getAllCostEntriesFiltered, getBaselineRunMetrics, getDecisionsByCategory, getDecisionsByPhaseForRun, getLatestCompletedRun, getLatestRun, getPipelineRunById, getPlanningCostTotal, getRetryableEscalations, getRunMetrics, getRunningPipelineRuns, getSessionCostSummary, getSessionCostSummaryFiltered, getStoryMetricsForRun, getTokenUsageSummary, incrementRunRestarts, initSchema, initializeDolt, listRunMetrics, loadParentRunDecisions, supersedeDecision, tagRunAsBaseline, updatePipelineRun } from "../dist-W2emvN3F.js";
6
6
  import "../adapter-registry-DXLMTmfD.js";
7
- import { AdapterTelemetryPersistence, AppError, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, EpicIngester, GLOBSTAR, GitClient, GrammarLoader, Minimatch, Minipass, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SymbolParser, createContextCompiler, createDispatcher, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, createTelemetryAdvisor, escape, formatPhaseCompletionSummary, getFactoryRunSummaries, getScenarioResultsForRun, getTwinRunsForRun, listGraphRuns, registerExportCommand, registerFactoryCommand, registerRunCommand, registerScenariosCommand, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runProbeAuthor, runSolutioningPhase, unescape, validateStopAfterFromConflict } from "../run-DcDoaG12.js";
7
+ import { AdapterTelemetryPersistence, AppError, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, EpicIngester, GLOBSTAR, GitClient, GrammarLoader, Minimatch, Minipass, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SymbolParser, createContextCompiler, createDispatcher, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, createTelemetryAdvisor, escape, formatPhaseCompletionSummary, getFactoryRunSummaries, getScenarioResultsForRun, getTwinRunsForRun, listGraphRuns, registerExportCommand, registerFactoryCommand, registerRunCommand, registerScenariosCommand, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runProbeAuthor, runSolutioningPhase, unescape, validateStopAfterFromConflict } from "../run-BGXvVIwO.js";
8
8
  import "../errors-CKFu8YI9.js";
9
9
  import "../routing-CcBOCuC9.js";
10
10
  import "../decisions-C0pz9Clx.js";
@@ -7484,7 +7484,7 @@ async function runStatusAction(options) {
7484
7484
  logger$13.debug({ err }, "Work graph query failed, continuing without work graph data");
7485
7485
  }
7486
7486
  if (run === void 0) {
7487
- const { inspectProcessTree: inspectProcessTree$1 } = await import("../health-HmyFdWEf.js");
7487
+ const { inspectProcessTree: inspectProcessTree$1 } = await import("../health-CsE9INpp.js");
7488
7488
  const substrateDirPath = join(projectRoot, ".substrate");
7489
7489
  const processInfo = inspectProcessTree$1({
7490
7490
  projectRoot,
@@ -9030,7 +9030,7 @@ async function runSupervisorAction(options, deps = {}) {
9030
9030
  await initSchema(expAdapter);
9031
9031
  const { runRunAction: runPipeline } = await import(
9032
9032
  /* @vite-ignore */
9033
- "../run-BF8oeJhG.js"
9033
+ "../run-BupqUzci.js"
9034
9034
  );
9035
9035
  const runStoryFn = async (opts) => {
9036
9036
  const exitCode = await runPipeline({
@@ -1,4 +1,4 @@
1
- import { DEFAULT_STALL_THRESHOLD_SECONDS, getAllDescendantPids, getAutoHealthData, inspectProcessTree, isOrchestratorProcessLine, registerHealthCommand, runHealthAction } from "./health-PdI4-96I.js";
1
+ import { DEFAULT_STALL_THRESHOLD_SECONDS, getAllDescendantPids, getAutoHealthData, inspectProcessTree, isOrchestratorProcessLine, registerHealthCommand, runHealthAction } from "./health-CzYD6ghE.js";
2
2
  import "./logger-KeHncl-f.js";
3
3
  import "./dist-W2emvN3F.js";
4
4
  import "./decisions-C0pz9Clx.js";
@@ -1,19 +1,22 @@
1
1
  import { createLogger } from "./logger-KeHncl-f.js";
2
2
  import { DoltClient, DoltQueryError, LEARNING_FINDING, createDatabaseAdapter$1 as createDatabaseAdapter, createDecision, getDecisionsByCategory, getLatestRun, getPipelineRunById, initSchema } from "./dist-W2emvN3F.js";
3
3
  import { createRequire } from "module";
4
+ import * as path$1 from "path";
4
5
  import { dirname, join } from "path";
5
6
  import { readFile } from "fs/promises";
6
7
  import { EventEmitter } from "node:events";
7
8
  import { YAMLException, load } from "js-yaml";
8
9
  import { existsSync, promises, readFileSync, readdirSync, statSync } from "node:fs";
9
10
  import { spawn, spawnSync } from "node:child_process";
10
- import * as path$1 from "node:path";
11
+ import * as path$2 from "node:path";
11
12
  import { basename as basename$1, dirname as dirname$1, join as join$1, resolve as resolve$1 } from "node:path";
12
13
  import { z } from "zod";
13
14
  import { mkdir as mkdir$1, open, readFile as readFile$1, unlink, writeFile as writeFile$1 } from "node:fs/promises";
15
+ import * as fs from "fs";
14
16
  import { existsSync as existsSync$1 } from "fs";
15
17
  import { createRequire as createRequire$1 } from "node:module";
16
18
  import { fileURLToPath } from "node:url";
19
+ import { execSync as execSync$1 } from "child_process";
17
20
 
18
21
  //#region rolldown:runtime
19
22
  var __create = Object.create;
@@ -481,20 +484,20 @@ function detectCycles(edges) {
481
484
  }
482
485
  const visited = new Set();
483
486
  const visiting = new Set();
484
- const path$2 = [];
487
+ const path$3 = [];
485
488
  function dfs(node) {
486
489
  if (visiting.has(node)) {
487
- const cycleStart = path$2.indexOf(node);
488
- return [...path$2.slice(cycleStart), node];
490
+ const cycleStart = path$3.indexOf(node);
491
+ return [...path$3.slice(cycleStart), node];
489
492
  }
490
493
  if (visited.has(node)) return null;
491
494
  visiting.add(node);
492
- path$2.push(node);
495
+ path$3.push(node);
493
496
  for (const neighbor of adj.get(node) ?? []) {
494
497
  const cycle = dfs(neighbor);
495
498
  if (cycle !== null) return cycle;
496
499
  }
497
- path$2.pop();
500
+ path$3.pop();
498
501
  visiting.delete(node);
499
502
  visited.add(node);
500
503
  return null;
@@ -2928,6 +2931,32 @@ const SEVERITY_PREFIX = {
2928
2931
  info: "INFO"
2929
2932
  };
2930
2933
  /**
2934
+ * source-ac-shellout-npx-fallback — Story 67-3, obs_2026-05-03_023 fix #3.
2935
+ *
2936
+ * Severity: warn. Emitted by SourceAcShelloutCheck when a bare `npx <package>`
2937
+ * invocation (without `--no-install`) is detected in a story-modified source file.
2938
+ * A bare `npx <package>` without `--no-install` falls through to the public npm
2939
+ * registry on first use if the package binary is not locally installed —
2940
+ * a dependency-confusion attack vector.
2941
+ */
2942
+ const CATEGORY_SHELLOUT_NPX_FALLBACK = "source-ac-shellout-npx-fallback";
2943
+ /**
2944
+ * cross-story-concurrent-modification — Story 68-1, Epic 66/67 cross-story-interaction fix.
2945
+ *
2946
+ * Severity: warn (defensive rollout per Story 60-16 pattern). Emitted by
2947
+ * CrossStoryConsistencyCheck (Layer 2) when post-completion analysis shows two
2948
+ * concurrent stories modified the same file AND interface signatures differ
2949
+ * between their commits.
2950
+ *
2951
+ * Motivating incidents: Epic 66 run a832487a (66-1+66-2+66-7 concurrent dispatch)
2952
+ * + Epic 67 run a59e4c96 (67-1+67-2 methodology-pack.test.ts budget constant race).
2953
+ *
2954
+ * NOTE: Promotion to 'error' is deferred pending empirical low-false-positive
2955
+ * validation across multiple substrate-on-substrate dispatch runs. Do NOT change
2956
+ * severity to 'error' until at least 3 consecutive runs with zero false positives.
2957
+ */
2958
+ const CATEGORY_CROSS_STORY_CONCURRENT_MODIFICATION = "cross-story-concurrent-modification";
2959
+ /**
2931
2960
  * Render a list of findings into the multi-line human-readable string that
2932
2961
  * populates VerificationResult.details. One line per finding:
2933
2962
  *
@@ -3444,15 +3473,15 @@ function findCodeEvidence(opts) {
3444
3473
  reason: `AC text references ${token}, which is in files_modified`
3445
3474
  };
3446
3475
  for (const token of tokens) {
3447
- const base = path$1.basename(token);
3448
- const match = filesModified.find((f) => path$1.basename(f) === base);
3476
+ const base = path$2.basename(token);
3477
+ const match = filesModified.find((f) => path$2.basename(f) === base);
3449
3478
  if (match !== void 0) return {
3450
3479
  found: true,
3451
3480
  reason: `AC text references ${token}; matching basename ${match} is in files_modified`
3452
3481
  };
3453
3482
  }
3454
3483
  for (const token of tokens) try {
3455
- if (existsSync(path$1.join(workingDir, token))) return {
3484
+ if (existsSync(path$2.join(workingDir, token))) return {
3456
3485
  found: true,
3457
3486
  reason: `AC text references ${token}, which exists in working tree`
3458
3487
  };
@@ -3462,7 +3491,7 @@ function findCodeEvidence(opts) {
3462
3491
  const acMentionRe = new RegExp(`\\bAC\\s*:?\\s*#?\\s*${num}\\b`, "i");
3463
3492
  const testFiles = filesModified.filter(isTestFilePath);
3464
3493
  for (const testFile of testFiles) try {
3465
- const content = readFileSync(path$1.join(workingDir, testFile), "utf-8");
3494
+ const content = readFileSync(path$2.join(workingDir, testFile), "utf-8");
3466
3495
  if (acMentionRe.test(content)) return {
3467
3496
  found: true,
3468
3497
  reason: `${testFile} mentions ${acId}`
@@ -3790,11 +3819,11 @@ function parseRuntimeProbes(storyContent) {
3790
3819
  const validation = RuntimeProbeListSchema.safeParse(parsed);
3791
3820
  if (!validation.success) {
3792
3821
  const first = validation.error.issues[0];
3793
- const path$2 = first?.path.join(".") ?? "";
3822
+ const path$3 = first?.path.join(".") ?? "";
3794
3823
  const message = first?.message ?? "schema validation failed";
3795
3824
  return {
3796
3825
  kind: "invalid",
3797
- error: `probe list is malformed at ${path$2 || "<root>"}: ${message}`
3826
+ error: `probe list is malformed at ${path$3 || "<root>"}: ${message}`
3798
3827
  };
3799
3828
  }
3800
3829
  const probes = validation.data;
@@ -4552,18 +4581,18 @@ function existsAnywhereUnderRoot(root, base) {
4552
4581
  depth: 0
4553
4582
  }];
4554
4583
  while (stack.length > 0) {
4555
- const { path: path$2, depth } = stack.pop();
4584
+ const { path: path$3, depth } = stack.pop();
4556
4585
  if (depth > MAX_WALK_DEPTH) continue;
4557
4586
  let entries;
4558
4587
  try {
4559
- entries = readdirSync(path$2);
4588
+ entries = readdirSync(path$3);
4560
4589
  } catch {
4561
4590
  continue;
4562
4591
  }
4563
4592
  for (const entry of entries) {
4564
4593
  if (SKIP_DIRS.has(entry)) continue;
4565
4594
  if (entry === base) return true;
4566
- const full = join$1(path$2, entry);
4595
+ const full = join$1(path$3, entry);
4567
4596
  try {
4568
4597
  const s = statSync(full);
4569
4598
  if (s.isDirectory()) stack.push({
@@ -5106,6 +5135,286 @@ var SourceAcFidelityCheck = class {
5106
5135
  }
5107
5136
  };
5108
5137
 
5138
+ //#endregion
5139
+ //#region packages/sdlc/dist/verification/checks/source-ac-shellout-check.js
5140
+ /** Matches `npx <name>` but NOT `npx --no-install <name>`. */
5141
+ const NPX_PATTERN = /npx\s+(?!--no-install)([a-zA-Z0-9_@\-/]+)/g;
5142
+ /**
5143
+ * Returns `true` when the line (after trimming leading whitespace) starts with
5144
+ * a single-line comment marker (`//` or `#`). Block comments (/* … *\/) are not
5145
+ * matched here — they are handled by the string-literal context check.
5146
+ */
5147
+ function isCommentLine(line) {
5148
+ const trimmed = line.trimStart();
5149
+ return trimmed.startsWith("//") || trimmed.startsWith("#");
5150
+ }
5151
+ /**
5152
+ * Returns `true` when `matchIndex` falls inside a single-quoted (`'...'`),
5153
+ * double-quoted (`"..."`), or template-literal (`` `...` ``) region of the line,
5154
+ * OR when the line is a shebang (`#!...`).
5155
+ *
5156
+ * Implementation: scan character-by-character from index 0, toggling
5157
+ * `inSingle`, `inDouble`, `inTemplate` flags at unescaped quote characters.
5158
+ * An escaped quote is one where the immediately preceding character is `\`.
5159
+ * (Note: this is a heuristic — it does not handle `\\` or complex escape
5160
+ * sequences correctly. For a static-analysis severity:warn heuristic, the
5161
+ * simplification is acceptable.)
5162
+ */
5163
+ function isInStringLiteralContext(line, matchIndex) {
5164
+ if (line.trimStart().startsWith("#!")) return true;
5165
+ let inSingle = false;
5166
+ let inDouble = false;
5167
+ let inTemplate = false;
5168
+ for (let i = 0; i < matchIndex; i++) {
5169
+ const char = line[i];
5170
+ const escaped = i > 0 && line[i - 1] === "\\";
5171
+ if (!escaped) {
5172
+ if (char === "'" && !inDouble && !inTemplate) inSingle = !inSingle;
5173
+ else if (char === "\"" && !inSingle && !inTemplate) inDouble = !inDouble;
5174
+ else if (char === "`" && !inSingle && !inDouble) inTemplate = !inTemplate;
5175
+ }
5176
+ }
5177
+ return inSingle || inDouble || inTemplate;
5178
+ }
5179
+ /**
5180
+ * Reads the file at `absolutePath` and returns every line/match pair where
5181
+ * a bare `npx <name>` (without `--no-install`) appears inside a string-literal
5182
+ * context on a non-comment line.
5183
+ *
5184
+ * Returns 1-indexed line numbers.
5185
+ */
5186
+ function scanFile(absolutePath) {
5187
+ const content = fs.readFileSync(absolutePath, "utf-8");
5188
+ const lines = content.split("\n");
5189
+ const results = [];
5190
+ for (let i = 0; i < lines.length; i++) {
5191
+ const line = lines[i];
5192
+ if (line === void 0) continue;
5193
+ if (isCommentLine(line)) continue;
5194
+ NPX_PATTERN.lastIndex = 0;
5195
+ let match;
5196
+ const linePattern = new RegExp(NPX_PATTERN.source, "g");
5197
+ while ((match = linePattern.exec(line)) !== null) {
5198
+ const name = match[1];
5199
+ if (name !== void 0 && isInStringLiteralContext(line, match.index)) results.push({
5200
+ lineNum: i + 1,
5201
+ name
5202
+ });
5203
+ }
5204
+ }
5205
+ return results;
5206
+ }
5207
+ /**
5208
+ * Standalone function implementing the shellout check logic.
5209
+ * Exported separately so tests can call it directly without instantiating the class.
5210
+ */
5211
+ async function runShelloutCheck(context) {
5212
+ const start = Date.now();
5213
+ const findings = [];
5214
+ let modifiedFiles = context.devStoryResult?.files_modified ?? [];
5215
+ if (modifiedFiles.length === 0) try {
5216
+ const output = execSync$1("git diff --name-only HEAD~1", {
5217
+ cwd: context.workingDir,
5218
+ encoding: "utf-8"
5219
+ });
5220
+ modifiedFiles = output.trim().split("\n").filter((f) => f.length > 0);
5221
+ } catch {
5222
+ return {
5223
+ status: "pass",
5224
+ details: "source-ac-shellout: no modified files available — skipping check",
5225
+ duration_ms: Date.now() - start,
5226
+ findings: []
5227
+ };
5228
+ }
5229
+ const filesToCheck = modifiedFiles.filter((f) => !f.endsWith(".md"));
5230
+ if (filesToCheck.length === 0) return {
5231
+ status: "pass",
5232
+ details: "source-ac-shellout: no non-.md modified files — skipping check",
5233
+ duration_ms: Date.now() - start,
5234
+ findings: []
5235
+ };
5236
+ for (const relPath of filesToCheck) {
5237
+ const absPath = path$1.join(context.workingDir, relPath);
5238
+ let matches;
5239
+ try {
5240
+ matches = scanFile(absPath);
5241
+ } catch {
5242
+ continue;
5243
+ }
5244
+ for (const { lineNum, name } of matches) findings.push({
5245
+ category: CATEGORY_SHELLOUT_NPX_FALLBACK,
5246
+ severity: "warn",
5247
+ message: `npx fallback detected in ${relPath}:${lineNum}: "npx ${name}" — bare \`npx <package>\` without \`--no-install\` falls through to the public npm registry on first use. If \`<package>\` isn't a registered binary in your dev dependencies, this is a dependency-confusion vector. Use absolute path or \`npx --no-install <package>\` instead.`
5248
+ });
5249
+ }
5250
+ const status = findings.some((f) => f.severity === "error") ? "fail" : findings.some((f) => f.severity === "warn") ? "warn" : "pass";
5251
+ return {
5252
+ status,
5253
+ details: findings.length > 0 ? renderFindings(findings) : "source-ac-shellout: no bare npx fallback patterns detected",
5254
+ duration_ms: Date.now() - start,
5255
+ findings
5256
+ };
5257
+ }
5258
+ /**
5259
+ * VerificationCheck class for the shellout static-analysis gate.
5260
+ *
5261
+ * name = 'source-ac-shellout'
5262
+ * tier = 'A' (fast — file I/O only, no LLM, no subprocess except optional git fallback)
5263
+ */
5264
+ var SourceAcShelloutCheck = class {
5265
+ name = "source-ac-shellout";
5266
+ tier = "A";
5267
+ async run(context) {
5268
+ return runShelloutCheck(context);
5269
+ }
5270
+ };
5271
+
5272
+ //#endregion
5273
+ //#region packages/sdlc/dist/verification/checks/cross-story-consistency-check.js
5274
+ /**
5275
+ * Matches added/removed export interface or type declarations.
5276
+ * Example: `+export interface Foo {` or `-export type Bar =`
5277
+ */
5278
+ const INTERFACE_CHANGE_PATTERN = /^[+-]\s*(export\s+(?:interface|type)\s+\w+)/;
5279
+ /**
5280
+ * Matches added/removed constant assignments.
5281
+ * Example: `+const BUDGET_LIMIT = 32000` or `-const BUDGET_LIMIT = 30000`
5282
+ * Also matches `export const`, `let`, `var`.
5283
+ */
5284
+ const CONST_CHANGE_PATTERN = /^[+-]\s*(?:export\s+)?(?:const|let|var)\s+\w+\s*=/;
5285
+ /**
5286
+ * Compute the set of file paths that collide between the current story's
5287
+ * modified files and the prior stories' modified files.
5288
+ *
5289
+ * Uses `context._crossStoryConflictingFiles` as a direct override when
5290
+ * supplied (test-hook / runtime-probe path). Otherwise computes the
5291
+ * intersection of `devStoryResult.files_modified` ∩ `priorStoryFiles`.
5292
+ */
5293
+ function computeCollisionPaths(context) {
5294
+ if (context._crossStoryConflictingFiles !== void 0 && context._crossStoryConflictingFiles.length > 0) return context._crossStoryConflictingFiles;
5295
+ const currentFiles = context.devStoryResult?.files_modified ?? [];
5296
+ const priorFiles = context.priorStoryFiles ?? [];
5297
+ if (currentFiles.length === 0 || priorFiles.length === 0) return [];
5298
+ const priorSet = new Set(priorFiles);
5299
+ return currentFiles.filter((f) => priorSet.has(f));
5300
+ }
5301
+ /**
5302
+ * Parse a unified diff text for type signature changes or constant reassignments.
5303
+ *
5304
+ * Returns `true` when the diff contains any added or removed export
5305
+ * interface/type declaration OR any added/removed constant assignment —
5306
+ * indicating a potential interface-level change that concurrent story authors
5307
+ * should review.
5308
+ */
5309
+ function diffContainsInterfaceOrConstChange(diffText) {
5310
+ const lines = diffText.split("\n");
5311
+ for (const line of lines) {
5312
+ if (INTERFACE_CHANGE_PATTERN.test(line)) return true;
5313
+ if (CONST_CHANGE_PATTERN.test(line)) return true;
5314
+ }
5315
+ return false;
5316
+ }
5317
+ /**
5318
+ * Run `git diff --no-renames <commitSha>^...<commitSha> -- <file>` to get
5319
+ * the per-file diff for the story's commit.
5320
+ *
5321
+ * Returns `null` when git is unavailable or the file wasn't part of the commit.
5322
+ */
5323
+ function getDiffForFile(workingDir, commitSha, filePath) {
5324
+ try {
5325
+ return execSync$1(`git diff --no-renames ${commitSha}~1 ${commitSha} -- ${filePath}`, {
5326
+ cwd: workingDir,
5327
+ encoding: "utf-8",
5328
+ stdio: [
5329
+ "ignore",
5330
+ "pipe",
5331
+ "pipe"
5332
+ ]
5333
+ });
5334
+ } catch {
5335
+ return null;
5336
+ }
5337
+ }
5338
+ /**
5339
+ * Get numstat diff for a story commit to confirm a file was modified.
5340
+ * Returns lines like: `5\t3\tsrc/foo.ts`
5341
+ */
5342
+ function getNumstatDiff(workingDir, commitSha) {
5343
+ try {
5344
+ return execSync$1(`git diff --no-renames --numstat ${commitSha}~1 ${commitSha}`, {
5345
+ cwd: workingDir,
5346
+ encoding: "utf-8",
5347
+ stdio: [
5348
+ "ignore",
5349
+ "pipe",
5350
+ "pipe"
5351
+ ]
5352
+ });
5353
+ } catch {
5354
+ return null;
5355
+ }
5356
+ }
5357
+ /**
5358
+ * Standalone function implementing the cross-story consistency check logic.
5359
+ * Exported separately so tests can call it directly without instantiating the class.
5360
+ */
5361
+ async function runCrossStoryConsistencyCheck(context) {
5362
+ const start = Date.now();
5363
+ const findings = [];
5364
+ if ((context.priorStoryFiles === void 0 || context.priorStoryFiles.length === 0) && (context._crossStoryConflictingFiles === void 0 || context._crossStoryConflictingFiles.length === 0)) return {
5365
+ status: "pass",
5366
+ details: "cross-story-consistency: no Tier B context (priorStoryFiles absent) — skipping check",
5367
+ duration_ms: Date.now() - start,
5368
+ findings: []
5369
+ };
5370
+ const collisionPaths = computeCollisionPaths(context);
5371
+ if (collisionPaths.length > 0) findings.push({
5372
+ category: "cross-story-file-collision",
5373
+ severity: "warn",
5374
+ message: `Layer 1 collision: story "${context.storyKey}" shares ${collisionPaths.length} file(s) with concurrent stories: ${collisionPaths.join(", ")}. Recommended action: serialize these stories to avoid race conditions. Motivating incidents: Epic 66 (a832487a), Epic 67 (a59e4c96).`
5375
+ });
5376
+ const shouldRunLayer2 = context.buildCheckPassed !== false && collisionPaths.length > 0;
5377
+ if (shouldRunLayer2) {
5378
+ const numstat = getNumstatDiff(context.workingDir, context.commitSha);
5379
+ const binaryFiles = new Set();
5380
+ if (numstat !== null) for (const line of numstat.split("\n")) {
5381
+ const binMatch = /^-\t-\t(.+)$/.exec(line.trim());
5382
+ if (binMatch?.[1]) binaryFiles.add(binMatch[1]);
5383
+ }
5384
+ for (const filePath of collisionPaths) {
5385
+ if (binaryFiles.has(filePath)) continue;
5386
+ const normalizedPath = filePath.replace(/\\/g, "/");
5387
+ const diffText = getDiffForFile(context.workingDir, context.commitSha, normalizedPath);
5388
+ if (diffText === null) continue;
5389
+ if (diffContainsInterfaceOrConstChange(diffText)) findings.push({
5390
+ category: CATEGORY_CROSS_STORY_CONCURRENT_MODIFICATION,
5391
+ severity: "warn",
5392
+ message: `Layer 2 interface/constant change in shared file "${filePath}": this story's commit modified export signatures or constants that may conflict with concurrent story changes. Manual review recommended. (Epic 66/67 reconciliation pattern: verify working-tree coherence via build + tests before treating pipeline outcome as definitive.)`
5393
+ });
5394
+ }
5395
+ }
5396
+ const status = findings.some((f) => f.severity === "error") ? "fail" : findings.some((f) => f.severity === "warn") ? "warn" : "pass";
5397
+ return {
5398
+ status,
5399
+ details: findings.length > 0 ? renderFindings(findings) : "cross-story-consistency: no file collisions detected between concurrent stories",
5400
+ duration_ms: Date.now() - start,
5401
+ findings
5402
+ };
5403
+ }
5404
+ /**
5405
+ * VerificationCheck class for cross-story consistency analysis.
5406
+ *
5407
+ * name = 'cross-story-consistency'
5408
+ * tier = 'B' (requires cross-story context; skipped for single-story runs)
5409
+ */
5410
+ var CrossStoryConsistencyCheck = class {
5411
+ name = "cross-story-consistency";
5412
+ tier = "B";
5413
+ async run(context) {
5414
+ return runCrossStoryConsistencyCheck(context);
5415
+ }
5416
+ };
5417
+
5109
5418
  //#endregion
5110
5419
  //#region packages/sdlc/dist/verification/verification-pipeline.js
5111
5420
  /**
@@ -5232,7 +5541,9 @@ function createDefaultVerificationPipeline(bus, config) {
5232
5541
  new AcceptanceCriteriaEvidenceCheck(),
5233
5542
  new BuildCheck(),
5234
5543
  new RuntimeProbeCheck(),
5235
- new SourceAcFidelityCheck()
5544
+ new SourceAcFidelityCheck(),
5545
+ new SourceAcShelloutCheck(),
5546
+ new CrossStoryConsistencyCheck()
5236
5547
  ];
5237
5548
  return new VerificationPipeline(bus, checks);
5238
5549
  }
@@ -6595,7 +6906,7 @@ function inspectProcessTree(opts) {
6595
6906
  }
6596
6907
  const lines = psOutput.split("\n");
6597
6908
  if (substrateDirPath !== void 0) try {
6598
- const readFileSyncFn = readFileSyncOverride ?? ((path$2, encoding) => readFileSync(path$2, encoding));
6909
+ const readFileSyncFn = readFileSyncOverride ?? ((path$3, encoding) => readFileSync(path$3, encoding));
6599
6910
  const pidContent = readFileSyncFn(join(substrateDirPath, "orchestrator.pid"), "utf-8");
6600
6911
  const pid = parseInt(pidContent.trim(), 10);
6601
6912
  if (!isNaN(pid) && pid > 0) {
@@ -7030,4 +7341,4 @@ function registerHealthCommand(program, _version = "0.0.0", projectRoot = proces
7030
7341
 
7031
7342
  //#endregion
7032
7343
  export { BMAD_BASELINE_TOKENS_FULL, DEFAULT_STALL_THRESHOLD_SECONDS, DoltMergeConflict, FileStateStore, FindingsInjector, RunManifest, RuntimeProbeListSchema, STOP_AFTER_VALID_PHASES, STORY_KEY_PATTERN$1 as STORY_KEY_PATTERN, SUBSTRATE_OWNED_SETTINGS_KEYS, SupervisorLock, VALID_PHASES, WorkGraphRepository, ZERO_FINDINGS_BY_AUTHOR, ZERO_FINDING_COUNTS, ZERO_PROBE_AUTHOR_METRICS, __commonJS, __require, __toESM, aggregateProbeAuthorMetrics, applyConfigToGraph, buildPipelineStatusOutput, createDatabaseAdapter$1 as createDatabaseAdapter, createDefaultVerificationPipeline, createGraphOrchestrator, createSdlcCodeReviewHandler, createSdlcCreateStoryHandler, createSdlcDevStoryHandler, createSdlcPhaseHandler, createStateStore, detectCycles, detectsEventDrivenAC, detectsStateIntegratingAC, extractTargetFilesFromStoryContent, findPackageRoot, formatOutput, formatPipelineStatusHuman, formatPipelineSummary, formatTokenTelemetry, getAllDescendantPids, getAutoHealthData, getSubstrateDefaultSettings, inspectProcessTree, isOrchestratorProcessLine, parseDbTimestampAsUtc, parseRuntimeProbes, registerHealthCommand, renderFindings, resolveBmadMethodSrcPath, resolveBmadMethodVersion, resolveGraphPath, resolveMainRepoRoot, resolveRunManifest, rollupFindingCounts, rollupFindingsByAuthor, rollupProbeAuthorByClass, rollupProbeAuthorMetrics, runHealthAction, validateStoryKey };
7033
- //# sourceMappingURL=health-PdI4-96I.js.map
7344
+ //# sourceMappingURL=health-CzYD6ghE.js.map
package/dist/index.d.ts CHANGED
@@ -2263,6 +2263,16 @@ interface OrchestratorEvents {
2263
2263
  /** Tail of subprocess stdout captured at kill time (~64KB max, UTF-8) */
2264
2264
  stdoutTail?: string;
2265
2265
  };
2266
+ /**
2267
+ * Two or more concurrent stories have overlapping target file paths.
2268
+ * Story 68-1: closes Epic 66 (a832487a) + Epic 67 (a59e4c96) cross-story-interaction class.
2269
+ * Mirror of CoreEvents['dispatch:cross-story-file-collision']; both must stay in sync.
2270
+ */
2271
+ 'dispatch:cross-story-file-collision': {
2272
+ storyKeys: string[];
2273
+ collisionPaths: string[];
2274
+ recommendedAction: 'serialize' | 'warn';
2275
+ };
2266
2276
  /** Watchdog detected no progress for an extended period */
2267
2277
  'orchestrator:stall': {
2268
2278
  runId: string;
@@ -1,4 +1,4 @@
1
- import { BMAD_BASELINE_TOKENS_FULL, DoltMergeConflict, FileStateStore, FindingsInjector, RunManifest, RuntimeProbeListSchema, STOP_AFTER_VALID_PHASES, STORY_KEY_PATTERN, VALID_PHASES, WorkGraphRepository, __commonJS, __require, __toESM, applyConfigToGraph, buildPipelineStatusOutput, createDatabaseAdapter, createDefaultVerificationPipeline, createGraphOrchestrator, createSdlcCodeReviewHandler, createSdlcCreateStoryHandler, createSdlcDevStoryHandler, createSdlcPhaseHandler, detectCycles, detectsEventDrivenAC, detectsStateIntegratingAC, extractTargetFilesFromStoryContent, formatOutput, formatPipelineSummary, formatTokenTelemetry, inspectProcessTree, parseDbTimestampAsUtc, renderFindings, resolveGraphPath, resolveMainRepoRoot, validateStoryKey } from "./health-PdI4-96I.js";
1
+ import { BMAD_BASELINE_TOKENS_FULL, DoltMergeConflict, FileStateStore, FindingsInjector, RunManifest, RuntimeProbeListSchema, STOP_AFTER_VALID_PHASES, STORY_KEY_PATTERN, VALID_PHASES, WorkGraphRepository, __commonJS, __require, __toESM, applyConfigToGraph, buildPipelineStatusOutput, createDatabaseAdapter, createDefaultVerificationPipeline, createGraphOrchestrator, createSdlcCodeReviewHandler, createSdlcCreateStoryHandler, createSdlcDevStoryHandler, createSdlcPhaseHandler, detectCycles, detectsEventDrivenAC, detectsStateIntegratingAC, extractTargetFilesFromStoryContent, formatOutput, formatPipelineSummary, formatTokenTelemetry, inspectProcessTree, parseDbTimestampAsUtc, renderFindings, resolveGraphPath, resolveMainRepoRoot, validateStoryKey } from "./health-CzYD6ghE.js";
2
2
  import { createLogger } from "./logger-KeHncl-f.js";
3
3
  import { TypedEventBusImpl, createEventBus, createTuiApp, isTuiCapable, printNonTtyWarning, sleep } from "./helpers-CElYrONe.js";
4
4
  import { ADVISORY_NOTES, Categorizer, ConsumerAnalyzer, DEFAULT_GLOBAL_SETTINGS, DispatcherImpl, DoltClient, ESCALATION_DIAGNOSIS, EXPERIMENT_RESULT, EfficiencyScorer, IngestionServer, LogTurnAnalyzer, OPERATIONAL_FINDING, Recommender, RoutingRecommender, RoutingResolver, RoutingTelemetry, RoutingTokenAccumulator, RoutingTuner, STORY_METRICS, STORY_OUTCOME, SubstrateConfigSchema, TEST_EXPANSION_FINDING, TEST_PLAN, TelemetryNormalizer, TelemetryPipeline, TurnAnalyzer, addTokenUsage, aggregateTokenUsageForRun, aggregateTokenUsageForStory, callLLM, createConfigSystem, createDatabaseAdapter$1, createDecision, createPipelineRun, createRequirement, detectInterfaceChanges, getArtifactByTypeForRun, getArtifactsByRun, getDecisionsByCategory, getDecisionsByPhase, getDecisionsByPhaseForRun, getLatestRun, getPipelineRunById, getRunMetrics, getRunningPipelineRuns, getStoryMetricsForRun, getTokenUsageSummary, initSchema, listRequirements, loadModelRoutingConfig, registerArtifact, updatePipelineRun, updatePipelineRunConfig, upsertDecision, writeRunMetrics, writeStoryMetrics } from "./dist-W2emvN3F.js";
@@ -15326,6 +15326,132 @@ function createImplementationOrchestrator(deps) {
15326
15326
  await sleep(gcPauseMs);
15327
15327
  }
15328
15328
  }
15329
+ function extractFilePathsFromStoryContent(content) {
15330
+ const paths = new Set();
15331
+ const atPattern = /@\s+([a-zA-Z][^\s`()\n]+\.[a-zA-Z0-9]+)/g;
15332
+ let m;
15333
+ while ((m = atPattern.exec(content)) !== null) {
15334
+ const p = m[1]?.trim();
15335
+ if (p !== void 0 && p.includes("/") && !p.startsWith("http")) paths.add(p.replace(/[.)]+$/, ""));
15336
+ }
15337
+ const backtickPattern = /`([a-zA-Z][^`\n ]+\.[a-zA-Z0-9]{1,6})`/g;
15338
+ while ((m = backtickPattern.exec(content)) !== null) {
15339
+ const p = m[1]?.trim();
15340
+ if (p !== void 0 && p.includes("/") && !p.includes(" ") && !p.startsWith("http")) paths.add(p);
15341
+ }
15342
+ return paths;
15343
+ }
15344
+ /**
15345
+ * Story 68-1: Pre-dispatch cross-story file collision detection.
15346
+ *
15347
+ * Before dispatching a batch with multiple concurrent groups, checks whether
15348
+ * any two stories from different groups share file paths in their story specs.
15349
+ * When collisions are found:
15350
+ * 1. Emits `dispatch:cross-story-file-collision` event for operator visibility.
15351
+ * 2. Merges colliding groups so they execute sequentially.
15352
+ *
15353
+ * Best-effort: if story files are missing, unreadable, or contain no parseable
15354
+ * paths, the original groups are returned unchanged.
15355
+ */
15356
+ function detectAndSerializeConcurrentFileCollisions(batchGroups) {
15357
+ if (batchGroups.length <= 1) return batchGroups;
15358
+ const root = projectRoot ?? process.cwd();
15359
+ const artifactsDir = join$1(root, "_bmad-output", "implementation-artifacts");
15360
+ if (!existsSync(artifactsDir)) return batchGroups;
15361
+ const storyFileMap = new Map();
15362
+ const allStoryKeys = batchGroups.flat();
15363
+ let artifactFiles;
15364
+ try {
15365
+ artifactFiles = readdirSync(artifactsDir);
15366
+ } catch {
15367
+ return batchGroups;
15368
+ }
15369
+ const STALE_SUFFIX = /\.stale-\d+\.md$/;
15370
+ for (const storyKey of allStoryKeys) try {
15371
+ const match$2 = artifactFiles.find((f$1) => f$1.startsWith(`${storyKey}-`) && f$1.endsWith(".md") && !STALE_SUFFIX.test(f$1));
15372
+ if (!match$2) continue;
15373
+ const content = readFileSync(join$1(artifactsDir, match$2), "utf-8");
15374
+ const paths = extractFilePathsFromStoryContent(content);
15375
+ if (paths.size > 0) storyFileMap.set(storyKey, paths);
15376
+ } catch {}
15377
+ if (storyFileMap.size === 0) return batchGroups;
15378
+ const parent = Array.from({ length: batchGroups.length }, (_, i) => i);
15379
+ function find(x) {
15380
+ while (parent[x] !== x) {
15381
+ parent[x] = parent[parent[x]];
15382
+ x = parent[x];
15383
+ }
15384
+ return x;
15385
+ }
15386
+ function union(x, y) {
15387
+ const rx = find(x);
15388
+ const ry = find(y);
15389
+ if (rx !== ry) parent[rx] = ry;
15390
+ }
15391
+ let collisionCount = 0;
15392
+ const collisionEvents = [];
15393
+ for (let i = 0; i < batchGroups.length; i++) for (let j$1 = i + 1; j$1 < batchGroups.length; j$1++) {
15394
+ if (find(i) === find(j$1)) continue;
15395
+ const groupA = batchGroups[i] ?? [];
15396
+ const groupB = batchGroups[j$1] ?? [];
15397
+ const filesA = new Set();
15398
+ const filesB = new Set();
15399
+ const keysA = [];
15400
+ const keysB = [];
15401
+ for (const key of groupA) {
15402
+ const files = storyFileMap.get(key);
15403
+ if (files !== void 0) {
15404
+ keysA.push(key);
15405
+ for (const f$1 of files) filesA.add(f$1);
15406
+ }
15407
+ }
15408
+ for (const key of groupB) {
15409
+ const files = storyFileMap.get(key);
15410
+ if (files !== void 0) {
15411
+ keysB.push(key);
15412
+ for (const f$1 of files) filesB.add(f$1);
15413
+ }
15414
+ }
15415
+ if (keysA.length === 0 || keysB.length === 0) continue;
15416
+ const collisionPaths = [...filesA].filter((f$1) => filesB.has(f$1));
15417
+ if (collisionPaths.length > 0) {
15418
+ union(i, j$1);
15419
+ collisionCount++;
15420
+ collisionEvents.push({
15421
+ storyKeys: [...keysA, ...keysB],
15422
+ collisionPaths
15423
+ });
15424
+ }
15425
+ }
15426
+ if (collisionCount === 0) return batchGroups;
15427
+ for (const evt of collisionEvents) {
15428
+ try {
15429
+ eventBus.emit("dispatch:cross-story-file-collision", {
15430
+ storyKeys: evt.storyKeys,
15431
+ collisionPaths: evt.collisionPaths,
15432
+ recommendedAction: "serialize"
15433
+ });
15434
+ } catch {}
15435
+ logger$26.info({
15436
+ storyKeys: evt.storyKeys,
15437
+ collisionPaths: evt.collisionPaths
15438
+ }, "Cross-story file collision detected — serializing affected groups to prevent race conditions");
15439
+ }
15440
+ const mergedGroupMap = new Map();
15441
+ for (let i = 0; i < batchGroups.length; i++) {
15442
+ const group = batchGroups[i] ?? [];
15443
+ const root2 = find(i);
15444
+ const existing = mergedGroupMap.get(root2) ?? [];
15445
+ mergedGroupMap.set(root2, [...existing, ...group]);
15446
+ }
15447
+ const result = [...mergedGroupMap.values()];
15448
+ logger$26.info({
15449
+ originalGroupCount: batchGroups.length,
15450
+ mergedGroupCount: result.length,
15451
+ collisionCount
15452
+ }, "Story groups re-arranged after cross-story collision detection");
15453
+ return result;
15454
+ }
15329
15455
  /**
15330
15456
  * Promise pool: run up to maxConcurrency groups at a time.
15331
15457
  *
@@ -15617,7 +15743,10 @@ function createImplementationOrchestrator(deps) {
15617
15743
  logger$26.warn({ err: snapErr }, "Failed to capture package snapshot — continuing without protection");
15618
15744
  }
15619
15745
  try {
15620
- for (const batchGroups of batches) await runWithConcurrency(batchGroups, config.maxConcurrency);
15746
+ for (const rawBatchGroups of batches) {
15747
+ const batchGroups = detectAndSerializeConcurrentFileCollisions(rawBatchGroups);
15748
+ await runWithConcurrency(batchGroups, config.maxConcurrency);
15749
+ }
15621
15750
  } catch (err) {
15622
15751
  stopHeartbeat();
15623
15752
  _state = "FAILED";
@@ -45472,4 +45601,4 @@ function registerRunCommand(program, _version = "0.0.0", projectRoot = process.c
45472
45601
 
45473
45602
  //#endregion
45474
45603
  export { AdapterTelemetryPersistence, AppError, DoltRepoMapMetaRepository, DoltSymbolRepository, ERR_REPO_MAP_STORAGE_WRITE, EpicIngester, GLOBSTAR$1 as GLOBSTAR, GitClient, GrammarLoader, Minimatch$1 as Minimatch, Minipass, RepoMapInjector, RepoMapModule, RepoMapQueryEngine, RepoMapStorage, SymbolParser, createContextCompiler, createDispatcher, createEventEmitter, createImplementationOrchestrator, createPackLoader, createPhaseOrchestrator, createStopAfterGate, createTelemetryAdvisor, escape$1 as escape, formatPhaseCompletionSummary, getFactoryRunSummaries, getScenarioResultsForRun, getTwinRunsForRun, listGraphRuns, normalizeGraphSummaryToStatus, registerExportCommand, registerFactoryCommand, registerRunCommand, registerScenariosCommand, resolveMaxReviewCycles, resolveProbeAuthorStateIntegrating, resolveStoryKeys, runAnalysisPhase, runPlanningPhase, runProbeAuthor, runRunAction, runSolutioningPhase, unescape$1 as unescape, validateStopAfterFromConflict, wireNdjsonEmitter };
45475
- //# sourceMappingURL=run-DcDoaG12.js.map
45604
+ //# sourceMappingURL=run-BGXvVIwO.js.map
@@ -1,8 +1,8 @@
1
- import "./health-PdI4-96I.js";
1
+ import "./health-CzYD6ghE.js";
2
2
  import "./logger-KeHncl-f.js";
3
3
  import "./helpers-CElYrONe.js";
4
4
  import "./dist-W2emvN3F.js";
5
- import { normalizeGraphSummaryToStatus, registerRunCommand, resolveMaxReviewCycles, resolveProbeAuthorStateIntegrating, runRunAction, wireNdjsonEmitter } from "./run-DcDoaG12.js";
5
+ import { normalizeGraphSummaryToStatus, registerRunCommand, resolveMaxReviewCycles, resolveProbeAuthorStateIntegrating, runRunAction, wireNdjsonEmitter } from "./run-BGXvVIwO.js";
6
6
  import "./routing-CcBOCuC9.js";
7
7
  import "./decisions-C0pz9Clx.js";
8
8
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "substrate-ai",
3
- "version": "0.20.57",
3
+ "version": "0.20.59",
4
4
  "description": "Substrate — multi-agent orchestration daemon for AI coding agents",
5
5
  "type": "module",
6
6
  "license": "MIT",
@@ -108,6 +108,20 @@ Use this exact format for each item:
108
108
 
109
109
  **Behavioral signals (runtime-dependent even when the artifact ships as TypeScript / JavaScript / Python source):** the AC describes the implementation invoking a **subprocess** (`execSync`, `spawn`, `child_process`), reading or writing the **filesystem outside test tmpdirs** (`fs.read*`, `fs.write*`, `path.join(homedir(), ...)`), running **git operations** (`git log`, `git push`, `git merge`), querying a **database** (Dolt, mysql, sqlite, postgres), making **network requests** (`fetch`, `axios`, `http.get`), or scanning a **registry / configuration source** ("queries the registry", "scans the fleet").
110
110
 
111
+ **Shell-script generation signals (runtime-dependent even when the artifact ships as source: the generated script runs on a live host, installs into `.git/hooks/`, or registers with the OS):** the AC describes the implementation generating or installing a script that a user or system event invokes — the correctness of that script can only be confirmed by running it in a real or ephemeral environment, not by inspecting source code alone. Signal types to recognize:
112
+
113
+ - **Hook generators**: code that writes or installs git hooks (`pre-push`, `post-merge`, `pre-commit`, `post-commit`, `post-rewrite`), configures `husky` or other hook managers, or writes files to `.git/hooks/*`.
114
+ - **Install scripts**: commands like `vg install`, `<binary> install`, or AC phrases like "installs the X hook", "writes the X script", "generates a wrapper for Y".
115
+ - **Lifecycle scripts**: generates npm `prepublish`, `postinstall`, or `prepare` scripts; writes entries into `package.json` `scripts` field programmatically.
116
+ - **Service generators**: generates systemd `.service` or `.timer` unit files, podman/docker image build scripts, or any file invoked by a scheduler or init system.
117
+ - **Wrapper scripts**: produces `#!/bin/sh`-style shell wrappers around binaries (e.g., `#!/bin/sh` / `exec node "$@"` shape), shim generators, or delegate-to-binary shell scripts.
118
+
119
+ Phrase patterns to flag as shell-script generation signals: "writes a hook", "generates a script", "installs a pre-push hook", "creates a wrapper", "emits a shell script", "writes to .git/hooks/", "creates a systemd unit".
120
+
121
+ Strata Stories 3-3+3-4 shipped LGTM_WITH_NOTES with a real dependency-confusion attack vector (`npx strata` fallback) because the verification gate accepted shell-script-generating code without a canonical-invocation probe. See `obs_2026-05-03_023` (create-story prompt enforce probe section for shell-out boundaries).
122
+
123
+ If any shell-script generation signal fires, the story MUST include a `## Runtime Probes` section that invokes the canonical user trigger in a fresh fixture project, not direct-call the script.
124
+
111
125
  **Architectural-level signals (the same external-state interactions described at higher abstraction levels — runtime-dependent identically):** the AC names a **named external dependency** (service, package, agent, skill, mesh, registry, queue, outbox, store, daemon, etc.) AND describes any **interaction verb** (queries, publishes, consumes, calls, writes to, reads from, subscribes to, registers with, delegates to, reaches for, persists to). Phrase patterns to recognize as the architectural-level equivalents of the code-API enumeration above:
112
126
 
113
127
  - `queries <service-or-skill>` (network / database) — e.g., "queries agent-mesh's `query-reports` skill", "queries the Dolt registry"
@@ -312,6 +312,59 @@ Read from an npm/package registry or fleet-config source. Precede the registry p
312
312
  description: npm registry resolves @substrate-ai/sdlc and returns a semver version string
313
313
  ```
314
314
 
315
+ ## Shell-script generation probe shapes
316
+
317
+ Shell-script generation ACs describe a generator that produces a lifecycle script — a pre-push hook, a postinstall wrapper, a systemd unit startup shim, or a cron-job body — that the **user** then invokes through a canonical mechanism (`git push`, `npm install`, etc.). This AC class was identified in obs_2026-05-03_023 (strata 3-3+3-4 incident: a pre-push hook generator shipped SHIP_IT with a dependency-confusion attack vector because the verification probe direct-called the generated script with synthetic inputs rather than invoking the canonical user trigger).
318
+
319
+ **Why the fresh-fixture requirement is critical:**
320
+
321
+ (a) The orchestrator's working tree may have global state (installed binaries, config files) that a typical user environment does not. A probe run against substrate's own working tree silently satisfies preconditions that would fail in a fresh project.
322
+
323
+ (b) The canonical user invocation runs in the user's project root — not substrate's. A probe that bypasses the install + wiring step (e.g., calls `bash .git/hooks/pre-push` directly) cannot detect that the hook was installed to the wrong path, was installed with the wrong mode, or was wired to the wrong trigger event.
324
+
325
+ (c) Defects like dependency-confusion (`npx <package>` fallback to global registry) only manifest when no local binary exists. On the orchestrator's machine, `node_modules/.bin/strata` satisfies the lookup before npm's fallback fires — masking the defect from any probe that runs inside the substrate working tree.
326
+
327
+ **Three rules for shell-script generation probes:**
328
+
329
+ 1. **Fresh fixture in `mktemp -d`** — never run against substrate's own project tree. The working tree silently satisfies probes that would fail in a user's fresh environment. Create a throwaway `mktemp -d` directory, `git init` it, and install the generator into it via the canonical install command.
330
+
331
+ 2. **Canonical user trigger** — `git push` for a pre-push hook, `npm install` for a postinstall hook, NOT direct script invocation (`bash .git/hooks/pre-push`). Direct invocation skips the wiring layer that determines whether the hook actually fires on the user's machine. See the trigger table in "Probes for event-driven mechanisms must invoke the production trigger" above.
332
+
333
+ 3. **Observable post-condition** — assert filesystem or process state the user would observe (e.g., `test -f .findings/history.jsonl`), not just exit code. A script that exits 0 without writing the expected artifact satisfies exit-code-only probes but silently fails the user. The assertion target must be the output the user can inspect after the event fires.
334
+
335
+ **Canonical worked example (strata 3-3 pre-push hook scenario — obs_2026-05-03_023 fix #1):**
336
+
337
+ ```yaml
338
+ - name: pre-push-hook-fires-on-real-push-and-archives-findings
339
+ sandbox: twin
340
+ command: |
341
+ set -e
342
+ FIXTURE=$(mktemp -d)
343
+ cd "$FIXTURE"
344
+ npm init -y >/dev/null
345
+ git init -q
346
+ git config user.email t@example.com && git config user.name test
347
+ # install via canonical user invocation (no global packages)
348
+ node <REPO_ROOT>/dist/cli.js vg install
349
+ # produce a finding-eligible change
350
+ mkdir -p src
351
+ echo "import x from 'lodash';" > src/bad.ts
352
+ git add . && git commit -qm "initial"
353
+ # trigger canonical user-facing event via git push (pre-push hook fires here)
354
+ REMOTE=$(mktemp -d)
355
+ git init --bare -q "$REMOTE"
356
+ git remote add origin "$REMOTE"
357
+ git push origin main 2>&1 || true
358
+ # assert observable post-condition
359
+ test -f .findings/history.jsonl && echo "ARCHIVE_PRESENT" || echo "ARCHIVE_MISSING"
360
+ expect_stdout_regex:
361
+ - ARCHIVE_PRESENT
362
+ description: >-
363
+ strata 3-3 canonical pre-push hook shape — fresh fixture (mktemp -d),
364
+ canonical user trigger (git push), observable post-condition assertion
365
+ (obs_2026-05-03_023 fix #1)
366
+ ```
367
+
315
368
  ## Mission
316
369
 
317
370
  Author runtime probes for the story described above. Use the AC sections provided: