@danielblomma/cortex-mcp 2.0.6 → 2.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@danielblomma/cortex-mcp",
3
3
  "mcpName": "io.github.DanielBlomma/cortex",
4
- "version": "2.0.6",
4
+ "version": "2.0.7",
5
5
  "description": "Local, repo-scoped context platform for coding assistants. Semantic search, graph relationships, and architectural rule context.",
6
6
  "type": "module",
7
7
  "author": "Daniel Blomma",
@@ -6,3 +6,4 @@ export * from "./default-workflows.js";
6
6
  export * from "./mcp-tools.js";
7
7
  export * from "./capabilities.js";
8
8
  export * from "./enforcement.js";
9
+ export * from "./synced-registry.js";
@@ -2,6 +2,7 @@ import { z } from "zod";
2
2
  import { advanceStage, createRun, getRunState } from "./run-lifecycle.js";
3
3
  import { composeStageEnvelope } from "./envelope.js";
4
4
  import { DEFAULT_WORKFLOWS } from "./default-workflows.js";
5
+ import { loadSyncedWorkflows } from "./synced-registry.js";
5
6
  import {
6
7
  stageStatusSchema,
7
8
  type StageStatus,
@@ -79,7 +80,13 @@ function resolveWorkflow(
79
80
  workflowId: string,
80
81
  registry: Record<string, WorkflowDefinition> | undefined,
81
82
  ): WorkflowDefinition {
82
- const workflows = registry ?? DEFAULT_WORKFLOWS;
83
+ // When the caller passes an explicit registry, it wins outright (used
84
+ // by tests). Otherwise we merge bundled defaults with the org-authored
85
+ // workflows the daemon has synced into ~/.cortex/workflows.local.json,
86
+ // with the synced ones taking precedence on workflow_id collisions so
87
+ // org overrides actually override.
88
+ const workflows =
89
+ registry ?? { ...DEFAULT_WORKFLOWS, ...loadSyncedWorkflows() };
83
90
  const workflow = workflows[workflowId];
84
91
  if (!workflow) {
85
92
  throw new Error(
@@ -0,0 +1,64 @@
1
+ import { existsSync, readFileSync } from "node:fs";
2
+ import { homedir } from "node:os";
3
+ import { join } from "node:path";
4
+ import { workflowDefinitionSchema, type WorkflowDefinition } from "./schemas.js";
5
+
6
+ /**
7
+ * Read side of the org-workflow sync cache. The daemon's
8
+ * workflow-sync-checker writes ~/.cortex/workflows.local.json; this
9
+ * module reads it. Kept in core/workflow/ rather than daemon/ so
10
+ * mcp-tools.ts can consult the cache without depending on daemon code.
11
+ *
12
+ * Each entry is validated against workflowDefinitionSchema before being
13
+ * surfaced — if the cache file is corrupt or contains stale shapes from
14
+ * an older daemon, those entries are silently dropped rather than
15
+ * crashing the read.
16
+ */
17
+
18
+ export const SYNCED_WORKFLOWS_FILENAME = "workflows.local.json";
19
+
20
+ type LocalWorkflowRecord = {
21
+ workflow_id: string;
22
+ version: number;
23
+ updated_at: string;
24
+ definition: unknown;
25
+ };
26
+
27
+ type LocalWorkflowsState = {
28
+ workflows?: Record<string, LocalWorkflowRecord>;
29
+ };
30
+
31
+ export function syncedWorkflowsCachePath(dir?: string): string {
32
+ return join(dir ?? join(homedir(), ".cortex"), SYNCED_WORKFLOWS_FILENAME);
33
+ }
34
+
35
+ /**
36
+ * Returns the synced org-authored workflows keyed by `workflow_id`.
37
+ * Empty object when the cache is missing, unreadable, malformed, or
38
+ * contains no valid entries. The optional `dir` argument is for tests;
39
+ * production callers leave it unset.
40
+ */
41
+ export function loadSyncedWorkflows(
42
+ dir?: string,
43
+ ): Record<string, WorkflowDefinition> {
44
+ const path = syncedWorkflowsCachePath(dir);
45
+ if (!existsSync(path)) return {};
46
+
47
+ let parsed: LocalWorkflowsState;
48
+ try {
49
+ parsed = JSON.parse(readFileSync(path, "utf8")) as LocalWorkflowsState;
50
+ } catch {
51
+ return {};
52
+ }
53
+ const records = parsed.workflows;
54
+ if (!records || typeof records !== "object") return {};
55
+
56
+ const out: Record<string, WorkflowDefinition> = {};
57
+ for (const [id, record] of Object.entries(records)) {
58
+ if (!record || typeof record !== "object") continue;
59
+ const result = workflowDefinitionSchema.safeParse(record.definition);
60
+ if (!result.success) continue;
61
+ out[id] = result.data;
62
+ }
63
+ return out;
64
+ }
@@ -28,6 +28,7 @@ import {
28
28
  } from "./heartbeat-tracker.js";
29
29
  import { startSyncTimer } from "./sync-checker.js";
30
30
  import { startSkillSyncTimer } from "./skill-sync-checker.js";
31
+ import { startWorkflowSyncTimer } from "./workflow-sync-checker.js";
31
32
  import { startHostEventsPusher } from "./host-events-pusher.js";
32
33
  import { startEgressProxy } from "./egress-proxy.js";
33
34
  import { startHeartbeatPusher } from "./heartbeat-pusher.js";
@@ -357,6 +358,20 @@ async function main(): Promise<void> {
357
358
  startSkillSyncTimer(process.cwd(), skillSyncMs);
358
359
  }
359
360
 
361
+ // Harness Phase 2: poll cortex-web for org-authored workflows, cache
362
+ // their definitions locally so cortex.workflow.start can resolve
363
+ // org-specific workflow_ids ahead of bundled defaults. Same cadence
364
+ // as the skill sync by default; independently configurable via
365
+ // CORTEX_WORKFLOW_SYNC_MS / CORTEX_DISABLE_WORKFLOW_SYNC.
366
+ const workflowSyncRaw = parseInt(process.env.CORTEX_WORKFLOW_SYNC_MS ?? "", 10);
367
+ const workflowSyncMs =
368
+ Number.isFinite(workflowSyncRaw) && workflowSyncRaw > 0
369
+ ? workflowSyncRaw
370
+ : skillSyncMs;
371
+ if (process.env.CORTEX_DISABLE_WORKFLOW_SYNC !== "1") {
372
+ startWorkflowSyncTimer(process.cwd(), workflowSyncMs);
373
+ }
374
+
360
375
  // Govern host heartbeat — fills host_enrollment on cortex-web so the
361
376
  // dashboard at /dashboard/govern actually shows this host.
362
377
  const heartbeatRaw = parseInt(process.env.CORTEX_HEARTBEAT_PUSH_MS ?? "", 10);
@@ -0,0 +1,301 @@
1
+ import {
2
+ existsSync,
3
+ readFileSync,
4
+ writeFileSync,
5
+ } from "node:fs";
6
+ import { hostname } from "node:os";
7
+ import { join } from "node:path";
8
+ import { loadEnterpriseConfig } from "../core/config.js";
9
+ import { workflowDefinitionSchema, type WorkflowDefinition } from "../core/workflow/schemas.js";
10
+ import { writeHostAuditEvent } from "./ungoverned-scanner.js";
11
+ import { daemonDir } from "./paths.js";
12
+
13
+ /**
14
+ * Org-workflow sync flow — daemon side.
15
+ *
16
+ * The daemon polls cortex-web /api/v1/govern/workflows/manifest each tick
17
+ * to learn what workflows the org has authored. It diffs against a local
18
+ * state file, fetches changed full definitions, and caches them locally.
19
+ * cortex.workflow.start (and the cortex stage CLI) read the cache via
20
+ * loadSyncedWorkflows() and merge with bundled DEFAULT_WORKFLOWS, with
21
+ * org definitions taking precedence on workflow_id collisions.
22
+ *
23
+ * Three audit outcomes per tick:
24
+ * - workflows_unchanged — manifest matches local state
25
+ * - workflows_synced — at least one workflow was added / changed /
26
+ * removed (metadata: counts)
27
+ * - workflows_sync_failed — network / auth / parse error
28
+ *
29
+ * Unlike skills, there is no on-disk artifact to write per workflow —
30
+ * the cached JSON is the only product. No "restart Claude Code"
31
+ * notification is needed because workflow lookup happens at run-start.
32
+ */
33
+
34
+ const STATE_FILENAME = "workflows.local.json";
35
+
36
+ type ManifestEntry = {
37
+ workflow_id: string;
38
+ version: number;
39
+ updated_at: string;
40
+ };
41
+
42
+ type FetchedWorkflow = {
43
+ workflow_id: string;
44
+ description: string;
45
+ version: number;
46
+ definition: WorkflowDefinition;
47
+ updated_at: string;
48
+ };
49
+
50
+ type LocalWorkflowRecord = {
51
+ workflow_id: string;
52
+ version: number;
53
+ updated_at: string;
54
+ definition: WorkflowDefinition;
55
+ };
56
+
57
+ type LocalWorkflowsState = {
58
+ workflows: Record<string, LocalWorkflowRecord>;
59
+ last_synced_at?: string;
60
+ };
61
+
62
+ export type WorkflowSyncOutcome =
63
+ | { kind: "unchanged"; count: number }
64
+ | {
65
+ kind: "synced";
66
+ added: string[];
67
+ changed: string[];
68
+ removed: string[];
69
+ }
70
+ | { kind: "failed"; error: string };
71
+
72
+ function stateFilePath(): string {
73
+ return join(daemonDir(), STATE_FILENAME);
74
+ }
75
+
76
+ export function readSyncedWorkflowsState(): LocalWorkflowsState {
77
+ const path = stateFilePath();
78
+ if (!existsSync(path)) return { workflows: {} };
79
+ try {
80
+ const parsed = JSON.parse(readFileSync(path, "utf8")) as LocalWorkflowsState;
81
+ return {
82
+ workflows: parsed.workflows ?? {},
83
+ last_synced_at: parsed.last_synced_at,
84
+ };
85
+ } catch {
86
+ return { workflows: {} };
87
+ }
88
+ }
89
+
90
+ function writeSyncedWorkflowsState(state: LocalWorkflowsState): void {
91
+ writeFileSync(
92
+ stateFilePath(),
93
+ JSON.stringify(state, null, 2) + "\n",
94
+ "utf8",
95
+ );
96
+ }
97
+
98
+ async function fetchManifest(
99
+ baseUrl: string,
100
+ apiKey: string,
101
+ ): Promise<ManifestEntry[]> {
102
+ const url = new URL(
103
+ baseUrl.replace(/\/$/, "") + "/api/v1/govern/workflows/manifest",
104
+ );
105
+ const res = await fetch(url, {
106
+ headers: { Authorization: `Bearer ${apiKey}` },
107
+ });
108
+ if (!res.ok) {
109
+ throw new Error(`HTTP ${res.status} ${res.statusText}`);
110
+ }
111
+ const body = (await res.json()) as { workflows?: ManifestEntry[] };
112
+ return body.workflows ?? [];
113
+ }
114
+
115
+ async function fetchWorkflow(
116
+ baseUrl: string,
117
+ apiKey: string,
118
+ workflowId: string,
119
+ ): Promise<FetchedWorkflow> {
120
+ const url = new URL(
121
+ baseUrl.replace(/\/$/, "") +
122
+ "/api/v1/govern/workflows/" +
123
+ encodeURIComponent(workflowId),
124
+ );
125
+ const res = await fetch(url, {
126
+ headers: { Authorization: `Bearer ${apiKey}` },
127
+ });
128
+ if (!res.ok) {
129
+ throw new Error(`HTTP ${res.status} ${res.statusText}`);
130
+ }
131
+ const body = (await res.json()) as { workflow?: FetchedWorkflow };
132
+ if (!body.workflow) {
133
+ throw new Error(`Response for ${workflowId} missing 'workflow' field`);
134
+ }
135
+ return body.workflow;
136
+ }
137
+
138
+ export async function runWorkflowSyncOnce(
139
+ cwd: string,
140
+ ): Promise<WorkflowSyncOutcome> {
141
+ const config = loadEnterpriseConfig(join(cwd, ".context"));
142
+ const apiKey = config.enterprise.api_key.trim();
143
+ const baseUrl = (config.enterprise.base_url || config.enterprise.endpoint).trim();
144
+ if (!apiKey || !baseUrl) {
145
+ const outcome: WorkflowSyncOutcome = {
146
+ kind: "failed",
147
+ error: "enterprise not configured",
148
+ };
149
+ await writeAudit(cwd, outcome);
150
+ return outcome;
151
+ }
152
+
153
+ let manifest: ManifestEntry[];
154
+ try {
155
+ manifest = await fetchManifest(baseUrl, apiKey);
156
+ } catch (err) {
157
+ const outcome: WorkflowSyncOutcome = {
158
+ kind: "failed",
159
+ error: err instanceof Error ? err.message : String(err),
160
+ };
161
+ await writeAudit(cwd, outcome);
162
+ return outcome;
163
+ }
164
+
165
+ const state = readSyncedWorkflowsState();
166
+ const remoteByName = new Map(manifest.map((e) => [e.workflow_id, e]));
167
+
168
+ const added: string[] = [];
169
+ const changed: string[] = [];
170
+ const removed: string[] = [];
171
+
172
+ for (const entry of manifest) {
173
+ const local = state.workflows[entry.workflow_id];
174
+ const isNew = !local;
175
+ const isChanged =
176
+ Boolean(local) &&
177
+ (local.updated_at !== entry.updated_at || local.version !== entry.version);
178
+ if (!isNew && !isChanged) continue;
179
+
180
+ let fetched: FetchedWorkflow;
181
+ try {
182
+ fetched = await fetchWorkflow(baseUrl, apiKey, entry.workflow_id);
183
+ } catch (err) {
184
+ const outcome: WorkflowSyncOutcome = {
185
+ kind: "failed",
186
+ error:
187
+ err instanceof Error
188
+ ? `fetch ${entry.workflow_id}: ${err.message}`
189
+ : `fetch ${entry.workflow_id}: ${String(err)}`,
190
+ };
191
+ await writeAudit(cwd, outcome);
192
+ return outcome;
193
+ }
194
+
195
+ let validated: WorkflowDefinition;
196
+ try {
197
+ validated = workflowDefinitionSchema.parse(fetched.definition);
198
+ } catch (err) {
199
+ const outcome: WorkflowSyncOutcome = {
200
+ kind: "failed",
201
+ error:
202
+ err instanceof Error
203
+ ? `validate ${entry.workflow_id}: ${err.message}`
204
+ : `validate ${entry.workflow_id}: ${String(err)}`,
205
+ };
206
+ await writeAudit(cwd, outcome);
207
+ return outcome;
208
+ }
209
+
210
+ state.workflows[entry.workflow_id] = {
211
+ workflow_id: entry.workflow_id,
212
+ version: fetched.version,
213
+ updated_at: fetched.updated_at,
214
+ definition: validated,
215
+ };
216
+ (isNew ? added : changed).push(entry.workflow_id);
217
+ }
218
+
219
+ for (const name of Object.keys(state.workflows)) {
220
+ if (remoteByName.has(name)) continue;
221
+ delete state.workflows[name];
222
+ removed.push(name);
223
+ }
224
+
225
+ const totalChanged = added.length + changed.length + removed.length;
226
+ if (totalChanged === 0) {
227
+ const outcome: WorkflowSyncOutcome = {
228
+ kind: "unchanged",
229
+ count: manifest.length,
230
+ };
231
+ await writeAudit(cwd, outcome);
232
+ return outcome;
233
+ }
234
+
235
+ state.last_synced_at = new Date().toISOString();
236
+ writeSyncedWorkflowsState(state);
237
+ const outcome: WorkflowSyncOutcome = {
238
+ kind: "synced",
239
+ added,
240
+ changed,
241
+ removed,
242
+ };
243
+ await writeAudit(cwd, outcome);
244
+ return outcome;
245
+ }
246
+
247
+ async function writeAudit(cwd: string, outcome: WorkflowSyncOutcome): Promise<void> {
248
+ const eventBase = {
249
+ timestamp: new Date().toISOString(),
250
+ host_id: hostname(),
251
+ };
252
+ if (outcome.kind === "unchanged") {
253
+ await writeHostAuditEvent(cwd, {
254
+ ...eventBase,
255
+ event_type: "workflows_unchanged",
256
+ count: outcome.count,
257
+ }).catch(() => undefined);
258
+ } else if (outcome.kind === "synced") {
259
+ await writeHostAuditEvent(cwd, {
260
+ ...eventBase,
261
+ event_type: "workflows_synced",
262
+ added: outcome.added,
263
+ changed: outcome.changed,
264
+ removed: outcome.removed,
265
+ }).catch(() => undefined);
266
+ } else {
267
+ await writeHostAuditEvent(cwd, {
268
+ ...eventBase,
269
+ event_type: "workflows_sync_failed",
270
+ error: outcome.error,
271
+ }).catch(() => undefined);
272
+ }
273
+ }
274
+
275
+ export type WorkflowSyncTimerHandle = {
276
+ stop(): void;
277
+ };
278
+
279
+ export function startWorkflowSyncTimer(
280
+ cwd: string,
281
+ intervalMs: number,
282
+ ): WorkflowSyncTimerHandle {
283
+ const tick = () => {
284
+ void runWorkflowSyncOnce(cwd).catch((err) => {
285
+ process.stderr.write(
286
+ `[cortex-daemon] workflow sync failed: ${
287
+ err instanceof Error ? err.message : String(err)
288
+ }\n`,
289
+ );
290
+ });
291
+ };
292
+
293
+ void Promise.resolve().then(tick);
294
+ const handle = setInterval(tick, intervalMs);
295
+ if (typeof handle.unref === "function") handle.unref();
296
+ return {
297
+ stop() {
298
+ clearInterval(handle);
299
+ },
300
+ };
301
+ }
@@ -0,0 +1,179 @@
1
+ import test from "node:test";
2
+ import assert from "node:assert/strict";
3
+ import fs from "node:fs";
4
+ import os from "node:os";
5
+ import path from "node:path";
6
+
7
+ import {
8
+ loadSyncedWorkflows,
9
+ syncedWorkflowsCachePath,
10
+ } from "../dist/core/workflow/synced-registry.js";
11
+ import { runWorkflowStart } from "../dist/core/workflow/mcp-tools.js";
12
+ import { SECURE_BUILD_WORKFLOW } from "../dist/core/workflow/default-workflows.js";
13
+
14
+ function makeWorkspace() {
15
+ return fs.mkdtempSync(path.join(os.tmpdir(), "cortex-synced-registry-"));
16
+ }
17
+
18
+ const TINY_WORKFLOW = {
19
+ id: "tiny",
20
+ description: "Two-stage tests workflow",
21
+ version: 1,
22
+ stages: [
23
+ {
24
+ name: "plan",
25
+ artifact: "plan.md",
26
+ reads: [],
27
+ required_fields: [],
28
+ capability: "planner",
29
+ description: "Produce a plan.",
30
+ },
31
+ {
32
+ name: "review",
33
+ artifact: "review.md",
34
+ reads: ["plan"],
35
+ required_fields: ["approved"],
36
+ capability: "reviewer",
37
+ description: "Review the plan.",
38
+ },
39
+ ],
40
+ };
41
+
42
+ function writeCache(dir, payload) {
43
+ fs.mkdirSync(dir, { recursive: true });
44
+ fs.writeFileSync(
45
+ syncedWorkflowsCachePath(dir),
46
+ JSON.stringify(payload, null, 2),
47
+ "utf8",
48
+ );
49
+ }
50
+
51
+ test("syncedWorkflowsCachePath: defaults to ~/.cortex/workflows.local.json", () => {
52
+ const expected = path.join(os.homedir(), ".cortex", "workflows.local.json");
53
+ assert.equal(syncedWorkflowsCachePath(), expected);
54
+ });
55
+
56
+ test("loadSyncedWorkflows: returns {} when cache is missing", () => {
57
+ const dir = makeWorkspace();
58
+ assert.deepEqual(loadSyncedWorkflows(dir), {});
59
+ });
60
+
61
+ test("loadSyncedWorkflows: returns {} when cache is unreadable JSON", () => {
62
+ const dir = makeWorkspace();
63
+ fs.writeFileSync(syncedWorkflowsCachePath(dir), "not valid json", "utf8");
64
+ assert.deepEqual(loadSyncedWorkflows(dir), {});
65
+ });
66
+
67
+ test("loadSyncedWorkflows: returns {} when 'workflows' key is missing", () => {
68
+ const dir = makeWorkspace();
69
+ writeCache(dir, { last_synced_at: "x" });
70
+ assert.deepEqual(loadSyncedWorkflows(dir), {});
71
+ });
72
+
73
+ test("loadSyncedWorkflows: drops entries whose definition fails schema", () => {
74
+ const dir = makeWorkspace();
75
+ writeCache(dir, {
76
+ workflows: {
77
+ "valid-one": {
78
+ workflow_id: "valid-one",
79
+ version: 1,
80
+ updated_at: "2026-05-06T12:00:00.000Z",
81
+ definition: TINY_WORKFLOW,
82
+ },
83
+ "broken-one": {
84
+ workflow_id: "broken-one",
85
+ version: 1,
86
+ updated_at: "2026-05-06T12:00:00.000Z",
87
+ definition: { id: "broken-one" /* missing stages */ },
88
+ },
89
+ },
90
+ });
91
+ const loaded = loadSyncedWorkflows(dir);
92
+ assert.deepEqual(Object.keys(loaded).sort(), ["valid-one"]);
93
+ });
94
+
95
+ test("loadSyncedWorkflows: returns valid workflow definitions keyed by workflow_id", () => {
96
+ const dir = makeWorkspace();
97
+ writeCache(dir, {
98
+ workflows: {
99
+ tiny: {
100
+ workflow_id: "tiny",
101
+ version: 1,
102
+ updated_at: "2026-05-06T12:00:00.000Z",
103
+ definition: TINY_WORKFLOW,
104
+ },
105
+ },
106
+ });
107
+ const loaded = loadSyncedWorkflows(dir);
108
+ assert.equal(loaded.tiny.id, "tiny");
109
+ assert.equal(loaded.tiny.stages.length, 2);
110
+ });
111
+
112
+ test("resolveWorkflow integration: synced workflow takes precedence over bundled default", () => {
113
+ // We can't easily intercept loadSyncedWorkflows() from inside
114
+ // mcp-tools.ts (it reads from a fixed home-dir path). Instead, exercise
115
+ // the explicit-registry path which mirrors what the merge would do
116
+ // and confirm the contract: passing a registry that includes the same
117
+ // id as a bundled default uses the registry version.
118
+ const cwd = makeWorkspace();
119
+ process.env.HOME = cwd; // sandbox the cache lookup
120
+ try {
121
+ const overridden = {
122
+ ...SECURE_BUILD_WORKFLOW,
123
+ description: "Org-overridden secure-build",
124
+ };
125
+ const registry = { "secure-build": overridden };
126
+ const result = runWorkflowStart(
127
+ {
128
+ task_id: "task-1",
129
+ task_description: "test",
130
+ workflow_id: "secure-build",
131
+ },
132
+ { cwd, workflows: registry },
133
+ );
134
+ assert.equal(result.state.workflow_id, "secure-build");
135
+ // The envelope renders the workflow description verbatim — confirms
136
+ // we got the org-overridden version and not the bundled one.
137
+ assert.match(result.envelope.prompt, /Org-overridden secure-build/);
138
+ } finally {
139
+ delete process.env.HOME;
140
+ }
141
+ });
142
+
143
+ test("resolveWorkflow integration: synced cache adds new workflow_ids beyond defaults", () => {
144
+ const cwd = makeWorkspace();
145
+ // Build a cache under a tmp HOME and point HOME at it, so that the
146
+ // home-dir-based loader picks it up.
147
+ const fakeHome = makeWorkspace();
148
+ process.env.HOME = fakeHome;
149
+ try {
150
+ fs.mkdirSync(path.join(fakeHome, ".cortex"), { recursive: true });
151
+ fs.writeFileSync(
152
+ path.join(fakeHome, ".cortex", "workflows.local.json"),
153
+ JSON.stringify({
154
+ workflows: {
155
+ tiny: {
156
+ workflow_id: "tiny",
157
+ version: 1,
158
+ updated_at: "2026-05-06T12:00:00.000Z",
159
+ definition: TINY_WORKFLOW,
160
+ },
161
+ },
162
+ }),
163
+ "utf8",
164
+ );
165
+
166
+ const result = runWorkflowStart(
167
+ {
168
+ task_id: "task-2",
169
+ task_description: "Use synced workflow",
170
+ workflow_id: "tiny",
171
+ },
172
+ { cwd },
173
+ );
174
+ assert.equal(result.state.workflow_id, "tiny");
175
+ assert.equal(result.state.current_stage, "plan");
176
+ } finally {
177
+ delete process.env.HOME;
178
+ }
179
+ });