dtu-github-actions 0.7.0 → 0.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -188,6 +188,29 @@ describe("Action Tarball Cache", () => {
188
188
  const data = await res.json();
189
189
  expect(data.actions).toEqual({});
190
190
  });
191
+ it("should skip local actions without crashing", async () => {
192
+ const baseUrl = `http://localhost:${PORT}`;
193
+ const res = await fetch(`${baseUrl}/_apis/distributedtask/hubs/Hub/plans/Plan/actiondownloadinfo`, {
194
+ method: "POST",
195
+ headers: { "Content-Type": "application/json" },
196
+ body: JSON.stringify({
197
+ actions: [
198
+ { nameWithOwner: "./.github/actions/shared-node-cache", ref: "" },
199
+ { nameWithOwner: "", ref: "" },
200
+ { nameWithOwner: "actions/checkout", ref: "v4" },
201
+ ],
202
+ }),
203
+ });
204
+ expect(res.status).toBe(200);
205
+ const data = await res.json();
206
+ // Local and empty actions should be skipped
207
+ expect(data.actions["./.github/actions/shared-node-cache@"]).toBeUndefined();
208
+ expect(data.actions["@"]).toBeUndefined();
209
+ // Remote action should still be present
210
+ const checkoutInfo = data.actions["actions/checkout@v4"];
211
+ expect(checkoutInfo).toBeDefined();
212
+ expect(checkoutInfo.tarballUrl).toBe(`${baseUrl}/_dtu/action-tarball/actions/checkout/v4`);
213
+ });
191
214
  });
192
215
  // ── writeStepOutputLines group filtering ─────────────────────────────────────
193
216
  // The writeStepOutputLines function is internal to registerActionRoutes, so we
@@ -38,6 +38,19 @@ export function toContextData(obj) {
38
38
  // TemplateTokenJsonConverter uses "type" key (integer) NOT the contextData "t" key.
39
39
  // TokenType.Mapping = 2. Items are serialized as {Key: scalarToken, Value: templateToken}.
40
40
  // Strings without file/line/col are serialized as bare string values.
41
+ /**
42
+ * Convert a string value to the appropriate TemplateToken.
43
+ * If the value is a pure `${{ expr }}` expression, encode it as a
44
+ * BasicExpressionToken (type 6) so the runner evaluates it at execution time.
45
+ * Otherwise, return a bare string (StringToken).
46
+ */
47
+ function toTemplateTokenValue(v) {
48
+ const exprMatch = v.match(/^\$\{\{\s*([\s\S]+?)\s*\}\}$/);
49
+ if (exprMatch) {
50
+ return { type: 3, expr: exprMatch[1] };
51
+ }
52
+ return v;
53
+ }
41
54
  export function toTemplateTokenMapping(obj) {
42
55
  const entries = Object.entries(obj);
43
56
  if (entries.length === 0) {
@@ -45,7 +58,7 @@ export function toTemplateTokenMapping(obj) {
45
58
  }
46
59
  return {
47
60
  type: 2,
48
- map: entries.map(([k, v]) => ({ Key: k, Value: v })),
61
+ map: entries.map(([k, v]) => ({ Key: k, Value: toTemplateTokenValue(v) })),
49
62
  };
50
63
  }
51
64
  /**
@@ -96,6 +109,7 @@ export function createJobResponse(jobId, payload, baseUrl, planId) {
96
109
  id: step.Id || step.id || crypto.randomUUID(),
97
110
  name: step.Name || step.name || `step-${index}`,
98
111
  displayName: step.DisplayName || step.Name || step.name || `step-${index}`,
112
+ contextName: step.ContextName || step.contextName || undefined,
99
113
  type: (step.Type || "Action").toLowerCase(),
100
114
  reference: (() => {
101
115
  const refTypeSource = step.Reference?.Type || "Script";
@@ -128,6 +142,12 @@ export function createJobResponse(jobId, payload, baseUrl, planId) {
128
142
  const ownerName = payload.repository?.owner?.login || "redwoodjs";
129
143
  const repoName = payload.repository?.name || repoFullName.split("/")[1] || "";
130
144
  const workspacePath = `/home/runner/_work/${repoName}/${repoName}`;
145
+ const headSha = payload.headSha && payload.headSha !== "HEAD"
146
+ ? payload.headSha
147
+ : "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
148
+ // realHeadSha is the actual HEAD commit SHA, even when headSha is unset
149
+ // (dirty workspace mode). Used for push event context (before/after).
150
+ const realHeadSha = payload.realHeadSha || headSha;
131
151
  const Variables = {
132
152
  // Standard GitHub Actions environment variables — always set by real runners.
133
153
  // CI=true is required by many scripts that branch on CI vs local (e.g. default DB_HOST).
@@ -145,6 +165,8 @@ export function createJobResponse(jobId, payload, baseUrl, planId) {
145
165
  GITHUB_RUN_NUMBER: { Value: "1", IsSecret: false },
146
166
  GITHUB_JOB: { Value: payload.name || "local-job", IsSecret: false },
147
167
  GITHUB_EVENT_NAME: { Value: "push", IsSecret: false },
168
+ GITHUB_API_URL: { Value: baseUrl, IsSecret: false },
169
+ GITHUB_SERVER_URL: { Value: "https://github.com", IsSecret: false },
148
170
  GITHUB_REF_NAME: { Value: "main", IsSecret: false },
149
171
  GITHUB_WORKFLOW: { Value: payload.workflowName || "local-workflow", IsSecret: false },
150
172
  GITHUB_WORKSPACE: { Value: workspacePath, IsSecret: false },
@@ -154,30 +176,24 @@ export function createJobResponse(jobId, payload, baseUrl, planId) {
154
176
  "system.github.repository": { Value: repoFullName, IsSecret: false },
155
177
  "github.repository": { Value: repoFullName, IsSecret: false },
156
178
  "github.actor": { Value: ownerName, IsSecret: false },
157
- "github.sha": {
158
- Value: payload.headSha && payload.headSha !== "HEAD"
159
- ? payload.headSha
160
- : "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
161
- IsSecret: false,
162
- },
179
+ "github.sha": { Value: realHeadSha, IsSecret: false },
163
180
  "github.ref": { Value: "refs/heads/main", IsSecret: false },
164
181
  repository: { Value: repoFullName, IsSecret: false },
165
182
  GITHUB_REPOSITORY: { Value: repoFullName, IsSecret: false },
166
183
  GITHUB_ACTOR: { Value: ownerName, IsSecret: false },
167
- GITHUB_SHA: {
168
- Value: payload.headSha && payload.headSha !== "HEAD"
169
- ? payload.headSha
170
- : "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
171
- IsSecret: false,
172
- },
184
+ GITHUB_SHA: { Value: realHeadSha, IsSecret: false },
173
185
  "build.repository.name": { Value: repoFullName, IsSecret: false },
174
186
  "build.repository.uri": { Value: `https://github.com/${repoFullName}`, IsSecret: false },
175
187
  };
176
- // Merge all step-level env: vars into the job Variables.
188
+ // Merge job-level env: into Variables first, then step-level env: (step wins on conflict).
177
189
  // The runner exports every Variable as a process env var for all steps, so this is the
178
- // reliable mechanism to get DB_HOST=mysql, DB_PORT=3306 etc. into the step subprocess.
179
- // (Step-scoped env would require full template compilation; job-level Variables are sufficient
180
- // for DB credentials / CI flags that are consistent across steps.)
190
+ // reliable mechanism to get AGENT_CI_LOCAL, DB_HOST, DB_PORT etc. into the step subprocess
191
+ // and into the runner's expression engine (${{ env.AGENT_CI_LOCAL }}).
192
+ if (payload.env && typeof payload.env === "object") {
193
+ for (const [key, val] of Object.entries(payload.env)) {
194
+ Variables[key] = { Value: String(val), IsSecret: false };
195
+ }
196
+ }
181
197
  for (const step of payload.steps || []) {
182
198
  if (step.Env && typeof step.Env === "object") {
183
199
  for (const [key, val] of Object.entries(step.Env)) {
@@ -188,12 +204,11 @@ export function createJobResponse(jobId, payload, baseUrl, planId) {
188
204
  const githubContext = {
189
205
  repository: repoFullName,
190
206
  actor: ownerName,
191
- sha: payload.headSha && payload.headSha !== "HEAD"
192
- ? payload.headSha
193
- : "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
207
+ sha: realHeadSha,
194
208
  ref: "refs/heads/main",
209
+ event_name: "push",
195
210
  server_url: "https://github.com",
196
- api_url: `${baseUrl}/_apis`,
211
+ api_url: `${baseUrl}`,
197
212
  graphql_url: `${baseUrl}/_graphql`,
198
213
  workspace: workspacePath,
199
214
  action: "__run",
@@ -212,16 +227,19 @@ export function createJobResponse(jobId, payload, baseUrl, planId) {
212
227
  name: repoName,
213
228
  owner: { login: ownerName },
214
229
  },
230
+ before: payload.baseSha || "0000000000000000000000000000000000000000",
231
+ after: realHeadSha,
215
232
  };
216
233
  }
217
- // Collect env vars from all steps (job-level env context seen by the runner's expression engine).
218
- // Step-level `env:` blocks in the workflow YAML need to be exposed via ContextData.env so the
219
- // runner can evaluate them. We merge all step envs — slightly broader than per-step scoping but
220
- // correct for typical use (DB_HOST, DB_PORT, CI flags etc.).
221
- const mergedStepEnv = {};
234
+ // Collect env vars from job-level and all steps (seen by the runner's expression engine).
235
+ // Job-level env is applied first, then step-level env wins on conflict.
236
+ const mergedEnv = {};
237
+ if (payload.env && typeof payload.env === "object") {
238
+ Object.assign(mergedEnv, payload.env);
239
+ }
222
240
  for (const step of payload.steps || []) {
223
241
  if (step.Env) {
224
- Object.assign(mergedStepEnv, step.Env);
242
+ Object.assign(mergedEnv, step.Env);
225
243
  }
226
244
  }
227
245
  const ContextData = {
@@ -230,9 +248,9 @@ export function createJobResponse(jobId, payload, baseUrl, planId) {
230
248
  needs: { t: 2, d: [] }, // Empty needs context
231
249
  strategy: { t: 2, d: [] }, // Empty strategy context
232
250
  matrix: { t: 2, d: [] }, // Empty matrix context
233
- // env context: merged from all step-level env: blocks so the runner's expression engine
234
- // can substitute ${{ env.DB_HOST }} etc. during step execution.
235
- ...(Object.keys(mergedStepEnv).length > 0 ? { env: toContextData(mergedStepEnv) } : {}),
251
+ // env context: merged from job-level + step-level env: blocks so the runner's expression
252
+ // engine can substitute ${{ env.AGENT_CI_LOCAL }}, ${{ env.DB_HOST }} etc.
253
+ ...(Object.keys(mergedEnv).length > 0 ? { env: toContextData(mergedEnv) } : {}),
236
254
  };
237
255
  const generatedJobId = crypto.randomUUID();
238
256
  const mockToken = createMockJwt(planId, generatedJobId);
@@ -303,7 +321,7 @@ export function createJobResponse(jobId, payload, baseUrl, planId) {
303
321
  // EnvironmentVariables is IList<TemplateToken> in the runner — each element is a MappingToken.
304
322
  // The runner evaluates each MappingToken and merges into Global.EnvironmentVariables (last wins),
305
323
  // which then populates ExpressionValues["env"] → subprocess env vars.
306
- EnvironmentVariables: Object.keys(mergedStepEnv).length > 0 ? [toTemplateTokenMapping(mergedStepEnv)] : [],
324
+ EnvironmentVariables: Object.keys(mergedEnv).length > 0 ? [toTemplateTokenMapping(mergedEnv)] : [],
307
325
  };
308
326
  return {
309
327
  MessageId: 1,
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,71 @@
1
+ import { describe, it, expect } from "vitest";
2
+ import { createJobResponse } from "./generators.js";
3
+ describe("createJobResponse", () => {
4
+ const basePayload = {
5
+ id: "1",
6
+ name: "test-job",
7
+ githubRepo: "owner/repo",
8
+ steps: [],
9
+ };
10
+ it("propagates job-level env into Variables", () => {
11
+ const payload = {
12
+ ...basePayload,
13
+ env: { AGENT_CI_LOCAL: "true", MY_VAR: "hello" },
14
+ };
15
+ const response = createJobResponse("1", payload, "http://localhost:3000", "plan-1");
16
+ const body = JSON.parse(response.Body);
17
+ const vars = body.Variables;
18
+ expect(vars.AGENT_CI_LOCAL).toEqual({ Value: "true", IsSecret: false });
19
+ expect(vars.MY_VAR).toEqual({ Value: "hello", IsSecret: false });
20
+ });
21
+ it("propagates job-level env into ContextData.env", () => {
22
+ const payload = {
23
+ ...basePayload,
24
+ env: { AGENT_CI_LOCAL: "true" },
25
+ };
26
+ const response = createJobResponse("1", payload, "http://localhost:3000", "plan-1");
27
+ const body = JSON.parse(response.Body);
28
+ // ContextData.env should be a ContextData object with type 2 (mapping)
29
+ expect(body.ContextData.env).toBeDefined();
30
+ expect(body.ContextData.env.t).toBe(2);
31
+ const entries = body.ContextData.env.d;
32
+ const localEntry = entries.find((e) => e.k === "AGENT_CI_LOCAL");
33
+ expect(localEntry).toBeDefined();
34
+ expect(localEntry.v).toEqual({ t: 0, s: "true" });
35
+ });
36
+ it("propagates job-level env into EnvironmentVariables", () => {
37
+ const payload = {
38
+ ...basePayload,
39
+ env: { AGENT_CI_LOCAL: "true" },
40
+ };
41
+ const response = createJobResponse("1", payload, "http://localhost:3000", "plan-1");
42
+ const body = JSON.parse(response.Body);
43
+ expect(body.EnvironmentVariables).toHaveLength(1);
44
+ const mapping = body.EnvironmentVariables[0];
45
+ expect(mapping.type).toBe(2);
46
+ const entry = mapping.map.find((e) => e.Key === "AGENT_CI_LOCAL");
47
+ expect(entry).toBeDefined();
48
+ expect(entry.Value).toBe("true");
49
+ });
50
+ it("step-level env overrides job-level env on conflict", () => {
51
+ const payload = {
52
+ ...basePayload,
53
+ env: { SHARED: "from-job" },
54
+ steps: [{ name: "step1", run: "echo hi", Env: { SHARED: "from-step" } }],
55
+ };
56
+ const response = createJobResponse("1", payload, "http://localhost:3000", "plan-1");
57
+ const body = JSON.parse(response.Body);
58
+ // Variables should have the step-level value (last-write wins)
59
+ expect(body.Variables.SHARED).toEqual({ Value: "from-step", IsSecret: false });
60
+ // ContextData.env should also have the step-level value
61
+ const entries = body.ContextData.env.d;
62
+ const entry = entries.find((e) => e.k === "SHARED");
63
+ expect(entry.v).toEqual({ t: 0, s: "from-step" });
64
+ });
65
+ it("omits env from ContextData when no env is provided", () => {
66
+ const response = createJobResponse("1", basePayload, "http://localhost:3000", "plan-1");
67
+ const body = JSON.parse(response.Body);
68
+ expect(body.ContextData.env).toBeUndefined();
69
+ expect(body.EnvironmentVariables).toEqual([]);
70
+ });
71
+ });
@@ -573,6 +573,11 @@ export function registerActionRoutes(app) {
573
573
  const result = { actions: {} };
574
574
  const baseUrl = getBaseUrl(req);
575
575
  for (const action of actions) {
576
+ // Local actions (RepositoryType: "self") are resolved from the workspace by the
577
+ // runner — they never need a tarball download. Skip them to avoid parsing errors.
578
+ if (!action.nameWithOwner || action.nameWithOwner.startsWith("./")) {
579
+ continue;
580
+ }
576
581
  const key = `${action.nameWithOwner}@${action.ref}`;
577
582
  // Strip sub-path from nameWithOwner (e.g. "actions/cache/save" → "actions/cache")
578
583
  // Sub-path actions share the same repo tarball as the parent action.
@@ -23,6 +23,10 @@ export function registerDtuRoutes(app) {
23
23
  Id: crypto.randomUUID(),
24
24
  }));
25
25
  const jobPayload = { ...payload, steps: mappedSteps };
26
+ // Track the original repo root for git operations (e.g. compare commits)
27
+ if (payload.repoRoot) {
28
+ state.repoRoot = payload.repoRoot;
29
+ }
26
30
  // Store the job for dispatch. Runner-targeted jobs go ONLY into runnerJobs
27
31
  // to prevent other runners from stealing them via the generic pool fallback.
28
32
  // Jobs without a runnerName go into the generic pool for any runner to pick up.
@@ -94,7 +94,116 @@ export function registerGithubRoutes(app) {
94
94
  };
95
95
  app.post("/actions/runner-registration", globalRunnerRegistrationHandler);
96
96
  app.post("/api/v3/actions/runner-registration", globalRunnerRegistrationHandler);
97
- // 7. Tarball route — actions/checkout downloads repos via this endpoint.
97
+ // 7. Compare commitsused by actions that detect changed files (e.g. Khan/actions@get-changed-files).
98
+ // Runs `git diff` on the original repo root and returns a GitHub-compatible response.
99
+ const compareHandler = (req, res) => {
100
+ const basehead = req.params.basehead;
101
+ // GitHub format: "base...head" (three dots) or "base..head" (two dots)
102
+ const parts = basehead.split(/\.{2,3}/);
103
+ const [base, head] = parts;
104
+ if (!base || !head) {
105
+ res.writeHead(422, { "Content-Type": "application/json" });
106
+ res.end(JSON.stringify({ message: "Invalid basehead format" }));
107
+ return;
108
+ }
109
+ const repoRoot = state.repoRoot;
110
+ if (!repoRoot) {
111
+ // No repo root available — return empty comparison
112
+ console.warn("[DTU] Compare: no repoRoot available, returning empty file list");
113
+ res.writeHead(200, { "Content-Type": "application/json" });
114
+ res.end(JSON.stringify({ status: "identical", files: [], total_commits: 0, commits: [] }));
115
+ return;
116
+ }
117
+ try {
118
+ const output = execSync(`git diff --name-status ${base} ${head}`, {
119
+ cwd: repoRoot,
120
+ stdio: "pipe",
121
+ timeout: 10000,
122
+ }).toString();
123
+ const statusMap = {
124
+ A: "added",
125
+ M: "modified",
126
+ D: "removed",
127
+ R: "renamed",
128
+ C: "copied",
129
+ T: "changed",
130
+ };
131
+ const files = output
132
+ .trim()
133
+ .split("\n")
134
+ .filter((line) => line.length > 0)
135
+ .map((line) => {
136
+ // Format: "M\tfilename" or "R100\told\tnew"
137
+ const parts = line.split("\t");
138
+ const rawStatus = parts[0];
139
+ const statusChar = rawStatus[0];
140
+ const filename = rawStatus.startsWith("R") ? parts[2] : parts[1];
141
+ const previousFilename = rawStatus.startsWith("R") ? parts[1] : undefined;
142
+ return {
143
+ sha: "0000000000000000000000000000000000000000",
144
+ filename,
145
+ status: statusMap[statusChar] || "modified",
146
+ ...(previousFilename ? { previous_filename: previousFilename } : {}),
147
+ additions: 0,
148
+ deletions: 0,
149
+ changes: 0,
150
+ };
151
+ });
152
+ res.writeHead(200, { "Content-Type": "application/json" });
153
+ res.end(JSON.stringify({
154
+ status: files.length > 0 ? "ahead" : "identical",
155
+ total_commits: 1,
156
+ commits: [],
157
+ files,
158
+ }));
159
+ }
160
+ catch (err) {
161
+ console.warn(`[DTU] Compare failed (${base}...${head}):`, err.message);
162
+ // Fall back to listing all tracked files as "added"
163
+ try {
164
+ const allFiles = execSync("git ls-files", {
165
+ cwd: repoRoot,
166
+ stdio: "pipe",
167
+ timeout: 10000,
168
+ }).toString();
169
+ const files = allFiles
170
+ .trim()
171
+ .split("\n")
172
+ .filter((f) => f.length > 0)
173
+ .map((filename) => ({
174
+ sha: "0000000000000000000000000000000000000000",
175
+ filename,
176
+ status: "added",
177
+ additions: 0,
178
+ deletions: 0,
179
+ changes: 0,
180
+ }));
181
+ res.writeHead(200, { "Content-Type": "application/json" });
182
+ res.end(JSON.stringify({
183
+ status: "ahead",
184
+ total_commits: 1,
185
+ commits: [],
186
+ files,
187
+ }));
188
+ }
189
+ catch {
190
+ res.writeHead(500, { "Content-Type": "application/json" });
191
+ res.end(JSON.stringify({ message: "Failed to compute diff" }));
192
+ }
193
+ }
194
+ };
195
+ app.get("/repos/:owner/:repo/compare/:basehead", compareHandler);
196
+ app.get("/_apis/repos/:owner/:repo/compare/:basehead", compareHandler);
197
+ // 8. List pull requests associated with a commit — used by some changed-files actions
198
+ // when the push event has an all-zeros `before` (new branch push).
199
+ const listPrsForCommitHandler = (req, res) => {
200
+ console.log(`[DTU] List PRs for commit ${req.params.sha} (mock: returning empty)`);
201
+ res.writeHead(200, { "Content-Type": "application/json" });
202
+ res.end(JSON.stringify([]));
203
+ };
204
+ app.get("/repos/:owner/:repo/commits/:sha/pulls", listPrsForCommitHandler);
205
+ app.get("/_apis/repos/:owner/:repo/commits/:sha/pulls", listPrsForCommitHandler);
206
+ // 9. Tarball route — actions/checkout downloads repos via this endpoint.
98
207
  // Return an empty tar.gz since the workspace is already bind-mounted.
99
208
  const tarballHandler = (req, res) => {
100
209
  console.log(`[DTU] Serving empty tarball for ${req.url}`);
@@ -19,6 +19,7 @@ export declare const state: {
19
19
  planToLogDir: Map<string, string>;
20
20
  timelineToLogDir: Map<string, string>;
21
21
  currentInProgressStep: Map<string, string>;
22
+ repoRoot: string | undefined;
22
23
  virtualCachePatterns: Set<string>;
23
24
  caches: Map<string, {
24
25
  version: string;
@@ -37,6 +37,8 @@ export const state = {
37
37
  // timelineId → sanitized name of the currently in-progress step
38
38
  // (used as fallback when the feed recordId is a Job-level ID)
39
39
  currentInProgressStep: new Map(),
40
+ // Original repo root on the host (for git operations like compare)
41
+ repoRoot: undefined,
40
42
  // Substring patterns for cache keys that should always return a synthetic hit
41
43
  // with an empty archive (e.g. "pnpm" for bind-mounted pnpm stores).
42
44
  virtualCachePatterns: new Set(),
@@ -94,6 +96,7 @@ export const state = {
94
96
  this.planToLogDir.clear();
95
97
  this.timelineToLogDir.clear();
96
98
  this.currentInProgressStep.clear();
99
+ this.repoRoot = undefined;
97
100
  this.virtualCachePatterns.clear();
98
101
  this.caches.clear();
99
102
  this.pendingCaches.clear();
@@ -439,6 +439,51 @@ describe("Artifact v4 upload/download", () => {
439
439
  expect(res.body.lockedUntil).toBeDefined();
440
440
  expect(new Date(res.body.lockedUntil).getTime()).toBeGreaterThan(Date.now());
441
441
  });
442
+ it("should let runner B steal runner A's job when seeded WITHOUT runnerName (generic pool bug)", async () => {
443
+ // REPRODUCTION for issue #103:
444
+ // When local-job.ts seeds a job without setting runnerName, the job lands
445
+ // in the generic state.jobs pool. If another runner (from a different
446
+ // concurrent workflow) polls before runner A, it steals the job — causing
447
+ // runner A to hang forever waiting for a job that will never arrive.
448
+ const runnerA = "agent-ci-repro-A";
449
+ const runnerB = "agent-ci-repro-B";
450
+ // Register both runners
451
+ await request("POST", "/_dtu/start-runner", {
452
+ runnerName: runnerA,
453
+ logDir: "/tmp/agent-ci-repro-A-logs",
454
+ timelineDir: "/tmp/agent-ci-repro-A-logs",
455
+ });
456
+ await request("POST", "/_dtu/start-runner", {
457
+ runnerName: runnerB,
458
+ logDir: "/tmp/agent-ci-repro-B-logs",
459
+ timelineDir: "/tmp/agent-ci-repro-B-logs",
460
+ });
461
+ // Seed a job intended for runner A, but WITHOUT runnerName (the bug).
462
+ // This goes into the generic state.jobs pool.
463
+ await request("POST", "/_dtu/seed", {
464
+ id: 4001,
465
+ name: "job-intended-for-A",
466
+ // BUG: no runnerName — job lands in generic pool
467
+ });
468
+ // Runner B creates a session and polls — it shouldn't get A's job,
469
+ // but because the job is in the generic pool, B steals it.
470
+ const sessionB = await request("POST", "/_apis/distributedtask/pools/1/sessions", {
471
+ agent: { name: runnerB },
472
+ });
473
+ const pollB = await request("GET", `/_apis/distributedtask/pools/1/messages?sessionId=${sessionB.body.sessionId}`);
474
+ // BUG CONFIRMED: runner B stole the job from the generic pool
475
+ expect(pollB.status).toBe(200);
476
+ const bodyB = JSON.parse(pollB.body.Body);
477
+ expect(bodyB.JobDisplayName).toBe("job-intended-for-A");
478
+ // Now runner A creates a session and polls — the job is gone
479
+ await request("POST", "/_apis/distributedtask/pools/1/sessions", {
480
+ agent: { name: runnerA },
481
+ });
482
+ // Runner A's poll will hang (long-poll timeout) because its job was stolen.
483
+ // We verify by checking state: no jobs remain for A.
484
+ const aHasJob = state.runnerJobs.has(runnerA) || state.jobs.size > 0;
485
+ expect(aHasJob).toBe(false); // A's job is gone — it will hang forever
486
+ }, 10_000);
442
487
  it("should handle job request finish (PATCH with result + finishTime)", async () => {
443
488
  const finishTime = new Date().toISOString();
444
489
  const res = await request("PATCH", "/_apis/distributedtask/jobrequests", {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "dtu-github-actions",
3
- "version": "0.7.0",
3
+ "version": "0.8.0",
4
4
  "description": "Digital Twin Universe - GitHub Actions Mock and Simulation",
5
5
  "keywords": [
6
6
  "ci",