dtu-github-actions 0.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,575 @@
1
+ import crypto from "node:crypto";
2
+ import fs from "node:fs";
3
+ import path from "node:path";
4
+ import { state } from "../../store.js";
5
+ import { getBaseUrl } from "../dtu.js";
6
+ import { createJobResponse } from "./generators.js";
7
+ // Helper to reliably find log Id from URLs like /_apis/distributedtask/hubs/Hub/plans/Plan/logs/123
8
+ export function registerActionRoutes(app) {
9
+ // 7. Pipeline Service Discovery Mock
10
+ const serviceDiscoveryHandler = (req, res) => {
11
+ console.log(`[DTU] Handling service discovery: ${req.url}`);
12
+ const baseUrl = getBaseUrl(req);
13
+ res.writeHead(200, { "Content-Type": "application/json" });
14
+ res.end(JSON.stringify({
15
+ value: [],
16
+ locationId: crypto.randomUUID(),
17
+ instanceId: crypto.randomUUID(),
18
+ locationServiceData: {
19
+ serviceOwner: "A85B8835-C1A1-4AAC-AE97-1C3D0BA72DBD",
20
+ defaultAccessMappingMoniker: "PublicAccessMapping",
21
+ accessMappings: [
22
+ { moniker: "PublicAccessMapping", displayName: "Public Access", accessPoint: baseUrl },
23
+ ],
24
+ serviceDefinitions: [
25
+ {
26
+ serviceType: "distributedtask",
27
+ identifier: "A85B8835-C1A1-4AAC-AE97-1C3D0BA72DBD",
28
+ displayName: "distributedtask",
29
+ relativeToSetting: 3,
30
+ relativePath: "",
31
+ description: "Distributed Task Service",
32
+ serviceOwner: "A85B8835-C1A1-4AAC-AE97-1C3D0BA72DBD",
33
+ status: 1, // Online
34
+ locationMappings: [
35
+ { accessMappingMoniker: "PublicAccessMapping", location: baseUrl },
36
+ ],
37
+ },
38
+ {
39
+ serviceType: "distributedtask",
40
+ identifier: "A8C47E17-4D56-4A56-92BB-DE7EA7DC65BE", // Pools
41
+ displayName: "Pools",
42
+ relativeToSetting: 3,
43
+ relativePath: "/_apis/distributedtask/pools",
44
+ description: "Pools Service",
45
+ serviceOwner: "A85B8835-C1A1-4AAC-AE97-1C3D0BA72DBD",
46
+ status: 1,
47
+ locationMappings: [
48
+ {
49
+ accessMappingMoniker: "PublicAccessMapping",
50
+ location: `${baseUrl}/_apis/distributedtask/pools`,
51
+ },
52
+ ],
53
+ },
54
+ {
55
+ serviceType: "distributedtask",
56
+ identifier: "27d7f831-88c1-4719-8ca1-6a061dad90eb", // ActionDownloadInfo
57
+ displayName: "ActionDownloadInfo",
58
+ relativeToSetting: 3,
59
+ relativePath: "/_apis/distributedtask/hubs/{hubName}/plans/{planId}/actiondownloadinfo",
60
+ description: "Action Download Info Service",
61
+ serviceOwner: "A85B8835-C1A1-4AAC-AE97-1C3D0BA72DBD",
62
+ status: 1,
63
+ locationMappings: [
64
+ { accessMappingMoniker: "PublicAccessMapping", location: `${baseUrl}` },
65
+ ],
66
+ },
67
+ {
68
+ serviceType: "distributedtask",
69
+ identifier: "858983e4-19bd-4c5e-864c-507b59b58b12", // AppendTimelineRecordFeedAsync
70
+ displayName: "AppendTimelineRecordFeed",
71
+ relativeToSetting: 3,
72
+ relativePath: "/_apis/distributedtask/hubs/{hubName}/plans/{planId}/timelines/{timelineId}/records/{recordId}/feed",
73
+ description: "Timeline Feed Service",
74
+ serviceOwner: "A85B8835-C1A1-4AAC-AE97-1C3D0BA72DBD",
75
+ status: 1,
76
+ locationMappings: [
77
+ { accessMappingMoniker: "PublicAccessMapping", location: `${baseUrl}` },
78
+ ],
79
+ },
80
+ {
81
+ serviceType: "distributedtask",
82
+ identifier: "46f5667d-263a-4684-91b1-dff7fdcf64e2", // AppendLogContent
83
+ displayName: "TaskLog",
84
+ relativeToSetting: 3,
85
+ relativePath: "/_apis/distributedtask/hubs/{hubName}/plans/{planId}/logs/{logId}",
86
+ description: "Task Log Service",
87
+ serviceOwner: "A85B8835-C1A1-4AAC-AE97-1C3D0BA72DBD",
88
+ status: 1,
89
+ locationMappings: [
90
+ { accessMappingMoniker: "PublicAccessMapping", location: `${baseUrl}` },
91
+ ],
92
+ },
93
+ ],
94
+ },
95
+ }));
96
+ };
97
+ app.get("/_apis/pipelines", serviceDiscoveryHandler);
98
+ app.get("/_apis/connectionData", serviceDiscoveryHandler);
99
+ // 10. Pools Handler
100
+ app.get("/_apis/distributedtask/pools", (req, res) => {
101
+ console.log(`[DTU] Handling pools request`);
102
+ res.writeHead(200, { "Content-Type": "application/json" });
103
+ res.end(JSON.stringify({
104
+ count: 1,
105
+ value: [{ id: 1, name: "Default", isHosted: false, autoProvision: true }],
106
+ }));
107
+ });
108
+ // 11. Agents Handler
109
+ app.get("/_apis/distributedtask/pools/:poolId/agents", (req, res) => {
110
+ console.log(`[DTU] Handling get agents request`);
111
+ res.writeHead(200, { "Content-Type": "application/json" });
112
+ res.end(JSON.stringify({ count: 0, value: [] }));
113
+ });
114
+ app.post("/_apis/distributedtask/pools/:poolId/agents", (req, res) => {
115
+ console.log(`[DTU] Handling register agent request`);
116
+ const payload = req.body;
117
+ const agentId = Math.floor(Math.random() * 10000);
118
+ const baseUrl = getBaseUrl(req);
119
+ const response = {
120
+ id: agentId,
121
+ name: payload?.name || "agent-ci-runner",
122
+ version: payload?.version || "2.331.0",
123
+ osDescription: payload?.osDescription || "Linux",
124
+ ephemeral: payload?.ephemeral || true,
125
+ disableUpdate: payload?.disableUpdate || true,
126
+ enabled: true,
127
+ status: "online",
128
+ provisioningState: "Provisioned",
129
+ authorization: {
130
+ clientId: crypto.randomUUID(),
131
+ authorizationUrl: `${baseUrl}/auth/authorize`,
132
+ },
133
+ accessPoint: `${baseUrl}/_apis/distributedtask/pools/${req.params.poolId}/agents/${agentId}`,
134
+ };
135
+ res.writeHead(200, { "Content-Type": "application/json" });
136
+ res.end(JSON.stringify(response));
137
+ });
138
+ // 12. Sessions Handler
139
+ app.post("/_apis/distributedtask/pools/:poolId/sessions", (req, res) => {
140
+ console.log(`[DTU] Creating session for pool ${req.params.poolId}`);
141
+ const newSessionId = crypto.randomUUID();
142
+ const ownerName = req.body?.agent?.name || "agent-ci-runner";
143
+ // Map this session to the runner name, allowing concurrent jobs to find their logs
144
+ state.sessionToRunner.set(newSessionId, ownerName);
145
+ const response = {
146
+ sessionId: newSessionId,
147
+ ownerName: ownerName,
148
+ agent: {
149
+ id: 1,
150
+ name: ownerName,
151
+ version: "2.331.0",
152
+ osDescription: "Linux",
153
+ enabled: true,
154
+ status: "online",
155
+ },
156
+ encryptionKey: {
157
+ value: Buffer.from(crypto.randomBytes(32)).toString("base64"),
158
+ k: "encryptionKey",
159
+ },
160
+ };
161
+ state.sessions.set(newSessionId, response);
162
+ state.messageQueues.set(newSessionId, []);
163
+ res.writeHead(200, { "Content-Type": "application/json" });
164
+ res.end(JSON.stringify(response));
165
+ });
166
+ app.delete("/_apis/distributedtask/pools/:poolId/sessions/:sessionId", (req, res) => {
167
+ const sessionId = req.params.sessionId;
168
+ console.log(`[DTU] Deleting session ${sessionId}`);
169
+ const pending = state.pendingPolls.get(sessionId);
170
+ if (pending && !pending.res.writableEnded) {
171
+ pending.res.writeHead(204);
172
+ pending.res.end();
173
+ }
174
+ state.pendingPolls.delete(sessionId);
175
+ state.sessions.delete(sessionId);
176
+ state.messageQueues.delete(sessionId);
177
+ state.sessionToRunner.delete(sessionId);
178
+ res.writeHead(204);
179
+ res.end();
180
+ });
181
+ // 13. Messages Long Polling
182
+ app.get("/_apis/distributedtask/pools/:poolId/messages", (req, res) => {
183
+ const sessionId = req.query.sessionId;
184
+ const baseUrl = getBaseUrl(req);
185
+ if (!sessionId || !state.sessions.has(sessionId)) {
186
+ res.writeHead(404);
187
+ res.end("Session not found");
188
+ return;
189
+ }
190
+ const existing = state.pendingPolls.get(sessionId);
191
+ if (existing) {
192
+ existing.res.writeHead(204);
193
+ existing.res.end();
194
+ }
195
+ state.pendingPolls.set(sessionId, { res, baseUrl });
196
+ const runnerName = state.sessionToRunner.get(sessionId);
197
+ // First check for a job seeded specifically for this runner, then fall back to the generic pool.
198
+ const runnerSpecificJob = runnerName ? state.runnerJobs.get(runnerName) : undefined;
199
+ const genericJobEntry = !runnerSpecificJob && state.jobs.size > 0 ? Array.from(state.jobs.entries())[0] : undefined;
200
+ const jobId = runnerSpecificJob
201
+ ? runnerName // use runnerName as synthetic key for runner-specific jobs
202
+ : genericJobEntry?.[0];
203
+ const jobData = runnerSpecificJob ?? genericJobEntry?.[1];
204
+ if (jobId && jobData) {
205
+ try {
206
+ const planId = crypto.randomUUID();
207
+ // Concurrency mapping
208
+ if (runnerName) {
209
+ const logDir = state.runnerLogs.get(runnerName);
210
+ if (logDir) {
211
+ state.planToLogDir.set(planId, logDir);
212
+ }
213
+ }
214
+ const response = createJobResponse(jobId, jobData, baseUrl, planId);
215
+ // Map timelineId → runner's timeline dir (CLI's _/logs/<runnerName>/)
216
+ try {
217
+ const jobBody = JSON.parse(response.Body);
218
+ const timelineId = jobBody?.Timeline?.Id;
219
+ const tDir = runnerName ? state.runnerTimelineDirs.get(runnerName) : undefined;
220
+ if (timelineId && tDir) {
221
+ state.timelineToLogDir.set(timelineId, tDir);
222
+ }
223
+ }
224
+ catch {
225
+ /* best-effort */
226
+ }
227
+ res.writeHead(200, { "Content-Type": "application/json" });
228
+ res.end(JSON.stringify(response));
229
+ // Clean up whichever job store we used
230
+ if (runnerSpecificJob && runnerName) {
231
+ state.runnerJobs.delete(runnerName);
232
+ }
233
+ else if (genericJobEntry) {
234
+ state.jobs.delete(genericJobEntry[0]);
235
+ }
236
+ state.pendingPolls.delete(sessionId);
237
+ return;
238
+ }
239
+ catch (e) {
240
+ console.error(`[DTU] Error creating job response:`, e);
241
+ res.writeHead(500);
242
+ res.end("Internal Server Error generating job");
243
+ return;
244
+ }
245
+ }
246
+ // Long poll: Wait up to 20 seconds before returning empty
247
+ const timeout = setTimeout(() => {
248
+ const pending = state.pendingPolls.get(sessionId);
249
+ if (pending && pending.res === res) {
250
+ state.pendingPolls.delete(sessionId);
251
+ if (!res.writableEnded) {
252
+ res.writeHead(204);
253
+ res.end();
254
+ }
255
+ }
256
+ }, 20000);
257
+ res.on("close", () => {
258
+ clearTimeout(timeout);
259
+ const pending = state.pendingPolls.get(sessionId);
260
+ if (pending && pending.res === res) {
261
+ state.pendingPolls.delete(sessionId);
262
+ }
263
+ });
264
+ });
265
+ app.delete("/_apis/distributedtask/pools/:poolId/messages", (req, res) => {
266
+ console.log(`[DTU] Acknowledging/Deleting message ${req.query?.messageId} for session ${req.query?.sessionId}`);
267
+ res.writeHead(204);
268
+ res.end();
269
+ });
270
+ // 14. Job Request Update / Renewal / Finish Mock
271
+ // The runner's VssClient resolves the route template "_apis/distributedtask/jobrequests/{jobId}"
272
+ // but passes { poolId, requestId } as routeValues — since none match "{jobId}", the placeholder
273
+ // is dropped and the runner sends PATCH /_apis/distributedtask/jobrequests (bare path).
274
+ // We register both patterns for safety.
275
+ const jobrequestHandler = (req, res) => {
276
+ let payload = req.body || {};
277
+ // If the request is a renewal (no result/finishTime), set lockedUntil
278
+ if (!payload.result && !payload.finishTime) {
279
+ payload.lockedUntil = new Date(Date.now() + 60000).toISOString();
280
+ }
281
+ res.writeHead(200, { "Content-Type": "application/json" });
282
+ res.end(JSON.stringify(payload));
283
+ };
284
+ app.patch("/_apis/distributedtask/jobrequests", jobrequestHandler);
285
+ app.patch("/_apis/distributedtask/jobrequests/:requestId", jobrequestHandler);
286
+ // 15. Timeline Records Handler — disk-only, no in-memory storage
287
+ const timelineHandler = (req, res) => {
288
+ const timelineId = req.params.timelineId;
289
+ const payload = req.body || {};
290
+ const newRecords = payload.value || [];
291
+ // Resolve the file to write to
292
+ const logDir = state.timelineToLogDir.get(timelineId);
293
+ const filePath = logDir ? path.join(logDir, "timeline.json") : null;
294
+ // Read existing records from disk (if any)
295
+ let existing = [];
296
+ if (filePath) {
297
+ try {
298
+ existing = JSON.parse(fs.readFileSync(filePath, "utf-8"));
299
+ }
300
+ catch {
301
+ /* file doesn't exist yet or is empty */
302
+ }
303
+ }
304
+ // Merge: update existing record by id, or by order for pre-populated records.
305
+ // Pre-populated records have friendly names from the YAML (e.g., "Build SDK")
306
+ // while DTU records have runner names (e.g., "Run pnpm build"). We want to
307
+ // preserve the friendly name when merging.
308
+ // The runner sends updates with name: null (uses refName instead), so we must
309
+ // strip null values to avoid overwriting existing data.
310
+ for (const record of newRecords) {
311
+ // Strip null values so they don't overwrite existing data
312
+ const nonNull = {};
313
+ for (const [k, v] of Object.entries(record)) {
314
+ if (v != null) {
315
+ nonNull[k] = v;
316
+ }
317
+ }
318
+ let mergedIdx = -1;
319
+ const idxById = existing.findIndex((r) => r.id === record.id);
320
+ if (idxById >= 0) {
321
+ existing[idxById] = { ...existing[idxById], ...nonNull };
322
+ mergedIdx = idxById;
323
+ }
324
+ else if (record.order != null) {
325
+ // Try to match by order against pre-populated pending records
326
+ const idxByOrder = existing.findIndex((r) => r.order === record.order && r.type === "Task" && r.state === "pending");
327
+ if (idxByOrder >= 0) {
328
+ // Preserve the friendly name from the pre-populated record
329
+ const friendlyName = existing[idxByOrder].name;
330
+ existing[idxByOrder] = { ...existing[idxByOrder], ...nonNull, name: friendlyName };
331
+ mergedIdx = idxByOrder;
332
+ }
333
+ else {
334
+ existing.push(record);
335
+ mergedIdx = existing.length - 1;
336
+ }
337
+ }
338
+ else {
339
+ existing.push(record);
340
+ mergedIdx = existing.length - 1;
341
+ }
342
+ // Ensure name is populated: fall back to refName if name is still null
343
+ if (mergedIdx >= 0 &&
344
+ existing[mergedIdx] &&
345
+ !existing[mergedIdx].name &&
346
+ existing[mergedIdx].refName) {
347
+ existing[mergedIdx].name = existing[mergedIdx].refName;
348
+ }
349
+ }
350
+ // Persist to disk
351
+ if (filePath) {
352
+ try {
353
+ fs.mkdirSync(path.dirname(filePath), { recursive: true });
354
+ fs.writeFileSync(filePath, JSON.stringify(existing, null, 2));
355
+ }
356
+ catch {
357
+ /* best-effort */
358
+ }
359
+ }
360
+ // Build recordId/logId → sanitized step name mappings for per-step log files.
361
+ // Also rename any existing files that were written before the mapping was available.
362
+ // logDir is already resolved above from state.timelineToLogDir — reuse it here.
363
+ const stepsDir = logDir ? path.join(logDir, "steps") : undefined;
364
+ for (const record of existing) {
365
+ if (record.name && record.type === "Task") {
366
+ const sanitized = record.name
367
+ .replace(/[^a-zA-Z0-9_.-]/g, "-")
368
+ .replace(/-+/g, "-")
369
+ .replace(/^-|-$/g, "")
370
+ .substring(0, 80);
371
+ const ids = [];
372
+ if (record.id) {
373
+ ids.push(record.id);
374
+ }
375
+ if (record.log?.id) {
376
+ ids.push(String(record.log.id));
377
+ }
378
+ for (const id of ids) {
379
+ state.recordToStepName.set(id, sanitized);
380
+ // Rename existing file from {id}.log to {stepName}.log if needed
381
+ if (stepsDir && id !== sanitized) {
382
+ const oldPath = path.join(stepsDir, `${id}.log`);
383
+ const newPath = path.join(stepsDir, `${sanitized}.log`);
384
+ try {
385
+ if (fs.existsSync(oldPath) && !fs.existsSync(newPath)) {
386
+ fs.renameSync(oldPath, newPath);
387
+ }
388
+ }
389
+ catch {
390
+ /* best-effort */
391
+ }
392
+ }
393
+ }
394
+ // Track the currently in-progress step so the Job-level feed
395
+ // can assign output to the correct per-step log file.
396
+ if (record.state === "inProgress") {
397
+ state.currentInProgressStep.set(timelineId, sanitized);
398
+ }
399
+ }
400
+ }
401
+ res.writeHead(200, { "Content-Type": "application/json" });
402
+ res.end(JSON.stringify({ count: existing.length, value: existing }));
403
+ };
404
+ // The runner will hit this depending on the route provided in discovery
405
+ app.patch("/_apis/distributedtask/timelines/:timelineId/records", timelineHandler);
406
+ app.post("/_apis/distributedtask/timelines/:timelineId/records", timelineHandler); // fallback
407
+ // 15b. Timeline GET — runner calls this during FinalizeJob to compute aggregate result.
408
+ // Without it, the runner gets 404 and defaults the job result to Failed.
409
+ app.get("/_apis/distributedtask/timelines/:timelineId", (req, res) => {
410
+ const timelineId = req.params.timelineId;
411
+ const logDir = state.timelineToLogDir.get(timelineId);
412
+ const filePath = logDir ? path.join(logDir, "timeline.json") : null;
413
+ let records = [];
414
+ if (filePath) {
415
+ try {
416
+ records = JSON.parse(fs.readFileSync(filePath, "utf-8"));
417
+ }
418
+ catch {
419
+ /* file doesn't exist yet */
420
+ }
421
+ }
422
+ res.writeHead(200, { "Content-Type": "application/json" });
423
+ res.end(JSON.stringify({
424
+ lastChangedBy: "00000000-0000-0000-0000-000000000000",
425
+ lastChangedOn: new Date().toISOString(),
426
+ id: timelineId,
427
+ changeId: 1,
428
+ location: null,
429
+ // includeRecords=True → runner expects a "records" array
430
+ ...(req.query?.includeRecords ? { records } : {}),
431
+ }));
432
+ });
433
+ // 18. Generic Step Outputs Handler
434
+ app.post("/_apis/distributedtask/hubs/:hub/plans/:planId/outputs", (req, res) => {
435
+ res.writeHead(200);
436
+ res.end(JSON.stringify({ value: {} }));
437
+ });
438
+ // 18. Resolve Action Download Info Mock
439
+ app.post("/_apis/distributedtask/hubs/:hub/plans/:planId/actiondownloadinfo", (req, res) => {
440
+ const payload = req.body || {};
441
+ const actions = payload.actions || [];
442
+ const result = { actions: {} };
443
+ for (const action of actions) {
444
+ const key = `${action.nameWithOwner}@${action.ref}`;
445
+ const downloadUrl = `https://api.github.com/repos/${action.nameWithOwner}/tarball/${action.ref}`;
446
+ result.actions[key] = {
447
+ nameWithOwner: action.nameWithOwner,
448
+ resolvedNameWithOwner: action.nameWithOwner,
449
+ ref: action.ref,
450
+ resolvedSha: crypto
451
+ .createHash("sha1")
452
+ .update(`${action.nameWithOwner}@${action.ref}`)
453
+ .digest("hex"),
454
+ tarballUrl: downloadUrl,
455
+ zipballUrl: downloadUrl.replace("tarball", "zipball"),
456
+ authentication: null,
457
+ };
458
+ }
459
+ res.writeHead(200, { "Content-Type": "application/json" });
460
+ res.end(JSON.stringify(result));
461
+ });
462
+ // 19. Generic Job Retrieval Handler
463
+ app.get("/_apis/distributedtask/pools/:poolId/jobs/:jobId", (req, res) => {
464
+ res.writeHead(200);
465
+ res.end(JSON.stringify({ id: "1", name: "job", status: "completed" }));
466
+ });
467
+ // 16. Log Creation Handler (POST .../logs)
468
+ app.post("/_apis/distributedtask/hubs/:hub/plans/:planId/logs", (req, res) => {
469
+ const logId = Math.floor(Math.random() * 10000).toString();
470
+ state.logs.set(logId, []);
471
+ res.writeHead(201, { "Content-Type": "application/json" });
472
+ // The runner's TaskLog class requires 'path' — null causes ArgumentNullException
473
+ res.end(JSON.stringify({
474
+ id: parseInt(logId),
475
+ path: `logs/${logId}`,
476
+ createdOn: new Date().toISOString(),
477
+ }));
478
+ });
479
+ // 17. Log Line Appending Handler (POST .../logs/:logId/lines)
480
+ app.post("/_apis/distributedtask/hubs/:hub/plans/:planId/logs/:logId/lines", (req, res) => {
481
+ const logId = req.params.logId;
482
+ const payload = req.body || {};
483
+ const lines = (payload.value || []).map((l) => l.message || l);
484
+ const existing = state.logs.get(logId) || [];
485
+ existing.push(...lines);
486
+ state.logs.set(logId, existing);
487
+ res.writeHead(200, { "Content-Type": "application/json" });
488
+ res.end(JSON.stringify({ count: 0, value: [] }));
489
+ });
490
+ // Helper to append filtered lines to the per-step log file
491
+ const writeStepOutputLines = (planId, recordId, lines) => {
492
+ const logDir = state.planToLogDir.get(planId);
493
+ if (!logDir) {
494
+ return;
495
+ }
496
+ const RUNNER_INTERNAL_RE = /^\[(?:RUNNER|WORKER) \d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}Z (?:INFO|WARN|ERR)\s/;
497
+ let content = "";
498
+ for (const rawLine of lines) {
499
+ const line = rawLine.trimEnd();
500
+ if (!line) {
501
+ content += "\n";
502
+ continue;
503
+ }
504
+ // Strip BOM + timestamp prefix before filtering
505
+ const stripped = line
506
+ .replace(/^\uFEFF?\d{4}-\d{2}-\d{2}T[\d:.]+Z\s*/, "")
507
+ .replace(/^\uFEFF/, "");
508
+ if (!stripped ||
509
+ stripped.startsWith("##[") ||
510
+ stripped.startsWith("[command]") ||
511
+ RUNNER_INTERNAL_RE.test(stripped)) {
512
+ continue;
513
+ }
514
+ content += stripped + "\n";
515
+ }
516
+ if (content) {
517
+ try {
518
+ let stepName = state.recordToStepName.get(recordId);
519
+ // Fallback: if the recordId is a Job-level record (no mapping),
520
+ // use the currently in-progress step from the timeline.
521
+ if (!stepName) {
522
+ // Find timelineId for this plan — check all timelines mapped to the same logDir
523
+ const logDirForPlan = state.planToLogDir.get(planId);
524
+ if (logDirForPlan) {
525
+ for (const [tid, tdir] of state.timelineToLogDir) {
526
+ if (tdir === logDirForPlan) {
527
+ const current = state.currentInProgressStep.get(tid);
528
+ if (current) {
529
+ stepName = current;
530
+ }
531
+ break;
532
+ }
533
+ }
534
+ }
535
+ }
536
+ stepName = stepName || recordId;
537
+ const stepsDir = path.join(logDir, "steps");
538
+ fs.mkdirSync(stepsDir, { recursive: true });
539
+ fs.appendFileSync(path.join(stepsDir, `${stepName}.log`), content);
540
+ }
541
+ catch {
542
+ /* best-effort */
543
+ }
544
+ }
545
+ };
546
+ // 19. Append Timeline Record Feed (JSON feed items)
547
+ app.post("/_apis/distributedtask/hubs/:hub/plans/:planId/timelines/:timelineId/records/:recordId/feed", (req, res) => {
548
+ const payload = req.body || {};
549
+ const planId = req.params.planId;
550
+ const extractedLines = [];
551
+ if (payload.value && Array.isArray(payload.value)) {
552
+ for (const l of payload.value) {
553
+ extractedLines.push(typeof l === "string" ? l : (l.message ?? ""));
554
+ }
555
+ }
556
+ else if (Array.isArray(payload)) {
557
+ for (const l of payload) {
558
+ extractedLines.push(typeof l === "string" ? l : JSON.stringify(l));
559
+ }
560
+ }
561
+ if (extractedLines.length > 0) {
562
+ writeStepOutputLines(planId, req.params.recordId, extractedLines);
563
+ }
564
+ res.writeHead(200, { "Content-Type": "application/json" });
565
+ res.end(JSON.stringify({ count: 0, value: [] }));
566
+ });
567
+ // Catch-all: log unhandled requests for debugging
568
+ app.all("(.*)", (req, res) => {
569
+ console.log(`[DTU] ⚠ Unhandled ${req.method} ${req.url}`);
570
+ if (!res.writableEnded) {
571
+ res.writeHead(404);
572
+ res.end("Not Found");
573
+ }
574
+ });
575
+ }
@@ -0,0 +1,2 @@
1
+ import { Polka } from "polka";
2
+ export declare function registerArtifactRoutes(app: Polka): void;