dtu-github-actions 0.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/dist/config.d.ts +39 -0
- package/dist/config.js +29 -0
- package/dist/ephemeral.d.ts +16 -0
- package/dist/ephemeral.js +48 -0
- package/dist/server/index.d.ts +4 -0
- package/dist/server/index.js +326 -0
- package/dist/server/logger.d.ts +4 -0
- package/dist/server/logger.js +56 -0
- package/dist/server/routes/actions/generators.d.ts +25 -0
- package/dist/server/routes/actions/generators.js +313 -0
- package/dist/server/routes/actions/index.d.ts +2 -0
- package/dist/server/routes/actions/index.js +575 -0
- package/dist/server/routes/artifacts.d.ts +2 -0
- package/dist/server/routes/artifacts.js +332 -0
- package/dist/server/routes/cache.d.ts +2 -0
- package/dist/server/routes/cache.js +230 -0
- package/dist/server/routes/cache.test.d.ts +1 -0
- package/dist/server/routes/cache.test.js +229 -0
- package/dist/server/routes/dtu.d.ts +3 -0
- package/dist/server/routes/dtu.js +141 -0
- package/dist/server/routes/github.d.ts +2 -0
- package/dist/server/routes/github.js +109 -0
- package/dist/server/start.d.ts +1 -0
- package/dist/server/start.js +22 -0
- package/dist/server/store.d.ts +44 -0
- package/dist/server/store.js +100 -0
- package/dist/server.js +1179 -0
- package/dist/server.test.d.ts +1 -0
- package/dist/server.test.js +322 -0
- package/dist/simulate.d.ts +1 -0
- package/dist/simulate.js +47 -0
- package/dist/types.d.ts +111 -0
- package/dist/types.js +1 -0
- package/package.json +43 -0
package/dist/server.js
ADDED
|
@@ -0,0 +1,1179 @@
|
|
|
1
|
+
import http from "node:http";
|
|
2
|
+
import crypto from "node:crypto";
|
|
3
|
+
import fs from "node:fs";
|
|
4
|
+
import path from "node:path";
|
|
5
|
+
import { fileURLToPath } from "node:url";
|
|
6
|
+
import { execa } from "execa";
|
|
7
|
+
import { config } from "./config.js";
|
|
8
|
+
// Set up file logging
|
|
9
|
+
const DTU_ROOT = path.resolve(fileURLToPath(import.meta.url), "..", "..");
|
|
10
|
+
const DTU_LOGS_DIR = path.join(DTU_ROOT, "_", "logs");
|
|
11
|
+
fs.mkdirSync(DTU_LOGS_DIR, { recursive: true });
|
|
12
|
+
const DTU_LOG_PATH = path.join(DTU_LOGS_DIR, "dtu-server.log");
|
|
13
|
+
const logStream = fs.createWriteStream(DTU_LOG_PATH, { flags: "a" });
|
|
14
|
+
const _origLog = console.log.bind(console);
|
|
15
|
+
const _origWarn = console.warn.bind(console);
|
|
16
|
+
const _origError = console.error.bind(console);
|
|
17
|
+
function writeToLog(...args) {
|
|
18
|
+
const line = args.map((a) => (typeof a === "string" ? a : JSON.stringify(a))).join(" ");
|
|
19
|
+
logStream.write(`${new Date().toISOString()} ${line}\n`);
|
|
20
|
+
}
|
|
21
|
+
console.log = (...args) => {
|
|
22
|
+
_origLog(...args);
|
|
23
|
+
writeToLog(...args);
|
|
24
|
+
};
|
|
25
|
+
console.warn = (...args) => {
|
|
26
|
+
_origWarn(...args);
|
|
27
|
+
writeToLog("[WARN]", ...args);
|
|
28
|
+
};
|
|
29
|
+
console.error = (...args) => {
|
|
30
|
+
_origError(...args);
|
|
31
|
+
writeToLog("[ERROR]", ...args);
|
|
32
|
+
};
|
|
33
|
+
// Kill existing process on port 8910
|
|
34
|
+
try {
|
|
35
|
+
await execa("kill", ["-9", "$(lsof -t -i:8910)"], { shell: true, reject: false });
|
|
36
|
+
}
|
|
37
|
+
catch {
|
|
38
|
+
// Ignore error if no process found
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* Digital Twin Universe (DTU) - GitHub API Mock Server
|
|
42
|
+
*
|
|
43
|
+
* This server mirrors the GitHub REST API for Actions.
|
|
44
|
+
* It maintains an in-memory store of job metadata seeded by simulation scripts.
|
|
45
|
+
*/
|
|
46
|
+
export const jobs = new Map();
|
|
47
|
+
export const sessions = new Map();
|
|
48
|
+
export const messageQueues = new Map();
|
|
49
|
+
export const pendingPolls = new Map();
|
|
50
|
+
export const timelines = new Map();
|
|
51
|
+
export const logs = new Map();
|
|
52
|
+
// Active runner: set by POST /_dtu/start-runner from localJob.ts
|
|
53
|
+
// The DTU writes feed lines to this file so the client can tail it.
|
|
54
|
+
let activeStepOutputPath = null;
|
|
55
|
+
/** Runner-internal log prefixes that should never appear in user-facing step output. */
|
|
56
|
+
const RUNNER_INTERNAL_RE = /^\[(?:RUNNER|WORKER) \d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}Z (?:INFO|WARN|ERR)\s/;
|
|
57
|
+
function writeStepOutputLine(line) {
|
|
58
|
+
if (!activeStepOutputPath) {
|
|
59
|
+
return;
|
|
60
|
+
}
|
|
61
|
+
// Filter out runner-internal log lines — only write actual step output
|
|
62
|
+
if (RUNNER_INTERNAL_RE.test(line)) {
|
|
63
|
+
return;
|
|
64
|
+
}
|
|
65
|
+
try {
|
|
66
|
+
fs.appendFileSync(activeStepOutputPath, line + "\n");
|
|
67
|
+
}
|
|
68
|
+
catch {
|
|
69
|
+
// best-effort
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
// Clear state on start
|
|
73
|
+
jobs.clear();
|
|
74
|
+
sessions.clear();
|
|
75
|
+
messageQueues.clear();
|
|
76
|
+
pendingPolls.clear();
|
|
77
|
+
timelines.clear();
|
|
78
|
+
logs.clear();
|
|
79
|
+
// Helper to convert JS objects to ContextData
|
|
80
|
+
function toContextData(obj) {
|
|
81
|
+
if (typeof obj === "string") {
|
|
82
|
+
return { t: 0, s: obj };
|
|
83
|
+
}
|
|
84
|
+
if (typeof obj === "boolean") {
|
|
85
|
+
return { t: 3, b: obj };
|
|
86
|
+
}
|
|
87
|
+
if (typeof obj === "number") {
|
|
88
|
+
return { t: 4, n: obj };
|
|
89
|
+
}
|
|
90
|
+
if (Array.isArray(obj)) {
|
|
91
|
+
return {
|
|
92
|
+
t: 1,
|
|
93
|
+
a: obj.map(toContextData),
|
|
94
|
+
};
|
|
95
|
+
}
|
|
96
|
+
if (typeof obj === "object" && obj !== null) {
|
|
97
|
+
return {
|
|
98
|
+
t: 2,
|
|
99
|
+
d: Object.entries(obj).map(([k, v]) => ({ k, v: toContextData(v) })),
|
|
100
|
+
};
|
|
101
|
+
}
|
|
102
|
+
// Handle null or undefined
|
|
103
|
+
return { t: 0, s: "" };
|
|
104
|
+
}
|
|
105
|
+
// Build a TemplateToken MappingToken in the format ActionStep.Inputs expects.
|
|
106
|
+
// TemplateTokenJsonConverter uses "type" key (integer) NOT the contextData "t" key.
|
|
107
|
+
// TokenType.Mapping = 2. Items are serialized as {Key: scalarToken, Value: templateToken}.
|
|
108
|
+
// Strings without file/line/col are serialized as bare string values.
|
|
109
|
+
function toTemplateTokenMapping(obj) {
|
|
110
|
+
const entries = Object.entries(obj);
|
|
111
|
+
if (entries.length === 0) {
|
|
112
|
+
return { type: 2 };
|
|
113
|
+
}
|
|
114
|
+
return {
|
|
115
|
+
type: 2,
|
|
116
|
+
map: entries.map(([k, v]) => ({ Key: k, Value: v })),
|
|
117
|
+
};
|
|
118
|
+
}
|
|
119
|
+
function createJobResponse(jobId, payload, baseUrl) {
|
|
120
|
+
const mappedSteps = (payload.steps || []).map((step, index) => {
|
|
121
|
+
const inputsObj = step.Inputs || (step.run ? { script: step.run } : {});
|
|
122
|
+
const s = {
|
|
123
|
+
id: step.Id || step.id || crypto.randomUUID(),
|
|
124
|
+
name: step.Name || step.DisplayName || step.name || `step-${index}`,
|
|
125
|
+
type: (step.Type || "Action").toLowerCase(),
|
|
126
|
+
// ActionSourceType.Script = 3
|
|
127
|
+
reference: { type: 3 },
|
|
128
|
+
// inputs is TemplateToken (MappingToken). Must use {"type": 2, "map": [...]} format.
|
|
129
|
+
inputs: toTemplateTokenMapping(inputsObj),
|
|
130
|
+
contextData: step.ContextData || toContextData({}),
|
|
131
|
+
// condition must be explicit — null Condition causes NullReferenceException in EvaluateStepIf
|
|
132
|
+
condition: step.condition || "success()",
|
|
133
|
+
};
|
|
134
|
+
return s;
|
|
135
|
+
});
|
|
136
|
+
const repoFullName = payload.repository?.full_name || "redwoodjs/opposite-actions";
|
|
137
|
+
const ownerName = payload.repository?.owner?.login || "redwoodjs";
|
|
138
|
+
const Variables = {
|
|
139
|
+
"system.github.token": { Value: "fake-token", IsSecret: true },
|
|
140
|
+
"system.github.job": { Value: "local-job", IsSecret: false },
|
|
141
|
+
"system.github.repository": { Value: repoFullName, IsSecret: false },
|
|
142
|
+
"github.repository": { Value: repoFullName, IsSecret: false },
|
|
143
|
+
"github.actor": { Value: ownerName, IsSecret: false },
|
|
144
|
+
"github.sha": {
|
|
145
|
+
Value: payload.headSha || "0000000000000000000000000000000000000000",
|
|
146
|
+
IsSecret: false,
|
|
147
|
+
},
|
|
148
|
+
"github.ref": { Value: "refs/heads/main", IsSecret: false },
|
|
149
|
+
repository: { Value: repoFullName, IsSecret: false },
|
|
150
|
+
GITHUB_REPOSITORY: { Value: repoFullName, IsSecret: false },
|
|
151
|
+
GITHUB_ACTOR: { Value: ownerName, IsSecret: false },
|
|
152
|
+
"build.repository.name": { Value: repoFullName, IsSecret: false },
|
|
153
|
+
"build.repository.uri": { Value: `https://github.com/${repoFullName}`, IsSecret: false },
|
|
154
|
+
};
|
|
155
|
+
// ... ContextData ...
|
|
156
|
+
const githubContext = {
|
|
157
|
+
repository: repoFullName,
|
|
158
|
+
actor: ownerName,
|
|
159
|
+
sha: "0000000000000000000000000000000000000000",
|
|
160
|
+
ref: "refs/heads/main",
|
|
161
|
+
server_url: baseUrl,
|
|
162
|
+
api_url: `${baseUrl}/_apis`,
|
|
163
|
+
graphql_url: `${baseUrl}/_graphql`,
|
|
164
|
+
workspace: "/home/runner/work/opposite-actions/opposite-actions",
|
|
165
|
+
action: "__run",
|
|
166
|
+
token: "fake-token",
|
|
167
|
+
job: "local-job",
|
|
168
|
+
};
|
|
169
|
+
if (payload.pull_request) {
|
|
170
|
+
githubContext.event = {
|
|
171
|
+
pull_request: payload.pull_request,
|
|
172
|
+
};
|
|
173
|
+
}
|
|
174
|
+
else {
|
|
175
|
+
githubContext.event = {
|
|
176
|
+
repository: {
|
|
177
|
+
full_name: repoFullName,
|
|
178
|
+
name: payload.repository?.name || "opposite-actions",
|
|
179
|
+
owner: { login: ownerName },
|
|
180
|
+
},
|
|
181
|
+
};
|
|
182
|
+
}
|
|
183
|
+
const ContextData = {
|
|
184
|
+
github: toContextData(githubContext),
|
|
185
|
+
steps: { t: 2, d: [] }, // Empty steps context (required by EvaluateStepIf)
|
|
186
|
+
needs: { t: 2, d: [] }, // Empty needs context
|
|
187
|
+
strategy: { t: 2, d: [] }, // Empty strategy context
|
|
188
|
+
matrix: { t: 2, d: [] }, // Empty matrix context
|
|
189
|
+
};
|
|
190
|
+
const jobRequest = {
|
|
191
|
+
MessageType: "PipelineAgentJobRequest",
|
|
192
|
+
Plan: {
|
|
193
|
+
PlanId: crypto.randomUUID(),
|
|
194
|
+
PlanType: "Action",
|
|
195
|
+
ScopeId: crypto.randomUUID(),
|
|
196
|
+
},
|
|
197
|
+
Timeline: {
|
|
198
|
+
Id: crypto.randomUUID(),
|
|
199
|
+
ChangeId: 1,
|
|
200
|
+
},
|
|
201
|
+
JobId: crypto.randomUUID(),
|
|
202
|
+
RequestId: parseInt(jobId) || 1,
|
|
203
|
+
JobDisplayName: payload.name || "local-job",
|
|
204
|
+
JobName: payload.name || "local-job",
|
|
205
|
+
Steps: mappedSteps,
|
|
206
|
+
Variables: Variables,
|
|
207
|
+
ContextData: ContextData,
|
|
208
|
+
Resources: {
|
|
209
|
+
Repositories: [
|
|
210
|
+
{
|
|
211
|
+
Alias: "self",
|
|
212
|
+
Id: "repo-1",
|
|
213
|
+
Type: "git",
|
|
214
|
+
Version: payload.headSha || "HEAD",
|
|
215
|
+
Url: `https://github.com/${repoFullName}`,
|
|
216
|
+
Properties: {
|
|
217
|
+
id: "repo-1",
|
|
218
|
+
name: payload.repository?.name || "opposite-actions",
|
|
219
|
+
fullName: repoFullName, // Required by types
|
|
220
|
+
repoFullName: repoFullName, // camelCase
|
|
221
|
+
owner: ownerName,
|
|
222
|
+
defaultBranch: payload.repository?.default_branch || "main",
|
|
223
|
+
cloneUrl: `https://github.com/${repoFullName}.git`,
|
|
224
|
+
},
|
|
225
|
+
},
|
|
226
|
+
],
|
|
227
|
+
Endpoints: [
|
|
228
|
+
{
|
|
229
|
+
Name: "SystemVssConnection",
|
|
230
|
+
Url: baseUrl,
|
|
231
|
+
Authorization: {
|
|
232
|
+
Parameters: {
|
|
233
|
+
AccessToken: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJvcmNoaWQiOiIxMjMifQ.c2lnbmF0dXJl",
|
|
234
|
+
},
|
|
235
|
+
Scheme: "OAuth",
|
|
236
|
+
},
|
|
237
|
+
},
|
|
238
|
+
],
|
|
239
|
+
},
|
|
240
|
+
Workspace: {
|
|
241
|
+
Path: "/home/runner/work/opposite-actions/opposite-actions",
|
|
242
|
+
},
|
|
243
|
+
SystemVssConnection: {
|
|
244
|
+
Url: baseUrl,
|
|
245
|
+
Authorization: {
|
|
246
|
+
Parameters: {
|
|
247
|
+
AccessToken: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJvcmNoaWQiOiIxMjMifQ.c2lnbmF0dXJl",
|
|
248
|
+
},
|
|
249
|
+
Scheme: "OAuth",
|
|
250
|
+
},
|
|
251
|
+
},
|
|
252
|
+
Actions: [],
|
|
253
|
+
MaskHints: [],
|
|
254
|
+
EnvironmentVariables: [],
|
|
255
|
+
};
|
|
256
|
+
console.log(`[DTU] DEBUG: Generating Job Response for JobId: ${crypto.randomUUID()}`);
|
|
257
|
+
console.log(`[DTU] DEBUG: repoFullName in Resources: ${jobRequest.Resources.Repositories[0].Properties["repoFullName"]}`);
|
|
258
|
+
console.log(`[DTU] DEBUG: ContextData Payload:`, JSON.stringify(jobRequest.ContextData, null, 2));
|
|
259
|
+
return {
|
|
260
|
+
MessageId: 1,
|
|
261
|
+
MessageType: "PipelineAgentJobRequest",
|
|
262
|
+
Body: JSON.stringify(jobRequest),
|
|
263
|
+
};
|
|
264
|
+
}
|
|
265
|
+
export const server = http.createServer((req, res) => {
|
|
266
|
+
const { method, headers } = req;
|
|
267
|
+
let { url } = req;
|
|
268
|
+
if (method !== "GET" && method !== "OPTIONS" && url !== "/_dtu/seed") {
|
|
269
|
+
console.log(`[DTU] API-DEBUG: ${method} ${url}`);
|
|
270
|
+
}
|
|
271
|
+
if (!url) {
|
|
272
|
+
res.statusCode = 400;
|
|
273
|
+
res.end("Missing URL");
|
|
274
|
+
return;
|
|
275
|
+
}
|
|
276
|
+
// Handle absolute URIs (proxy requests)
|
|
277
|
+
if (url.startsWith("http")) {
|
|
278
|
+
const parsedUrl = new URL(url);
|
|
279
|
+
url = parsedUrl.pathname + parsedUrl.search;
|
|
280
|
+
}
|
|
281
|
+
let host = headers.host || `localhost:${config.DTU_PORT}`;
|
|
282
|
+
const protocol = headers["x-forwarded-proto"] || "http";
|
|
283
|
+
// If host doesn't have a port, append it
|
|
284
|
+
if (!host.includes(":")) {
|
|
285
|
+
host = `${host}:${config.DTU_PORT}`;
|
|
286
|
+
}
|
|
287
|
+
const baseUrl = `${protocol}://${host}`;
|
|
288
|
+
// 1. Internal Seeding Endpoint
|
|
289
|
+
if (method === "POST" && url === "/_dtu/seed") {
|
|
290
|
+
let body = "";
|
|
291
|
+
req.on("data", (chunk) => {
|
|
292
|
+
body += chunk;
|
|
293
|
+
});
|
|
294
|
+
req.on("end", () => {
|
|
295
|
+
try {
|
|
296
|
+
const payload = JSON.parse(body);
|
|
297
|
+
const jobId = payload.id?.toString();
|
|
298
|
+
if (jobId) {
|
|
299
|
+
const mappedSteps = (payload.steps || []).map((step) => ({
|
|
300
|
+
...step,
|
|
301
|
+
Id: crypto.randomUUID(), // Always use a UUID for Step ID
|
|
302
|
+
}));
|
|
303
|
+
jobs.set(jobId, { ...payload, steps: mappedSteps });
|
|
304
|
+
console.log(`[DTU] Seeded job: ${jobId}`);
|
|
305
|
+
console.log(`[DTU] Seed Payload Repository:`, JSON.stringify(payload.repository, null, 2));
|
|
306
|
+
// Notify any pending polls
|
|
307
|
+
for (const [sessionId, { res, baseUrl: runnerBaseUrl }] of pendingPolls) {
|
|
308
|
+
console.log(`[DTU] Notifying session ${sessionId} of new job ${jobId} (Wait URL: ${runnerBaseUrl})`);
|
|
309
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
310
|
+
res.end(JSON.stringify(createJobResponse(jobId, payload, runnerBaseUrl)));
|
|
311
|
+
pendingPolls.delete(sessionId);
|
|
312
|
+
}
|
|
313
|
+
res.writeHead(201, { "Content-Type": "application/json" });
|
|
314
|
+
res.end(JSON.stringify({ status: "ok", jobId }));
|
|
315
|
+
}
|
|
316
|
+
else {
|
|
317
|
+
res.writeHead(400);
|
|
318
|
+
res.end("Missing job ID");
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
catch {
|
|
322
|
+
res.writeHead(400);
|
|
323
|
+
res.end("Invalid JSON");
|
|
324
|
+
}
|
|
325
|
+
});
|
|
326
|
+
return;
|
|
327
|
+
}
|
|
328
|
+
// 2. GitHub REST API Mirror
|
|
329
|
+
const jobMatch = url?.match(/\/repos\/[^/]+\/[^/]+\/actions\/jobs\/(\d+)/);
|
|
330
|
+
if (method === "GET" && jobMatch) {
|
|
331
|
+
const jobId = jobMatch[1];
|
|
332
|
+
const job = jobs.get(jobId);
|
|
333
|
+
if (job) {
|
|
334
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
335
|
+
res.end(JSON.stringify(job));
|
|
336
|
+
}
|
|
337
|
+
else {
|
|
338
|
+
console.warn(`[DTU] Job not found: ${jobId}`);
|
|
339
|
+
res.writeHead(404, { "Content-Type": "application/json" });
|
|
340
|
+
res.end(JSON.stringify({ message: "Not Found (DTU Mock)" }));
|
|
341
|
+
}
|
|
342
|
+
return;
|
|
343
|
+
}
|
|
344
|
+
// 3. GitHub App Token Exchange Mock (App Level)
|
|
345
|
+
const tokenMatch = url?.match(/\/app\/installations\/(\d+)\/access_tokens/);
|
|
346
|
+
if (method === "POST" && tokenMatch) {
|
|
347
|
+
const installationId = tokenMatch[1];
|
|
348
|
+
const authHeader = req.headers["authorization"];
|
|
349
|
+
console.log(`[DTU] Token exchange for installation: ${installationId}`);
|
|
350
|
+
if (authHeader) {
|
|
351
|
+
console.log(`[DTU] Received JWT: ${authHeader.substring(0, 20)}...`);
|
|
352
|
+
}
|
|
353
|
+
// Return a mock installation token
|
|
354
|
+
const response = {
|
|
355
|
+
token: `ghs_mock_token_${installationId}_${Math.random().toString(36).substring(7)}`,
|
|
356
|
+
expires_at: new Date(Date.now() + 3600 * 1000).toISOString(),
|
|
357
|
+
permissions: {
|
|
358
|
+
actions: "read",
|
|
359
|
+
metadata: "read",
|
|
360
|
+
},
|
|
361
|
+
repository_selection: "selected",
|
|
362
|
+
};
|
|
363
|
+
res.writeHead(201, { "Content-Type": "application/json" });
|
|
364
|
+
res.end(JSON.stringify(response));
|
|
365
|
+
return;
|
|
366
|
+
}
|
|
367
|
+
// 4. GitHub Installation Lookup Mock (Repo Level)
|
|
368
|
+
const repoInstallationMatch = url?.match(/\/repos\/([^/]+)\/([^/]+)\/installation/);
|
|
369
|
+
if (method === "GET" && repoInstallationMatch) {
|
|
370
|
+
const owner = repoInstallationMatch[1];
|
|
371
|
+
const repo = repoInstallationMatch[2];
|
|
372
|
+
console.log(`[DTU] Fetching installation for ${owner}/${repo}`);
|
|
373
|
+
const response = {
|
|
374
|
+
id: 12345678,
|
|
375
|
+
account: {
|
|
376
|
+
login: owner,
|
|
377
|
+
type: "User",
|
|
378
|
+
},
|
|
379
|
+
repository_selection: "all",
|
|
380
|
+
access_tokens_url: `${baseUrl}/app/installations/12345678/access_tokens`,
|
|
381
|
+
};
|
|
382
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
383
|
+
res.end(JSON.stringify(response));
|
|
384
|
+
return;
|
|
385
|
+
}
|
|
386
|
+
// 5. GitHub Runner Registration Token Mock
|
|
387
|
+
const registrationTokenMatch = url?.match(/(?:\/api\/v3)?\/repos\/([^/]+)\/([^/]+)\/actions\/runners\/registration-token/);
|
|
388
|
+
if (method === "POST" && registrationTokenMatch) {
|
|
389
|
+
const owner = registrationTokenMatch[1];
|
|
390
|
+
const repo = registrationTokenMatch[2];
|
|
391
|
+
console.log(`[DTU] Generating registration token for ${owner}/${repo}`);
|
|
392
|
+
const response = {
|
|
393
|
+
token: `ghr_mock_registration_token_${Math.random().toString(36).substring(7)}`,
|
|
394
|
+
expires_at: new Date(Date.now() + 3600 * 1000).toISOString(),
|
|
395
|
+
};
|
|
396
|
+
res.writeHead(201, { "Content-Type": "application/json" });
|
|
397
|
+
res.end(JSON.stringify(response));
|
|
398
|
+
return;
|
|
399
|
+
}
|
|
400
|
+
// 6. Global Runner Registration Mock (Discovery/Handshake)
|
|
401
|
+
if (method === "POST" &&
|
|
402
|
+
(url === "/actions/runner-registration" || url === "/api/v3/actions/runner-registration")) {
|
|
403
|
+
console.log(`[DTU] Handling global runner registration: ${url}`);
|
|
404
|
+
const token = `ghr_mock_tenant_token_${Math.random().toString(36).substring(7)}`;
|
|
405
|
+
const expiresAt = new Date(Date.now() + 3600 * 1000).toISOString();
|
|
406
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
407
|
+
res.end(JSON.stringify({
|
|
408
|
+
token: token,
|
|
409
|
+
token_schema: "OAuthAccessToken",
|
|
410
|
+
authorization_url: `${baseUrl}/auth/authorize`,
|
|
411
|
+
client_id: "mock-client-id",
|
|
412
|
+
tenant_id: "mock-tenant-id",
|
|
413
|
+
expiration: expiresAt,
|
|
414
|
+
url: baseUrl, // Attempt to populate TenantUrl as well if needed
|
|
415
|
+
}));
|
|
416
|
+
return;
|
|
417
|
+
}
|
|
418
|
+
// 12. Sessions Handler (Mock)
|
|
419
|
+
if (url?.includes("/sessions")) {
|
|
420
|
+
const sessionMatch = url?.match(/\/distributedtask\/pools\/(\d+)\/sessions(?:\/([^/?]+))?/);
|
|
421
|
+
if (sessionMatch) {
|
|
422
|
+
const poolId = sessionMatch[1];
|
|
423
|
+
const sessionId = sessionMatch[2];
|
|
424
|
+
if (method === "POST" && !sessionId) {
|
|
425
|
+
console.log(`[DTU] Creating session for pool ${poolId}`);
|
|
426
|
+
const newSessionId = crypto.randomUUID();
|
|
427
|
+
const response = {
|
|
428
|
+
sessionId: newSessionId,
|
|
429
|
+
ownerName: "oa-runner",
|
|
430
|
+
agent: {
|
|
431
|
+
id: 1,
|
|
432
|
+
name: "oa-runner",
|
|
433
|
+
version: "2.331.0",
|
|
434
|
+
osDescription: "Linux",
|
|
435
|
+
enabled: true,
|
|
436
|
+
status: "online",
|
|
437
|
+
},
|
|
438
|
+
encryptionKey: {
|
|
439
|
+
value: Buffer.from(crypto.randomBytes(32)).toString("base64"),
|
|
440
|
+
k: "encryptionKey",
|
|
441
|
+
},
|
|
442
|
+
};
|
|
443
|
+
sessions.set(newSessionId, response);
|
|
444
|
+
messageQueues.set(newSessionId, []);
|
|
445
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
446
|
+
res.end(JSON.stringify(response));
|
|
447
|
+
return;
|
|
448
|
+
}
|
|
449
|
+
if (method === "DELETE" && sessionId) {
|
|
450
|
+
console.log(`[DTU] Deleting session ${sessionId} for pool ${poolId}`);
|
|
451
|
+
// Close any pending long-poll so the runner doesn't have to cancel it
|
|
452
|
+
const pending = pendingPolls.get(sessionId);
|
|
453
|
+
if (pending && !pending.res.writableEnded) {
|
|
454
|
+
pending.res.writeHead(204);
|
|
455
|
+
pending.res.end();
|
|
456
|
+
}
|
|
457
|
+
pendingPolls.delete(sessionId);
|
|
458
|
+
sessions.delete(sessionId);
|
|
459
|
+
messageQueues.delete(sessionId);
|
|
460
|
+
res.writeHead(204);
|
|
461
|
+
res.end();
|
|
462
|
+
return;
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
}
|
|
466
|
+
// 13. Messages Handler (Mock) - Long Polling
|
|
467
|
+
if (url?.includes("/messages")) {
|
|
468
|
+
const urlParts = new URL(url, baseUrl);
|
|
469
|
+
const sessionId = urlParts.searchParams.get("sessionId");
|
|
470
|
+
if (method === "GET") {
|
|
471
|
+
const lastMessageId = urlParts.searchParams.get("lastMessageId");
|
|
472
|
+
console.log(`[DTU] Polling messages for session ${sessionId} (lastMessageId: ${lastMessageId})`);
|
|
473
|
+
if (!sessionId || !sessions.has(sessionId)) {
|
|
474
|
+
res.writeHead(404);
|
|
475
|
+
res.end("Session not found");
|
|
476
|
+
return;
|
|
477
|
+
}
|
|
478
|
+
// If there's already a pending poll for this session, close it
|
|
479
|
+
const existing = pendingPolls.get(sessionId);
|
|
480
|
+
if (existing) {
|
|
481
|
+
existing.res.writeHead(204);
|
|
482
|
+
existing.res.end();
|
|
483
|
+
}
|
|
484
|
+
pendingPolls.set(sessionId, { res, baseUrl });
|
|
485
|
+
console.log(`[DTU] TRACE-DELIVERY: Entering poll handler for session ${sessionId}. jobs.size=${jobs.size}`);
|
|
486
|
+
if (jobs.size > 0) {
|
|
487
|
+
const [[jobId, jobData]] = Array.from(jobs.entries());
|
|
488
|
+
console.log(`[DTU] TRACE-DELIVERY: Job found. Sending immediate job ${jobId} to session ${sessionId}`);
|
|
489
|
+
try {
|
|
490
|
+
const response = createJobResponse(jobId, jobData, baseUrl);
|
|
491
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
492
|
+
res.end(JSON.stringify(response));
|
|
493
|
+
jobs.delete(jobId);
|
|
494
|
+
pendingPolls.delete(sessionId);
|
|
495
|
+
return;
|
|
496
|
+
}
|
|
497
|
+
catch (e) {
|
|
498
|
+
console.error(`[DTU] Error creating job response:`, e);
|
|
499
|
+
// Don't delete job, let it retry? Or delete to avoid loop?
|
|
500
|
+
// Better to not delete so we can debug, but might infinite loop.
|
|
501
|
+
// For now, allow retry.
|
|
502
|
+
res.writeHead(500);
|
|
503
|
+
res.end("Internal Server Error generating job");
|
|
504
|
+
return;
|
|
505
|
+
}
|
|
506
|
+
}
|
|
507
|
+
// Long poll: Wait up to 20 seconds before returning empty
|
|
508
|
+
const timeout = setTimeout(() => {
|
|
509
|
+
const pending = pendingPolls.get(sessionId);
|
|
510
|
+
if (pending && pending.res === res) {
|
|
511
|
+
pendingPolls.delete(sessionId);
|
|
512
|
+
if (!res.writableEnded) {
|
|
513
|
+
// Returning 204 No Content for timeout is often better for mocks
|
|
514
|
+
res.writeHead(204);
|
|
515
|
+
res.end();
|
|
516
|
+
}
|
|
517
|
+
}
|
|
518
|
+
}, 20000);
|
|
519
|
+
res.on("close", () => {
|
|
520
|
+
clearTimeout(timeout);
|
|
521
|
+
const pending = pendingPolls.get(sessionId);
|
|
522
|
+
if (pending && pending.res === res) {
|
|
523
|
+
pendingPolls.delete(sessionId);
|
|
524
|
+
}
|
|
525
|
+
});
|
|
526
|
+
return;
|
|
527
|
+
}
|
|
528
|
+
if (method === "DELETE") {
|
|
529
|
+
const messageId = urlParts.searchParams.get("messageId");
|
|
530
|
+
console.log(`[DTU] Acknowledging/Deleting message ${messageId} for session ${sessionId}`);
|
|
531
|
+
res.writeHead(204);
|
|
532
|
+
res.end();
|
|
533
|
+
return;
|
|
534
|
+
}
|
|
535
|
+
}
|
|
536
|
+
// 7. Pipeline Service Discovery Mock
|
|
537
|
+
if (method === "GET" &&
|
|
538
|
+
(url?.includes("/_apis/pipelines") || url?.includes("/_apis/connectionData"))) {
|
|
539
|
+
console.log(`[DTU] Handling service discovery: ${url}`);
|
|
540
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
541
|
+
res.end(JSON.stringify({
|
|
542
|
+
value: [],
|
|
543
|
+
locationId: crypto.randomUUID(),
|
|
544
|
+
instanceId: crypto.randomUUID(),
|
|
545
|
+
locationServiceData: {
|
|
546
|
+
serviceOwner: "A85B8835-C1A1-4AAC-AE97-1C3D0BA72DBD",
|
|
547
|
+
defaultAccessMappingMoniker: "PublicAccessMapping",
|
|
548
|
+
accessMappings: [
|
|
549
|
+
{
|
|
550
|
+
moniker: "PublicAccessMapping",
|
|
551
|
+
displayName: "Public Access",
|
|
552
|
+
accessPoint: baseUrl,
|
|
553
|
+
},
|
|
554
|
+
],
|
|
555
|
+
serviceDefinitions: [
|
|
556
|
+
{
|
|
557
|
+
serviceType: "distributedtask",
|
|
558
|
+
identifier: "A85B8835-C1A1-4AAC-AE97-1C3D0BA72DBD",
|
|
559
|
+
displayName: "distributedtask",
|
|
560
|
+
relativeToSetting: 3,
|
|
561
|
+
relativePath: "",
|
|
562
|
+
description: "Distributed Task Service",
|
|
563
|
+
serviceOwner: "A85B8835-C1A1-4AAC-AE97-1C3D0BA72DBD",
|
|
564
|
+
status: 1, // ServiceStatus.Online — REQUIRED or VssLocationService skips it
|
|
565
|
+
locationMappings: [
|
|
566
|
+
{
|
|
567
|
+
accessMappingMoniker: "PublicAccessMapping",
|
|
568
|
+
location: baseUrl,
|
|
569
|
+
},
|
|
570
|
+
],
|
|
571
|
+
},
|
|
572
|
+
{
|
|
573
|
+
serviceType: "distributedtask",
|
|
574
|
+
identifier: "A8C47E17-4D56-4A56-92BB-DE7EA7DC65BE", // Pools
|
|
575
|
+
displayName: "Pools",
|
|
576
|
+
relativeToSetting: 3,
|
|
577
|
+
relativePath: "/_apis/distributedtask/pools",
|
|
578
|
+
description: "Pools Service",
|
|
579
|
+
serviceOwner: "A85B8835-C1A1-4AAC-AE97-1C3D0BA72DBD",
|
|
580
|
+
status: 1,
|
|
581
|
+
locationMappings: [
|
|
582
|
+
{
|
|
583
|
+
accessMappingMoniker: "PublicAccessMapping",
|
|
584
|
+
location: `${baseUrl}/_apis/distributedtask/pools`,
|
|
585
|
+
},
|
|
586
|
+
],
|
|
587
|
+
},
|
|
588
|
+
{
|
|
589
|
+
serviceType: "distributedtask",
|
|
590
|
+
identifier: "27d7f831-88c1-4719-8ca1-6a061dad90eb", // ActionDownloadInfo
|
|
591
|
+
displayName: "ActionDownloadInfo",
|
|
592
|
+
relativeToSetting: 3,
|
|
593
|
+
relativePath: "/_apis/distributedtask/hubs/{hubName}/plans/{planId}/actiondownloadinfo",
|
|
594
|
+
description: "Action Download Info Service",
|
|
595
|
+
serviceOwner: "A85B8835-C1A1-4AAC-AE97-1C3D0BA72DBD",
|
|
596
|
+
status: 1,
|
|
597
|
+
locationMappings: [
|
|
598
|
+
{
|
|
599
|
+
accessMappingMoniker: "PublicAccessMapping",
|
|
600
|
+
location: `${baseUrl}`,
|
|
601
|
+
},
|
|
602
|
+
],
|
|
603
|
+
},
|
|
604
|
+
{
|
|
605
|
+
serviceType: "distributedtask",
|
|
606
|
+
identifier: "858983e4-19bd-4c5e-864c-507b59b58b12", // AppendTimelineRecordFeedAsync
|
|
607
|
+
displayName: "AppendTimelineRecordFeed",
|
|
608
|
+
relativeToSetting: 3,
|
|
609
|
+
relativePath: "/_apis/distributedtask/hubs/{hubName}/plans/{planId}/timelines/{timelineId}/records/{recordId}/feed",
|
|
610
|
+
description: "Timeline Feed Service",
|
|
611
|
+
serviceOwner: "A85B8835-C1A1-4AAC-AE97-1C3D0BA72DBD",
|
|
612
|
+
status: 1,
|
|
613
|
+
locationMappings: [
|
|
614
|
+
{
|
|
615
|
+
accessMappingMoniker: "PublicAccessMapping",
|
|
616
|
+
location: `${baseUrl}`,
|
|
617
|
+
},
|
|
618
|
+
],
|
|
619
|
+
},
|
|
620
|
+
{
|
|
621
|
+
serviceType: "distributedtask",
|
|
622
|
+
identifier: "46f5667d-263a-4684-91b1-dff7fdcf64e2", // AppendLogContent / log file upload
|
|
623
|
+
displayName: "TaskLog",
|
|
624
|
+
relativeToSetting: 3,
|
|
625
|
+
relativePath: "/_apis/distributedtask/hubs/{hubName}/plans/{planId}/logs/{logId}",
|
|
626
|
+
description: "Task Log Service",
|
|
627
|
+
serviceOwner: "A85B8835-C1A1-4AAC-AE97-1C3D0BA72DBD",
|
|
628
|
+
status: 1,
|
|
629
|
+
locationMappings: [
|
|
630
|
+
{
|
|
631
|
+
accessMappingMoniker: "PublicAccessMapping",
|
|
632
|
+
location: `${baseUrl}`,
|
|
633
|
+
},
|
|
634
|
+
],
|
|
635
|
+
},
|
|
636
|
+
],
|
|
637
|
+
},
|
|
638
|
+
}));
|
|
639
|
+
return;
|
|
640
|
+
}
|
|
641
|
+
// POST /_dtu/start-runner — called by localJob.ts when spawning a runner container
|
|
642
|
+
// Body: { runnerName: string, logDir: string }
|
|
643
|
+
if (method === "POST" && url === "/_dtu/start-runner") {
|
|
644
|
+
let body = "";
|
|
645
|
+
req.on("data", (chunk) => {
|
|
646
|
+
body += chunk;
|
|
647
|
+
});
|
|
648
|
+
req.on("end", () => {
|
|
649
|
+
try {
|
|
650
|
+
const { logDir } = JSON.parse(body);
|
|
651
|
+
fs.mkdirSync(logDir, { recursive: true });
|
|
652
|
+
activeStepOutputPath = path.join(logDir, "step-output.log");
|
|
653
|
+
// Truncate/create fresh for this run
|
|
654
|
+
fs.writeFileSync(activeStepOutputPath, "");
|
|
655
|
+
}
|
|
656
|
+
catch (e) {
|
|
657
|
+
console.warn("[DTU] start-runner parse error:", e);
|
|
658
|
+
}
|
|
659
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
660
|
+
res.end(JSON.stringify({ ok: true }));
|
|
661
|
+
});
|
|
662
|
+
return;
|
|
663
|
+
}
|
|
664
|
+
// 19. Append Timeline Record Feed — write lines to active runner's step-output.log
|
|
665
|
+
if (method === "POST" && url?.includes("/feed")) {
|
|
666
|
+
let body = "";
|
|
667
|
+
req.on("data", (chunk) => {
|
|
668
|
+
body += chunk;
|
|
669
|
+
});
|
|
670
|
+
req.on("end", () => {
|
|
671
|
+
try {
|
|
672
|
+
const payload = JSON.parse(body || "{}");
|
|
673
|
+
let count = 0;
|
|
674
|
+
if (payload.value && Array.isArray(payload.value)) {
|
|
675
|
+
for (const l of payload.value) {
|
|
676
|
+
const msg = typeof l === "string" ? l : (l.message ?? "");
|
|
677
|
+
if (msg) {
|
|
678
|
+
writeStepOutputLine(msg);
|
|
679
|
+
count++;
|
|
680
|
+
}
|
|
681
|
+
}
|
|
682
|
+
}
|
|
683
|
+
else if (Array.isArray(payload)) {
|
|
684
|
+
for (const l of payload) {
|
|
685
|
+
writeStepOutputLine(typeof l === "string" ? l : JSON.stringify(l));
|
|
686
|
+
count++;
|
|
687
|
+
}
|
|
688
|
+
}
|
|
689
|
+
if (count > 0) {
|
|
690
|
+
console.log(`[DTU] Feed: wrote ${count} lines to ${activeStepOutputPath ?? "(no active runner)"}`);
|
|
691
|
+
}
|
|
692
|
+
}
|
|
693
|
+
catch {
|
|
694
|
+
// best-effort
|
|
695
|
+
}
|
|
696
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
697
|
+
res.end(JSON.stringify({ count: 0, value: [] }));
|
|
698
|
+
});
|
|
699
|
+
return;
|
|
700
|
+
}
|
|
701
|
+
// 10. Pools Handler (Mock)
|
|
702
|
+
if (method === "GET" &&
|
|
703
|
+
url?.includes("/_apis/distributedtask/pools") &&
|
|
704
|
+
!url?.includes("/agents")) {
|
|
705
|
+
console.log(`[DTU] Handling pools request: ${url}`);
|
|
706
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
707
|
+
res.end(JSON.stringify({
|
|
708
|
+
count: 1,
|
|
709
|
+
value: [
|
|
710
|
+
{
|
|
711
|
+
id: 1,
|
|
712
|
+
name: "Default",
|
|
713
|
+
isHosted: false,
|
|
714
|
+
autoProvision: true,
|
|
715
|
+
},
|
|
716
|
+
],
|
|
717
|
+
}));
|
|
718
|
+
return;
|
|
719
|
+
}
|
|
720
|
+
// 11. Agents Handler (Mock)
|
|
721
|
+
// GET: Check if agent exists
|
|
722
|
+
if (method === "GET" &&
|
|
723
|
+
url?.includes("/_apis/distributedtask/pools") &&
|
|
724
|
+
url?.includes("/agents")) {
|
|
725
|
+
console.log(`[DTU] Handling get agents request: ${url}`);
|
|
726
|
+
const _agentName = new URLSearchParams(url.split("?")[1]).get("agentName");
|
|
727
|
+
// If querying by name, return empty list to simulate "not found" so runner registers
|
|
728
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
729
|
+
res.end(JSON.stringify({
|
|
730
|
+
count: 0,
|
|
731
|
+
value: [],
|
|
732
|
+
}));
|
|
733
|
+
return;
|
|
734
|
+
}
|
|
735
|
+
// POST: Register new agent
|
|
736
|
+
if (method === "POST" &&
|
|
737
|
+
url?.includes("/_apis/distributedtask/pools") &&
|
|
738
|
+
url?.includes("/agents")) {
|
|
739
|
+
console.log(`[DTU] Handling register agent request: ${url}`);
|
|
740
|
+
let body = "";
|
|
741
|
+
req.on("data", (chunk) => {
|
|
742
|
+
body += chunk;
|
|
743
|
+
});
|
|
744
|
+
req.on("end", () => {
|
|
745
|
+
const payload = JSON.parse(body);
|
|
746
|
+
const agentId = Math.floor(Math.random() * 10000);
|
|
747
|
+
const response = {
|
|
748
|
+
id: agentId,
|
|
749
|
+
name: payload.name,
|
|
750
|
+
version: payload.version,
|
|
751
|
+
osDescription: payload.osDescription,
|
|
752
|
+
ephemeral: payload.ephemeral,
|
|
753
|
+
disableUpdate: payload.disableUpdate,
|
|
754
|
+
enabled: true,
|
|
755
|
+
status: "online",
|
|
756
|
+
provisioningState: "Provisioned",
|
|
757
|
+
authorization: {
|
|
758
|
+
clientId: crypto.randomUUID(),
|
|
759
|
+
authorizationUrl: `${baseUrl}/auth/authorize`,
|
|
760
|
+
},
|
|
761
|
+
accessPoint: `${baseUrl}/_apis/distributedtask/pools/${payload.poolId}/agents/${agentId}`,
|
|
762
|
+
};
|
|
763
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
764
|
+
res.end(JSON.stringify(response));
|
|
765
|
+
});
|
|
766
|
+
return;
|
|
767
|
+
}
|
|
768
|
+
// 14. Job Request Update / Renewal Mock
|
|
769
|
+
if (method === "PATCH" && url?.includes("/_apis/distributedtask/jobrequests")) {
|
|
770
|
+
console.log(`[DTU] Handling job request update/renewal: ${url}`);
|
|
771
|
+
let body = "";
|
|
772
|
+
req.on("data", (chunk) => {
|
|
773
|
+
body += chunk;
|
|
774
|
+
});
|
|
775
|
+
req.on("end", () => {
|
|
776
|
+
try {
|
|
777
|
+
const payload = JSON.parse(body || "{}");
|
|
778
|
+
// Update LockedUntil to keep the runner happy
|
|
779
|
+
if (!payload.lockedUntil) {
|
|
780
|
+
// If it's just a query param lock renewal, we might need to construct a response.
|
|
781
|
+
// But usually the runner sends the job request object.
|
|
782
|
+
}
|
|
783
|
+
// Always return a valid future date for lock
|
|
784
|
+
payload.lockedUntil = new Date(Date.now() + 60000).toISOString();
|
|
785
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
786
|
+
res.end(JSON.stringify(payload));
|
|
787
|
+
}
|
|
788
|
+
catch (e) {
|
|
789
|
+
console.error("[DTU] Error parsing job request update body", e);
|
|
790
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
791
|
+
res.end(JSON.stringify({ lockedUntil: new Date(Date.now() + 60000).toISOString() }));
|
|
792
|
+
}
|
|
793
|
+
});
|
|
794
|
+
return;
|
|
795
|
+
}
|
|
796
|
+
// 15. Timeline Records Handler (Status Updates & Log Links)
|
|
797
|
+
if ((method === "PATCH" || method === "POST") &&
|
|
798
|
+
url?.includes("/_apis/distributedtask/timelines/") &&
|
|
799
|
+
url?.includes("/records") &&
|
|
800
|
+
!url?.includes("/feed")) {
|
|
801
|
+
if (method === "POST") {
|
|
802
|
+
console.log(`[DTU] Creating timeline records: ${url}`);
|
|
803
|
+
}
|
|
804
|
+
else {
|
|
805
|
+
console.log(`[DTU] Updating timeline records: ${url}`);
|
|
806
|
+
}
|
|
807
|
+
const timelineId = url.split("/timelines/")[1].split("/")[0];
|
|
808
|
+
let body = "";
|
|
809
|
+
req.on("data", (chunk) => {
|
|
810
|
+
body += chunk;
|
|
811
|
+
});
|
|
812
|
+
req.on("end", () => {
|
|
813
|
+
try {
|
|
814
|
+
const payload = JSON.parse(body || "{}");
|
|
815
|
+
const newRecords = payload.value || [];
|
|
816
|
+
let existing = timelines.get(timelineId) || [];
|
|
817
|
+
// Merge/Add records
|
|
818
|
+
for (const record of newRecords) {
|
|
819
|
+
const idx = existing.findIndex((r) => r.id === record.id);
|
|
820
|
+
if (idx >= 0) {
|
|
821
|
+
existing[idx] = { ...existing[idx], ...record };
|
|
822
|
+
}
|
|
823
|
+
else {
|
|
824
|
+
existing.push(record);
|
|
825
|
+
}
|
|
826
|
+
}
|
|
827
|
+
timelines.set(timelineId, existing);
|
|
828
|
+
console.log(`[DTU] Processed timeline ${timelineId} with ${newRecords.length} records`);
|
|
829
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
830
|
+
res.end(JSON.stringify({ count: existing.length, value: existing }));
|
|
831
|
+
}
|
|
832
|
+
catch (e) {
|
|
833
|
+
console.error("[DTU] Error parsing timeline body", e);
|
|
834
|
+
res.writeHead(400);
|
|
835
|
+
res.end();
|
|
836
|
+
}
|
|
837
|
+
});
|
|
838
|
+
return;
|
|
839
|
+
}
|
|
840
|
+
// 16. Log Creation Handler
|
|
841
|
+
if (method === "POST" &&
|
|
842
|
+
url?.includes("/_apis/distributedtask/") &&
|
|
843
|
+
url?.includes("/logs") &&
|
|
844
|
+
!url?.includes("/lines")) {
|
|
845
|
+
console.log(`[DTU] Creating log: ${url}`);
|
|
846
|
+
let logId = "";
|
|
847
|
+
const match = url.match(/\/logs\/([^/?]+)/);
|
|
848
|
+
if (match) {
|
|
849
|
+
logId = match[1];
|
|
850
|
+
}
|
|
851
|
+
let body = "";
|
|
852
|
+
req.on("data", (chunk) => {
|
|
853
|
+
body += chunk;
|
|
854
|
+
});
|
|
855
|
+
req.on("end", () => {
|
|
856
|
+
// Ensure map entry exists
|
|
857
|
+
if (!logs.has(logId)) {
|
|
858
|
+
logs.set(logId, []);
|
|
859
|
+
}
|
|
860
|
+
res.writeHead(201, { "Content-Type": "application/json" });
|
|
861
|
+
res.end(JSON.stringify({ id: logId, state: "Created" })); // Mock response
|
|
862
|
+
});
|
|
863
|
+
return;
|
|
864
|
+
}
|
|
865
|
+
// 17. Log Line Appending Handler
|
|
866
|
+
if (method === "POST" && url?.includes("/_apis/distributedtask/") && url?.includes("/lines")) {
|
|
867
|
+
let logId = "1";
|
|
868
|
+
const match = url.match(/\/logs\/([^/?]+)/);
|
|
869
|
+
if (match) {
|
|
870
|
+
logId = match[1];
|
|
871
|
+
}
|
|
872
|
+
let body = "";
|
|
873
|
+
req.on("data", (chunk) => {
|
|
874
|
+
body += chunk;
|
|
875
|
+
});
|
|
876
|
+
req.on("end", () => {
|
|
877
|
+
try {
|
|
878
|
+
const payload = JSON.parse(body || "{}");
|
|
879
|
+
const lines = (payload.value || []).map((l) => l.message || l);
|
|
880
|
+
const existing = logs.get(logId) || [];
|
|
881
|
+
existing.push(...lines);
|
|
882
|
+
logs.set(logId, existing);
|
|
883
|
+
console.log(`[DTU] Appened ${lines.length} lines to log ${logId}`);
|
|
884
|
+
}
|
|
885
|
+
catch (e) {
|
|
886
|
+
console.warn("[DTU] Failed to parse log lines", e);
|
|
887
|
+
}
|
|
888
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
889
|
+
res.end(JSON.stringify({ count: 0, value: [] }));
|
|
890
|
+
});
|
|
891
|
+
return;
|
|
892
|
+
}
|
|
893
|
+
// 18. Generic Step Outputs Handler
|
|
894
|
+
if (method === "POST" && url?.includes("/outputs")) {
|
|
895
|
+
res.writeHead(200);
|
|
896
|
+
res.end(JSON.stringify({ value: {} }));
|
|
897
|
+
return;
|
|
898
|
+
}
|
|
899
|
+
// Log file upload (resource 46f5667d) — AppendLogContentAsync sends UTF-8 step log text.
|
|
900
|
+
// POST /_apis/distributedtask/hubs/{hub}/plans/{planId}/logs/{logId}?startLine=...
|
|
901
|
+
// We write each non-annotation line to step-output.log.
|
|
902
|
+
if ((method === "POST" || method === "PUT") &&
|
|
903
|
+
url?.includes("/_apis/") &&
|
|
904
|
+
url?.includes("/logs/")) {
|
|
905
|
+
const chunks = [];
|
|
906
|
+
req.on("data", (chunk) => {
|
|
907
|
+
chunks.push(chunk);
|
|
908
|
+
});
|
|
909
|
+
req.on("end", () => {
|
|
910
|
+
const text = Buffer.concat(chunks).toString("utf8");
|
|
911
|
+
try {
|
|
912
|
+
// Each line of text is a step log line. Filter runner-internal annotations.
|
|
913
|
+
for (const rawLine of text.split("\n")) {
|
|
914
|
+
const line = rawLine.trimEnd();
|
|
915
|
+
// Skip empty lines and ##[...] annotation markers
|
|
916
|
+
if (!line || line.startsWith("##[") || line.startsWith("[command]")) {
|
|
917
|
+
continue;
|
|
918
|
+
}
|
|
919
|
+
writeStepOutputLine(line);
|
|
920
|
+
}
|
|
921
|
+
if (text.trim()) {
|
|
922
|
+
console.log(`[DTU] Log upload: wrote ${text.split("\n").length} lines to step-output`);
|
|
923
|
+
}
|
|
924
|
+
}
|
|
925
|
+
catch {
|
|
926
|
+
/* best-effort */
|
|
927
|
+
}
|
|
928
|
+
// Extract logId from URL: .../logs/{logId}?...
|
|
929
|
+
const logIdMatch = url?.match(/\/logs\/(\d+)/);
|
|
930
|
+
const logId = logIdMatch ? parseInt(logIdMatch[1]) : 1;
|
|
931
|
+
const lineCount = text.split("\n").filter((l) => l.trim()).length;
|
|
932
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
933
|
+
res.end(JSON.stringify({ id: logId, lineCount, createdOn: new Date().toISOString() }));
|
|
934
|
+
});
|
|
935
|
+
return;
|
|
936
|
+
}
|
|
937
|
+
// Create log entry stub (returns a log ID)
|
|
938
|
+
if (method === "POST" &&
|
|
939
|
+
url?.includes("/_apis/") &&
|
|
940
|
+
url?.includes("/logs") &&
|
|
941
|
+
!url?.includes("/logs/")) {
|
|
942
|
+
const logId = Math.floor(Math.random() * 10000);
|
|
943
|
+
req.on("data", () => { });
|
|
944
|
+
req.on("end", () => {
|
|
945
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
946
|
+
res.end(JSON.stringify({ id: logId, lineCount: 0, createdOn: new Date().toISOString() }));
|
|
947
|
+
});
|
|
948
|
+
return;
|
|
949
|
+
}
|
|
950
|
+
// Attachments / results stub — accept and discard
|
|
951
|
+
if ((method === "POST" || method === "PUT" || method === "PATCH") &&
|
|
952
|
+
url?.includes("/_apis/") &&
|
|
953
|
+
(url?.includes("/attachments") || url?.includes("/results"))) {
|
|
954
|
+
req.on("data", () => { });
|
|
955
|
+
req.on("end", () => {
|
|
956
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
957
|
+
res.end(JSON.stringify({ value: [] }));
|
|
958
|
+
});
|
|
959
|
+
return;
|
|
960
|
+
}
|
|
961
|
+
// 19. Generic Job Retrieval Handler
|
|
962
|
+
if (method === "GET" &&
|
|
963
|
+
url?.includes("/_apis/distributedtask/pools/") &&
|
|
964
|
+
url?.includes("/jobs/")) {
|
|
965
|
+
console.log(`[DTU] Acknowledging job retrieval: ${url}`);
|
|
966
|
+
res.writeHead(200);
|
|
967
|
+
res.end(JSON.stringify({
|
|
968
|
+
id: "1",
|
|
969
|
+
name: "job",
|
|
970
|
+
status: "completed",
|
|
971
|
+
}));
|
|
972
|
+
return;
|
|
973
|
+
}
|
|
974
|
+
// Debug: Dump State
|
|
975
|
+
if (method === "GET" && url === "/_dtu/dump") {
|
|
976
|
+
const dump = {
|
|
977
|
+
jobs: Object.fromEntries(jobs),
|
|
978
|
+
timelines: Object.fromEntries(timelines),
|
|
979
|
+
logs: Object.fromEntries(logs),
|
|
980
|
+
};
|
|
981
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
982
|
+
res.end(JSON.stringify(dump));
|
|
983
|
+
return;
|
|
984
|
+
}
|
|
985
|
+
// 18. Resolve Action Download Info Mock
|
|
986
|
+
if (method === "POST" && url?.includes("/actiondownloadinfo")) {
|
|
987
|
+
console.log(`[DTU] resolving action download info: ${url}`);
|
|
988
|
+
let body = "";
|
|
989
|
+
req.on("data", (chunk) => {
|
|
990
|
+
body += chunk;
|
|
991
|
+
});
|
|
992
|
+
req.on("end", () => {
|
|
993
|
+
try {
|
|
994
|
+
const payload = JSON.parse(body || "{}");
|
|
995
|
+
const actions = payload.actions || [];
|
|
996
|
+
const result = { actions: {} };
|
|
997
|
+
for (const action of actions) {
|
|
998
|
+
const key = `${action.nameWithOwner}@${action.ref}`;
|
|
999
|
+
// Construct a public GitHub URL for the action
|
|
1000
|
+
// e.g. https://api.github.com/repos/actions/checkout/tarball/v4
|
|
1001
|
+
// or https://codeload.github.com/actions/checkout/legacy.tar.gz/refs/tags/v4 ?
|
|
1002
|
+
// The runner seems to support standard GitHub API tarball URLs.
|
|
1003
|
+
// We'll use the API URL format which redirects to codeload.
|
|
1004
|
+
const downloadUrl = `https://api.github.com/repos/${action.nameWithOwner}/tarball/${action.ref}`;
|
|
1005
|
+
result.actions[key] = {
|
|
1006
|
+
nameWithOwner: action.nameWithOwner,
|
|
1007
|
+
resolvedNameWithOwner: action.nameWithOwner,
|
|
1008
|
+
ref: action.ref,
|
|
1009
|
+
resolvedSha: "fake-sha",
|
|
1010
|
+
tarballUrl: downloadUrl,
|
|
1011
|
+
zipballUrl: downloadUrl.replace("tarball", "zipball"),
|
|
1012
|
+
authentication: null, // No token for public actions
|
|
1013
|
+
};
|
|
1014
|
+
}
|
|
1015
|
+
console.log(`[DTU] Resolved ${actions.length} actions.`);
|
|
1016
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
1017
|
+
res.end(JSON.stringify(result));
|
|
1018
|
+
}
|
|
1019
|
+
catch (e) {
|
|
1020
|
+
console.error("[DTU] Error resolving actions", e);
|
|
1021
|
+
res.writeHead(400);
|
|
1022
|
+
res.end();
|
|
1023
|
+
}
|
|
1024
|
+
});
|
|
1025
|
+
return;
|
|
1026
|
+
}
|
|
1027
|
+
// 8. Global OPTIONS Handler (for CORS/Capabilities + Resource Discovery)
|
|
1028
|
+
if (method === "OPTIONS") {
|
|
1029
|
+
res.writeHead(200, {
|
|
1030
|
+
"Access-Control-Allow-Origin": "*",
|
|
1031
|
+
"Access-Control-Allow-Methods": "GET, POST, OPTIONS",
|
|
1032
|
+
"Access-Control-Allow-Headers": "Content-Type, Authorization, X-TFS-FedAuthRedirect, X-VSS-E2EID, X-TFS-Session",
|
|
1033
|
+
"Content-Type": "application/json",
|
|
1034
|
+
});
|
|
1035
|
+
// Return the list of available API resources
|
|
1036
|
+
// This allows VssHttpClientBase to discover the "Pools" resource
|
|
1037
|
+
const responseValue = [
|
|
1038
|
+
{
|
|
1039
|
+
id: "A8C47E17-4D56-4A56-92BB-DE7EA7DC65BE",
|
|
1040
|
+
area: "distributedtask",
|
|
1041
|
+
resourceName: "pools",
|
|
1042
|
+
routeTemplate: "_apis/distributedtask/pools/{poolId}",
|
|
1043
|
+
resourceVersion: 1,
|
|
1044
|
+
minVersion: "1.0",
|
|
1045
|
+
maxVersion: "9.0",
|
|
1046
|
+
releasedVersion: "9.0",
|
|
1047
|
+
},
|
|
1048
|
+
{
|
|
1049
|
+
id: "E298EF32-5878-4CAB-993C-043836571F42",
|
|
1050
|
+
area: "distributedtask",
|
|
1051
|
+
resourceName: "agents",
|
|
1052
|
+
routeTemplate: "_apis/distributedtask/pools/{poolId}/agents/{agentId}",
|
|
1053
|
+
resourceVersion: 1,
|
|
1054
|
+
minVersion: "1.0",
|
|
1055
|
+
maxVersion: "9.0",
|
|
1056
|
+
releasedVersion: "9.0",
|
|
1057
|
+
},
|
|
1058
|
+
{
|
|
1059
|
+
id: "C3A054F6-7A8A-49C0-944E-3A8E5D7ADFD7",
|
|
1060
|
+
area: "distributedtask",
|
|
1061
|
+
resourceName: "messages",
|
|
1062
|
+
routeTemplate: "_apis/distributedtask/pools/{poolId}/messages",
|
|
1063
|
+
resourceVersion: 1,
|
|
1064
|
+
minVersion: "1.0",
|
|
1065
|
+
maxVersion: "9.0",
|
|
1066
|
+
releasedVersion: "9.0",
|
|
1067
|
+
},
|
|
1068
|
+
{
|
|
1069
|
+
id: "134E239E-2DF3-4794-A6F6-24F1F19EC8DC",
|
|
1070
|
+
area: "distributedtask",
|
|
1071
|
+
resourceName: "sessions",
|
|
1072
|
+
routeTemplate: "_apis/distributedtask/pools/{poolId}/sessions/{sessionId}",
|
|
1073
|
+
resourceVersion: 1,
|
|
1074
|
+
minVersion: "1.0",
|
|
1075
|
+
maxVersion: "9.0",
|
|
1076
|
+
},
|
|
1077
|
+
{
|
|
1078
|
+
id: "83597576-CC2C-453C-BEA6-2882AE6A1653",
|
|
1079
|
+
area: "distributedtask",
|
|
1080
|
+
resourceName: "timelines",
|
|
1081
|
+
routeTemplate: "_apis/distributedtask/timelines/{timelineId}",
|
|
1082
|
+
resourceVersion: 1,
|
|
1083
|
+
minVersion: "1.0",
|
|
1084
|
+
maxVersion: "9.0",
|
|
1085
|
+
releasedVersion: "9.0",
|
|
1086
|
+
},
|
|
1087
|
+
{
|
|
1088
|
+
id: "27d7f831-88c1-4719-8ca1-6a061dad90eb",
|
|
1089
|
+
area: "distributedtask",
|
|
1090
|
+
resourceName: "actiondownloadinfo",
|
|
1091
|
+
routeTemplate: "_apis/distributedtask/hubs/{hubName}/plans/{planId}/actiondownloadinfo",
|
|
1092
|
+
resourceVersion: 1,
|
|
1093
|
+
minVersion: "1.0",
|
|
1094
|
+
maxVersion: "6.0",
|
|
1095
|
+
releasedVersion: "6.0",
|
|
1096
|
+
},
|
|
1097
|
+
{
|
|
1098
|
+
id: "858983e4-19bd-4c5e-864c-507b59b58b12",
|
|
1099
|
+
area: "distributedtask",
|
|
1100
|
+
resourceName: "feed",
|
|
1101
|
+
routeTemplate: "_apis/distributedtask/hubs/{hubName}/plans/{planId}/timelines/{timelineId}/records/{recordId}/feed",
|
|
1102
|
+
resourceVersion: 1,
|
|
1103
|
+
minVersion: "1.0",
|
|
1104
|
+
maxVersion: "9.0",
|
|
1105
|
+
releasedVersion: "9.0",
|
|
1106
|
+
},
|
|
1107
|
+
{
|
|
1108
|
+
id: "46f5667d-263a-4684-91b1-dff7fdcf64e2",
|
|
1109
|
+
area: "distributedtask",
|
|
1110
|
+
resourceName: "logs",
|
|
1111
|
+
routeTemplate: "_apis/distributedtask/hubs/{hubName}/plans/{planId}/logs/{logId}",
|
|
1112
|
+
resourceVersion: 1,
|
|
1113
|
+
minVersion: "1.0",
|
|
1114
|
+
maxVersion: "9.0",
|
|
1115
|
+
releasedVersion: "9.0",
|
|
1116
|
+
},
|
|
1117
|
+
{
|
|
1118
|
+
id: "8893BC5B-35B2-4BE7-83CB-99E683551DB4",
|
|
1119
|
+
area: "distributedtask",
|
|
1120
|
+
resourceName: "records",
|
|
1121
|
+
routeTemplate: "_apis/distributedtask/timelines/{timelineId}/records/{recordId}",
|
|
1122
|
+
resourceVersion: 1,
|
|
1123
|
+
minVersion: "1.0",
|
|
1124
|
+
maxVersion: "9.0",
|
|
1125
|
+
releasedVersion: "9.0",
|
|
1126
|
+
},
|
|
1127
|
+
{
|
|
1128
|
+
id: "FC825784-C92A-4299-9221-998A02D1B54F",
|
|
1129
|
+
area: "distributedtask",
|
|
1130
|
+
resourceName: "jobrequests",
|
|
1131
|
+
routeTemplate: "_apis/distributedtask/jobrequests/{jobId}",
|
|
1132
|
+
resourceVersion: 1,
|
|
1133
|
+
minVersion: "1.0",
|
|
1134
|
+
maxVersion: "9.0",
|
|
1135
|
+
releasedVersion: "9.0",
|
|
1136
|
+
},
|
|
1137
|
+
{
|
|
1138
|
+
id: "0A1EFD25-ABDA-43BD-9629-6C7BDD2E0D60",
|
|
1139
|
+
area: "distributedtask",
|
|
1140
|
+
resourceName: "jobinstances",
|
|
1141
|
+
routeTemplate: "_apis/distributedtask/jobinstances/{jobId}",
|
|
1142
|
+
resourceVersion: 1,
|
|
1143
|
+
minVersion: "1.0",
|
|
1144
|
+
maxVersion: "9.0",
|
|
1145
|
+
releasedVersion: "9.0",
|
|
1146
|
+
},
|
|
1147
|
+
];
|
|
1148
|
+
res.end(JSON.stringify({
|
|
1149
|
+
count: responseValue.length,
|
|
1150
|
+
value: responseValue,
|
|
1151
|
+
}));
|
|
1152
|
+
return;
|
|
1153
|
+
}
|
|
1154
|
+
// 9. Generic API Root Handler (to prevent 404s on discovery)
|
|
1155
|
+
if (method === "GET" && url?.startsWith("/_apis")) {
|
|
1156
|
+
console.log(`[DTU] Catch-all for _apis: ${url}`);
|
|
1157
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
1158
|
+
res.end(JSON.stringify({ value: [] }));
|
|
1159
|
+
return;
|
|
1160
|
+
}
|
|
1161
|
+
// health check
|
|
1162
|
+
if ((method === "GET" || method === "HEAD") && url === "/") {
|
|
1163
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
1164
|
+
res.end(method === "GET" ? JSON.stringify({ status: "online", seededJobs: jobs.size }) : undefined);
|
|
1165
|
+
return;
|
|
1166
|
+
}
|
|
1167
|
+
// 16. Fallback (404)
|
|
1168
|
+
// Log unhandled requests to see if we satisfy all runner demands
|
|
1169
|
+
console.log(`[DTU] 404 Not Found: ${req.method} ${url}`);
|
|
1170
|
+
console.log(`[DTU] Unhandled Headers:`, JSON.stringify(req.headers, null, 2));
|
|
1171
|
+
res.writeHead(404);
|
|
1172
|
+
res.end("Not Found (DTU Mock)");
|
|
1173
|
+
});
|
|
1174
|
+
if (import.meta.url === `file://${process.argv[1]}` || process.env.NODE_ENV !== "test") {
|
|
1175
|
+
server.listen(config.DTU_PORT, "0.0.0.0", () => {
|
|
1176
|
+
console.log(`[DTU] OA-RUN-1 Mock GitHub API server running at http://0.0.0.0:${config.DTU_PORT}`);
|
|
1177
|
+
console.log(`[DTU] Logging to ${DTU_LOG_PATH}`);
|
|
1178
|
+
});
|
|
1179
|
+
}
|