dtu-github-actions 0.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/dist/config.d.ts +39 -0
- package/dist/config.js +29 -0
- package/dist/ephemeral.d.ts +16 -0
- package/dist/ephemeral.js +48 -0
- package/dist/server/index.d.ts +4 -0
- package/dist/server/index.js +326 -0
- package/dist/server/logger.d.ts +4 -0
- package/dist/server/logger.js +56 -0
- package/dist/server/routes/actions/generators.d.ts +25 -0
- package/dist/server/routes/actions/generators.js +313 -0
- package/dist/server/routes/actions/index.d.ts +2 -0
- package/dist/server/routes/actions/index.js +575 -0
- package/dist/server/routes/artifacts.d.ts +2 -0
- package/dist/server/routes/artifacts.js +332 -0
- package/dist/server/routes/cache.d.ts +2 -0
- package/dist/server/routes/cache.js +230 -0
- package/dist/server/routes/cache.test.d.ts +1 -0
- package/dist/server/routes/cache.test.js +229 -0
- package/dist/server/routes/dtu.d.ts +3 -0
- package/dist/server/routes/dtu.js +141 -0
- package/dist/server/routes/github.d.ts +2 -0
- package/dist/server/routes/github.js +109 -0
- package/dist/server/start.d.ts +1 -0
- package/dist/server/start.js +22 -0
- package/dist/server/store.d.ts +44 -0
- package/dist/server/store.js +100 -0
- package/dist/server.js +1179 -0
- package/dist/server.test.d.ts +1 -0
- package/dist/server.test.js +322 -0
- package/dist/simulate.d.ts +1 -0
- package/dist/simulate.js +47 -0
- package/dist/types.d.ts +111 -0
- package/dist/types.js +1 -0
- package/package.json +43 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,322 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, beforeAll, afterAll } from "vitest";
|
|
2
|
+
import { state } from "./server/store.js";
|
|
3
|
+
import { bootstrapAndReturnApp } from "./server/index.js";
|
|
4
|
+
import { getBaseUrl } from "./server/routes/dtu.js";
|
|
5
|
+
import http from "node:http";
|
|
6
|
+
import fs from "node:fs";
|
|
7
|
+
import path from "node:path";
|
|
8
|
+
import { getDtuLogPath } from "./server/logger.js";
|
|
9
|
+
let PORT;
|
|
10
|
+
async function request(method, path, body) {
|
|
11
|
+
return new Promise((resolve, reject) => {
|
|
12
|
+
const req = http.request({
|
|
13
|
+
hostname: "localhost",
|
|
14
|
+
port: PORT,
|
|
15
|
+
path: path,
|
|
16
|
+
method: method,
|
|
17
|
+
headers: body ? { "Content-Type": "application/json" } : {},
|
|
18
|
+
}, (res) => {
|
|
19
|
+
let data = "";
|
|
20
|
+
res.on("data", (chunk) => {
|
|
21
|
+
data += chunk;
|
|
22
|
+
});
|
|
23
|
+
res.on("end", () => {
|
|
24
|
+
try {
|
|
25
|
+
resolve({
|
|
26
|
+
status: res.statusCode || 0,
|
|
27
|
+
body: data ? JSON.parse(data) : null,
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
catch {
|
|
31
|
+
resolve({ status: res.statusCode || 0, body: data });
|
|
32
|
+
}
|
|
33
|
+
});
|
|
34
|
+
});
|
|
35
|
+
req.on("error", reject);
|
|
36
|
+
if (body) {
|
|
37
|
+
req.write(JSON.stringify(body));
|
|
38
|
+
}
|
|
39
|
+
req.end();
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
describe("DTU Server", () => {
|
|
43
|
+
let server;
|
|
44
|
+
beforeAll(async () => {
|
|
45
|
+
state.reset();
|
|
46
|
+
const app = await bootstrapAndReturnApp();
|
|
47
|
+
return new Promise((resolve) => {
|
|
48
|
+
server = app.listen(0, () => {
|
|
49
|
+
const address = server.server?.address();
|
|
50
|
+
PORT = address.port;
|
|
51
|
+
resolve();
|
|
52
|
+
});
|
|
53
|
+
});
|
|
54
|
+
});
|
|
55
|
+
beforeEach(() => {
|
|
56
|
+
state.jobs.clear();
|
|
57
|
+
});
|
|
58
|
+
afterAll(async () => {
|
|
59
|
+
await new Promise((resolve) => {
|
|
60
|
+
if (server && server.server) {
|
|
61
|
+
server.server.close(() => resolve());
|
|
62
|
+
}
|
|
63
|
+
else {
|
|
64
|
+
resolve();
|
|
65
|
+
}
|
|
66
|
+
});
|
|
67
|
+
});
|
|
68
|
+
it("should handle health check", async () => {
|
|
69
|
+
const res = await request("GET", "/");
|
|
70
|
+
expect(res.status).toBe(200);
|
|
71
|
+
expect(res.body.status).toBe("online");
|
|
72
|
+
});
|
|
73
|
+
it("should seed a job", async () => {
|
|
74
|
+
const job = { id: 123, name: "test-job" };
|
|
75
|
+
const res = await request("POST", "/_dtu/seed", job);
|
|
76
|
+
expect(res.status).toBe(201);
|
|
77
|
+
expect(res.body.jobId).toBe("123");
|
|
78
|
+
const storedJob = state.jobs.get("123");
|
|
79
|
+
expect(storedJob.id).toBe(job.id);
|
|
80
|
+
expect(storedJob.name).toBe(job.name);
|
|
81
|
+
});
|
|
82
|
+
it("should retrieve a seeded job", async () => {
|
|
83
|
+
await request("POST", "/_dtu/seed", { id: 123, name: "test-job" });
|
|
84
|
+
const res = await request("GET", "/repos/owner/repo/actions/jobs/123");
|
|
85
|
+
expect(res.status).toBe(200);
|
|
86
|
+
expect(res.body.id).toBe(123);
|
|
87
|
+
});
|
|
88
|
+
it("should handle missing job", async () => {
|
|
89
|
+
const res = await request("GET", "/repos/owner/repo/actions/jobs/999");
|
|
90
|
+
expect(res.status).toBe(404);
|
|
91
|
+
});
|
|
92
|
+
it("should handle installation lookup", async () => {
|
|
93
|
+
const res = await request("GET", "/repos/owner/repo/installation");
|
|
94
|
+
expect(res.status).toBe(200);
|
|
95
|
+
expect(res.body.id).toBe(12345678);
|
|
96
|
+
expect(res.body.access_tokens_url).toContain("/app/installations/12345678/access_tokens");
|
|
97
|
+
});
|
|
98
|
+
it("should handle access token exchange", async () => {
|
|
99
|
+
const res = await request("POST", "/app/installations/12345678/access_tokens");
|
|
100
|
+
expect(res.status).toBe(201);
|
|
101
|
+
expect(res.body.token).toContain("ghs_mock_token_12345678");
|
|
102
|
+
});
|
|
103
|
+
it("should handle registration token generation", async () => {
|
|
104
|
+
const res = await request("POST", "/repos/owner/repo/actions/runners/registration-token");
|
|
105
|
+
expect(res.status).toBe(201);
|
|
106
|
+
expect(res.body.token).toContain("ghr_mock_registration_token");
|
|
107
|
+
});
|
|
108
|
+
it("should handle pipeline service discovery", async () => {
|
|
109
|
+
const res = await request("GET", "/_apis/pipelines");
|
|
110
|
+
expect(res.status).toBe(200);
|
|
111
|
+
expect(res.body.locationServiceData).toBeDefined();
|
|
112
|
+
expect(res.body.locationServiceData.serviceDefinitions).toBeDefined();
|
|
113
|
+
});
|
|
114
|
+
it("should handle global runner registration", async () => {
|
|
115
|
+
const res = await request("POST", "/actions/runner-registration");
|
|
116
|
+
expect(res.status).toBe(200);
|
|
117
|
+
expect(res.body.token).toBeDefined();
|
|
118
|
+
expect(res.body.token_schema).toBe("OAuthAccessToken");
|
|
119
|
+
});
|
|
120
|
+
it("should handle session creation", async () => {
|
|
121
|
+
const res = await request("POST", "/_apis/distributedtask/pools/1/sessions");
|
|
122
|
+
expect(res.status).toBe(200);
|
|
123
|
+
expect(res.body.sessionId).toBeDefined();
|
|
124
|
+
expect(res.body.agent.name).toBe("agent-ci-runner");
|
|
125
|
+
});
|
|
126
|
+
it("should handle long polling for messages", async () => {
|
|
127
|
+
// 1. Create a session first
|
|
128
|
+
const sessionRes = await request("POST", "/_apis/distributedtask/pools/1/sessions");
|
|
129
|
+
const sessionId = sessionRes.body.sessionId;
|
|
130
|
+
// 2. Poll for messages (expecting 204 or 200 if job seeded)
|
|
131
|
+
// We'll seed a job first to ensure 200
|
|
132
|
+
await request("POST", "/_dtu/seed", { id: 456, name: "poll-job" });
|
|
133
|
+
const pollRes = await request("GET", `/_apis/distributedtask/pools/1/messages?sessionId=${sessionId}`);
|
|
134
|
+
expect(pollRes.status).toBe(200);
|
|
135
|
+
expect(pollRes.body.MessageType).toBe("PipelineAgentJobRequest");
|
|
136
|
+
const body = JSON.parse(pollRes.body.Body);
|
|
137
|
+
expect(body.JobDisplayName).toBe("poll-job");
|
|
138
|
+
});
|
|
139
|
+
it("should log unhandled requests to 404.log", async () => {
|
|
140
|
+
const logDir = path.dirname(getDtuLogPath());
|
|
141
|
+
const logFile = path.join(logDir, "404.log");
|
|
142
|
+
// Clean up any existing 404.log
|
|
143
|
+
if (fs.existsSync(logFile)) {
|
|
144
|
+
fs.unlinkSync(logFile);
|
|
145
|
+
}
|
|
146
|
+
const res = await request("POST", "/some/unhandled/route", { test: "payload" });
|
|
147
|
+
expect(res.status).toBe(404);
|
|
148
|
+
// Give the file writing a tiny bit of time to complete if needed,
|
|
149
|
+
// though appendFileSync is synchronous.
|
|
150
|
+
expect(fs.existsSync(logFile)).toBe(true);
|
|
151
|
+
const logContent = fs.readFileSync(logFile, "utf-8");
|
|
152
|
+
expect(logContent).toContain("404 Not Found: POST /some/unhandled/route");
|
|
153
|
+
expect(logContent).toContain("Body (parsed JSON)");
|
|
154
|
+
expect(logContent).toContain('"test": "payload"');
|
|
155
|
+
});
|
|
156
|
+
});
|
|
157
|
+
// ── Artifact v4 upload / download (Twirp + Azure Block Blob protocol) ──────────
|
|
158
|
+
describe("Artifact v4 upload/download", () => {
|
|
159
|
+
let server;
|
|
160
|
+
beforeAll(async () => {
|
|
161
|
+
state.reset();
|
|
162
|
+
const app = await bootstrapAndReturnApp();
|
|
163
|
+
return new Promise((resolve) => {
|
|
164
|
+
server = app.listen(0, () => {
|
|
165
|
+
const address = server.server?.address();
|
|
166
|
+
PORT = address.port;
|
|
167
|
+
resolve();
|
|
168
|
+
});
|
|
169
|
+
});
|
|
170
|
+
});
|
|
171
|
+
beforeEach(() => {
|
|
172
|
+
state.reset();
|
|
173
|
+
});
|
|
174
|
+
afterAll(async () => {
|
|
175
|
+
await new Promise((resolve) => {
|
|
176
|
+
if (server?.server) {
|
|
177
|
+
server.server.close(() => resolve());
|
|
178
|
+
}
|
|
179
|
+
else {
|
|
180
|
+
resolve();
|
|
181
|
+
}
|
|
182
|
+
});
|
|
183
|
+
});
|
|
184
|
+
it("getBaseUrl strips \\r and \\n from Host header (direct unit test)", () => {
|
|
185
|
+
// Node.js's own http client validates headers and won't let us send \r via HTTP.
|
|
186
|
+
// So we test getBaseUrl() directly with a mock request object — this is the
|
|
187
|
+
// exact same path the runner hits when it sends a dirty Host header.
|
|
188
|
+
// Root cause: HTTP/1.1 runners can include a trailing \r in the Host header
|
|
189
|
+
// (part of the \r\n line terminator). If we embed it in the signed URL, the
|
|
190
|
+
// @actions/artifact toolkit triggers "Parse Error: Invalid header value char".
|
|
191
|
+
const mockReq = { headers: { host: `localhost:${PORT}\r`, "x-forwarded-proto": undefined } };
|
|
192
|
+
const url = getBaseUrl(mockReq);
|
|
193
|
+
expect(url).not.toMatch(/[\r\n]/);
|
|
194
|
+
expect(url).toBe(`http://localhost:${PORT}`);
|
|
195
|
+
// Also test \n and mixed whitespace variants
|
|
196
|
+
const mockReq2 = { headers: { host: ` 127.0.0.1:8910\r\n `, "x-forwarded-proto": "http" } };
|
|
197
|
+
const url2 = getBaseUrl(mockReq2);
|
|
198
|
+
expect(url2).not.toMatch(/[\r\n]/);
|
|
199
|
+
expect(url2).toBe("http://127.0.0.1:8910");
|
|
200
|
+
});
|
|
201
|
+
it("full v4 artifact lifecycle: create → block upload → commit → finalize → download", async () => {
|
|
202
|
+
const baseUrl = `http://localhost:${PORT}`;
|
|
203
|
+
const artifactName = "my-v4-artifact";
|
|
204
|
+
const fileContent = Buffer.from("hello from artifact v4!");
|
|
205
|
+
// 1. CreateArtifact (Twirp)
|
|
206
|
+
let res = await fetch(`${baseUrl}/twirp/github.actions.results.api.v1.ArtifactService/CreateArtifact`, {
|
|
207
|
+
method: "POST",
|
|
208
|
+
headers: { "Content-Type": "application/json" },
|
|
209
|
+
body: JSON.stringify({ name: artifactName, version: 4 }),
|
|
210
|
+
});
|
|
211
|
+
expect(res.status).toBe(200);
|
|
212
|
+
const { signedUploadUrl } = (await res.json());
|
|
213
|
+
expect(signedUploadUrl).toMatch(/\/_apis\/artifactblob\//);
|
|
214
|
+
// Crucially, no rogue characters
|
|
215
|
+
expect(signedUploadUrl).not.toMatch(/[\r\n]/);
|
|
216
|
+
// Extract containerId from the URL
|
|
217
|
+
const containerIdMatch = signedUploadUrl.match(/\/artifactblob\/(\d+)\//);
|
|
218
|
+
expect(containerIdMatch).toBeTruthy();
|
|
219
|
+
const _containerId = containerIdMatch[1];
|
|
220
|
+
// 2. Upload a block (Azure Block Blob protocol: PUT ?comp=block&blockid=X)
|
|
221
|
+
const blockId = Buffer.from("block-0001").toString("base64url");
|
|
222
|
+
res = await fetch(`${signedUploadUrl}?comp=block&blockid=${blockId}`, {
|
|
223
|
+
method: "PUT",
|
|
224
|
+
headers: { "Content-Type": "application/octet-stream" },
|
|
225
|
+
body: fileContent,
|
|
226
|
+
});
|
|
227
|
+
expect(res.status).toBe(201);
|
|
228
|
+
// 3. Commit the block list (PUT ?comp=blocklist with XML body)
|
|
229
|
+
const blockListXml = `<?xml version="1.0" encoding="utf-8"?><BlockList><Latest>${blockId}</Latest></BlockList>`;
|
|
230
|
+
res = await fetch(`${signedUploadUrl}?comp=blocklist`, {
|
|
231
|
+
method: "PUT",
|
|
232
|
+
headers: { "Content-Type": "application/xml" },
|
|
233
|
+
body: blockListXml,
|
|
234
|
+
});
|
|
235
|
+
expect(res.status).toBe(201);
|
|
236
|
+
// 4. FinalizeArtifact (Twirp)
|
|
237
|
+
res = await fetch(`${baseUrl}/twirp/github.actions.results.api.v1.ArtifactService/FinalizeArtifact`, {
|
|
238
|
+
method: "POST",
|
|
239
|
+
headers: { "Content-Type": "application/json" },
|
|
240
|
+
body: JSON.stringify({ name: artifactName }),
|
|
241
|
+
});
|
|
242
|
+
expect(res.status).toBe(200);
|
|
243
|
+
const finalizeBody = (await res.json());
|
|
244
|
+
expect(finalizeBody.ok).toBe(true);
|
|
245
|
+
// 5. GetSignedArtifactURL (Twirp)
|
|
246
|
+
res = await fetch(`${baseUrl}/twirp/github.actions.results.api.v1.ArtifactService/GetSignedArtifactURL`, {
|
|
247
|
+
method: "POST",
|
|
248
|
+
headers: { "Content-Type": "application/json" },
|
|
249
|
+
body: JSON.stringify({ name: artifactName }),
|
|
250
|
+
});
|
|
251
|
+
expect(res.status).toBe(200);
|
|
252
|
+
const { signedUrl } = (await res.json());
|
|
253
|
+
expect(signedUrl).toMatch(/\/_apis\/artifactblob\//);
|
|
254
|
+
expect(signedUrl).not.toMatch(/[\r\n]/);
|
|
255
|
+
// 6. Download the blob and verify content roundtrips
|
|
256
|
+
res = await fetch(signedUrl);
|
|
257
|
+
expect(res.status).toBe(200);
|
|
258
|
+
const downloaded = Buffer.from(await res.arrayBuffer());
|
|
259
|
+
expect(downloaded).toEqual(fileContent);
|
|
260
|
+
});
|
|
261
|
+
it("ListArtifacts returns uploaded artifacts", async () => {
|
|
262
|
+
const baseUrl = `http://localhost:${PORT}`;
|
|
263
|
+
// Create + finalize an artifact
|
|
264
|
+
let res = await fetch(`${baseUrl}/twirp/github.actions.results.api.v1.ArtifactService/CreateArtifact`, {
|
|
265
|
+
method: "POST",
|
|
266
|
+
headers: { "Content-Type": "application/json" },
|
|
267
|
+
body: JSON.stringify({ name: "list-test-artifact", version: 4 }),
|
|
268
|
+
});
|
|
269
|
+
const { signedUploadUrl } = (await res.json());
|
|
270
|
+
// Single-block (no comp param) upload
|
|
271
|
+
await fetch(signedUploadUrl, {
|
|
272
|
+
method: "PUT",
|
|
273
|
+
headers: { "Content-Type": "application/octet-stream" },
|
|
274
|
+
body: Buffer.from("data"),
|
|
275
|
+
});
|
|
276
|
+
res = await fetch(`${baseUrl}/twirp/github.actions.results.api.v1.ArtifactService/FinalizeArtifact`, {
|
|
277
|
+
method: "POST",
|
|
278
|
+
headers: { "Content-Type": "application/json" },
|
|
279
|
+
body: JSON.stringify({ name: "list-test-artifact" }),
|
|
280
|
+
});
|
|
281
|
+
expect(res.status).toBe(200);
|
|
282
|
+
// ListArtifacts with name filter
|
|
283
|
+
res = await fetch(`${baseUrl}/twirp/github.actions.results.api.v1.ArtifactService/ListArtifacts`, {
|
|
284
|
+
method: "POST",
|
|
285
|
+
headers: { "Content-Type": "application/json" },
|
|
286
|
+
body: JSON.stringify({ nameFilter: "list-test-artifact" }),
|
|
287
|
+
});
|
|
288
|
+
expect(res.status).toBe(200);
|
|
289
|
+
const { artifacts } = (await res.json());
|
|
290
|
+
expect(artifacts).toHaveLength(1);
|
|
291
|
+
expect(artifacts[0].name).toBe("list-test-artifact");
|
|
292
|
+
});
|
|
293
|
+
it("should handle job request renewal on bare path (actual runner behavior)", async () => {
|
|
294
|
+
const res = await request("PATCH", "/_apis/distributedtask/jobrequests", {
|
|
295
|
+
requestId: 1,
|
|
296
|
+
});
|
|
297
|
+
expect(res.status).toBe(200);
|
|
298
|
+
expect(res.body.lockedUntil).toBeDefined();
|
|
299
|
+
expect(new Date(res.body.lockedUntil).getTime()).toBeGreaterThan(Date.now());
|
|
300
|
+
});
|
|
301
|
+
it("should handle job request renewal on parameterized path", async () => {
|
|
302
|
+
const res = await request("PATCH", "/_apis/distributedtask/jobrequests/1", {
|
|
303
|
+
requestId: 1,
|
|
304
|
+
});
|
|
305
|
+
expect(res.status).toBe(200);
|
|
306
|
+
expect(res.body.lockedUntil).toBeDefined();
|
|
307
|
+
expect(new Date(res.body.lockedUntil).getTime()).toBeGreaterThan(Date.now());
|
|
308
|
+
});
|
|
309
|
+
it("should handle job request finish (PATCH with result + finishTime)", async () => {
|
|
310
|
+
const finishTime = new Date().toISOString();
|
|
311
|
+
const res = await request("PATCH", "/_apis/distributedtask/jobrequests", {
|
|
312
|
+
requestId: 1,
|
|
313
|
+
result: "succeeded",
|
|
314
|
+
finishTime,
|
|
315
|
+
});
|
|
316
|
+
expect(res.status).toBe(200);
|
|
317
|
+
expect(res.body.result).toBe("succeeded");
|
|
318
|
+
expect(res.body.finishTime).toBe(finishTime);
|
|
319
|
+
// Finish requests should NOT get lockedUntil injected
|
|
320
|
+
expect(res.body.lockedUntil).toBeUndefined();
|
|
321
|
+
});
|
|
322
|
+
});
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
package/dist/simulate.js
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { fileURLToPath } from "node:url";
|
|
4
|
+
import crypto from "node:crypto";
|
|
5
|
+
import { config } from "./config.js";
|
|
6
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
7
|
+
const __dirname = path.dirname(__filename);
|
|
8
|
+
async function main() {
|
|
9
|
+
const event = process.argv[2] || "workflow_job";
|
|
10
|
+
// Events are in ../events relative to src/
|
|
11
|
+
const payloadPath = path.join(__dirname, "..", "events", `${event}.json`);
|
|
12
|
+
if (!fs.existsSync(payloadPath)) {
|
|
13
|
+
const availableEvents = fs
|
|
14
|
+
.readdirSync(path.join(__dirname, "..", "events"))
|
|
15
|
+
.map((f) => f.replace(".json", ""));
|
|
16
|
+
console.error(`Payload not found: ${payloadPath}`);
|
|
17
|
+
console.error(`Available events: ${availableEvents.join(", ")}`);
|
|
18
|
+
process.exit(1);
|
|
19
|
+
}
|
|
20
|
+
const rawPayload = fs.readFileSync(payloadPath, "utf-8");
|
|
21
|
+
const payload = JSON.parse(rawPayload);
|
|
22
|
+
const deliveryId = crypto.randomUUID();
|
|
23
|
+
// 1. Seeding Logic for DTU
|
|
24
|
+
if (event === "workflow_job" && payload.workflow_job) {
|
|
25
|
+
console.log(`[DTU] Seeding mock server at ${config.DTU_URL}...`);
|
|
26
|
+
try {
|
|
27
|
+
const seedResponse = await fetch(`${config.DTU_URL}/_dtu/seed`, {
|
|
28
|
+
method: "POST",
|
|
29
|
+
headers: { "Content-Type": "application/json" },
|
|
30
|
+
body: JSON.stringify(payload.workflow_job),
|
|
31
|
+
});
|
|
32
|
+
if (!seedResponse.ok) {
|
|
33
|
+
console.warn(`[DTU] Warning: Failed to seed mock server: ${seedResponse.status} ${seedResponse.statusText}`);
|
|
34
|
+
}
|
|
35
|
+
else {
|
|
36
|
+
console.log("[DTU] Mock server seeded successfully.");
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
catch {
|
|
40
|
+
console.warn(`[DTU] Warning: Could not connect to mock server at ${config.DTU_URL}. Is it running?`);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
console.log(`[DTU] Simulating "${event}" event...`);
|
|
44
|
+
console.log(`[DTU] Delivery ID: ${deliveryId}`);
|
|
45
|
+
console.log(`[DTU] Simulation complete.`);
|
|
46
|
+
}
|
|
47
|
+
main();
|
package/dist/types.d.ts
ADDED
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
export interface JobStep {
|
|
2
|
+
Id: string;
|
|
3
|
+
Name: string;
|
|
4
|
+
Type: string;
|
|
5
|
+
Reference: {
|
|
6
|
+
Type: string;
|
|
7
|
+
[key: string]: any;
|
|
8
|
+
};
|
|
9
|
+
Inputs?: {
|
|
10
|
+
[key: string]: string;
|
|
11
|
+
};
|
|
12
|
+
ContextData?: any;
|
|
13
|
+
[key: string]: any;
|
|
14
|
+
}
|
|
15
|
+
export interface JobVariable {
|
|
16
|
+
Value: string;
|
|
17
|
+
IsSecret: boolean;
|
|
18
|
+
}
|
|
19
|
+
export interface ContextDataValue {
|
|
20
|
+
t: number;
|
|
21
|
+
s?: string;
|
|
22
|
+
b?: boolean;
|
|
23
|
+
n?: number;
|
|
24
|
+
a?: ContextDataValue[];
|
|
25
|
+
d?: ContextDataItem[];
|
|
26
|
+
v?: any;
|
|
27
|
+
}
|
|
28
|
+
export interface ContextDataItem {
|
|
29
|
+
k: string;
|
|
30
|
+
v: ContextDataValue;
|
|
31
|
+
}
|
|
32
|
+
export interface ContextData {
|
|
33
|
+
[key: string]: ContextDataValue;
|
|
34
|
+
}
|
|
35
|
+
export interface Endpoint {
|
|
36
|
+
Name: string;
|
|
37
|
+
Url: string;
|
|
38
|
+
Authorization: {
|
|
39
|
+
Parameters: {
|
|
40
|
+
AccessToken: string;
|
|
41
|
+
};
|
|
42
|
+
Scheme: string;
|
|
43
|
+
};
|
|
44
|
+
}
|
|
45
|
+
export interface RepositoryProperties {
|
|
46
|
+
id: string;
|
|
47
|
+
name: string;
|
|
48
|
+
fullName: string;
|
|
49
|
+
repoFullName?: string;
|
|
50
|
+
owner?: string;
|
|
51
|
+
defaultBranch?: string;
|
|
52
|
+
cloneUrl?: string;
|
|
53
|
+
[key: string]: string | undefined;
|
|
54
|
+
}
|
|
55
|
+
export interface JobRepository {
|
|
56
|
+
Alias: string;
|
|
57
|
+
Id: string;
|
|
58
|
+
Type: string;
|
|
59
|
+
Version: string;
|
|
60
|
+
Url: string;
|
|
61
|
+
Properties: RepositoryProperties;
|
|
62
|
+
[key: string]: any;
|
|
63
|
+
}
|
|
64
|
+
export interface JobResources {
|
|
65
|
+
Repositories: JobRepository[];
|
|
66
|
+
Endpoints: Endpoint[];
|
|
67
|
+
}
|
|
68
|
+
export interface JobWorkspace {
|
|
69
|
+
Path: string;
|
|
70
|
+
}
|
|
71
|
+
export interface PipelineAgentJobRequest {
|
|
72
|
+
MessageType: "PipelineAgentJobRequest";
|
|
73
|
+
Plan: {
|
|
74
|
+
PlanId: string;
|
|
75
|
+
PlanType: string;
|
|
76
|
+
ScopeId: string;
|
|
77
|
+
};
|
|
78
|
+
Timeline: {
|
|
79
|
+
Id: string;
|
|
80
|
+
ChangeId: number;
|
|
81
|
+
};
|
|
82
|
+
JobId: string;
|
|
83
|
+
RequestId: number;
|
|
84
|
+
JobDisplayName: string;
|
|
85
|
+
JobName: string;
|
|
86
|
+
Steps: JobStep[];
|
|
87
|
+
Variables: {
|
|
88
|
+
[key: string]: JobVariable;
|
|
89
|
+
};
|
|
90
|
+
ContextData: ContextData;
|
|
91
|
+
Resources: JobResources;
|
|
92
|
+
Workspace: JobWorkspace;
|
|
93
|
+
SystemVssConnection: {
|
|
94
|
+
Url: string;
|
|
95
|
+
Authorization: {
|
|
96
|
+
Parameters: {
|
|
97
|
+
AccessToken: string;
|
|
98
|
+
};
|
|
99
|
+
Scheme: string;
|
|
100
|
+
};
|
|
101
|
+
};
|
|
102
|
+
Actions: any[];
|
|
103
|
+
MaskHints: any[];
|
|
104
|
+
EnvironmentVariables: any[];
|
|
105
|
+
JobContainer?: object;
|
|
106
|
+
}
|
|
107
|
+
export interface MessageResponse {
|
|
108
|
+
MessageId: number;
|
|
109
|
+
MessageType: string;
|
|
110
|
+
Body: string;
|
|
111
|
+
}
|
package/dist/types.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
package/package.json
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "dtu-github-actions",
|
|
3
|
+
"version": "0.0.0",
|
|
4
|
+
"description": "Digital Twin Universe - GitHub Actions Mock and Simulation",
|
|
5
|
+
"keywords": [],
|
|
6
|
+
"license": "FSL-1.1-MIT",
|
|
7
|
+
"author": "",
|
|
8
|
+
"files": [
|
|
9
|
+
"dist",
|
|
10
|
+
"README.md"
|
|
11
|
+
],
|
|
12
|
+
"type": "module",
|
|
13
|
+
"main": "./dist/server/start.js",
|
|
14
|
+
"exports": {
|
|
15
|
+
".": "./dist/server/start.js",
|
|
16
|
+
"./ephemeral": "./dist/ephemeral.js"
|
|
17
|
+
},
|
|
18
|
+
"publishConfig": {
|
|
19
|
+
"access": "public"
|
|
20
|
+
},
|
|
21
|
+
"scripts": {
|
|
22
|
+
"dev": "(pnpm dlx kill-port 8910 || true) && tsx src/server/start.ts",
|
|
23
|
+
"simulate": "tsx src/simulate.ts",
|
|
24
|
+
"build": "tsgo",
|
|
25
|
+
"typecheck": "tsgo",
|
|
26
|
+
"test": "vitest run"
|
|
27
|
+
},
|
|
28
|
+
"dependencies": {
|
|
29
|
+
"body-parser": "^2.2.2",
|
|
30
|
+
"jsonc-parser": "^3.3.1",
|
|
31
|
+
"minimatch": "^10.2.4",
|
|
32
|
+
"polka": "^0.5.2",
|
|
33
|
+
"zod": "^3.24.1"
|
|
34
|
+
},
|
|
35
|
+
"devDependencies": {
|
|
36
|
+
"@types/body-parser": "^1.19.6",
|
|
37
|
+
"@types/node": "^22.10.2",
|
|
38
|
+
"@types/polka": "^0.5.8",
|
|
39
|
+
"execa": "^9.6.1",
|
|
40
|
+
"tsx": "^4.19.2",
|
|
41
|
+
"vitest": "^4.0.18"
|
|
42
|
+
}
|
|
43
|
+
}
|