dtu-github-actions 0.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/dist/config.d.ts +39 -0
- package/dist/config.js +29 -0
- package/dist/ephemeral.d.ts +16 -0
- package/dist/ephemeral.js +48 -0
- package/dist/server/index.d.ts +4 -0
- package/dist/server/index.js +326 -0
- package/dist/server/logger.d.ts +4 -0
- package/dist/server/logger.js +56 -0
- package/dist/server/routes/actions/generators.d.ts +25 -0
- package/dist/server/routes/actions/generators.js +313 -0
- package/dist/server/routes/actions/index.d.ts +2 -0
- package/dist/server/routes/actions/index.js +575 -0
- package/dist/server/routes/artifacts.d.ts +2 -0
- package/dist/server/routes/artifacts.js +332 -0
- package/dist/server/routes/cache.d.ts +2 -0
- package/dist/server/routes/cache.js +230 -0
- package/dist/server/routes/cache.test.d.ts +1 -0
- package/dist/server/routes/cache.test.js +229 -0
- package/dist/server/routes/dtu.d.ts +3 -0
- package/dist/server/routes/dtu.js +141 -0
- package/dist/server/routes/github.d.ts +2 -0
- package/dist/server/routes/github.js +109 -0
- package/dist/server/start.d.ts +1 -0
- package/dist/server/start.js +22 -0
- package/dist/server/store.d.ts +44 -0
- package/dist/server/store.js +100 -0
- package/dist/server.js +1179 -0
- package/dist/server.test.d.ts +1 -0
- package/dist/server.test.js +322 -0
- package/dist/simulate.d.ts +1 -0
- package/dist/simulate.js +47 -0
- package/dist/types.d.ts +111 -0
- package/dist/types.js +1 -0
- package/package.json +43 -0
|
@@ -0,0 +1,332 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { state } from "../store.js";
|
|
4
|
+
import { getBaseUrl } from "./dtu.js";
|
|
5
|
+
import { config } from "../../config.js";
|
|
6
|
+
const ARTIFACT_DIR = path.join(config.DTU_CACHE_DIR, "artifacts");
|
|
7
|
+
if (!fs.existsSync(ARTIFACT_DIR)) {
|
|
8
|
+
fs.mkdirSync(ARTIFACT_DIR, { recursive: true });
|
|
9
|
+
}
|
|
10
|
+
const TWIRP_PREFIX = "/twirp/github.actions.results.api.v1.ArtifactService";
|
|
11
|
+
export function registerArtifactRoutes(app) {
|
|
12
|
+
// ── Twirp endpoints (used by actions/upload-artifact@v4 & actions/download-artifact@v4) ──
|
|
13
|
+
// CreateArtifact — returns a signed upload URL
|
|
14
|
+
app.post(`${TWIRP_PREFIX}/CreateArtifact`, (req, res) => {
|
|
15
|
+
const { name, version } = req.body || {};
|
|
16
|
+
if (!name) {
|
|
17
|
+
res.writeHead(400, { "Content-Type": "application/json" });
|
|
18
|
+
return res.end(JSON.stringify({ msg: "Missing artifact name" }));
|
|
19
|
+
}
|
|
20
|
+
const containerId = Math.floor(Math.random() * 1000000);
|
|
21
|
+
const baseUrl = getBaseUrl(req);
|
|
22
|
+
state.pendingArtifacts.set(containerId, { name, files: new Map() });
|
|
23
|
+
console.log(`[DTU] CreateArtifact "${name}" (v${version}) → container ${containerId}`);
|
|
24
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
25
|
+
res.end(JSON.stringify({
|
|
26
|
+
ok: true,
|
|
27
|
+
signedUploadUrl: `${baseUrl}/_apis/artifactblob/${containerId}/upload`,
|
|
28
|
+
}));
|
|
29
|
+
});
|
|
30
|
+
// FinalizeArtifact — mark upload as complete
|
|
31
|
+
app.post(`${TWIRP_PREFIX}/FinalizeArtifact`, (req, res) => {
|
|
32
|
+
const { name } = req.body || {};
|
|
33
|
+
if (!name) {
|
|
34
|
+
res.writeHead(400, { "Content-Type": "application/json" });
|
|
35
|
+
return res.end(JSON.stringify({ msg: "Missing artifact name" }));
|
|
36
|
+
}
|
|
37
|
+
let foundId = null;
|
|
38
|
+
for (const [id, pending] of state.pendingArtifacts) {
|
|
39
|
+
if (pending.name === name) {
|
|
40
|
+
foundId = id;
|
|
41
|
+
break;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
if (foundId === null) {
|
|
45
|
+
console.warn(`[DTU] FinalizeArtifact: not found "${name}"`);
|
|
46
|
+
res.writeHead(404, { "Content-Type": "application/json" });
|
|
47
|
+
return res.end(JSON.stringify({ ok: false }));
|
|
48
|
+
}
|
|
49
|
+
const pending = state.pendingArtifacts.get(foundId);
|
|
50
|
+
state.artifacts.set(name, { containerId: foundId, files: new Map(pending.files) });
|
|
51
|
+
state.pendingArtifacts.delete(foundId);
|
|
52
|
+
console.log(`[DTU] FinalizeArtifact "${name}" (container ${foundId})`);
|
|
53
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
54
|
+
res.end(JSON.stringify({ ok: true, artifactId: String(foundId) }));
|
|
55
|
+
});
|
|
56
|
+
// ListArtifacts — find artifacts by name
|
|
57
|
+
app.post(`${TWIRP_PREFIX}/ListArtifacts`, (req, res) => {
|
|
58
|
+
const { nameFilter } = req.body || {};
|
|
59
|
+
const filterName = typeof nameFilter === "string" ? nameFilter : nameFilter?.value;
|
|
60
|
+
console.log(`[DTU] ListArtifacts (filter: ${filterName || "none"})`);
|
|
61
|
+
const artifacts = [];
|
|
62
|
+
for (const [name, art] of state.artifacts) {
|
|
63
|
+
if (filterName && name !== filterName) {
|
|
64
|
+
continue;
|
|
65
|
+
}
|
|
66
|
+
artifacts.push({
|
|
67
|
+
workflowRunBackendId: "00000000-0000-0000-0000-000000000001",
|
|
68
|
+
databaseId: String(art.containerId),
|
|
69
|
+
name,
|
|
70
|
+
size: String(art.files.size > 0 ? fs.statSync(Array.from(art.files.values())[0]).size : 0),
|
|
71
|
+
createdAt: new Date().toISOString(),
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
75
|
+
res.end(JSON.stringify({ artifacts }));
|
|
76
|
+
});
|
|
77
|
+
// GetSignedArtifactURL — return a download URL for an artifact
|
|
78
|
+
app.post(`${TWIRP_PREFIX}/GetSignedArtifactURL`, (req, res) => {
|
|
79
|
+
const { name } = req.body || {};
|
|
80
|
+
const baseUrl = getBaseUrl(req);
|
|
81
|
+
const artifact = state.artifacts.get(name);
|
|
82
|
+
if (!artifact) {
|
|
83
|
+
console.warn(`[DTU] GetSignedArtifactURL: not found "${name}"`);
|
|
84
|
+
res.writeHead(404, { "Content-Type": "application/json" });
|
|
85
|
+
return res.end(JSON.stringify({ signedUrl: "" }));
|
|
86
|
+
}
|
|
87
|
+
console.log(`[DTU] GetSignedArtifactURL "${name}" → container ${artifact.containerId}`);
|
|
88
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
89
|
+
res.end(JSON.stringify({
|
|
90
|
+
signedUrl: `${baseUrl}/_apis/artifactblob/${artifact.containerId}/download`,
|
|
91
|
+
}));
|
|
92
|
+
});
|
|
93
|
+
// ── Blob endpoints (signed URL targets) ──
|
|
94
|
+
// Block storage for Azure block blob protocol
|
|
95
|
+
// @actions/artifact v4 uses: PUT ?comp=block&blockid=X (upload block) then PUT ?comp=blocklist (commit)
|
|
96
|
+
const blockStore = new Map(); // containerId → blockId → data
|
|
97
|
+
// Upload blob (PUT from signed URL) — implements Azure Block Blob protocol
|
|
98
|
+
app.put("/_apis/artifactblob/:containerId/upload", async (req, res) => {
|
|
99
|
+
const containerId = req.params.containerId;
|
|
100
|
+
const containerIdNum = parseInt(containerId, 10);
|
|
101
|
+
const comp = req.query.comp;
|
|
102
|
+
const blockId = req.query.blockid;
|
|
103
|
+
const pending = state.pendingArtifacts.get(containerIdNum);
|
|
104
|
+
if (!pending) {
|
|
105
|
+
console.warn(`[DTU] Blob upload to invalid container: ${containerId}`);
|
|
106
|
+
res.writeHead(404);
|
|
107
|
+
return res.end();
|
|
108
|
+
}
|
|
109
|
+
// Collect body (may be streaming/chunked)
|
|
110
|
+
let buffer;
|
|
111
|
+
try {
|
|
112
|
+
if (Buffer.isBuffer(req.body)) {
|
|
113
|
+
buffer = req.body;
|
|
114
|
+
}
|
|
115
|
+
else if (typeof req.body === "string") {
|
|
116
|
+
buffer = Buffer.from(req.body);
|
|
117
|
+
}
|
|
118
|
+
else {
|
|
119
|
+
const chunks = [];
|
|
120
|
+
for await (const chunk of req) {
|
|
121
|
+
chunks.push(typeof chunk === "string" ? Buffer.from(chunk) : chunk);
|
|
122
|
+
}
|
|
123
|
+
buffer = Buffer.concat(chunks);
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
catch (e) {
|
|
127
|
+
console.error("[DTU] Failed to read blob upload body:", e);
|
|
128
|
+
res.writeHead(500);
|
|
129
|
+
return res.end();
|
|
130
|
+
}
|
|
131
|
+
if (comp === "block" && blockId) {
|
|
132
|
+
// Stage a block
|
|
133
|
+
if (!blockStore.has(containerId)) {
|
|
134
|
+
blockStore.set(containerId, new Map());
|
|
135
|
+
}
|
|
136
|
+
blockStore.get(containerId).set(blockId, buffer);
|
|
137
|
+
console.log(`[DTU] Staged block ${blockId} for container ${containerId} (${buffer.length} bytes)`);
|
|
138
|
+
res.writeHead(201);
|
|
139
|
+
return res.end();
|
|
140
|
+
}
|
|
141
|
+
if (comp === "blocklist") {
|
|
142
|
+
// Commit: assemble staged blocks in order from XML block list
|
|
143
|
+
const blocks = blockStore.get(containerId) ?? new Map();
|
|
144
|
+
const diskPath = path.join(ARTIFACT_DIR, `${containerId}_blob.zip`);
|
|
145
|
+
// Parse blockid list from XML: <Latest>blockid</Latest> entries
|
|
146
|
+
const xml = buffer.toString("utf8");
|
|
147
|
+
const ids = [];
|
|
148
|
+
for (const m of xml.matchAll(/<Latest>([^<]+)<\/Latest>/g)) {
|
|
149
|
+
ids.push(m[1]);
|
|
150
|
+
}
|
|
151
|
+
const assembled = ids.length > 0
|
|
152
|
+
? Buffer.concat(ids.map((id) => blocks.get(id) ?? Buffer.alloc(0)))
|
|
153
|
+
: Buffer.concat(Array.from(blocks.values())); // fallback: concat all in insertion order
|
|
154
|
+
fs.writeFileSync(diskPath, assembled);
|
|
155
|
+
blockStore.delete(containerId);
|
|
156
|
+
pending.files.set("artifact.zip", diskPath);
|
|
157
|
+
console.log(`[DTU] Committed block blob for container ${containerId} (${assembled.length} bytes, ${ids.length} blocks)`);
|
|
158
|
+
res.writeHead(201);
|
|
159
|
+
return res.end();
|
|
160
|
+
}
|
|
161
|
+
// Single-block upload (no comp param) — write directly
|
|
162
|
+
const diskPath = path.join(ARTIFACT_DIR, `${containerIdNum}_blob.zip`);
|
|
163
|
+
fs.writeFileSync(diskPath, buffer);
|
|
164
|
+
pending.files.set("artifact.zip", diskPath);
|
|
165
|
+
console.log(`[DTU] Blob uploaded to container ${containerIdNum} (${buffer.length} bytes)`);
|
|
166
|
+
res.writeHead(201, { "Content-Type": "application/json" });
|
|
167
|
+
res.end(JSON.stringify({ ok: true }));
|
|
168
|
+
});
|
|
169
|
+
// Download blob (GET from signed URL)
|
|
170
|
+
app.get("/_apis/artifactblob/:containerId/download", (req, res) => {
|
|
171
|
+
const containerId = parseInt(req.params.containerId, 10);
|
|
172
|
+
let found = null;
|
|
173
|
+
for (const art of state.artifacts.values()) {
|
|
174
|
+
if (art.containerId === containerId) {
|
|
175
|
+
found = art;
|
|
176
|
+
break;
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
if (!found || found.files.size === 0) {
|
|
180
|
+
console.warn(`[DTU] Blob download: container ${containerId} not found`);
|
|
181
|
+
res.writeHead(404);
|
|
182
|
+
return res.end();
|
|
183
|
+
}
|
|
184
|
+
const diskPath = Array.from(found.files.values())[0];
|
|
185
|
+
if (!fs.existsSync(diskPath)) {
|
|
186
|
+
console.warn(`[DTU] Blob download: file missing ${diskPath}`);
|
|
187
|
+
res.writeHead(404);
|
|
188
|
+
return res.end();
|
|
189
|
+
}
|
|
190
|
+
console.log(`[DTU] Blob download from container ${containerId}`);
|
|
191
|
+
const stat = fs.statSync(diskPath);
|
|
192
|
+
res.writeHead(200, {
|
|
193
|
+
"Content-Type": "application/zip",
|
|
194
|
+
"Content-Length": stat.size,
|
|
195
|
+
});
|
|
196
|
+
fs.createReadStream(diskPath).pipe(res);
|
|
197
|
+
});
|
|
198
|
+
// ── Simple REST endpoints (used by curl-based smoke/e2e tests) ──
|
|
199
|
+
// Create artifact container
|
|
200
|
+
app.post("/_apis/artifacts", (req, res) => {
|
|
201
|
+
const { name } = req.body || {};
|
|
202
|
+
if (!name) {
|
|
203
|
+
res.writeHead(400, { "Content-Type": "application/json" });
|
|
204
|
+
return res.end(JSON.stringify({ error: "Missing artifact name" }));
|
|
205
|
+
}
|
|
206
|
+
const containerId = Math.floor(Math.random() * 1000000);
|
|
207
|
+
const baseUrl = getBaseUrl(req);
|
|
208
|
+
state.pendingArtifacts.set(containerId, { name, files: new Map() });
|
|
209
|
+
console.log(`[DTU] Created artifact container ${containerId} for "${name}"`);
|
|
210
|
+
res.writeHead(201, { "Content-Type": "application/json" });
|
|
211
|
+
res.end(JSON.stringify({
|
|
212
|
+
containerId,
|
|
213
|
+
name,
|
|
214
|
+
fileContainerResourceUrl: `${baseUrl}/_apis/artifacts/${containerId}`,
|
|
215
|
+
}));
|
|
216
|
+
});
|
|
217
|
+
// Upload file to artifact container
|
|
218
|
+
app.put("/_apis/artifacts/:containerId", (req, res) => {
|
|
219
|
+
const containerId = parseInt(req.params.containerId, 10);
|
|
220
|
+
const itemPath = req.query.itemPath || "artifact.bin";
|
|
221
|
+
const pending = state.pendingArtifacts.get(containerId);
|
|
222
|
+
if (!pending) {
|
|
223
|
+
console.warn(`[DTU] Artifact upload to invalid container: ${containerId}`);
|
|
224
|
+
res.writeHead(404);
|
|
225
|
+
return res.end();
|
|
226
|
+
}
|
|
227
|
+
console.log(`[DTU] Uploading artifact file "${itemPath}" to container ${containerId}`);
|
|
228
|
+
const diskPath = path.join(ARTIFACT_DIR, `${containerId}_${path.basename(itemPath)}`);
|
|
229
|
+
try {
|
|
230
|
+
if (Buffer.isBuffer(req.body) || typeof req.body === "string") {
|
|
231
|
+
const buffer = Buffer.isBuffer(req.body) ? req.body : Buffer.from(req.body);
|
|
232
|
+
fs.writeFileSync(diskPath, buffer);
|
|
233
|
+
}
|
|
234
|
+
else {
|
|
235
|
+
res.writeHead(500, { "Content-Type": "text/plain" });
|
|
236
|
+
return res.end("Expected raw buffer/string body");
|
|
237
|
+
}
|
|
238
|
+
pending.files.set(itemPath, diskPath);
|
|
239
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
240
|
+
res.end(JSON.stringify({ ok: true }));
|
|
241
|
+
}
|
|
242
|
+
catch (e) {
|
|
243
|
+
console.error("[DTU] Failed to write artifact file:", e);
|
|
244
|
+
res.writeHead(500);
|
|
245
|
+
res.end();
|
|
246
|
+
}
|
|
247
|
+
});
|
|
248
|
+
// Finalize artifact
|
|
249
|
+
app.patch("/_apis/artifacts", (req, res) => {
|
|
250
|
+
const { artifactName } = req.body || {};
|
|
251
|
+
if (!artifactName) {
|
|
252
|
+
res.writeHead(400, { "Content-Type": "application/json" });
|
|
253
|
+
return res.end(JSON.stringify({ error: "Missing artifactName" }));
|
|
254
|
+
}
|
|
255
|
+
let foundId = null;
|
|
256
|
+
for (const [id, pending] of state.pendingArtifacts) {
|
|
257
|
+
if (pending.name === artifactName) {
|
|
258
|
+
foundId = id;
|
|
259
|
+
break;
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
if (foundId === null) {
|
|
263
|
+
console.warn(`[DTU] Finalize artifact not found: "${artifactName}"`);
|
|
264
|
+
res.writeHead(404);
|
|
265
|
+
return res.end();
|
|
266
|
+
}
|
|
267
|
+
const pending = state.pendingArtifacts.get(foundId);
|
|
268
|
+
state.artifacts.set(artifactName, {
|
|
269
|
+
containerId: foundId,
|
|
270
|
+
files: new Map(pending.files),
|
|
271
|
+
});
|
|
272
|
+
state.pendingArtifacts.delete(foundId);
|
|
273
|
+
console.log(`[DTU] Finalized artifact "${artifactName}" (container ${foundId}, ${pending.files.size} file(s))`);
|
|
274
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
275
|
+
res.end(JSON.stringify({ ok: true, containerId: foundId }));
|
|
276
|
+
});
|
|
277
|
+
// List / get artifact by name
|
|
278
|
+
app.get("/_apis/artifacts", (req, res) => {
|
|
279
|
+
const artifactName = req.query.artifactName;
|
|
280
|
+
const baseUrl = getBaseUrl(req);
|
|
281
|
+
if (artifactName) {
|
|
282
|
+
const artifact = state.artifacts.get(artifactName);
|
|
283
|
+
if (!artifact) {
|
|
284
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
285
|
+
return res.end(JSON.stringify({ count: 0, value: [] }));
|
|
286
|
+
}
|
|
287
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
288
|
+
return res.end(JSON.stringify({
|
|
289
|
+
count: 1,
|
|
290
|
+
value: [
|
|
291
|
+
{
|
|
292
|
+
containerId: artifact.containerId,
|
|
293
|
+
name: artifactName,
|
|
294
|
+
fileContainerResourceUrl: `${baseUrl}/_apis/artifactfiles/${artifact.containerId}`,
|
|
295
|
+
},
|
|
296
|
+
],
|
|
297
|
+
}));
|
|
298
|
+
}
|
|
299
|
+
const value = Array.from(state.artifacts.entries()).map(([name, art]) => ({
|
|
300
|
+
containerId: art.containerId,
|
|
301
|
+
name,
|
|
302
|
+
fileContainerResourceUrl: `${baseUrl}/_apis/artifactfiles/${art.containerId}`,
|
|
303
|
+
}));
|
|
304
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
305
|
+
res.end(JSON.stringify({ count: value.length, value }));
|
|
306
|
+
});
|
|
307
|
+
// Download artifact file (REST)
|
|
308
|
+
app.get("/_apis/artifactfiles/:containerId", (req, res) => {
|
|
309
|
+
const containerId = parseInt(req.params.containerId, 10);
|
|
310
|
+
let found = null;
|
|
311
|
+
for (const art of state.artifacts.values()) {
|
|
312
|
+
if (art.containerId === containerId) {
|
|
313
|
+
found = art;
|
|
314
|
+
break;
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
if (!found || found.files.size === 0) {
|
|
318
|
+
res.writeHead(404);
|
|
319
|
+
return res.end();
|
|
320
|
+
}
|
|
321
|
+
const [, diskPath] = Array.from(found.files.entries())[0];
|
|
322
|
+
if (!fs.existsSync(diskPath)) {
|
|
323
|
+
res.writeHead(404);
|
|
324
|
+
return res.end();
|
|
325
|
+
}
|
|
326
|
+
res.writeHead(200, {
|
|
327
|
+
"Content-Type": "application/octet-stream",
|
|
328
|
+
"Content-Length": fs.statSync(diskPath).size,
|
|
329
|
+
});
|
|
330
|
+
fs.createReadStream(diskPath).pipe(res);
|
|
331
|
+
});
|
|
332
|
+
}
|
|
@@ -0,0 +1,230 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { execSync } from "node:child_process";
|
|
4
|
+
import { state } from "../store.js";
|
|
5
|
+
import { getBaseUrl } from "./dtu.js";
|
|
6
|
+
import { config } from "../../config.js";
|
|
7
|
+
// Ensure DTU has a temp dir for caching
|
|
8
|
+
const CACHE_DIR = config.DTU_CACHE_DIR;
|
|
9
|
+
if (!fs.existsSync(CACHE_DIR)) {
|
|
10
|
+
fs.mkdirSync(CACHE_DIR, { recursive: true });
|
|
11
|
+
}
|
|
12
|
+
// Pre-built empty tar.gz to serve as a synthetic cache hit for virtual keys.
|
|
13
|
+
// The runner downloads it, extracts nothing, and marks the key as a primary hit
|
|
14
|
+
// (so it skips the save step entirely). This avoids 60+ seconds of unnecessary
|
|
15
|
+
// gzip compression for bind-mounted paths like the pnpm store.
|
|
16
|
+
const EMPTY_TAR_GZ_PATH = path.join(CACHE_DIR, "__empty__.tar.gz");
|
|
17
|
+
if (!fs.existsSync(EMPTY_TAR_GZ_PATH)) {
|
|
18
|
+
execSync(`tar -czf ${EMPTY_TAR_GZ_PATH} -T /dev/null`);
|
|
19
|
+
}
|
|
20
|
+
const VIRTUAL_CACHE_ID = 0; // sentinel ID for virtual (no-op) caches
|
|
21
|
+
export function registerCacheRoutes(app) {
|
|
22
|
+
// 1. Check if cache exists
|
|
23
|
+
const checkCacheHandler = (req, res) => {
|
|
24
|
+
const keys = (req.query.keys || "").split(",").map((k) => k.trim());
|
|
25
|
+
const version = req.query.version;
|
|
26
|
+
console.log(`[DTU] Checking cache for keys: ${keys.join(", ")} (version: ${version})`);
|
|
27
|
+
for (const key of keys) {
|
|
28
|
+
if (!key) {
|
|
29
|
+
continue;
|
|
30
|
+
}
|
|
31
|
+
// Virtual key: bind-mounted path already on disk — return a synthetic hit
|
|
32
|
+
// so the runner skips both the tar extraction and the tar save.
|
|
33
|
+
if (state.isVirtualCacheKey(key)) {
|
|
34
|
+
console.log(`[DTU] Virtual cache hit for key: ${key} (skip tar)`);
|
|
35
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
36
|
+
return res.end(JSON.stringify({
|
|
37
|
+
result: "hit",
|
|
38
|
+
archiveLocation: `${getBaseUrl(req)}/_apis/artifactcache/artifacts/${VIRTUAL_CACHE_ID}`,
|
|
39
|
+
cacheKey: key,
|
|
40
|
+
}));
|
|
41
|
+
}
|
|
42
|
+
const entry = state.caches.get(key);
|
|
43
|
+
if (entry && entry.version === version) {
|
|
44
|
+
// Validate archive file still exists on disk
|
|
45
|
+
const cacheIdMatch = entry.archiveLocation.match(/artifacts\/(\d+)/);
|
|
46
|
+
if (cacheIdMatch) {
|
|
47
|
+
const filePath = path.join(CACHE_DIR, `cache_${cacheIdMatch[1]}.tar.gz`);
|
|
48
|
+
if (!fs.existsSync(filePath)) {
|
|
49
|
+
console.warn(`[DTU] Evicting stale cache "${key}" — file missing: ${filePath}`);
|
|
50
|
+
state.caches.delete(key);
|
|
51
|
+
state.saveCachesToDisk();
|
|
52
|
+
continue;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
console.log(`[DTU] Cache hit for key: ${key}`);
|
|
56
|
+
// Construct archiveLocation dynamically from the current request so stale
|
|
57
|
+
// hostnames persisted in caches.json don't cause download failures.
|
|
58
|
+
const archiveLocation = cacheIdMatch
|
|
59
|
+
? `${getBaseUrl(req)}/_apis/artifactcache/artifacts/${cacheIdMatch[1]}`
|
|
60
|
+
: entry.archiveLocation;
|
|
61
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
62
|
+
return res.end(JSON.stringify({
|
|
63
|
+
result: "hit",
|
|
64
|
+
archiveLocation,
|
|
65
|
+
cacheKey: key,
|
|
66
|
+
}));
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
console.log(`[DTU] Cache miss for keys: ${keys.join(", ")}`);
|
|
70
|
+
res.writeHead(204);
|
|
71
|
+
res.end();
|
|
72
|
+
};
|
|
73
|
+
app.get("/_apis/artifactcache/caches", checkCacheHandler);
|
|
74
|
+
app.get("/_apis/artifactcache/cache", checkCacheHandler);
|
|
75
|
+
// 2. Reserve cache space (create pending cache)
|
|
76
|
+
app.post("/_apis/artifactcache/caches", (req, res) => {
|
|
77
|
+
const { key, version } = req.body;
|
|
78
|
+
console.log(`[DTU] Reserving cache for key: ${key} (version: ${version})`);
|
|
79
|
+
// Virtual key: acknowledge immediately without touching disk
|
|
80
|
+
if (state.isVirtualCacheKey(key)) {
|
|
81
|
+
console.log(`[DTU] Virtual cache reservation for key: ${key} — no-op`);
|
|
82
|
+
res.writeHead(201, { "Content-Type": "application/json" });
|
|
83
|
+
return res.end(JSON.stringify({ cacheId: VIRTUAL_CACHE_ID }));
|
|
84
|
+
}
|
|
85
|
+
// Immutable: reject if this key+version is already cached (committed)
|
|
86
|
+
const existing = state.caches.get(key);
|
|
87
|
+
if (existing && existing.version === version) {
|
|
88
|
+
console.log(`[DTU] Cache already exists for key: ${key} — skipping reservation`);
|
|
89
|
+
res.writeHead(409, { "Content-Type": "application/json" });
|
|
90
|
+
return res.end(JSON.stringify({ message: "Cache already exists" }));
|
|
91
|
+
}
|
|
92
|
+
// Also reject if another job has already reserved this key+version (in-flight)
|
|
93
|
+
// This prevents multiple parallel jobs from all winning a reservation for the
|
|
94
|
+
// same cache key, generating redundant tar processes and orphaned temp files.
|
|
95
|
+
for (const [, pending] of state.pendingCaches) {
|
|
96
|
+
if (pending.key === key && pending.version === version) {
|
|
97
|
+
console.log(`[DTU] Cache reservation in-flight for key: ${key} — another job is already saving it`);
|
|
98
|
+
res.writeHead(409, { "Content-Type": "application/json" });
|
|
99
|
+
return res.end(JSON.stringify({ message: "Cache already exists" }));
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
// Assign a unique cache ID
|
|
103
|
+
const cacheId = Math.floor(Math.random() * 1000000);
|
|
104
|
+
const tempPath = path.join(CACHE_DIR, `temp_${cacheId}.tar.gz`);
|
|
105
|
+
fs.writeFileSync(tempPath, ""); // create an empty file
|
|
106
|
+
state.pendingCaches.set(cacheId, { tempPath, key, version });
|
|
107
|
+
res.writeHead(201, { "Content-Type": "application/json" });
|
|
108
|
+
res.end(JSON.stringify({ cacheId }));
|
|
109
|
+
});
|
|
110
|
+
// 3. Upload cache chunk
|
|
111
|
+
app.patch("/_apis/artifactcache/caches/:cacheId", (req, res) => {
|
|
112
|
+
const cacheId = parseInt(req.params.cacheId, 10);
|
|
113
|
+
// Virtual cache ID: discard the upload entirely — we don't need the archive
|
|
114
|
+
if (cacheId === VIRTUAL_CACHE_ID) {
|
|
115
|
+
res.writeHead(200);
|
|
116
|
+
return res.end();
|
|
117
|
+
}
|
|
118
|
+
const pending = state.pendingCaches.get(cacheId);
|
|
119
|
+
if (!pending) {
|
|
120
|
+
console.warn(`[DTU] Cache upload to invalid ID: ${cacheId}`);
|
|
121
|
+
res.writeHead(404);
|
|
122
|
+
return res.end();
|
|
123
|
+
}
|
|
124
|
+
const contentRange = req.headers["content-range"];
|
|
125
|
+
console.log(`[DTU] Uploading cache chunk to ID ${cacheId}, Content-Range: ${contentRange}`);
|
|
126
|
+
let startOffset = -1;
|
|
127
|
+
if (contentRange && typeof contentRange === "string") {
|
|
128
|
+
const match = contentRange.match(/bytes (\d+)-/);
|
|
129
|
+
if (match) {
|
|
130
|
+
startOffset = parseInt(match[1], 10);
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
try {
|
|
134
|
+
if (Buffer.isBuffer(req.body) || typeof req.body === "string") {
|
|
135
|
+
const buffer = Buffer.isBuffer(req.body) ? req.body : Buffer.from(req.body);
|
|
136
|
+
if (startOffset >= 0) {
|
|
137
|
+
const fd = fs.openSync(pending.tempPath, "r+");
|
|
138
|
+
fs.writeSync(fd, buffer, 0, buffer.length, startOffset);
|
|
139
|
+
fs.closeSync(fd);
|
|
140
|
+
}
|
|
141
|
+
else {
|
|
142
|
+
fs.appendFileSync(pending.tempPath, buffer);
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
else {
|
|
146
|
+
res.writeHead(500, { "Content-Type": "text/plain" });
|
|
147
|
+
return res.end("Expected raw buffer/string body");
|
|
148
|
+
}
|
|
149
|
+
res.writeHead(200);
|
|
150
|
+
res.end();
|
|
151
|
+
}
|
|
152
|
+
catch (e) {
|
|
153
|
+
console.error("[DTU] Failed to write cache chunk:", e);
|
|
154
|
+
res.writeHead(500);
|
|
155
|
+
res.end();
|
|
156
|
+
}
|
|
157
|
+
});
|
|
158
|
+
// 4. Commit cache
|
|
159
|
+
app.post("/_apis/artifactcache/caches/:cacheId", (req, res) => {
|
|
160
|
+
const cacheId = parseInt(req.params.cacheId, 10);
|
|
161
|
+
// Virtual cache ID: no-op commit
|
|
162
|
+
if (cacheId === VIRTUAL_CACHE_ID) {
|
|
163
|
+
res.writeHead(200);
|
|
164
|
+
return res.end();
|
|
165
|
+
}
|
|
166
|
+
const { size } = req.body || { size: 0 };
|
|
167
|
+
const pending = state.pendingCaches.get(cacheId);
|
|
168
|
+
if (!pending) {
|
|
169
|
+
console.warn(`[DTU] Cache commit to invalid ID: ${cacheId}`);
|
|
170
|
+
res.writeHead(404);
|
|
171
|
+
return res.end();
|
|
172
|
+
}
|
|
173
|
+
console.log(`[DTU] Committing cache ID ${cacheId} (key: ${pending.key})`);
|
|
174
|
+
// Delete old archive if this key already existed (prevents orphaned files)
|
|
175
|
+
const oldEntry = state.caches.get(pending.key);
|
|
176
|
+
if (oldEntry) {
|
|
177
|
+
const oldMatch = oldEntry.archiveLocation.match(/artifacts\/(\d+)/);
|
|
178
|
+
if (oldMatch) {
|
|
179
|
+
const oldPath = path.join(CACHE_DIR, `cache_${oldMatch[1]}.tar.gz`);
|
|
180
|
+
try {
|
|
181
|
+
fs.unlinkSync(oldPath);
|
|
182
|
+
console.log(`[DTU] Deleted old cache file: ${oldPath}`);
|
|
183
|
+
}
|
|
184
|
+
catch {
|
|
185
|
+
// File may already be gone
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
const finalPath = path.join(CACHE_DIR, `cache_${cacheId}.tar.gz`);
|
|
190
|
+
fs.renameSync(pending.tempPath, finalPath);
|
|
191
|
+
const baseUrl = getBaseUrl(req);
|
|
192
|
+
const archiveLocation = `${baseUrl}/_apis/artifactcache/artifacts/${cacheId}`;
|
|
193
|
+
state.caches.set(pending.key, {
|
|
194
|
+
version: pending.version,
|
|
195
|
+
archiveLocation,
|
|
196
|
+
size,
|
|
197
|
+
});
|
|
198
|
+
state.saveCachesToDisk();
|
|
199
|
+
state.pendingCaches.delete(cacheId);
|
|
200
|
+
res.writeHead(200);
|
|
201
|
+
res.end();
|
|
202
|
+
});
|
|
203
|
+
// 5. Download cache archive
|
|
204
|
+
app.get("/_apis/artifactcache/artifacts/:cacheId", (req, res) => {
|
|
205
|
+
const cacheId = parseInt(req.params.cacheId, 10);
|
|
206
|
+
// Virtual sentinel: serve the empty tar.gz
|
|
207
|
+
if (cacheId === VIRTUAL_CACHE_ID) {
|
|
208
|
+
res.writeHead(200, {
|
|
209
|
+
"Content-Type": "application/octet-stream",
|
|
210
|
+
"Content-Disposition": 'attachment; filename="cache.tar.gz"',
|
|
211
|
+
"Content-Length": fs.statSync(EMPTY_TAR_GZ_PATH).size,
|
|
212
|
+
});
|
|
213
|
+
return fs.createReadStream(EMPTY_TAR_GZ_PATH).pipe(res);
|
|
214
|
+
}
|
|
215
|
+
const filePath = path.join(CACHE_DIR, `cache_${cacheId}.tar.gz`);
|
|
216
|
+
if (!fs.existsSync(filePath)) {
|
|
217
|
+
console.warn(`[DTU] Cache artifact not found: ${cacheId}`);
|
|
218
|
+
res.writeHead(404);
|
|
219
|
+
return res.end();
|
|
220
|
+
}
|
|
221
|
+
console.log(`[DTU] Downloading cache ID ${cacheId}`);
|
|
222
|
+
res.writeHead(200, {
|
|
223
|
+
"Content-Type": "application/octet-stream",
|
|
224
|
+
"Content-Disposition": `attachment; filename="cache_${cacheId}.tar.gz"`,
|
|
225
|
+
"Content-Length": fs.statSync(filePath).size,
|
|
226
|
+
});
|
|
227
|
+
const readStream = fs.createReadStream(filePath);
|
|
228
|
+
readStream.pipe(res);
|
|
229
|
+
});
|
|
230
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|