@sentinelqa/uploader 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +96 -0
- package/dist/cli.js +466 -0
- package/package.json +44 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 SentinelQA
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
# @sentinelqa/uploader
|
|
2
|
+
|
|
3
|
+
Sentinel uploader CLI for Playwright CI artifacts.
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npm i -D @sentinelqa/uploader
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
Or run directly:
|
|
12
|
+
|
|
13
|
+
```bash
|
|
14
|
+
npx @sentinelqa/uploader playwright
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
## Usage
|
|
18
|
+
|
|
19
|
+
```bash
|
|
20
|
+
sentinelqa playwright
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
If you want to run without installing, use:
|
|
24
|
+
|
|
25
|
+
```bash
|
|
26
|
+
npx @sentinelqa/uploader playwright
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
### Minimal GitLab CI
|
|
30
|
+
|
|
31
|
+
```yaml
|
|
32
|
+
e2e:
|
|
33
|
+
image: node:20
|
|
34
|
+
script:
|
|
35
|
+
- npm ci
|
|
36
|
+
- npx playwright install --with-deps
|
|
37
|
+
- npm run test:e2e || true
|
|
38
|
+
- npx @sentinelqa/uploader playwright
|
|
39
|
+
artifacts:
|
|
40
|
+
when: always
|
|
41
|
+
paths:
|
|
42
|
+
- playwright-report/
|
|
43
|
+
- test-results/
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
### Minimal GitHub Actions
|
|
47
|
+
|
|
48
|
+
```yaml
|
|
49
|
+
name: E2E
|
|
50
|
+
on: [push]
|
|
51
|
+
jobs:
|
|
52
|
+
e2e:
|
|
53
|
+
runs-on: ubuntu-latest
|
|
54
|
+
steps:
|
|
55
|
+
- uses: actions/checkout@v4
|
|
56
|
+
- uses: actions/setup-node@v4
|
|
57
|
+
with:
|
|
58
|
+
node-version: "20"
|
|
59
|
+
- run: npm ci
|
|
60
|
+
- run: npx playwright install --with-deps
|
|
61
|
+
- run: npm run test:e2e || true
|
|
62
|
+
- run: npx @sentinelqa/uploader playwright
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
## Required Environment Variables
|
|
66
|
+
|
|
67
|
+
- `SENTINEL_TOKEN` (project ingest token)
|
|
68
|
+
- `SENTINEL_URL` (optional; defaults to `https://app.sentinelqa.com`)
|
|
69
|
+
|
|
70
|
+
## Optional: BYO S3 (Advanced)
|
|
71
|
+
|
|
72
|
+
Set these to upload directly to your own bucket:
|
|
73
|
+
|
|
74
|
+
- `SENTINEL_S3_ENDPOINT` (optional for AWS)
|
|
75
|
+
- `SENTINEL_S3_REGION`
|
|
76
|
+
- `SENTINEL_S3_BUCKET`
|
|
77
|
+
- `SENTINEL_S3_PREFIX` (optional)
|
|
78
|
+
- `SENTINEL_S3_ACCESS_KEY_ID`
|
|
79
|
+
- `SENTINEL_S3_SECRET_ACCESS_KEY`
|
|
80
|
+
|
|
81
|
+
## Troubleshooting
|
|
82
|
+
|
|
83
|
+
**Missing artifacts**
|
|
84
|
+
- Ensure Playwright outputs are present:
|
|
85
|
+
- `playwright-report/report.json`
|
|
86
|
+
- `playwright-report/`
|
|
87
|
+
- `test-results/`
|
|
88
|
+
|
|
89
|
+
**401 Unauthorized**
|
|
90
|
+
- Check `SENTINEL_TOKEN` and project permissions.
|
|
91
|
+
|
|
92
|
+
**No CI metadata**
|
|
93
|
+
- The uploader detects GitLab or GitHub. If running locally, set the CI env vars or run in CI.
|
|
94
|
+
|
|
95
|
+
**BYO uploads failing**
|
|
96
|
+
- Verify `SENTINEL_S3_*` values and permissions.
|
package/dist/cli.js
ADDED
|
@@ -0,0 +1,466 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
"use strict";
|
|
3
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
4
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
5
|
+
};
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
const fs_1 = __importDefault(require("fs"));
|
|
8
|
+
const path_1 = __importDefault(require("path"));
|
|
9
|
+
const client_s3_1 = require("@aws-sdk/client-s3");
|
|
10
|
+
const DEFAULT_APP_URL = "https://app.sentinelqa.com";
|
|
11
|
+
const DEFAULT_JSON_PATH = "playwright-report/report.json";
|
|
12
|
+
const DEFAULT_PLAYWRIGHT_REPORT_DIR = "playwright-report";
|
|
13
|
+
const DEFAULT_TEST_RESULTS_DIR = "test-results";
|
|
14
|
+
const readEnv = (key) => {
|
|
15
|
+
const value = process.env[key];
|
|
16
|
+
return value && value.trim().length > 0 ? value.trim() : null;
|
|
17
|
+
};
|
|
18
|
+
const fail = (message) => {
|
|
19
|
+
console.error(`Error: ${message}`);
|
|
20
|
+
process.exit(1);
|
|
21
|
+
};
|
|
22
|
+
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
|
23
|
+
const withRetry = async (fn, label) => {
|
|
24
|
+
const maxAttempts = 3;
|
|
25
|
+
let attempt = 0;
|
|
26
|
+
let delay = 500;
|
|
27
|
+
while (attempt < maxAttempts) {
|
|
28
|
+
try {
|
|
29
|
+
return await fn(attempt + 1);
|
|
30
|
+
}
|
|
31
|
+
catch (err) {
|
|
32
|
+
attempt += 1;
|
|
33
|
+
if (attempt >= maxAttempts)
|
|
34
|
+
throw err;
|
|
35
|
+
await sleep(delay);
|
|
36
|
+
delay *= 2;
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
throw new Error(`${label} failed after ${maxAttempts} attempts`);
|
|
40
|
+
};
|
|
41
|
+
const getArgValue = (args, name) => {
|
|
42
|
+
const idx = args.indexOf(name);
|
|
43
|
+
if (idx === -1)
|
|
44
|
+
return null;
|
|
45
|
+
return args[idx + 1] ?? null;
|
|
46
|
+
};
|
|
47
|
+
const ensureFile = (filePath, label) => {
|
|
48
|
+
if (!filePath)
|
|
49
|
+
fail(`${label} is required.`);
|
|
50
|
+
if (!fs_1.default.existsSync(filePath))
|
|
51
|
+
fail(`${label} not found at ${filePath}.`);
|
|
52
|
+
if (!fs_1.default.statSync(filePath).isFile())
|
|
53
|
+
fail(`${label} is not a file: ${filePath}.`);
|
|
54
|
+
};
|
|
55
|
+
const ensureDir = (dirPath, label) => {
|
|
56
|
+
if (!dirPath)
|
|
57
|
+
fail(`${label} is required.`);
|
|
58
|
+
if (!fs_1.default.existsSync(dirPath))
|
|
59
|
+
fail(`${label} not found at ${dirPath}.`);
|
|
60
|
+
if (!fs_1.default.statSync(dirPath).isDirectory()) {
|
|
61
|
+
fail(`${label} is not a directory: ${dirPath}.`);
|
|
62
|
+
}
|
|
63
|
+
};
|
|
64
|
+
const listFilesRecursive = (dirPath) => {
|
|
65
|
+
const results = [];
|
|
66
|
+
const entries = fs_1.default.readdirSync(dirPath, { withFileTypes: true });
|
|
67
|
+
for (const entry of entries) {
|
|
68
|
+
const fullPath = path_1.default.join(dirPath, entry.name);
|
|
69
|
+
if (entry.isDirectory())
|
|
70
|
+
results.push(...listFilesRecursive(fullPath));
|
|
71
|
+
else if (entry.isFile())
|
|
72
|
+
results.push(fullPath);
|
|
73
|
+
}
|
|
74
|
+
return results;
|
|
75
|
+
};
|
|
76
|
+
const normalizeError = (value) => {
|
|
77
|
+
if (!value)
|
|
78
|
+
return null;
|
|
79
|
+
if (typeof value === "string")
|
|
80
|
+
return value;
|
|
81
|
+
if (typeof value === "object" && value && "message" in value) {
|
|
82
|
+
return String(value.message);
|
|
83
|
+
}
|
|
84
|
+
return JSON.stringify(value);
|
|
85
|
+
};
|
|
86
|
+
const buildFullTitle = (suiteTitles, testTitle) => {
|
|
87
|
+
const parts = [...suiteTitles, testTitle].filter(Boolean);
|
|
88
|
+
return parts.join(" > ");
|
|
89
|
+
};
|
|
90
|
+
const statusRank = (status) => {
|
|
91
|
+
if (status === "failed")
|
|
92
|
+
return 3;
|
|
93
|
+
if (status === "timedOut")
|
|
94
|
+
return 2;
|
|
95
|
+
if (status === "passed")
|
|
96
|
+
return 1;
|
|
97
|
+
if (status === "skipped")
|
|
98
|
+
return 0;
|
|
99
|
+
return 0;
|
|
100
|
+
};
|
|
101
|
+
const normalizeStatus = (status) => {
|
|
102
|
+
if (status === "failed" || status === "timedOut")
|
|
103
|
+
return "failed";
|
|
104
|
+
if (status === "passed")
|
|
105
|
+
return "passed";
|
|
106
|
+
if (status === "skipped")
|
|
107
|
+
return "skipped";
|
|
108
|
+
return "skipped";
|
|
109
|
+
};
|
|
110
|
+
const extractTestsFromReport = (reportJson) => {
|
|
111
|
+
const tests = [];
|
|
112
|
+
const suites = Array.isArray(reportJson?.suites) ? reportJson.suites : [];
|
|
113
|
+
const visitSuite = (suite, ancestors) => {
|
|
114
|
+
const title = suite?.title || "";
|
|
115
|
+
const nextAncestors = title ? [...ancestors, title] : [...ancestors];
|
|
116
|
+
const childSuites = Array.isArray(suite?.suites) ? suite.suites : [];
|
|
117
|
+
for (const child of childSuites)
|
|
118
|
+
visitSuite(child, nextAncestors);
|
|
119
|
+
const suiteTests = Array.isArray(suite?.tests) ? suite.tests : [];
|
|
120
|
+
for (const test of suiteTests) {
|
|
121
|
+
const results = Array.isArray(test?.results) ? test.results : [];
|
|
122
|
+
let topStatus = normalizeStatus(test?.status) || "skipped";
|
|
123
|
+
let durationMs = 0;
|
|
124
|
+
let error = null;
|
|
125
|
+
for (const result of results) {
|
|
126
|
+
const resultStatus = normalizeStatus(result?.status);
|
|
127
|
+
if (statusRank(resultStatus) > statusRank(topStatus))
|
|
128
|
+
topStatus = resultStatus;
|
|
129
|
+
if (typeof result?.duration === "number")
|
|
130
|
+
durationMs += result.duration;
|
|
131
|
+
if (!error && Array.isArray(result?.errors) && result.errors.length > 0) {
|
|
132
|
+
error = normalizeError(result.errors[0]?.message || result.errors[0]);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
if (!error && test?.error)
|
|
136
|
+
error = normalizeError(test.error);
|
|
137
|
+
const file = test?.location?.file || suite?.location?.file || null;
|
|
138
|
+
const projectName = test?.projectName || results[0]?.projectName || null;
|
|
139
|
+
const testTitle = test?.title || "";
|
|
140
|
+
const fullTitle = buildFullTitle(nextAncestors, testTitle);
|
|
141
|
+
const displayTitle = fullTitle || testTitle;
|
|
142
|
+
const testKey = `${file || "unknown"}::${projectName || "default"}::${fullTitle || testTitle}`;
|
|
143
|
+
tests.push({
|
|
144
|
+
testKey,
|
|
145
|
+
title: displayTitle,
|
|
146
|
+
file: file || "",
|
|
147
|
+
projectName: projectName || "",
|
|
148
|
+
status: topStatus,
|
|
149
|
+
durationMs,
|
|
150
|
+
error
|
|
151
|
+
});
|
|
152
|
+
}
|
|
153
|
+
};
|
|
154
|
+
for (const suite of suites)
|
|
155
|
+
visitSuite(suite, []);
|
|
156
|
+
return tests;
|
|
157
|
+
};
|
|
158
|
+
const computeRunStatus = (tests) => {
|
|
159
|
+
return tests.some((t) => t.status === "failed") ? "failed" : "passed";
|
|
160
|
+
};
|
|
161
|
+
const detectGitLabEnv = () => {
|
|
162
|
+
const isGitLab = readEnv("GITLAB_CI") === "true" || !!readEnv("CI_PROJECT_ID");
|
|
163
|
+
if (!isGitLab)
|
|
164
|
+
return null;
|
|
165
|
+
return {
|
|
166
|
+
provider: "gitlab",
|
|
167
|
+
commit: {
|
|
168
|
+
sha: readEnv("CI_COMMIT_SHA"),
|
|
169
|
+
branch: readEnv("CI_COMMIT_BRANCH") || readEnv("CI_COMMIT_REF_NAME")
|
|
170
|
+
},
|
|
171
|
+
ci: {
|
|
172
|
+
url: readEnv("CI_JOB_URL") || readEnv("CI_PIPELINE_URL")
|
|
173
|
+
}
|
|
174
|
+
};
|
|
175
|
+
};
|
|
176
|
+
const detectGitHubEnv = () => {
|
|
177
|
+
const isGitHub = readEnv("GITHUB_ACTIONS") === "true";
|
|
178
|
+
if (!isGitHub)
|
|
179
|
+
return null;
|
|
180
|
+
const sha = readEnv("GITHUB_SHA");
|
|
181
|
+
const branch = readEnv("GITHUB_REF_NAME") ||
|
|
182
|
+
(readEnv("GITHUB_REF") || "").replace("refs/heads/", "");
|
|
183
|
+
const serverUrl = readEnv("GITHUB_SERVER_URL");
|
|
184
|
+
const repo = readEnv("GITHUB_REPOSITORY");
|
|
185
|
+
const runId = readEnv("GITHUB_RUN_ID");
|
|
186
|
+
const url = serverUrl && repo && runId ? `${serverUrl}/${repo}/actions/runs/${runId}` : null;
|
|
187
|
+
return {
|
|
188
|
+
provider: "github",
|
|
189
|
+
commit: { sha, branch },
|
|
190
|
+
ci: { url }
|
|
191
|
+
};
|
|
192
|
+
};
|
|
193
|
+
const getArtifacts = (rootDir, typeOverride, runId, s3Prefix) => {
|
|
194
|
+
const files = listFilesRecursive(rootDir);
|
|
195
|
+
return files.map((filePath) => {
|
|
196
|
+
const rel = path_1.default.relative(rootDir, filePath).replace(/\\/g, "/");
|
|
197
|
+
const keyParts = [s3Prefix, runId, rel].filter(Boolean);
|
|
198
|
+
const objectKey = keyParts.join("/");
|
|
199
|
+
let type = typeOverride || null;
|
|
200
|
+
if (!type) {
|
|
201
|
+
const ext = path_1.default.extname(filePath).toLowerCase();
|
|
202
|
+
if (ext === ".zip")
|
|
203
|
+
type = "trace";
|
|
204
|
+
else if (ext === ".webm" || ext === ".mp4")
|
|
205
|
+
type = "video";
|
|
206
|
+
else if (ext === ".png" || ext === ".jpg" || ext === ".jpeg")
|
|
207
|
+
type = "screenshot";
|
|
208
|
+
else if (ext === ".log" || ext === ".txt")
|
|
209
|
+
type = "log";
|
|
210
|
+
else
|
|
211
|
+
type = "other";
|
|
212
|
+
}
|
|
213
|
+
return { type, filePath, objectKey };
|
|
214
|
+
});
|
|
215
|
+
};
|
|
216
|
+
const uploadArtifacts = async (s3Client, bucket, artifacts) => {
|
|
217
|
+
for (const artifact of artifacts) {
|
|
218
|
+
await withRetry(async () => {
|
|
219
|
+
const body = fs_1.default.createReadStream(artifact.filePath);
|
|
220
|
+
await s3Client.send(new client_s3_1.PutObjectCommand({
|
|
221
|
+
Bucket: bucket,
|
|
222
|
+
Key: artifact.objectKey,
|
|
223
|
+
Body: body
|
|
224
|
+
}));
|
|
225
|
+
}, `upload ${artifact.objectKey}`);
|
|
226
|
+
}
|
|
227
|
+
};
|
|
228
|
+
const printHelp = () => {
|
|
229
|
+
console.log([
|
|
230
|
+
"Sentinel Uploader CLI",
|
|
231
|
+
"",
|
|
232
|
+
"Usage:",
|
|
233
|
+
" sentinelqa playwright [options]",
|
|
234
|
+
" sentinelqa upload [options]",
|
|
235
|
+
"",
|
|
236
|
+
"Options:",
|
|
237
|
+
" --playwright-json-path <path> Path to Playwright JSON report",
|
|
238
|
+
" --playwright-report-dir <dir> Path to Playwright HTML report directory",
|
|
239
|
+
" --test-results-dir <dir> Path to Playwright test-results directory",
|
|
240
|
+
" -h, --help Show help",
|
|
241
|
+
"",
|
|
242
|
+
"Env:",
|
|
243
|
+
" SENTINEL_TOKEN Ingest token for your project (required)",
|
|
244
|
+
" SENTINEL_URL Optional override for app URL",
|
|
245
|
+
"",
|
|
246
|
+
"Examples:",
|
|
247
|
+
" sentinelqa playwright",
|
|
248
|
+
" sentinelqa upload --playwright-json-path playwright-report/report.json"
|
|
249
|
+
].join("\n"));
|
|
250
|
+
};
|
|
251
|
+
const main = async () => {
|
|
252
|
+
const args = process.argv.slice(2);
|
|
253
|
+
const command = args[0];
|
|
254
|
+
if (!command || command === "-h" || command === "--help" || command === "help") {
|
|
255
|
+
printHelp();
|
|
256
|
+
process.exit(0);
|
|
257
|
+
}
|
|
258
|
+
if (command !== "playwright" && command !== "upload") {
|
|
259
|
+
fail(`Unknown command "${command}". Run with --help for usage.`);
|
|
260
|
+
}
|
|
261
|
+
const ingestToken = readEnv("SENTINEL_TOKEN") || readEnv("PROJECT_INGEST_TOKEN");
|
|
262
|
+
if (!ingestToken)
|
|
263
|
+
fail("SENTINEL_TOKEN is required.");
|
|
264
|
+
const playwrightJsonPath = getArgValue(args, "--playwright-json-path") ||
|
|
265
|
+
readEnv("PLAYWRIGHT_JSON_PATH") ||
|
|
266
|
+
DEFAULT_JSON_PATH;
|
|
267
|
+
const playwrightReportDir = getArgValue(args, "--playwright-report-dir") ||
|
|
268
|
+
readEnv("PLAYWRIGHT_REPORT_DIR") ||
|
|
269
|
+
DEFAULT_PLAYWRIGHT_REPORT_DIR;
|
|
270
|
+
const testResultsDir = getArgValue(args, "--test-results-dir") ||
|
|
271
|
+
readEnv("TEST_RESULTS_DIR") ||
|
|
272
|
+
DEFAULT_TEST_RESULTS_DIR;
|
|
273
|
+
ensureFile(playwrightJsonPath, "PLAYWRIGHT_JSON_PATH");
|
|
274
|
+
ensureDir(playwrightReportDir, "PLAYWRIGHT_REPORT_DIR");
|
|
275
|
+
ensureDir(testResultsDir, "TEST_RESULTS_DIR");
|
|
276
|
+
const reportJson = JSON.parse(fs_1.default.readFileSync(playwrightJsonPath, "utf8"));
|
|
277
|
+
const tests = extractTestsFromReport(reportJson);
|
|
278
|
+
const status = computeRunStatus(tests);
|
|
279
|
+
const gitlab = detectGitLabEnv();
|
|
280
|
+
const github = detectGitHubEnv();
|
|
281
|
+
const ci = gitlab || github;
|
|
282
|
+
if (!ci)
|
|
283
|
+
fail("CI environment not detected (GitLab or GitHub).");
|
|
284
|
+
if (!ci.commit.sha)
|
|
285
|
+
fail("Commit SHA is required.");
|
|
286
|
+
if (!ci.commit.branch)
|
|
287
|
+
fail("Commit branch is required.");
|
|
288
|
+
if (!ci.ci.url)
|
|
289
|
+
fail("CI run URL is required.");
|
|
290
|
+
const appUrl = readEnv("SENTINEL_URL") || readEnv("APP_URL") || DEFAULT_APP_URL;
|
|
291
|
+
const createRes = await withRetry(() => fetch(`${appUrl}/api/runs`, {
|
|
292
|
+
method: "POST",
|
|
293
|
+
headers: {
|
|
294
|
+
"content-type": "application/json",
|
|
295
|
+
authorization: `Bearer ${ingestToken}`
|
|
296
|
+
},
|
|
297
|
+
body: JSON.stringify({
|
|
298
|
+
provider: ci.provider,
|
|
299
|
+
commit: ci.commit,
|
|
300
|
+
ci: ci.ci
|
|
301
|
+
})
|
|
302
|
+
}), "POST /api/runs");
|
|
303
|
+
if (!createRes.ok) {
|
|
304
|
+
const body = await createRes.text();
|
|
305
|
+
fail(`POST /api/runs failed (${createRes.status}): ${body}`);
|
|
306
|
+
}
|
|
307
|
+
const { runId } = await createRes.json();
|
|
308
|
+
if (!runId)
|
|
309
|
+
fail("Missing runId from /api/runs response.");
|
|
310
|
+
const uploadStart = Date.now();
|
|
311
|
+
const byoBucket = readEnv("SENTINEL_S3_BUCKET") || readEnv("S3_BUCKET");
|
|
312
|
+
const byoPrefix = readEnv("SENTINEL_S3_PREFIX") || readEnv("S3_PREFIX") || "";
|
|
313
|
+
const artifacts = [
|
|
314
|
+
...getArtifacts(playwrightReportDir, null, runId, byoBucket ? byoPrefix : ""),
|
|
315
|
+
...getArtifacts(testResultsDir, null, runId, byoBucket ? byoPrefix : "")
|
|
316
|
+
].filter((artifact) => path_1.default.resolve(artifact.filePath) !== path_1.default.resolve(playwrightJsonPath));
|
|
317
|
+
const artifactsWithMeta = artifacts.map((artifact) => {
|
|
318
|
+
const stat = fs_1.default.statSync(artifact.filePath);
|
|
319
|
+
const ext = path_1.default.extname(artifact.filePath).toLowerCase();
|
|
320
|
+
const contentType = ext === ".zip"
|
|
321
|
+
? "application/zip"
|
|
322
|
+
: ext === ".webm"
|
|
323
|
+
? "video/webm"
|
|
324
|
+
: ext === ".mp4"
|
|
325
|
+
? "video/mp4"
|
|
326
|
+
: ext === ".png"
|
|
327
|
+
? "image/png"
|
|
328
|
+
: ext === ".jpg" || ext === ".jpeg"
|
|
329
|
+
? "image/jpeg"
|
|
330
|
+
: ext === ".txt" || ext === ".log"
|
|
331
|
+
? "text/plain"
|
|
332
|
+
: "application/octet-stream";
|
|
333
|
+
return { ...artifact, sizeBytes: stat.size, contentType };
|
|
334
|
+
});
|
|
335
|
+
let finalArtifacts = artifactsWithMeta;
|
|
336
|
+
if (byoBucket) {
|
|
337
|
+
const s3Region = readEnv("SENTINEL_S3_REGION") || readEnv("AWS_REGION") || readEnv("S3_REGION");
|
|
338
|
+
if (!s3Region)
|
|
339
|
+
fail("SENTINEL_S3_REGION is required for BYO S3.");
|
|
340
|
+
const s3Endpoint = readEnv("SENTINEL_S3_ENDPOINT") || readEnv("S3_ENDPOINT");
|
|
341
|
+
const accessKeyId = readEnv("SENTINEL_S3_ACCESS_KEY_ID") || readEnv("AWS_ACCESS_KEY_ID");
|
|
342
|
+
const secretAccessKey = readEnv("SENTINEL_S3_SECRET_ACCESS_KEY") || readEnv("AWS_SECRET_ACCESS_KEY");
|
|
343
|
+
if (!accessKeyId || !secretAccessKey) {
|
|
344
|
+
fail("SENTINEL_S3_ACCESS_KEY_ID and SENTINEL_S3_SECRET_ACCESS_KEY are required.");
|
|
345
|
+
}
|
|
346
|
+
const s3Client = new client_s3_1.S3Client({
|
|
347
|
+
region: s3Region,
|
|
348
|
+
...(s3Endpoint ? { endpoint: s3Endpoint } : {}),
|
|
349
|
+
credentials: { accessKeyId, secretAccessKey }
|
|
350
|
+
});
|
|
351
|
+
await withRetry(async () => {
|
|
352
|
+
await uploadArtifacts(s3Client, byoBucket, finalArtifacts);
|
|
353
|
+
}, "upload artifacts");
|
|
354
|
+
finalArtifacts = finalArtifacts.map((artifact) => ({
|
|
355
|
+
...artifact,
|
|
356
|
+
bucket: byoBucket
|
|
357
|
+
}));
|
|
358
|
+
}
|
|
359
|
+
else {
|
|
360
|
+
const presignRes = await withRetry(() => fetch(`${appUrl}/api/uploads/presign`, {
|
|
361
|
+
method: "POST",
|
|
362
|
+
headers: {
|
|
363
|
+
"content-type": "application/json",
|
|
364
|
+
authorization: `Bearer ${ingestToken}`
|
|
365
|
+
},
|
|
366
|
+
body: JSON.stringify({
|
|
367
|
+
items: finalArtifacts.map((artifact) => ({
|
|
368
|
+
relPath: artifact.objectKey,
|
|
369
|
+
contentType: artifact.contentType,
|
|
370
|
+
sizeBytes: artifact.sizeBytes,
|
|
371
|
+
kind: artifact.type
|
|
372
|
+
}))
|
|
373
|
+
})
|
|
374
|
+
}), "POST /api/uploads/presign");
|
|
375
|
+
if (!presignRes.ok) {
|
|
376
|
+
const body = await presignRes.text();
|
|
377
|
+
fail(`POST /api/uploads/presign failed (${presignRes.status}): ${body}`);
|
|
378
|
+
}
|
|
379
|
+
const presignData = await presignRes.json();
|
|
380
|
+
const uploadMap = new Map((presignData.items || []).map((item) => [item.relPath, item]));
|
|
381
|
+
await withRetry(async () => {
|
|
382
|
+
for (const artifact of finalArtifacts) {
|
|
383
|
+
const item = uploadMap.get(artifact.objectKey);
|
|
384
|
+
if (!item?.uploadUrl || !item?.objectKey) {
|
|
385
|
+
throw new Error(`Missing upload URL for ${artifact.objectKey}`);
|
|
386
|
+
}
|
|
387
|
+
await withRetry(async () => {
|
|
388
|
+
const body = fs_1.default.createReadStream(artifact.filePath);
|
|
389
|
+
const res = await fetch(item.uploadUrl, {
|
|
390
|
+
method: "PUT",
|
|
391
|
+
headers: {
|
|
392
|
+
"content-type": artifact.contentType
|
|
393
|
+
},
|
|
394
|
+
body
|
|
395
|
+
});
|
|
396
|
+
if (!res.ok) {
|
|
397
|
+
throw new Error(`Upload failed (${res.status}) for ${artifact.objectKey}`);
|
|
398
|
+
}
|
|
399
|
+
}, `upload ${artifact.objectKey}`);
|
|
400
|
+
artifact.objectKey = item.objectKey;
|
|
401
|
+
}
|
|
402
|
+
}, "upload artifacts");
|
|
403
|
+
finalArtifacts = finalArtifacts.map((artifact) => ({
|
|
404
|
+
...artifact,
|
|
405
|
+
bucket: presignData.bucket
|
|
406
|
+
}));
|
|
407
|
+
}
|
|
408
|
+
const uploadDurationMs = Date.now() - uploadStart;
|
|
409
|
+
const completeRes = await withRetry(() => fetch(`${appUrl}/api/runs/${runId}/complete`, {
|
|
410
|
+
method: "POST",
|
|
411
|
+
headers: {
|
|
412
|
+
"content-type": "application/json",
|
|
413
|
+
authorization: `Bearer ${ingestToken}`
|
|
414
|
+
},
|
|
415
|
+
body: JSON.stringify({
|
|
416
|
+
status,
|
|
417
|
+
finishedAt: new Date().toISOString(),
|
|
418
|
+
tests,
|
|
419
|
+
artifacts: finalArtifacts.map((artifact) => ({
|
|
420
|
+
type: artifact.type,
|
|
421
|
+
bucket: artifact.bucket,
|
|
422
|
+
objectKey: artifact.objectKey,
|
|
423
|
+
sizeBytes: artifact.sizeBytes,
|
|
424
|
+
contentType: artifact.contentType,
|
|
425
|
+
testKey: null
|
|
426
|
+
}))
|
|
427
|
+
})
|
|
428
|
+
}), "POST /api/runs/:runId/complete");
|
|
429
|
+
if (!completeRes.ok) {
|
|
430
|
+
const body = await completeRes.text();
|
|
431
|
+
fail(`POST /api/runs/:runId/complete failed (${completeRes.status}): ${body}`);
|
|
432
|
+
}
|
|
433
|
+
let shareUrl = null;
|
|
434
|
+
try {
|
|
435
|
+
const data = await completeRes.json();
|
|
436
|
+
shareUrl = data.shareUrl || null;
|
|
437
|
+
}
|
|
438
|
+
catch {
|
|
439
|
+
shareUrl = null;
|
|
440
|
+
}
|
|
441
|
+
const internalUrl = `${appUrl}/runs/${runId}`;
|
|
442
|
+
console.log(JSON.stringify({
|
|
443
|
+
event: "uploader_summary",
|
|
444
|
+
runId,
|
|
445
|
+
tests: tests.length,
|
|
446
|
+
artifacts: artifacts.length,
|
|
447
|
+
uploadDurationMs
|
|
448
|
+
}));
|
|
449
|
+
console.log(`Uploaded ${artifacts.length} artifacts and completed run ${runId}.`);
|
|
450
|
+
console.log(internalUrl);
|
|
451
|
+
if (shareUrl)
|
|
452
|
+
console.log(shareUrl);
|
|
453
|
+
if (status === "failed") {
|
|
454
|
+
const total = tests.length;
|
|
455
|
+
const failed = tests.filter((t) => t.status === "failed").length;
|
|
456
|
+
const useColor = !process.env.NO_COLOR;
|
|
457
|
+
const bold = useColor ? "\u001b[1m" : "";
|
|
458
|
+
const red = useColor ? "\u001b[31m" : "";
|
|
459
|
+
const reset = useColor ? "\u001b[0m" : "";
|
|
460
|
+
console.log(`${bold}${red}CI Debug Report${reset}`);
|
|
461
|
+
console.log(`${bold}${failed} failed / ${total} total${reset}`);
|
|
462
|
+
}
|
|
463
|
+
};
|
|
464
|
+
main().catch((err) => {
|
|
465
|
+
fail(err?.message || String(err));
|
|
466
|
+
});
|
package/package.json
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@sentinelqa/uploader",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"private": false,
|
|
5
|
+
"description": "Sentinel uploader CLI for CI/CD debugging artifacts",
|
|
6
|
+
"license": "MIT",
|
|
7
|
+
"repository": {
|
|
8
|
+
"type": "git",
|
|
9
|
+
"url": "https://github.com/sentinelqa/sentinel"
|
|
10
|
+
},
|
|
11
|
+
"keywords": [
|
|
12
|
+
"sentinel",
|
|
13
|
+
"ci",
|
|
14
|
+
"cicd",
|
|
15
|
+
"playwright",
|
|
16
|
+
"debugging",
|
|
17
|
+
"artifacts",
|
|
18
|
+
"uploader"
|
|
19
|
+
],
|
|
20
|
+
"bin": {
|
|
21
|
+
"sentinelqa": "dist/cli.js"
|
|
22
|
+
},
|
|
23
|
+
"type": "commonjs",
|
|
24
|
+
"files": [
|
|
25
|
+
"dist",
|
|
26
|
+
"README.md",
|
|
27
|
+
"LICENSE"
|
|
28
|
+
],
|
|
29
|
+
"engines": {
|
|
30
|
+
"node": ">=18"
|
|
31
|
+
},
|
|
32
|
+
"main": "dist/cli.js",
|
|
33
|
+
"scripts": {
|
|
34
|
+
"build": "tsc -p tsconfig.json",
|
|
35
|
+
"publish:dry": "npm run build && npm pack --dry-run"
|
|
36
|
+
},
|
|
37
|
+
"dependencies": {
|
|
38
|
+
"@aws-sdk/client-s3": "^3.600.0"
|
|
39
|
+
},
|
|
40
|
+
"devDependencies": {
|
|
41
|
+
"@types/node": "^20.19.32",
|
|
42
|
+
"typescript": "^5.9.3"
|
|
43
|
+
}
|
|
44
|
+
}
|