@topogram/cli 0.3.86 → 0.3.87
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/archive/unarchive.js +56 -5
- package/src/catalog/source.js +52 -1
- package/src/cli/commands/emit/snapshot-input.js +111 -0
- package/src/cli/commands/emit.js +11 -1
- package/src/github-client.js +56 -2
- package/src/import/core/shared/files.js +21 -0
- package/src/remote-payload-limits.js +40 -0
- package/src/template-trust/constants.js +1 -1
- package/src/template-trust/policy.js +3 -4
- package/src/template-trust/status.js +2 -4
- package/src/topogram-config.js +47 -0
- package/src/workflows/reconcile/adoption-plan/outputs.js +31 -4
- package/src/workflows/shared.js +21 -0
package/package.json
CHANGED
package/src/archive/unarchive.js
CHANGED
|
@@ -12,15 +12,15 @@
|
|
|
12
12
|
// - pitch: draft
|
|
13
13
|
// - document: draft
|
|
14
14
|
|
|
15
|
-
import { existsSync,
|
|
15
|
+
import { existsSync, writeFileSync, mkdirSync } from "node:fs";
|
|
16
16
|
import path from "node:path";
|
|
17
17
|
import {
|
|
18
|
-
archiveDir,
|
|
19
18
|
listArchiveFiles,
|
|
20
19
|
parseArchiveFile,
|
|
21
20
|
rewriteArchiveFile
|
|
22
21
|
} from "./jsonl.js";
|
|
23
|
-
import {
|
|
22
|
+
import { isArchivableKind } from "./schema.js";
|
|
23
|
+
import { sdlcRootForSdlc } from "../sdlc/paths.js";
|
|
24
24
|
|
|
25
25
|
const REOPEN_STATUSES = {
|
|
26
26
|
bug: "open",
|
|
@@ -30,6 +30,50 @@ const REOPEN_STATUSES = {
|
|
|
30
30
|
document: "draft"
|
|
31
31
|
};
|
|
32
32
|
|
|
33
|
+
const SAFE_ARCHIVE_ID = /^[A-Za-z][A-Za-z0-9_]*$/;
|
|
34
|
+
|
|
35
|
+
function recordDirForKind(kind) {
|
|
36
|
+
if (kind === "acceptance_criterion") return "acceptance_criteria";
|
|
37
|
+
return `${kind}s`;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
function isContainedPath(root, candidate) {
|
|
41
|
+
const relative = path.relative(root, candidate);
|
|
42
|
+
return relative === "" || (!relative.startsWith("..") && !path.isAbsolute(relative));
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
function validateArchivedEntry(entry) {
|
|
46
|
+
if (!entry || typeof entry !== "object") {
|
|
47
|
+
return "Archived entry is not an object";
|
|
48
|
+
}
|
|
49
|
+
if (!isArchivableKind(entry.kind)) {
|
|
50
|
+
return `Archived entry kind '${entry.kind}' is not supported for unarchive`;
|
|
51
|
+
}
|
|
52
|
+
if (typeof entry.id !== "string" || !SAFE_ARCHIVE_ID.test(entry.id)) {
|
|
53
|
+
return `Archived entry id '${entry.id}' is not a safe Topogram identifier`;
|
|
54
|
+
}
|
|
55
|
+
return null;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
function resolveTargetFile(workspaceRoot, entry, options = {}) {
|
|
59
|
+
const recordRoot = path.resolve(sdlcRootForSdlc(workspaceRoot), recordDirForKind(entry.kind));
|
|
60
|
+
const targetDir = path.resolve(options.targetDir || recordRoot);
|
|
61
|
+
if (!isContainedPath(recordRoot, targetDir)) {
|
|
62
|
+
return {
|
|
63
|
+
ok: false,
|
|
64
|
+
error: `Target directory '${targetDir}' escapes SDLC ${entry.kind} record root '${recordRoot}'`
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
const targetFile = path.resolve(targetDir, `${entry.id}.tg`);
|
|
68
|
+
if (!isContainedPath(recordRoot, targetFile)) {
|
|
69
|
+
return {
|
|
70
|
+
ok: false,
|
|
71
|
+
error: `Archived entry id '${entry.id}' resolves outside SDLC ${entry.kind} record root '${recordRoot}'`
|
|
72
|
+
};
|
|
73
|
+
}
|
|
74
|
+
return { ok: true, recordRoot, targetDir, targetFile };
|
|
75
|
+
}
|
|
76
|
+
|
|
33
77
|
function findEntry(workspaceRoot, id) {
|
|
34
78
|
for (const file of listArchiveFiles(workspaceRoot)) {
|
|
35
79
|
const entries = parseArchiveFile(file);
|
|
@@ -109,10 +153,17 @@ export function unarchive(workspaceRoot, id, options = {}) {
|
|
|
109
153
|
}
|
|
110
154
|
|
|
111
155
|
const { file, entries, entry } = found;
|
|
156
|
+
const entryError = validateArchivedEntry(entry);
|
|
157
|
+
if (entryError) {
|
|
158
|
+
return { ok: false, error: entryError };
|
|
159
|
+
}
|
|
112
160
|
const reopenStatus = options.status || REOPEN_STATUSES[entry.kind] || "draft";
|
|
113
|
-
const
|
|
161
|
+
const target = resolveTargetFile(workspaceRoot, entry, options);
|
|
162
|
+
if (!target.ok) {
|
|
163
|
+
return target;
|
|
164
|
+
}
|
|
165
|
+
const { targetDir, targetFile } = target;
|
|
114
166
|
if (!existsSync(targetDir)) mkdirSync(targetDir, { recursive: true });
|
|
115
|
-
const targetFile = path.join(targetDir, `${entry.id}.tg`);
|
|
116
167
|
|
|
117
168
|
if (existsSync(targetFile)) {
|
|
118
169
|
return { ok: false, error: `Target file '${targetFile}' already exists; refuse to overwrite` };
|
package/src/catalog/source.js
CHANGED
|
@@ -5,22 +5,66 @@ import fs from "node:fs";
|
|
|
5
5
|
import path from "node:path";
|
|
6
6
|
|
|
7
7
|
import { readGithubCatalogSourceText } from "../github-client.js";
|
|
8
|
+
import { remotePayloadMaxBytes } from "../remote-payload-limits.js";
|
|
8
9
|
import { defaultCatalogSource } from "../topogram-config.js";
|
|
9
10
|
import { GITHUB_TOKEN_HOSTS } from "./constants.js";
|
|
10
11
|
import { validateCatalog } from "./validation.js";
|
|
11
12
|
|
|
12
13
|
const FETCH_URL_SCRIPT = `
|
|
13
14
|
const source = process.argv[1];
|
|
15
|
+
const maxBytes = Number.parseInt(process.env.TOPOGRAM_FETCH_MAX_BYTES || "", 10) || 5242880;
|
|
14
16
|
const token = process.env.TOPOGRAM_FETCH_TOKEN || "";
|
|
15
17
|
const tokenHosts = new Set(["github.com", "api.github.com", "raw.githubusercontent.com"]);
|
|
16
18
|
function tokenAllowed(url) {
|
|
17
19
|
const hostname = new URL(url).hostname.toLowerCase();
|
|
18
20
|
return tokenHosts.has(hostname) || hostname.endsWith(".github.com");
|
|
19
21
|
}
|
|
22
|
+
async function readResponseText(response, url) {
|
|
23
|
+
const declaredLength = Number.parseInt(response.headers.get("content-length") || "", 10);
|
|
24
|
+
if (Number.isFinite(declaredLength) && declaredLength > maxBytes) {
|
|
25
|
+
throw new Error("Response from " + url + " exceeded " + maxBytes + " byte limit.");
|
|
26
|
+
}
|
|
27
|
+
if (!response.body) {
|
|
28
|
+
const text = await response.text();
|
|
29
|
+
if (Buffer.byteLength(text, "utf8") > maxBytes) {
|
|
30
|
+
throw new Error("Response from " + url + " exceeded " + maxBytes + " byte limit.");
|
|
31
|
+
}
|
|
32
|
+
return text;
|
|
33
|
+
}
|
|
34
|
+
const reader = response.body.getReader();
|
|
35
|
+
const decoder = new TextDecoder();
|
|
36
|
+
const chunks = [];
|
|
37
|
+
let total = 0;
|
|
38
|
+
while (true) {
|
|
39
|
+
const { value, done } = await reader.read();
|
|
40
|
+
if (done) {
|
|
41
|
+
break;
|
|
42
|
+
}
|
|
43
|
+
total += value.byteLength;
|
|
44
|
+
if (total > maxBytes) {
|
|
45
|
+
try {
|
|
46
|
+
await reader.cancel();
|
|
47
|
+
} catch {}
|
|
48
|
+
throw new Error("Response from " + url + " exceeded " + maxBytes + " byte limit.");
|
|
49
|
+
}
|
|
50
|
+
chunks.push(decoder.decode(value, { stream: true }));
|
|
51
|
+
}
|
|
52
|
+
chunks.push(decoder.decode());
|
|
53
|
+
return chunks.join("");
|
|
54
|
+
}
|
|
20
55
|
async function readUrl(url, redirects = 0) {
|
|
21
56
|
if (redirects > 5) {
|
|
22
57
|
throw new Error("Too many redirects.");
|
|
23
58
|
}
|
|
59
|
+
if (process.env.TOPOGRAM_CATALOG_URL_FIXTURE_PATH) {
|
|
60
|
+
const fs = await import("node:fs");
|
|
61
|
+
const fixturePath = process.env.TOPOGRAM_CATALOG_URL_FIXTURE_PATH;
|
|
62
|
+
const fixtureSize = fs.statSync(fixturePath).size;
|
|
63
|
+
if (fixtureSize > maxBytes) {
|
|
64
|
+
throw new Error("Response from " + url + " exceeded " + maxBytes + " byte limit.");
|
|
65
|
+
}
|
|
66
|
+
return fs.readFileSync(fixturePath, "utf8");
|
|
67
|
+
}
|
|
24
68
|
const headers = {};
|
|
25
69
|
if (token && tokenAllowed(url)) {
|
|
26
70
|
headers.authorization = "Bearer " + token;
|
|
@@ -30,7 +74,7 @@ async function readUrl(url, redirects = 0) {
|
|
|
30
74
|
const next = new URL(response.headers.get("location"), url).toString();
|
|
31
75
|
return readUrl(next, redirects + 1);
|
|
32
76
|
}
|
|
33
|
-
const text = await response
|
|
77
|
+
const text = await readResponseText(response, url);
|
|
34
78
|
if (!response.ok) {
|
|
35
79
|
const preview = text.trim().slice(0, 400);
|
|
36
80
|
throw new Error(String(response.status) + " " + response.statusText + (preview ? "\\n" + preview : ""));
|
|
@@ -118,14 +162,21 @@ function readCatalogText(source) {
|
|
|
118
162
|
*/
|
|
119
163
|
function readUrlText(source) {
|
|
120
164
|
const token = process.env.GITHUB_TOKEN || process.env.GH_TOKEN || "";
|
|
165
|
+
const maxBytes = remotePayloadMaxBytes(
|
|
166
|
+
["TOPOGRAM_CATALOG_FETCH_MAX_BYTES", "TOPOGRAM_REMOTE_FETCH_MAX_BYTES"],
|
|
167
|
+
undefined,
|
|
168
|
+
["catalogFetchMaxBytes", "remoteFetchMaxBytes"]
|
|
169
|
+
);
|
|
121
170
|
const tokenEnv = token && githubTokenAllowedForCatalogUrl(source)
|
|
122
171
|
? { TOPOGRAM_FETCH_TOKEN: token }
|
|
123
172
|
: {};
|
|
124
173
|
const result = childProcess.spawnSync(process.execPath, ["--input-type=module", "-e", FETCH_URL_SCRIPT, source], {
|
|
125
174
|
encoding: "utf8",
|
|
175
|
+
maxBuffer: maxBytes + 4096,
|
|
126
176
|
env: {
|
|
127
177
|
...process.env,
|
|
128
178
|
...tokenEnv,
|
|
179
|
+
TOPOGRAM_FETCH_MAX_BYTES: String(maxBytes),
|
|
129
180
|
PATH: process.env.PATH || ""
|
|
130
181
|
}
|
|
131
182
|
});
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
// @ts-check
|
|
2
|
+
|
|
3
|
+
import fs from "node:fs";
|
|
4
|
+
|
|
5
|
+
const FORBIDDEN_SNAPSHOT_KEYS = new Set(["__proto__", "constructor", "prototype"]);
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* @param {unknown} value
|
|
9
|
+
* @returns {value is Record<string, unknown>}
|
|
10
|
+
*/
|
|
11
|
+
function isRecord(value) {
|
|
12
|
+
return Boolean(value) && typeof value === "object" && !Array.isArray(value);
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* @param {unknown} value
|
|
17
|
+
* @returns {string}
|
|
18
|
+
*/
|
|
19
|
+
function errorMessage(value) {
|
|
20
|
+
return value instanceof Error ? value.message : String(value);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* @param {unknown} value
|
|
25
|
+
* @returns {string[]}
|
|
26
|
+
*/
|
|
27
|
+
function validateDbSchemaSnapshot(value) {
|
|
28
|
+
const errors = [];
|
|
29
|
+
if (!isRecord(value)) {
|
|
30
|
+
return ["snapshot must be a JSON object"];
|
|
31
|
+
}
|
|
32
|
+
if (value.type !== "db_schema_snapshot") {
|
|
33
|
+
errors.push("snapshot.type must be 'db_schema_snapshot'");
|
|
34
|
+
}
|
|
35
|
+
if (!Array.isArray(value.tables)) {
|
|
36
|
+
errors.push("snapshot.tables must be an array");
|
|
37
|
+
}
|
|
38
|
+
if (!Array.isArray(value.enums)) {
|
|
39
|
+
errors.push("snapshot.enums must be an array");
|
|
40
|
+
}
|
|
41
|
+
if (Array.isArray(value.tables)) {
|
|
42
|
+
for (const [index, table] of value.tables.entries()) {
|
|
43
|
+
if (!isRecord(table)) {
|
|
44
|
+
errors.push(`snapshot.tables[${index}] must be an object`);
|
|
45
|
+
continue;
|
|
46
|
+
}
|
|
47
|
+
if (typeof table.table !== "string" || table.table.length === 0) {
|
|
48
|
+
errors.push(`snapshot.tables[${index}].table must be a non-empty string`);
|
|
49
|
+
}
|
|
50
|
+
if (!Array.isArray(table.columns)) {
|
|
51
|
+
errors.push(`snapshot.tables[${index}].columns must be an array`);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
if (Array.isArray(value.enums)) {
|
|
56
|
+
for (const [index, enumEntry] of value.enums.entries()) {
|
|
57
|
+
if (!isRecord(enumEntry)) {
|
|
58
|
+
errors.push(`snapshot.enums[${index}] must be an object`);
|
|
59
|
+
continue;
|
|
60
|
+
}
|
|
61
|
+
if (typeof enumEntry.id !== "string" || enumEntry.id.length === 0) {
|
|
62
|
+
errors.push(`snapshot.enums[${index}].id must be a non-empty string`);
|
|
63
|
+
}
|
|
64
|
+
if (!Array.isArray(enumEntry.values)) {
|
|
65
|
+
errors.push(`snapshot.enums[${index}].values must be an array`);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
return errors;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* @param {string} snapshotPath
|
|
74
|
+
* @returns {{ ok: true, snapshot: Record<string, unknown> } | { ok: false, message: string }}
|
|
75
|
+
*/
|
|
76
|
+
export function readFromSnapshot(snapshotPath) {
|
|
77
|
+
let raw;
|
|
78
|
+
try {
|
|
79
|
+
raw = fs.readFileSync(snapshotPath, "utf8");
|
|
80
|
+
} catch (error) {
|
|
81
|
+
return {
|
|
82
|
+
ok: false,
|
|
83
|
+
message: `Unable to read --from-snapshot '${snapshotPath}': ${errorMessage(error)}`
|
|
84
|
+
};
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
let parsed;
|
|
88
|
+
try {
|
|
89
|
+
parsed = JSON.parse(raw, (key, value) => {
|
|
90
|
+
if (FORBIDDEN_SNAPSHOT_KEYS.has(key)) {
|
|
91
|
+
throw new Error(`unsafe key '${key}' is not allowed in snapshot JSON`);
|
|
92
|
+
}
|
|
93
|
+
return value;
|
|
94
|
+
});
|
|
95
|
+
} catch (error) {
|
|
96
|
+
return {
|
|
97
|
+
ok: false,
|
|
98
|
+
message: `Invalid --from-snapshot JSON at '${snapshotPath}': ${errorMessage(error)}`
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
const errors = validateDbSchemaSnapshot(parsed);
|
|
103
|
+
if (errors.length > 0) {
|
|
104
|
+
return {
|
|
105
|
+
ok: false,
|
|
106
|
+
message: `Invalid --from-snapshot DB schema snapshot at '${snapshotPath}': ${errors.join("; ")}`
|
|
107
|
+
};
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
return { ok: true, snapshot: parsed };
|
|
111
|
+
}
|
package/src/cli/commands/emit.js
CHANGED
|
@@ -22,6 +22,7 @@ import {
|
|
|
22
22
|
generatedOutputSentinel,
|
|
23
23
|
topogramInputPathForGeneration
|
|
24
24
|
} from "../output-safety.js";
|
|
25
|
+
import { readFromSnapshot } from "./emit/snapshot-input.js";
|
|
25
26
|
|
|
26
27
|
const IMPLEMENTATION_PROVIDER_TARGETS = new Set([
|
|
27
28
|
"persistence-scaffold",
|
|
@@ -78,6 +79,15 @@ function targetRequiresImplementationProvider(target) {
|
|
|
78
79
|
*/
|
|
79
80
|
export async function runEmitCommand(options) {
|
|
80
81
|
const ast = parsePath(options.inputPath);
|
|
82
|
+
let fromSnapshot = null;
|
|
83
|
+
if (options.fromSnapshotPath) {
|
|
84
|
+
const parsedSnapshot = readFromSnapshot(options.fromSnapshotPath);
|
|
85
|
+
if (!parsedSnapshot.ok) {
|
|
86
|
+
console.error(parsedSnapshot.message);
|
|
87
|
+
return 1;
|
|
88
|
+
}
|
|
89
|
+
fromSnapshot = parsedSnapshot.snapshot;
|
|
90
|
+
}
|
|
81
91
|
const explicitProjectConfig = loadProjectConfig(options.projectRoot) || loadProjectConfig(options.inputPath);
|
|
82
92
|
const shouldLoadImplementation = targetRequiresImplementationProvider(options.target) &&
|
|
83
93
|
(!IMPLEMENTATION_OPTIONAL_TARGETS.has(options.target) || Boolean(explicitProjectConfig?.config?.implementation));
|
|
@@ -106,7 +116,7 @@ export async function runEmitCommand(options) {
|
|
|
106
116
|
target: options.target,
|
|
107
117
|
...(options.selectors || {}),
|
|
108
118
|
profileId: options.profileId,
|
|
109
|
-
fromSnapshot
|
|
119
|
+
fromSnapshot,
|
|
110
120
|
fromSnapshotPath: options.fromSnapshotPath,
|
|
111
121
|
fromTopogramPath: options.fromTopogramPath,
|
|
112
122
|
topogramInputPath: topogramInputPathForGeneration(options.inputPath),
|
package/src/github-client.js
CHANGED
|
@@ -2,9 +2,12 @@
|
|
|
2
2
|
|
|
3
3
|
import childProcess from "node:child_process";
|
|
4
4
|
|
|
5
|
+
import { remotePayloadMaxBytes } from "./remote-payload-limits.js";
|
|
6
|
+
|
|
5
7
|
const DEFAULT_GITHUB_API_BASE_URL = "https://api.github.com";
|
|
6
8
|
const GITHUB_REST_SCRIPT = `
|
|
7
9
|
const request = JSON.parse(process.argv[1]);
|
|
10
|
+
const maxBytes = Number.parseInt(String(request.maxBytes || ""), 10) || 5242880;
|
|
8
11
|
const base = String(request.baseUrl || "https://api.github.com").replace(/\\/+$/, "") + "/";
|
|
9
12
|
const path = String(request.path || "").replace(/^\\/+/, "");
|
|
10
13
|
const url = new URL(path, base);
|
|
@@ -24,6 +27,39 @@ const headers = {
|
|
|
24
27
|
if (request.token && canAttachToken(url)) {
|
|
25
28
|
headers.authorization = "Bearer " + request.token;
|
|
26
29
|
}
|
|
30
|
+
async function readResponseText(response) {
|
|
31
|
+
const declaredLength = Number.parseInt(response.headers.get("content-length") || "", 10);
|
|
32
|
+
if (Number.isFinite(declaredLength) && declaredLength > maxBytes) {
|
|
33
|
+
throw new Error("GitHub REST response exceeded " + maxBytes + " byte limit.");
|
|
34
|
+
}
|
|
35
|
+
if (!response.body) {
|
|
36
|
+
const text = await response.text();
|
|
37
|
+
if (Buffer.byteLength(text, "utf8") > maxBytes) {
|
|
38
|
+
throw new Error("GitHub REST response exceeded " + maxBytes + " byte limit.");
|
|
39
|
+
}
|
|
40
|
+
return text;
|
|
41
|
+
}
|
|
42
|
+
const reader = response.body.getReader();
|
|
43
|
+
const decoder = new TextDecoder();
|
|
44
|
+
const chunks = [];
|
|
45
|
+
let total = 0;
|
|
46
|
+
while (true) {
|
|
47
|
+
const { value, done } = await reader.read();
|
|
48
|
+
if (done) {
|
|
49
|
+
break;
|
|
50
|
+
}
|
|
51
|
+
total += value.byteLength;
|
|
52
|
+
if (total > maxBytes) {
|
|
53
|
+
try {
|
|
54
|
+
await reader.cancel();
|
|
55
|
+
} catch {}
|
|
56
|
+
throw new Error("GitHub REST response exceeded " + maxBytes + " byte limit.");
|
|
57
|
+
}
|
|
58
|
+
chunks.push(decoder.decode(value, { stream: true }));
|
|
59
|
+
}
|
|
60
|
+
chunks.push(decoder.decode());
|
|
61
|
+
return chunks.join("");
|
|
62
|
+
}
|
|
27
63
|
if (process.env.TOPOGRAM_GITHUB_API_FIXTURE_ROOT) {
|
|
28
64
|
const fs = await import("node:fs");
|
|
29
65
|
const pathModule = await import("node:path");
|
|
@@ -50,6 +86,11 @@ if (process.env.TOPOGRAM_GITHUB_API_FIXTURE_ROOT) {
|
|
|
50
86
|
}));
|
|
51
87
|
process.exit(2);
|
|
52
88
|
}
|
|
89
|
+
const fixtureSize = fs.statSync(fixturePath).size;
|
|
90
|
+
if (fixtureSize > maxBytes) {
|
|
91
|
+
process.stderr.write("GitHub REST fixture response exceeded " + maxBytes + " byte limit.");
|
|
92
|
+
process.exit(1);
|
|
93
|
+
}
|
|
53
94
|
process.stdout.write(JSON.stringify({
|
|
54
95
|
status: 200,
|
|
55
96
|
body: fs.readFileSync(fixturePath, "utf8"),
|
|
@@ -59,7 +100,7 @@ if (process.env.TOPOGRAM_GITHUB_API_FIXTURE_ROOT) {
|
|
|
59
100
|
}
|
|
60
101
|
try {
|
|
61
102
|
const response = await fetch(url, { headers });
|
|
62
|
-
const text = await response
|
|
103
|
+
const text = await readResponseText(response);
|
|
63
104
|
if (!response.ok) {
|
|
64
105
|
process.stderr.write(JSON.stringify({
|
|
65
106
|
status: response.status,
|
|
@@ -150,6 +191,11 @@ function shouldUseRestApi() {
|
|
|
150
191
|
* @returns {any}
|
|
151
192
|
*/
|
|
152
193
|
function githubRequestJson(path, options = {}) {
|
|
194
|
+
const maxBytes = remotePayloadMaxBytes(
|
|
195
|
+
["TOPOGRAM_GITHUB_FETCH_MAX_BYTES", "TOPOGRAM_REMOTE_FETCH_MAX_BYTES"],
|
|
196
|
+
undefined,
|
|
197
|
+
["githubFetchMaxBytes", "remoteFetchMaxBytes"]
|
|
198
|
+
);
|
|
153
199
|
const result = childProcess.spawnSync(process.execPath, [
|
|
154
200
|
"--input-type=module",
|
|
155
201
|
"-e",
|
|
@@ -158,10 +204,12 @@ function githubRequestJson(path, options = {}) {
|
|
|
158
204
|
baseUrl: githubApiBaseUrl(),
|
|
159
205
|
path,
|
|
160
206
|
query: options.query || {},
|
|
161
|
-
token: githubTokenFromEnv() || ""
|
|
207
|
+
token: githubTokenFromEnv() || "",
|
|
208
|
+
maxBytes
|
|
162
209
|
})
|
|
163
210
|
], {
|
|
164
211
|
encoding: "utf8",
|
|
212
|
+
maxBuffer: (maxBytes * 2) + 8192,
|
|
165
213
|
env: {
|
|
166
214
|
...process.env,
|
|
167
215
|
PATH: process.env.PATH || ""
|
|
@@ -479,9 +527,15 @@ function normalizeWorkflowJob(job) {
|
|
|
479
527
|
* @returns {ReturnType<typeof childProcess.spawnSync>}
|
|
480
528
|
*/
|
|
481
529
|
function runGh(args, cwd = process.cwd()) {
|
|
530
|
+
const maxBytes = remotePayloadMaxBytes(
|
|
531
|
+
["TOPOGRAM_GITHUB_FETCH_MAX_BYTES", "TOPOGRAM_REMOTE_FETCH_MAX_BYTES"],
|
|
532
|
+
undefined,
|
|
533
|
+
["githubFetchMaxBytes", "remoteFetchMaxBytes"]
|
|
534
|
+
);
|
|
482
535
|
return childProcess.spawnSync("gh", args, {
|
|
483
536
|
cwd,
|
|
484
537
|
encoding: "utf8",
|
|
538
|
+
maxBuffer: maxBytes + 4096,
|
|
485
539
|
env: {
|
|
486
540
|
...process.env,
|
|
487
541
|
GH_TOKEN: process.env.GH_TOKEN || process.env.GITHUB_TOKEN || "",
|
|
@@ -50,14 +50,35 @@ export function listFilesRecursive(rootDir, predicate = () => true, options = {}
|
|
|
50
50
|
return [];
|
|
51
51
|
}
|
|
52
52
|
const ignoredDirs = options.ignoredDirs || DEFAULT_IGNORED_DIRS;
|
|
53
|
+
let rootRealPath;
|
|
54
|
+
try {
|
|
55
|
+
rootRealPath = fs.realpathSync(rootDir);
|
|
56
|
+
} catch {
|
|
57
|
+
return [];
|
|
58
|
+
}
|
|
59
|
+
const visitedDirs = new Set([rootRealPath]);
|
|
53
60
|
const files = /** @type {any[]} */ ([]);
|
|
54
61
|
const walk = /** @param {any} currentDir */ (currentDir) => {
|
|
55
62
|
for (const entry of fs.readdirSync(currentDir, { withFileTypes: true })) {
|
|
56
63
|
const childPath = path.join(currentDir, entry.name);
|
|
64
|
+
if (entry.isSymbolicLink()) {
|
|
65
|
+
continue;
|
|
66
|
+
}
|
|
57
67
|
if (entry.isDirectory()) {
|
|
58
68
|
if (ignoredDirs.has(entry.name)) {
|
|
59
69
|
continue;
|
|
60
70
|
}
|
|
71
|
+
let childRealPath;
|
|
72
|
+
try {
|
|
73
|
+
childRealPath = fs.realpathSync(childPath);
|
|
74
|
+
} catch {
|
|
75
|
+
continue;
|
|
76
|
+
}
|
|
77
|
+
const relativeToRoot = path.relative(rootRealPath, childRealPath);
|
|
78
|
+
if (relativeToRoot.startsWith("..") || path.isAbsolute(relativeToRoot) || visitedDirs.has(childRealPath)) {
|
|
79
|
+
continue;
|
|
80
|
+
}
|
|
81
|
+
visitedDirs.add(childRealPath);
|
|
61
82
|
walk(childPath);
|
|
62
83
|
continue;
|
|
63
84
|
}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
// @ts-check
|
|
2
|
+
|
|
3
|
+
import { topogramRuntimeConfig } from "./topogram-config.js";
|
|
4
|
+
|
|
5
|
+
export const DEFAULT_REMOTE_FETCH_MAX_BYTES = 5 * 1024 * 1024;
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* @param {string|null|undefined} value
|
|
9
|
+
* @returns {number|null}
|
|
10
|
+
*/
|
|
11
|
+
function parsePositiveInteger(value) {
|
|
12
|
+
if (!value) {
|
|
13
|
+
return null;
|
|
14
|
+
}
|
|
15
|
+
const parsed = Number.parseInt(String(value), 10);
|
|
16
|
+
return Number.isFinite(parsed) && parsed > 0 ? parsed : null;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* @param {string[]} envNames
|
|
21
|
+
* @param {number} [fallback]
|
|
22
|
+
* @param {Array<"remoteFetchMaxBytes"|"catalogFetchMaxBytes"|"githubFetchMaxBytes">} [configKeys]
|
|
23
|
+
* @returns {number}
|
|
24
|
+
*/
|
|
25
|
+
export function remotePayloadMaxBytes(envNames, fallback = DEFAULT_REMOTE_FETCH_MAX_BYTES, configKeys = []) {
|
|
26
|
+
for (const envName of envNames) {
|
|
27
|
+
const parsed = parsePositiveInteger(process.env[envName]);
|
|
28
|
+
if (parsed) {
|
|
29
|
+
return parsed;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
const limits = topogramRuntimeConfig(process.cwd()).limits;
|
|
33
|
+
for (const configKey of configKeys) {
|
|
34
|
+
const parsed = parsePositiveInteger(String(limits[configKey] || ""));
|
|
35
|
+
if (parsed) {
|
|
36
|
+
return parsed;
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
return fallback;
|
|
40
|
+
}
|
|
@@ -35,7 +35,7 @@ export function unsupportedImplementationSymlinkMessage(relativePath) {
|
|
|
35
35
|
* @returns {string}
|
|
36
36
|
*/
|
|
37
37
|
export function implementationOutsideRootMessage(modulePath) {
|
|
38
|
-
return `Template implementation module '${modulePath}' must be under implementation
|
|
38
|
+
return `Template implementation module '${modulePath}' must be under implementation/. Keep executable template code inside implementation/ so the trust record covers what topogram generate may load. Move the module back under implementation/, then run ${TRUST_REVIEW_COMMANDS} after review.`;
|
|
39
39
|
}
|
|
40
40
|
|
|
41
41
|
/**
|
|
@@ -43,10 +43,9 @@ export function projectHasTemplateAttachment(projectConfig) {
|
|
|
43
43
|
* @returns {boolean}
|
|
44
44
|
*/
|
|
45
45
|
export function implementationRequiresTrust(implementationInfo, projectConfig = null) {
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
return isSameOrInside(implementationRoot, modulePath) || projectHasTemplateAttachment(projectConfig);
|
|
46
|
+
void projectConfig;
|
|
47
|
+
implementationTrustFingerprint(implementationInfo.config);
|
|
48
|
+
return true;
|
|
50
49
|
}
|
|
51
50
|
|
|
52
51
|
/**
|
|
@@ -16,8 +16,7 @@ import {
|
|
|
16
16
|
import {
|
|
17
17
|
implementationModuleIsUnderRoot,
|
|
18
18
|
implementationRequiresTrust,
|
|
19
|
-
implementationTrustFingerprint
|
|
20
|
-
projectHasTemplateAttachment
|
|
19
|
+
implementationTrustFingerprint
|
|
21
20
|
} from "./policy.js";
|
|
22
21
|
import { readTemplateTrustRecord } from "./record.js";
|
|
23
22
|
|
|
@@ -44,7 +43,6 @@ export function assertTrustedImplementation(implementationInfo, projectConfig =
|
|
|
44
43
|
* @returns {{ ok: boolean, requiresTrust: boolean, trustPath: string, trustRecord: import("./record.js").TemplateTrustRecord|null, template: { id: string|null, version: string|null, source: string|null, sourceSpec: string|null, requested: string|null, sourceRoot: string|null, catalog?: Record<string, any>|null, includesExecutableImplementation: boolean|null }, implementation: { id: string|null, module: string|null, export: string|null }, content: { trustedDigest: string|null, currentDigest: string|null, added: string[], removed: string[], changed: string[] }, issues: string[] }}
|
|
45
44
|
*/
|
|
46
45
|
export function getTemplateTrustStatus(implementationInfo, projectConfig = null) {
|
|
47
|
-
const templateAttached = projectHasTemplateAttachment(projectConfig);
|
|
48
46
|
if (!implementationRequiresTrust(implementationInfo, projectConfig)) {
|
|
49
47
|
return {
|
|
50
48
|
ok: true,
|
|
@@ -68,7 +66,7 @@ export function getTemplateTrustStatus(implementationInfo, projectConfig = null)
|
|
|
68
66
|
/** @type {{ trustedDigest: string|null, currentDigest: string|null, added: string[], removed: string[], changed: string[] }} */
|
|
69
67
|
const contentStatus = { trustedDigest: null, currentDigest: null, added: [], removed: [], changed: [] };
|
|
70
68
|
|
|
71
|
-
if (
|
|
69
|
+
if (!moduleInsideImplementation) {
|
|
72
70
|
issues.push(implementationOutsideRootMessage(fingerprint.module));
|
|
73
71
|
}
|
|
74
72
|
|
package/src/topogram-config.js
CHANGED
|
@@ -83,6 +83,11 @@ export const DEFAULT_TOPOGRAM_CONFIG = {
|
|
|
83
83
|
consumers: DEFAULT_RELEASE_CONSUMER_REPOS,
|
|
84
84
|
workflows: DEFAULT_RELEASE_CONSUMER_WORKFLOWS,
|
|
85
85
|
workflowJobs: DEFAULT_RELEASE_CONSUMER_WORKFLOW_JOBS
|
|
86
|
+
},
|
|
87
|
+
limits: {
|
|
88
|
+
remoteFetchMaxBytes: 5 * 1024 * 1024,
|
|
89
|
+
catalogFetchMaxBytes: null,
|
|
90
|
+
githubFetchMaxBytes: null
|
|
86
91
|
}
|
|
87
92
|
};
|
|
88
93
|
|
|
@@ -93,6 +98,7 @@ export const DEFAULT_CATALOG_SOURCE = `https://raw.githubusercontent.com/${DEFAU
|
|
|
93
98
|
* @property {{ owner: string, repo: string }} github
|
|
94
99
|
* @property {{ owner: string, repo: string, ref: string, path: string, source: string|null }} catalog
|
|
95
100
|
* @property {{ consumers: string[], workflows: Record<string, string>, workflowJobs: Record<string, string[]> }} release
|
|
101
|
+
* @property {{ remoteFetchMaxBytes: number, catalogFetchMaxBytes: number|null, githubFetchMaxBytes: number|null }} limits
|
|
96
102
|
*/
|
|
97
103
|
|
|
98
104
|
/**
|
|
@@ -154,6 +160,18 @@ function parseJsonEnv(value) {
|
|
|
154
160
|
return JSON.parse(value);
|
|
155
161
|
}
|
|
156
162
|
|
|
163
|
+
/**
|
|
164
|
+
* @param {string|null|undefined} value
|
|
165
|
+
* @returns {number|null}
|
|
166
|
+
*/
|
|
167
|
+
function parsePositiveIntegerEnv(value) {
|
|
168
|
+
if (!value) {
|
|
169
|
+
return null;
|
|
170
|
+
}
|
|
171
|
+
const parsed = Number.parseInt(String(value), 10);
|
|
172
|
+
return Number.isFinite(parsed) && parsed > 0 ? parsed : null;
|
|
173
|
+
}
|
|
174
|
+
|
|
157
175
|
/**
|
|
158
176
|
* @param {Record<string, any>} fileConfig
|
|
159
177
|
* @returns {Record<string, any>}
|
|
@@ -178,6 +196,11 @@ function envConfig(fileConfig = {}) {
|
|
|
178
196
|
consumers: consumers || fileConfig.release?.consumers,
|
|
179
197
|
workflows: workflows || fileConfig.release?.workflows,
|
|
180
198
|
workflowJobs: workflowJobs || fileConfig.release?.workflowJobs
|
|
199
|
+
},
|
|
200
|
+
limits: {
|
|
201
|
+
remoteFetchMaxBytes: parsePositiveIntegerEnv(process.env.TOPOGRAM_REMOTE_FETCH_MAX_BYTES) || fileConfig.limits?.remoteFetchMaxBytes,
|
|
202
|
+
catalogFetchMaxBytes: parsePositiveIntegerEnv(process.env.TOPOGRAM_CATALOG_FETCH_MAX_BYTES) || fileConfig.limits?.catalogFetchMaxBytes,
|
|
203
|
+
githubFetchMaxBytes: parsePositiveIntegerEnv(process.env.TOPOGRAM_GITHUB_FETCH_MAX_BYTES) || fileConfig.limits?.githubFetchMaxBytes
|
|
181
204
|
}
|
|
182
205
|
};
|
|
183
206
|
}
|
|
@@ -235,6 +258,16 @@ function normalizeStringListMap(value, fallback) {
|
|
|
235
258
|
return output;
|
|
236
259
|
}
|
|
237
260
|
|
|
261
|
+
/**
|
|
262
|
+
* @param {unknown} value
|
|
263
|
+
* @param {number|null} fallback
|
|
264
|
+
* @returns {number|null}
|
|
265
|
+
*/
|
|
266
|
+
function normalizePositiveInteger(value, fallback) {
|
|
267
|
+
const parsed = Number.parseInt(String(value || ""), 10);
|
|
268
|
+
return Number.isFinite(parsed) && parsed > 0 ? parsed : fallback;
|
|
269
|
+
}
|
|
270
|
+
|
|
238
271
|
/**
|
|
239
272
|
* @param {string} cwd
|
|
240
273
|
* @returns {TopogramRuntimeConfig}
|
|
@@ -258,6 +291,20 @@ export function topogramRuntimeConfig(cwd = process.cwd()) {
|
|
|
258
291
|
consumers: normalizeStringList(overrides.release.consumers, DEFAULT_TOPOGRAM_CONFIG.release.consumers),
|
|
259
292
|
workflows: normalizeStringMap(overrides.release.workflows, DEFAULT_TOPOGRAM_CONFIG.release.workflows),
|
|
260
293
|
workflowJobs: normalizeStringListMap(overrides.release.workflowJobs, DEFAULT_TOPOGRAM_CONFIG.release.workflowJobs)
|
|
294
|
+
},
|
|
295
|
+
limits: {
|
|
296
|
+
remoteFetchMaxBytes: normalizePositiveInteger(
|
|
297
|
+
overrides.limits.remoteFetchMaxBytes,
|
|
298
|
+
DEFAULT_TOPOGRAM_CONFIG.limits.remoteFetchMaxBytes
|
|
299
|
+
) || DEFAULT_TOPOGRAM_CONFIG.limits.remoteFetchMaxBytes,
|
|
300
|
+
catalogFetchMaxBytes: normalizePositiveInteger(
|
|
301
|
+
overrides.limits.catalogFetchMaxBytes,
|
|
302
|
+
DEFAULT_TOPOGRAM_CONFIG.limits.catalogFetchMaxBytes
|
|
303
|
+
),
|
|
304
|
+
githubFetchMaxBytes: normalizePositiveInteger(
|
|
305
|
+
overrides.limits.githubFetchMaxBytes,
|
|
306
|
+
DEFAULT_TOPOGRAM_CONFIG.limits.githubFetchMaxBytes
|
|
307
|
+
)
|
|
261
308
|
}
|
|
262
309
|
};
|
|
263
310
|
}
|
|
@@ -12,6 +12,27 @@ import { readJsonIfExists, readTextIfExists } from "../../shared.js";
|
|
|
12
12
|
import { canonicalRelativePathForItem } from "./paths.js";
|
|
13
13
|
import { applyProjectionAuthPatchToTopogram } from "./projection-patches.js";
|
|
14
14
|
|
|
15
|
+
/** @param {string} rootDir @param {string} relativePath @param {string} fieldName @returns {{ absolutePath: string, relativePath: string }} */
|
|
16
|
+
function resolveContainedTopoPath(rootDir, relativePath, fieldName) {
|
|
17
|
+
const rawPath = String(relativePath || "").replaceAll("\\", "/");
|
|
18
|
+
if (!rawPath.trim()) {
|
|
19
|
+
throw new Error(`Adoption plan ${fieldName} must be a non-empty relative path.`);
|
|
20
|
+
}
|
|
21
|
+
if (rawPath.includes("\0") || path.isAbsolute(rawPath) || /^[A-Za-z]:\//.test(rawPath)) {
|
|
22
|
+
throw new Error(`Adoption plan ${fieldName} must be relative to the topo workspace: ${relativePath}`);
|
|
23
|
+
}
|
|
24
|
+
const absoluteRoot = path.resolve(rootDir);
|
|
25
|
+
const absolutePath = path.resolve(absoluteRoot, rawPath);
|
|
26
|
+
const relativeToRoot = path.relative(absoluteRoot, absolutePath);
|
|
27
|
+
if (!relativeToRoot || relativeToRoot.startsWith("..") || path.isAbsolute(relativeToRoot)) {
|
|
28
|
+
throw new Error(`Adoption plan ${fieldName} escapes the topo workspace: ${relativePath}`);
|
|
29
|
+
}
|
|
30
|
+
return {
|
|
31
|
+
absolutePath,
|
|
32
|
+
relativePath: relativeToRoot.replaceAll(path.sep, "/")
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
|
|
15
36
|
/** @param {WorkspacePaths} paths @returns {any} */
|
|
16
37
|
export function readAdoptionPlan(paths) {
|
|
17
38
|
return readJsonIfExists(path.join(paths.topogramRoot, "candidates", "reconcile", "adoption-plan.json"));
|
|
@@ -55,11 +76,15 @@ export function buildCanonicalAdoptionOutputs(paths, candidateFiles, planItems,
|
|
|
55
76
|
if (item.suggested_action === "skip_duplicate_shape") {
|
|
56
77
|
continue;
|
|
57
78
|
}
|
|
58
|
-
const
|
|
59
|
-
if (!
|
|
79
|
+
const rawRelativeCanonicalPath = item.canonical_rel_path || canonicalRelativePathForItem(item.kind, item.item);
|
|
80
|
+
if (!rawRelativeCanonicalPath) {
|
|
60
81
|
continue;
|
|
61
82
|
}
|
|
62
|
-
const canonicalPath =
|
|
83
|
+
const { absolutePath: canonicalPath, relativePath: relativeCanonicalPath } = resolveContainedTopoPath(
|
|
84
|
+
paths.topogramRoot,
|
|
85
|
+
rawRelativeCanonicalPath,
|
|
86
|
+
"canonical_rel_path"
|
|
87
|
+
);
|
|
63
88
|
if (item.suggested_action === "apply_doc_link_patch") {
|
|
64
89
|
const baseContents = files[relativeCanonicalPath] || (fs.existsSync(canonicalPath) ? fs.readFileSync(canonicalPath, "utf8") : null);
|
|
65
90
|
if (!baseContents) {
|
|
@@ -113,7 +138,9 @@ export function buildCanonicalAdoptionOutputs(paths, candidateFiles, planItems,
|
|
|
113
138
|
}
|
|
114
139
|
const candidateContents =
|
|
115
140
|
candidateFiles[item.source_path] ||
|
|
116
|
-
(item.source_path
|
|
141
|
+
(item.source_path
|
|
142
|
+
? readTextIfExists(resolveContainedTopoPath(paths.topogramRoot, item.source_path, "source_path").absolutePath)
|
|
143
|
+
: null);
|
|
117
144
|
if (!candidateContents) {
|
|
118
145
|
continue;
|
|
119
146
|
}
|
package/src/workflows/shared.js
CHANGED
|
@@ -79,15 +79,36 @@ export function listFilesRecursive(rootDir, predicate = () => true, options = {}
|
|
|
79
79
|
return [];
|
|
80
80
|
}
|
|
81
81
|
const ignoredDirs = options.ignoredDirs || DEFAULT_IGNORED_DIRS;
|
|
82
|
+
let rootRealPath;
|
|
83
|
+
try {
|
|
84
|
+
rootRealPath = fs.realpathSync(rootDir);
|
|
85
|
+
} catch {
|
|
86
|
+
return [];
|
|
87
|
+
}
|
|
88
|
+
const visitedDirs = new Set([rootRealPath]);
|
|
82
89
|
/** @type {any[]} */
|
|
83
90
|
const files = [];
|
|
84
91
|
const walk = (/** @type {any} */ currentDir) => {
|
|
85
92
|
for (const entry of fs.readdirSync(currentDir, { withFileTypes: true })) {
|
|
86
93
|
const childPath = path.join(currentDir, entry.name);
|
|
94
|
+
if (entry.isSymbolicLink()) {
|
|
95
|
+
continue;
|
|
96
|
+
}
|
|
87
97
|
if (entry.isDirectory()) {
|
|
88
98
|
if (ignoredDirs.has(entry.name)) {
|
|
89
99
|
continue;
|
|
90
100
|
}
|
|
101
|
+
let childRealPath;
|
|
102
|
+
try {
|
|
103
|
+
childRealPath = fs.realpathSync(childPath);
|
|
104
|
+
} catch {
|
|
105
|
+
continue;
|
|
106
|
+
}
|
|
107
|
+
const relativeToRoot = path.relative(rootRealPath, childRealPath);
|
|
108
|
+
if (relativeToRoot.startsWith("..") || path.isAbsolute(relativeToRoot) || visitedDirs.has(childRealPath)) {
|
|
109
|
+
continue;
|
|
110
|
+
}
|
|
111
|
+
visitedDirs.add(childRealPath);
|
|
91
112
|
walk(childPath);
|
|
92
113
|
continue;
|
|
93
114
|
}
|