@a5c-ai/git-a5c 1.0.12 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/args.d.ts +20 -0
- package/dist/args.d.ts.map +1 -1
- package/dist/args.js +47 -0
- package/dist/args.js.map +1 -1
- package/dist/commands/agent.d.ts.map +1 -1
- package/dist/commands/agent.js +19 -0
- package/dist/commands/agent.js.map +1 -1
- package/dist/commands/agentGenerateContext.d.ts +16 -0
- package/dist/commands/agentGenerateContext.d.ts.map +1 -0
- package/dist/commands/agentGenerateContext.js +965 -0
- package/dist/commands/agentGenerateContext.js.map +1 -0
- package/dist/commands/agentRun.d.ts +3 -0
- package/dist/commands/agentRun.d.ts.map +1 -0
- package/dist/commands/agentRun.js +210 -0
- package/dist/commands/agentRun.js.map +1 -0
- package/dist/commands/block.d.ts.map +1 -1
- package/dist/commands/block.js +6 -0
- package/dist/commands/block.js.map +1 -1
- package/dist/commands/gate.d.ts.map +1 -1
- package/dist/commands/gate.js +6 -0
- package/dist/commands/gate.js.map +1 -1
- package/dist/commands/help.d.ts.map +1 -1
- package/dist/commands/help.js +13 -0
- package/dist/commands/help.js.map +1 -1
- package/dist/commands/hookExec.d.ts +3 -0
- package/dist/commands/hookExec.d.ts.map +1 -0
- package/dist/commands/hookExec.js +56 -0
- package/dist/commands/hookExec.js.map +1 -0
- package/dist/commands/hooks.d.ts.map +1 -1
- package/dist/commands/hooks.js +3 -1
- package/dist/commands/hooks.js.map +1 -1
- package/dist/commands/issue.d.ts.map +1 -1
- package/dist/commands/issue.js +21 -0
- package/dist/commands/issue.js.map +1 -1
- package/dist/commands/ops.d.ts.map +1 -1
- package/dist/commands/ops.js +6 -0
- package/dist/commands/ops.js.map +1 -1
- package/dist/commands/parse.d.ts +31 -0
- package/dist/commands/parse.d.ts.map +1 -0
- package/dist/commands/parse.js +286 -0
- package/dist/commands/parse.js.map +1 -0
- package/dist/commands/pr.d.ts.map +1 -1
- package/dist/commands/pr.js +21 -0
- package/dist/commands/pr.js.map +1 -1
- package/dist/commands/runOrchestration.d.ts +3 -0
- package/dist/commands/runOrchestration.d.ts.map +1 -0
- package/dist/commands/runOrchestration.js +281 -0
- package/dist/commands/runOrchestration.js.map +1 -0
- package/dist/git.d.ts +7 -0
- package/dist/git.d.ts.map +1 -1
- package/dist/git.js +46 -0
- package/dist/git.js.map +1 -1
- package/dist/orchestration/hookExecCore.d.ts +22 -0
- package/dist/orchestration/hookExecCore.d.ts.map +1 -0
- package/dist/orchestration/hookExecCore.js +380 -0
- package/dist/orchestration/hookExecCore.js.map +1 -0
- package/dist/orchestration/reconcileCore.d.ts +27 -0
- package/dist/orchestration/reconcileCore.d.ts.map +1 -0
- package/dist/orchestration/reconcileCore.js +183 -0
- package/dist/orchestration/reconcileCore.js.map +1 -0
- package/dist/orchestration/runEventWriter.d.ts +11 -0
- package/dist/orchestration/runEventWriter.d.ts.map +1 -0
- package/dist/orchestration/runEventWriter.js +44 -0
- package/dist/orchestration/runEventWriter.js.map +1 -0
- package/dist/orchestration/sweepCore.d.ts +11 -0
- package/dist/orchestration/sweepCore.d.ts.map +1 -0
- package/dist/orchestration/sweepCore.js +96 -0
- package/dist/orchestration/sweepCore.js.map +1 -0
- package/dist/run.d.ts.map +1 -1
- package/dist/run.js +23 -0
- package/dist/run.js.map +1 -1
- package/dist/sync.d.ts +13 -0
- package/dist/sync.d.ts.map +1 -0
- package/dist/sync.js +58 -0
- package/dist/sync.js.map +1 -0
- package/dist/util/githubContents.d.ts +15 -0
- package/dist/util/githubContents.d.ts.map +1 -0
- package/dist/util/githubContents.js +77 -0
- package/dist/util/githubContents.js.map +1 -0
- package/dist/util/redact.d.ts +24 -0
- package/dist/util/redact.d.ts.map +1 -0
- package/dist/util/redact.js +109 -0
- package/dist/util/redact.js.map +1 -0
- package/dist/util/uri.d.ts +12 -0
- package/dist/util/uri.d.ts.map +1 -0
- package/dist/util/uri.js +61 -0
- package/dist/util/uri.js.map +1 -0
- package/package.json +10 -5
|
@@ -0,0 +1,965 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { fileURLToPath } from "node:url";
|
|
4
|
+
import { minimatch } from "minimatch";
|
|
5
|
+
import { stringify as yamlStringify } from "yaml";
|
|
6
|
+
import { createLogger, parseLogLevel } from "@a5c-ai/sdk";
|
|
7
|
+
import { git } from "../git.js";
|
|
8
|
+
import { DEFAULT_MASK, redactObject } from "../util/redact.js";
|
|
9
|
+
import { fetchGithubFileContents, listGithubFilesRecursive } from "../util/githubContents.js";
|
|
10
|
+
export async function handleAgentGenerateContext(args) {
|
|
11
|
+
if (args.positionals[0] !== "agent")
|
|
12
|
+
return;
|
|
13
|
+
if (args.positionals[1] !== "generate-context")
|
|
14
|
+
return;
|
|
15
|
+
const vars = args.flags.vars || {};
|
|
16
|
+
const res = await handleGenerateContext({
|
|
17
|
+
in: args.flags.in,
|
|
18
|
+
template: args.flags.template,
|
|
19
|
+
out: args.flags.out,
|
|
20
|
+
vars,
|
|
21
|
+
token: args.flags.token,
|
|
22
|
+
repoRoot: args.repoRoot
|
|
23
|
+
});
|
|
24
|
+
if (res.output && args.flags.out) {
|
|
25
|
+
const outPath = path.isAbsolute(args.flags.out) ? args.flags.out : path.resolve(args.repoRoot, args.flags.out);
|
|
26
|
+
fs.mkdirSync(path.dirname(outPath), { recursive: true });
|
|
27
|
+
fs.writeFileSync(outPath, res.output, "utf8");
|
|
28
|
+
}
|
|
29
|
+
if (res.output && !args.flags.out)
|
|
30
|
+
args.io.writeLine(args.io.out, res.output);
|
|
31
|
+
if (res.errorMessage)
|
|
32
|
+
args.io.writeLine(args.io.err, res.errorMessage);
|
|
33
|
+
return res.code;
|
|
34
|
+
}
|
|
35
|
+
export async function handleGenerateContext(opts) {
|
|
36
|
+
try {
|
|
37
|
+
const repoRoot = opts.repoRoot ?? process.cwd();
|
|
38
|
+
const input = await readInput(opts.in, repoRoot);
|
|
39
|
+
const token = opts.token ||
|
|
40
|
+
process.env.A5C_AGENT_GITHUB_TOKEN ||
|
|
41
|
+
process.env.GITHUB_TOKEN;
|
|
42
|
+
const rootUri = opts.template || ".a5c/main.md";
|
|
43
|
+
const originalEvent = input.original_event || {};
|
|
44
|
+
const new_input = { ...input, ...originalEvent };
|
|
45
|
+
const eventForTpl = sanitizeEventForTemplate(new_input);
|
|
46
|
+
dbg("generate:begin", {
|
|
47
|
+
in: opts.in || "stdin",
|
|
48
|
+
template: rootUri,
|
|
49
|
+
token_present: !!token,
|
|
50
|
+
});
|
|
51
|
+
const rendered = await renderTemplate(expandDollarExpressions(rootUri, {
|
|
52
|
+
event: eventForTpl,
|
|
53
|
+
env: process.env,
|
|
54
|
+
vars: opts.vars || {},
|
|
55
|
+
token,
|
|
56
|
+
repoRoot,
|
|
57
|
+
}), {
|
|
58
|
+
event: eventForTpl,
|
|
59
|
+
env: process.env,
|
|
60
|
+
vars: opts.vars || {},
|
|
61
|
+
token,
|
|
62
|
+
repoRoot,
|
|
63
|
+
});
|
|
64
|
+
dbg("generate:done", { bytes: rendered?.length || 0 });
|
|
65
|
+
return { code: 0, output: rendered };
|
|
66
|
+
}
|
|
67
|
+
catch (e) {
|
|
68
|
+
return { code: 1, errorMessage: String(e?.message || e) };
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
async function readInput(inPath, repoRoot) {
|
|
72
|
+
if (inPath) {
|
|
73
|
+
const abs = path.isAbsolute(inPath) ? inPath : path.resolve(repoRoot, inPath);
|
|
74
|
+
return JSON.parse(fs.readFileSync(abs, "utf8"));
|
|
75
|
+
}
|
|
76
|
+
const raw = fs.readFileSync(0, "utf8");
|
|
77
|
+
return JSON.parse(raw);
|
|
78
|
+
}
|
|
79
|
+
const logger = createLogger({ base: { component: "cli.generateContext" }, level: parseLogLevel(process.env.A5C_LOG_LEVEL ?? "silent") });
|
|
80
|
+
const dbg = (msg, ctx) => logger.debug(msg, ctx);
|
|
81
|
+
async function renderTemplate(uri, ctx, base) {
|
|
82
|
+
dbg("renderTemplate", { uri, base });
|
|
83
|
+
const content = await fetchResource(uri, ctx, base);
|
|
84
|
+
return await renderString(content, ctx, uri);
|
|
85
|
+
}
|
|
86
|
+
async function renderString(tpl, ctx, currentUri) {
|
|
87
|
+
// Includes: legacy {{> uri }} or {{> uri key=value }}
|
|
88
|
+
// Support quoted URIs and inline expressions inside the URI (both ${{ }} and {{ }})
|
|
89
|
+
let out = tpl;
|
|
90
|
+
// Expand all ${{ }} first to avoid tokenizer conflicts inside include URIs
|
|
91
|
+
try {
|
|
92
|
+
out = expandDollarExpressions(out, ctx);
|
|
93
|
+
}
|
|
94
|
+
catch { }
|
|
95
|
+
const includeRe = /\{\{>\s*(?:"([^"]+)"|'([^']+)'|([^}]+?))(\s+[^}]*)?\}\}/g;
|
|
96
|
+
out = await replaceAsync(out, includeRe, async (_m, g1, g2, g3, args) => {
|
|
97
|
+
const rawUri = g1 || g2 || g3 || "";
|
|
98
|
+
const argVars = parseArgs(args || "");
|
|
99
|
+
const merged = {
|
|
100
|
+
...ctx,
|
|
101
|
+
vars: { ...ctx.vars, ...argVars },
|
|
102
|
+
};
|
|
103
|
+
const afterDollar = expandDollarExpressions(rawUri, merged);
|
|
104
|
+
const afterCurly = expandCurlyExpressionsForUri(afterDollar, merged, currentUri);
|
|
105
|
+
const finalUri = unescapeGlobMeta(afterCurly).trim();
|
|
106
|
+
dbg("include:legacy", {
|
|
107
|
+
raw: rawUri,
|
|
108
|
+
afterDollar,
|
|
109
|
+
dynUri: afterCurly,
|
|
110
|
+
finalUri,
|
|
111
|
+
base: currentUri,
|
|
112
|
+
});
|
|
113
|
+
try {
|
|
114
|
+
const included = await renderTemplate(finalUri, merged, currentUri);
|
|
115
|
+
return included;
|
|
116
|
+
}
|
|
117
|
+
catch {
|
|
118
|
+
// Graceful on missing file(s)
|
|
119
|
+
return "";
|
|
120
|
+
}
|
|
121
|
+
});
|
|
122
|
+
// Includes: new {{#include uri [key=value] }} with quoted URIs and inline expressions
|
|
123
|
+
const includeHashRe = /\{\{#include\s*(?:"([^"]+)"|'([^']+)'|([^}]+?))(\s+[^}]*)?\}\}/g;
|
|
124
|
+
out = await replaceAsync(out, includeHashRe, async (_m, g1, g2, g3, args) => {
|
|
125
|
+
const rawUri = g1 || g2 || g3 || "";
|
|
126
|
+
const argVars = parseArgs(args || "");
|
|
127
|
+
const merged = {
|
|
128
|
+
...ctx,
|
|
129
|
+
vars: { ...ctx.vars, ...argVars },
|
|
130
|
+
};
|
|
131
|
+
const afterDollar = expandDollarExpressions(rawUri, merged);
|
|
132
|
+
const afterCurly = expandCurlyExpressionsForUri(afterDollar, merged, currentUri);
|
|
133
|
+
const finalUri = unescapeGlobMeta(afterCurly).trim();
|
|
134
|
+
dbg("include:hash", {
|
|
135
|
+
raw: rawUri,
|
|
136
|
+
afterDollar,
|
|
137
|
+
dynUri: afterCurly,
|
|
138
|
+
finalUri,
|
|
139
|
+
base: currentUri,
|
|
140
|
+
});
|
|
141
|
+
try {
|
|
142
|
+
const included = await renderTemplate(finalUri, merged, currentUri);
|
|
143
|
+
return included;
|
|
144
|
+
}
|
|
145
|
+
catch {
|
|
146
|
+
// Graceful on missing file(s)
|
|
147
|
+
return "";
|
|
148
|
+
}
|
|
149
|
+
});
|
|
150
|
+
// Printers: single-tag {{#printYAML expr}} and {{#printJSON expr}}
|
|
151
|
+
const printYamlRe = /\{\{#printYAML\s+([^}]+)\}\}/g;
|
|
152
|
+
out = await replaceAsync(out, printYamlRe, async (_m, expr) => {
|
|
153
|
+
try {
|
|
154
|
+
const val = evalExpr(expr, ctx, currentUri);
|
|
155
|
+
const resolved = isThenable(val) ? await val : val;
|
|
156
|
+
return printYAML(resolved);
|
|
157
|
+
}
|
|
158
|
+
catch {
|
|
159
|
+
return "";
|
|
160
|
+
}
|
|
161
|
+
});
|
|
162
|
+
const printJsonRe = /\{\{#printJSON\s+([^}]+)\}\}/g;
|
|
163
|
+
out = await replaceAsync(out, printJsonRe, async (_m, expr) => {
|
|
164
|
+
try {
|
|
165
|
+
const val = evalExpr(expr, ctx, currentUri);
|
|
166
|
+
const resolved = isThenable(val) ? await val : val;
|
|
167
|
+
return printJSON(resolved);
|
|
168
|
+
}
|
|
169
|
+
catch {
|
|
170
|
+
return "";
|
|
171
|
+
}
|
|
172
|
+
});
|
|
173
|
+
const printXmlRe = /\{\{#printXML\s+([^}]+)\}\}/g;
|
|
174
|
+
out = await replaceAsync(out, printXmlRe, async (_m, expr) => {
|
|
175
|
+
try {
|
|
176
|
+
const val = evalExpr(expr, ctx, currentUri);
|
|
177
|
+
const resolved = isThenable(val) ? await val : val;
|
|
178
|
+
return printXML(resolved);
|
|
179
|
+
}
|
|
180
|
+
catch {
|
|
181
|
+
return "";
|
|
182
|
+
}
|
|
183
|
+
});
|
|
184
|
+
const printRe = /\{\{#print\s+([^}]+)\}\}/g;
|
|
185
|
+
out = await replaceAsync(out, printRe, async (_m, expr) => {
|
|
186
|
+
try {
|
|
187
|
+
const val = evalExpr(expr, ctx, currentUri);
|
|
188
|
+
const resolved = isThenable(val) ? await val : val;
|
|
189
|
+
return resolved == null ? "" : String(resolved);
|
|
190
|
+
}
|
|
191
|
+
catch {
|
|
192
|
+
return "";
|
|
193
|
+
}
|
|
194
|
+
});
|
|
195
|
+
// Sections: {{#if expr}}...{{/if}}
|
|
196
|
+
out = await replaceSections(out, /\{\{#if\s+([^}]+)\}\}([\s\S]*?)\{\{\/if\}\}/g, async (expr, body) => {
|
|
197
|
+
const ok = !!evalExpr(expr, ctx, currentUri);
|
|
198
|
+
return ok ? await renderString(body, ctx, currentUri) : "";
|
|
199
|
+
});
|
|
200
|
+
// Each: {{#each expr}}...{{/each}}
|
|
201
|
+
out = await replaceSections(out, /\{\{#each\s+([^}]+)\}\}([\s\S]*?)\{\{\/each\}\}/g, async (expr, body) => {
|
|
202
|
+
const arr = toArray(evalExpr(expr, ctx, currentUri));
|
|
203
|
+
const parts = [];
|
|
204
|
+
for (const item of arr) {
|
|
205
|
+
const child = { ...ctx, vars: { ...ctx.vars, this: item } };
|
|
206
|
+
parts.push(await renderString(body, child, currentUri));
|
|
207
|
+
}
|
|
208
|
+
return parts.join("");
|
|
209
|
+
});
|
|
210
|
+
// Variables: {{ expr }}
|
|
211
|
+
const varRe = /\{\{\s*([^}]+)\s*\}\}/g;
|
|
212
|
+
out = await replaceAsync(out, varRe, async (_m, expr) => {
|
|
213
|
+
try {
|
|
214
|
+
const val = evalExpr(expr, ctx, currentUri);
|
|
215
|
+
const resolved = isThenable(val) ? await val : val;
|
|
216
|
+
return resolved == null ? "" : String(resolved);
|
|
217
|
+
}
|
|
218
|
+
catch {
|
|
219
|
+
return "";
|
|
220
|
+
}
|
|
221
|
+
});
|
|
222
|
+
return out;
|
|
223
|
+
}
|
|
224
|
+
function toArray(v) {
|
|
225
|
+
if (Array.isArray(v))
|
|
226
|
+
return v;
|
|
227
|
+
if (v == null)
|
|
228
|
+
return [];
|
|
229
|
+
return [v];
|
|
230
|
+
}
|
|
231
|
+
function parseArgs(s) {
|
|
232
|
+
const out = {};
|
|
233
|
+
const parts = String(s || "")
|
|
234
|
+
.trim()
|
|
235
|
+
.split(/\s+/)
|
|
236
|
+
.filter(Boolean);
|
|
237
|
+
for (const p of parts) {
|
|
238
|
+
const [k, v] = p.split("=");
|
|
239
|
+
if (!k)
|
|
240
|
+
continue;
|
|
241
|
+
out[k] = v ?? true;
|
|
242
|
+
}
|
|
243
|
+
return out;
|
|
244
|
+
}
|
|
245
|
+
async function replaceSections(s, re, fn) {
|
|
246
|
+
const chunks = [];
|
|
247
|
+
let lastIndex = 0;
|
|
248
|
+
for (;;) {
|
|
249
|
+
const m = re.exec(s);
|
|
250
|
+
if (!m)
|
|
251
|
+
break;
|
|
252
|
+
chunks.push(s.slice(lastIndex, m.index));
|
|
253
|
+
lastIndex = m.index + m[0].length;
|
|
254
|
+
chunks.push(await fn(m[1], m[2]));
|
|
255
|
+
}
|
|
256
|
+
chunks.push(s.slice(lastIndex));
|
|
257
|
+
return chunks.join("");
|
|
258
|
+
}
|
|
259
|
+
async function replaceAsync(s, re, fn) {
|
|
260
|
+
const chunks = [];
|
|
261
|
+
let lastIndex = 0;
|
|
262
|
+
for (;;) {
|
|
263
|
+
const m = re.exec(s);
|
|
264
|
+
if (!m)
|
|
265
|
+
break;
|
|
266
|
+
chunks.push(s.slice(lastIndex, m.index));
|
|
267
|
+
lastIndex = m.index + m[0].length;
|
|
268
|
+
chunks.push(await fn(...m));
|
|
269
|
+
}
|
|
270
|
+
chunks.push(s.slice(lastIndex));
|
|
271
|
+
return chunks.join("");
|
|
272
|
+
}
|
|
273
|
+
function resolveUri(raw, base) {
|
|
274
|
+
// Support protocol-relative: //path -> inherit scheme from base
|
|
275
|
+
if (raw.startsWith("//") && base) {
|
|
276
|
+
const b = new URL(base, "file://");
|
|
277
|
+
return { scheme: b.protocol.replace(":", ""), path: raw.slice(2) };
|
|
278
|
+
}
|
|
279
|
+
const m = /^(\w+):\/\/(.+)$/.exec(raw);
|
|
280
|
+
if (m)
|
|
281
|
+
return { scheme: m[1], path: m[2] };
|
|
282
|
+
// relative path
|
|
283
|
+
return { scheme: "file", path: raw };
|
|
284
|
+
}
|
|
285
|
+
async function fetchResource(rawUri, ctx, base) {
|
|
286
|
+
// Expand ${{ ... }} inside URI before resolution
|
|
287
|
+
const expanded = expandDollarExpressions(rawUri, ctx);
|
|
288
|
+
const { scheme, path: p } = resolveUri(expanded, base);
|
|
289
|
+
dbg("fetchResource", { rawUri, expanded, base, scheme, p });
|
|
290
|
+
// If relative include and base is a GitHub URI, resolve against the GitHub file's directory
|
|
291
|
+
if (scheme === "file" && base && /^github:\/\//i.test(base)) {
|
|
292
|
+
// Try typed base first: github://owner/repo/(branch|ref|version)/<ref-raw>/<path>
|
|
293
|
+
const typedBase = /^github:\/\/([^/]+)\/([^/]+)\/(?:branch|ref|version)\/([^/]+)\/(.+)$/i.exec(base);
|
|
294
|
+
if (typedBase) {
|
|
295
|
+
const owner = typedBase[1];
|
|
296
|
+
const repo = typedBase[2];
|
|
297
|
+
const ref = decodeURIComponent(typedBase[3]);
|
|
298
|
+
const basePath = decodeURIComponent(typedBase[4]);
|
|
299
|
+
const dir = basePath.endsWith("/")
|
|
300
|
+
? basePath.replace(/\/+$/, "")
|
|
301
|
+
: path.posix.dirname(basePath);
|
|
302
|
+
// Resolve relative segments against GitHub base directory
|
|
303
|
+
const joined = path.posix.normalize(path.posix.join(dir, p || ""));
|
|
304
|
+
const filePath = joined.startsWith("./") ? joined.slice(2) : joined;
|
|
305
|
+
dbg("github:typed:resolve", { owner, repo, ref, dir, filePath });
|
|
306
|
+
if (hasGlob(filePath)) {
|
|
307
|
+
const files = await listGithubFiles(owner, repo, ref, dir, ctx.token);
|
|
308
|
+
dbg("github:typed:list", {
|
|
309
|
+
owner,
|
|
310
|
+
repo,
|
|
311
|
+
ref,
|
|
312
|
+
dir,
|
|
313
|
+
listed: files.length,
|
|
314
|
+
pattern: filePath,
|
|
315
|
+
});
|
|
316
|
+
const matches = files.filter((f) => matchGithubGlobAbsoluteOrRelative(f, filePath, dir));
|
|
317
|
+
dbg("github:typed:matches", { count: matches.length });
|
|
318
|
+
const parts = [];
|
|
319
|
+
for (const m of matches) {
|
|
320
|
+
try {
|
|
321
|
+
const fileUri = `github://${owner}/${repo}/branch/${encodeURIComponent(ref)}/${m}`;
|
|
322
|
+
dbg("github:typed:renderEach", { fileUri });
|
|
323
|
+
parts.push(await renderTemplate(fileUri, ctx, fileUri));
|
|
324
|
+
}
|
|
325
|
+
catch (e) {
|
|
326
|
+
const fileUri = `github://${owner}/${repo}/branch/${encodeURIComponent(ref)}/${m}`;
|
|
327
|
+
dbg("github:typed:renderEach:error", { fileUri, e });
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
return parts.join("");
|
|
331
|
+
}
|
|
332
|
+
dbg("github:typed:fetch", { path: filePath });
|
|
333
|
+
return await fetchGithubFile(owner, repo, ref, filePath, ctx.token);
|
|
334
|
+
}
|
|
335
|
+
// Fallback: generic github://owner/repo/<ref+path>
|
|
336
|
+
const genericBase = /^github:\/\/([^/]+)\/([^/]+)\/(.+)$/i.exec(base);
|
|
337
|
+
if (genericBase) {
|
|
338
|
+
const owner = genericBase[1];
|
|
339
|
+
const repo = genericBase[2];
|
|
340
|
+
const restRaw = genericBase[3];
|
|
341
|
+
const restDecoded = decodeURIComponent(restRaw);
|
|
342
|
+
const baseDirFull = restDecoded.endsWith("/")
|
|
343
|
+
? restDecoded.replace(/\/+$/, "")
|
|
344
|
+
: path.posix.dirname(restDecoded);
|
|
345
|
+
const joined = path.posix.normalize(path.posix.join(baseDirFull, p || ""));
|
|
346
|
+
const combined = joined.startsWith("./") ? joined.slice(2) : joined;
|
|
347
|
+
dbg("github:generic:resolve", {
|
|
348
|
+
owner,
|
|
349
|
+
repo,
|
|
350
|
+
restDecoded,
|
|
351
|
+
baseDirFull,
|
|
352
|
+
combined,
|
|
353
|
+
});
|
|
354
|
+
if (hasGlob(combined)) {
|
|
355
|
+
// Best-effort: list from baseDirFull and filter
|
|
356
|
+
const firstSeg = restDecoded.split("/")[0] || "";
|
|
357
|
+
const remaining = restDecoded.slice(firstSeg.length + 1);
|
|
358
|
+
const refGuess = firstSeg;
|
|
359
|
+
const listDir = remaining ? path.posix.dirname(remaining) : "";
|
|
360
|
+
const files = await listGithubFiles(owner, repo, refGuess, listDir, ctx.token);
|
|
361
|
+
dbg("github:generic:list", {
|
|
362
|
+
owner,
|
|
363
|
+
repo,
|
|
364
|
+
ref: refGuess,
|
|
365
|
+
dir: listDir,
|
|
366
|
+
listed: files.length,
|
|
367
|
+
pattern: combined,
|
|
368
|
+
});
|
|
369
|
+
const matches = files
|
|
370
|
+
.map((f) => `${refGuess}/${f}`)
|
|
371
|
+
.filter((f) => matchGithubGlobAbsoluteOrRelativeWithRef(f, combined, refGuess, listDir));
|
|
372
|
+
dbg("github:generic:matches", { count: matches.length });
|
|
373
|
+
const parts = [];
|
|
374
|
+
for (const m of matches) {
|
|
375
|
+
const segs = m.split("/");
|
|
376
|
+
const refCand = segs[0];
|
|
377
|
+
const fileCand = segs.slice(1).join("/");
|
|
378
|
+
try {
|
|
379
|
+
const fileUri = `github://${owner}/${repo}/branch/${encodeURIComponent(refCand)}/${fileCand}`;
|
|
380
|
+
dbg("github:generic:renderEach", { fileUri });
|
|
381
|
+
parts.push(await renderTemplate(fileUri, ctx, fileUri));
|
|
382
|
+
}
|
|
383
|
+
catch { }
|
|
384
|
+
}
|
|
385
|
+
return parts.join("");
|
|
386
|
+
}
|
|
387
|
+
// Longest-first split to determine ref vs file path
|
|
388
|
+
const parts = combined.split("/");
|
|
389
|
+
let lastErr = null;
|
|
390
|
+
for (let i = parts.length - 1; i >= 1; i--) {
|
|
391
|
+
const refCandidateRaw = parts.slice(0, i).join("/");
|
|
392
|
+
const filePathCandidate = parts.slice(i).join("/");
|
|
393
|
+
const refCandidate = await resolveGithubRef(owner, repo, refCandidateRaw, ctx.token);
|
|
394
|
+
try {
|
|
395
|
+
dbg("github:generic:fetchTry", {
|
|
396
|
+
ref: refCandidate,
|
|
397
|
+
path: filePathCandidate,
|
|
398
|
+
});
|
|
399
|
+
const content = await fetchGithubFile(owner, repo, refCandidate, filePathCandidate, ctx.token);
|
|
400
|
+
return content;
|
|
401
|
+
}
|
|
402
|
+
catch (e) {
|
|
403
|
+
lastErr = e;
|
|
404
|
+
}
|
|
405
|
+
}
|
|
406
|
+
throw lastErr || new Error("Failed to fetch GitHub file: unknown error");
|
|
407
|
+
}
|
|
408
|
+
}
|
|
409
|
+
if (scheme === "file") {
|
|
410
|
+
// Resolve relative includes against base file (either file:// URI or local path)
|
|
411
|
+
let resolved;
|
|
412
|
+
if (base && base.startsWith("file://")) {
|
|
413
|
+
resolved = path.resolve(path.dirname(fileURLToPath(new URL(base))), p);
|
|
414
|
+
}
|
|
415
|
+
else if (base && !/^[a-zA-Z]+:\/\//.test(base)) {
|
|
416
|
+
resolved = path.resolve(path.dirname(base), p);
|
|
417
|
+
}
|
|
418
|
+
else {
|
|
419
|
+
resolved = path.isAbsolute(p) ? p : path.resolve(ctx.repoRoot, p);
|
|
420
|
+
}
|
|
421
|
+
dbg("file:resolve", { p, resolved });
|
|
422
|
+
if (hasGlob(resolved)) {
|
|
423
|
+
const dir = fs.existsSync(resolved) && fs.statSync(resolved).isDirectory()
|
|
424
|
+
? resolved
|
|
425
|
+
: path.dirname(resolved);
|
|
426
|
+
const all = listLocalFilesRecursive(dir);
|
|
427
|
+
const matches = all
|
|
428
|
+
.filter((f) => matchLocalGlob(f, resolved))
|
|
429
|
+
.sort((a, b) => a.localeCompare(b));
|
|
430
|
+
dbg("file:glob", { dir, listed: all.length, pattern: resolved, matches: matches.length });
|
|
431
|
+
const parts = [];
|
|
432
|
+
for (const m of matches) {
|
|
433
|
+
// try {
|
|
434
|
+
parts.push(fs.readFileSync(m, "utf8"));
|
|
435
|
+
// } catch {}
|
|
436
|
+
}
|
|
437
|
+
return parts.join("");
|
|
438
|
+
}
|
|
439
|
+
return fs.readFileSync(resolved, "utf8");
|
|
440
|
+
}
|
|
441
|
+
if (scheme === "github") {
|
|
442
|
+
// Two supported shapes:
|
|
443
|
+
// 1) owner/repo/(branch|ref|version)/<ref-with-optional-%2F>/file-path
|
|
444
|
+
// 2) owner/repo/<ref-with-optional-slashes>/file-path
|
|
445
|
+
// Try typed form first for compatibility with reactor.ts
|
|
446
|
+
const typed = /^([^/]+)\/([^/]+)\/(?:branch|ref|version)\/([^/]+)\/(.+)$/.exec(p);
|
|
447
|
+
if (typed) {
|
|
448
|
+
const owner = typed[1];
|
|
449
|
+
const repo = typed[2];
|
|
450
|
+
const refRaw = typed[3];
|
|
451
|
+
const filePathRaw = typed[4];
|
|
452
|
+
const ref = await resolveGithubRef(owner, repo, decodeURIComponent(refRaw), ctx.token);
|
|
453
|
+
const filePath = decodeURIComponent(filePathRaw);
|
|
454
|
+
dbg("github:direct:typed", { owner, repo, ref, filePath });
|
|
455
|
+
if (hasGlob(filePath)) {
|
|
456
|
+
const baseDir = findGlobBaseDir(filePath);
|
|
457
|
+
const files = await listGithubFiles(owner, repo, ref, baseDir, ctx.token);
|
|
458
|
+
dbg("github:direct:list", { owner, repo, ref, dir: baseDir, listed: files.length, pattern: filePath });
|
|
459
|
+
const matches = files.filter((f) => matchGithubGlobAbsoluteOrRelative(f, filePath, baseDir));
|
|
460
|
+
dbg("github:direct:matches", { count: matches.length });
|
|
461
|
+
const parts = [];
|
|
462
|
+
for (const m of matches) {
|
|
463
|
+
// try {
|
|
464
|
+
const fileUri = `github://${owner}/${repo}/branch/${encodeURIComponent(ref)}/${m}`;
|
|
465
|
+
dbg("github:direct:renderEach", { fileUri });
|
|
466
|
+
parts.push(await renderTemplate(fileUri, ctx, fileUri));
|
|
467
|
+
// } catch {}
|
|
468
|
+
}
|
|
469
|
+
return parts.join("");
|
|
470
|
+
}
|
|
471
|
+
dbg("github:direct:fetch", { path: filePath });
|
|
472
|
+
return await fetchGithubFile(owner, repo, ref, filePath, ctx.token);
|
|
473
|
+
}
|
|
474
|
+
// Fallback: longest-first split of rest = refParts + fileParts
|
|
475
|
+
// Shape: owner/repo/<ref-with-optional-slashes>/<file-path>
|
|
476
|
+
const [owner, repo, ...rest] = p.split("/");
|
|
477
|
+
if (!owner || !repo || rest.length === 0)
|
|
478
|
+
throw new Error(`Invalid github URI: expected github://owner/repo/ref/path, got '${rawUri}'`);
|
|
479
|
+
let lastErr = null;
|
|
480
|
+
for (let i = rest.length - 1; i >= 1; i--) {
|
|
481
|
+
const refCandidateRaw = rest.slice(0, i).join("/");
|
|
482
|
+
const filePathCandidateRaw = rest.slice(i).join("/");
|
|
483
|
+
const decodedRefCandidateRaw = decodeURIComponent(refCandidateRaw);
|
|
484
|
+
const decodedFilePathCandidate = decodeURIComponent(filePathCandidateRaw);
|
|
485
|
+
const refCandidate = await resolveGithubRef(owner, repo, decodedRefCandidateRaw, ctx.token);
|
|
486
|
+
try {
|
|
487
|
+
dbg("github:direct:fetchTry", { ref: refCandidate, path: decodedFilePathCandidate });
|
|
488
|
+
const content = await fetchGithubFile(owner, repo, refCandidate, decodedFilePathCandidate, ctx.token);
|
|
489
|
+
return content;
|
|
490
|
+
}
|
|
491
|
+
catch (e) {
|
|
492
|
+
lastErr = e;
|
|
493
|
+
// continue trying shorter ref
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
throw lastErr || new Error("Failed to fetch GitHub file: unknown error");
|
|
497
|
+
}
|
|
498
|
+
if (scheme === "git") {
|
|
499
|
+
// git://<ref>/<path> with longest-first ref splitting.
|
|
500
|
+
// Supports relative includes when base is also a git:// URI.
|
|
501
|
+
const combined = resolveGitRelative(base, p);
|
|
502
|
+
const parts = combined.split("/").filter(Boolean);
|
|
503
|
+
if (parts.length < 2)
|
|
504
|
+
throw new Error(`Invalid git URI: expected git://ref/path, got '${rawUri}'`);
|
|
505
|
+
let lastErr = null;
|
|
506
|
+
for (let i = parts.length - 1; i >= 1; i--) {
|
|
507
|
+
const refCandidate = parts.slice(0, i).join("/");
|
|
508
|
+
const filePathCandidate = parts.slice(i).join("/");
|
|
509
|
+
try {
|
|
510
|
+
if (!(await isValidGitRef(ctx.repoRoot, refCandidate))) {
|
|
511
|
+
lastErr = new Error(`Invalid git ref: ${refCandidate}`);
|
|
512
|
+
continue;
|
|
513
|
+
}
|
|
514
|
+
if (hasGlob(filePathCandidate)) {
|
|
515
|
+
const baseDir = findGlobBaseDir(filePathCandidate);
|
|
516
|
+
const files = await listGitFiles(ctx.repoRoot, refCandidate, baseDir);
|
|
517
|
+
const matches = files.filter((f) => matchGithubGlobAbsoluteOrRelative(f, filePathCandidate, baseDir));
|
|
518
|
+
const chunks = [];
|
|
519
|
+
for (const m of matches) {
|
|
520
|
+
try {
|
|
521
|
+
chunks.push(await fetchGitFile(ctx.repoRoot, refCandidate, m));
|
|
522
|
+
}
|
|
523
|
+
catch { }
|
|
524
|
+
}
|
|
525
|
+
return chunks.join("");
|
|
526
|
+
}
|
|
527
|
+
return await fetchGitFile(ctx.repoRoot, refCandidate, filePathCandidate);
|
|
528
|
+
}
|
|
529
|
+
catch (e) {
|
|
530
|
+
lastErr = e;
|
|
531
|
+
}
|
|
532
|
+
}
|
|
533
|
+
throw lastErr || new Error("Failed to fetch git file: unknown error");
|
|
534
|
+
}
|
|
535
|
+
// Default: treat as file path (with glob support)
|
|
536
|
+
const absolute = path.isAbsolute(p) ? p : path.resolve(ctx.repoRoot, p);
|
|
537
|
+
if (hasGlob(absolute)) {
|
|
538
|
+
const dir = fs.existsSync(absolute) && fs.statSync(absolute).isDirectory()
|
|
539
|
+
? absolute
|
|
540
|
+
: path.dirname(absolute);
|
|
541
|
+
const all = listLocalFilesRecursive(dir);
|
|
542
|
+
const matches = all
|
|
543
|
+
.filter((f) => matchLocalGlob(f, absolute))
|
|
544
|
+
.sort((a, b) => a.localeCompare(b));
|
|
545
|
+
dbg("file:glob:absolute", { dir, listed: all.length, pattern: absolute, matches: matches.length });
|
|
546
|
+
const parts = [];
|
|
547
|
+
for (const m of matches) {
|
|
548
|
+
// try {
|
|
549
|
+
parts.push(fs.readFileSync(m, "utf8"));
|
|
550
|
+
// } catch {}
|
|
551
|
+
}
|
|
552
|
+
return parts.join("");
|
|
553
|
+
}
|
|
554
|
+
return fs.readFileSync(absolute, "utf8");
|
|
555
|
+
}
|
|
556
|
+
function resolveGitRelative(base, p) {
|
|
557
|
+
const raw = decodeURIComponent(String(p || ""));
|
|
558
|
+
if (!base || !/^git:\/\//i.test(base) || raw.startsWith("/"))
|
|
559
|
+
return raw;
|
|
560
|
+
const basePath = decodeURIComponent(base.replace(/^git:\/\//i, ""));
|
|
561
|
+
const dir = basePath.endsWith("/") ? basePath.replace(/\/+$/, "") : path.posix.dirname(basePath);
|
|
562
|
+
return path.posix.normalize(path.posix.join(dir, raw));
|
|
563
|
+
}
|
|
564
|
+
async function fetchGitFile(repoRoot, ref, filePath) {
|
|
565
|
+
const spec = `${ref}:${filePath}`;
|
|
566
|
+
return (await git(["show", spec], repoRoot)).toString();
|
|
567
|
+
}
|
|
568
|
+
async function listGitFiles(repoRoot, ref, dir) {
|
|
569
|
+
try {
|
|
570
|
+
const out = await git(["ls-tree", "-r", "--name-only", ref, "--", dir || "."], repoRoot);
|
|
571
|
+
return out
|
|
572
|
+
.split(/\r?\n/)
|
|
573
|
+
.map((s) => s.trim())
|
|
574
|
+
.filter(Boolean);
|
|
575
|
+
}
|
|
576
|
+
catch {
|
|
577
|
+
return [];
|
|
578
|
+
}
|
|
579
|
+
}
|
|
580
|
+
async function isValidGitRef(repoRoot, ref) {
|
|
581
|
+
try {
|
|
582
|
+
await git(["rev-parse", "--verify", ref], repoRoot);
|
|
583
|
+
return true;
|
|
584
|
+
}
|
|
585
|
+
catch {
|
|
586
|
+
return false;
|
|
587
|
+
}
|
|
588
|
+
}
|
|
589
|
+
async function resolveGithubRef(owner, repo, refOrVersion, _token) {
|
|
590
|
+
if (!refOrVersion)
|
|
591
|
+
return "main";
|
|
592
|
+
if (/^v?\d+\.\d+\.\d+/.test(refOrVersion)) {
|
|
593
|
+
// semver tag or version: v1.2.3 → prefer tag, else resolve to release/*
|
|
594
|
+
return refOrVersion.startsWith("v") ? refOrVersion : `v${refOrVersion}`;
|
|
595
|
+
}
|
|
596
|
+
return refOrVersion;
|
|
597
|
+
}
|
|
598
|
+
async function fetchGithubFile(owner, repo, ref, filePath, _token) {
|
|
599
|
+
return await fetchGithubFileContents({ owner, repo, ref, filePath, token: _token });
|
|
600
|
+
}
|
|
601
|
+
function hasGlob(p) {
|
|
602
|
+
return /[\*\?\[\]\{\}]/.test(p);
|
|
603
|
+
}
|
|
604
|
+
function normalizeForGlob(p) {
|
|
605
|
+
// minimatch treats backslashes as escapes unless special options are used.
|
|
606
|
+
// Normalizing to POSIX-style paths keeps glob matching consistent.
|
|
607
|
+
return String(p).replace(/\\/g, "/");
|
|
608
|
+
}
|
|
609
|
+
function matchLocalGlob(filePath, pattern) {
|
|
610
|
+
return minimatch(normalizeForGlob(filePath), normalizeForGlob(pattern), { dot: true });
|
|
611
|
+
}
|
|
612
|
+
function listLocalFilesRecursive(root) {
|
|
613
|
+
const out = [];
|
|
614
|
+
try {
|
|
615
|
+
const st = fs.statSync(root);
|
|
616
|
+
if (st.isFile())
|
|
617
|
+
return [path.resolve(root)];
|
|
618
|
+
if (!st.isDirectory())
|
|
619
|
+
return [];
|
|
620
|
+
}
|
|
621
|
+
catch {
|
|
622
|
+
return [];
|
|
623
|
+
}
|
|
624
|
+
const stack = [path.resolve(root)];
|
|
625
|
+
while (stack.length) {
|
|
626
|
+
const cur = stack.pop();
|
|
627
|
+
let entries = [];
|
|
628
|
+
try {
|
|
629
|
+
entries = fs.readdirSync(cur);
|
|
630
|
+
}
|
|
631
|
+
catch {
|
|
632
|
+
continue;
|
|
633
|
+
}
|
|
634
|
+
for (const name of entries) {
|
|
635
|
+
const full = path.join(cur, name);
|
|
636
|
+
try {
|
|
637
|
+
const st = fs.statSync(full);
|
|
638
|
+
if (st.isDirectory())
|
|
639
|
+
stack.push(full);
|
|
640
|
+
else if (st.isFile())
|
|
641
|
+
out.push(full);
|
|
642
|
+
}
|
|
643
|
+
catch { }
|
|
644
|
+
}
|
|
645
|
+
}
|
|
646
|
+
return out;
|
|
647
|
+
}
|
|
648
|
+
function findGlobBaseDir(p) {
|
|
649
|
+
const segs = p.split("/");
|
|
650
|
+
const out = [];
|
|
651
|
+
for (const s of segs) {
|
|
652
|
+
if (hasGlob(s))
|
|
653
|
+
break;
|
|
654
|
+
out.push(s);
|
|
655
|
+
}
|
|
656
|
+
return out.join("/");
|
|
657
|
+
}
|
|
658
|
+
function unescapeGlobMeta(p) {
|
|
659
|
+
// Allow Markdown-escaped glob metacharacters like \* to act as * in paths/URIs
|
|
660
|
+
try {
|
|
661
|
+
return String(p)
|
|
662
|
+
.replace(/\\\*/g, "*")
|
|
663
|
+
.replace(/\\\?/g, "?")
|
|
664
|
+
.replace(/\\\[/g, "[")
|
|
665
|
+
.replace(/\\\]/g, "]")
|
|
666
|
+
.replace(/\\\{/g, "{")
|
|
667
|
+
.replace(/\\\}/g, "}");
|
|
668
|
+
}
|
|
669
|
+
catch {
|
|
670
|
+
return String(p);
|
|
671
|
+
}
|
|
672
|
+
}
|
|
673
|
+
// Match files returned from GitHub API against a glob pattern that may be relative
|
|
674
|
+
// to a base directory. We normalize both sides and enable dot-file matching.
|
|
675
|
+
function matchGithubGlobAbsoluteOrRelative(filePath, pattern, baseDir) {
|
|
676
|
+
const normalizedFile = path.posix.normalize(filePath);
|
|
677
|
+
const normalizedBase = path.posix.normalize(baseDir || "");
|
|
678
|
+
const normalizedPattern = path.posix.normalize(pattern);
|
|
679
|
+
// Try as-is
|
|
680
|
+
if (minimatch(normalizedFile, normalizedPattern, { dot: true }))
|
|
681
|
+
return true;
|
|
682
|
+
// Try relative to baseDir
|
|
683
|
+
const relPattern = path.posix.normalize(path.posix.join(normalizedBase, normalizedPattern));
|
|
684
|
+
if (minimatch(normalizedFile, relPattern, { dot: true }))
|
|
685
|
+
return true;
|
|
686
|
+
return false;
|
|
687
|
+
}
|
|
688
|
+
function matchGithubGlobAbsoluteOrRelativeWithRef(filePathWithRef, combinedPattern, refGuess, _baseDirFull) {
|
|
689
|
+
const normalizedFile = path.posix.normalize(filePathWithRef);
|
|
690
|
+
const normalizedPattern = path.posix.normalize(combinedPattern);
|
|
691
|
+
// Direct match (pattern already includes ref segment)
|
|
692
|
+
if (minimatch(normalizedFile, normalizedPattern, { dot: true }))
|
|
693
|
+
return true;
|
|
694
|
+
// Try comparing without ref prefix (pattern might not include ref)
|
|
695
|
+
const withoutRef = normalizedFile.startsWith(refGuess + "/")
|
|
696
|
+
? normalizedFile.slice(refGuess.length + 1)
|
|
697
|
+
: normalizedFile;
|
|
698
|
+
if (minimatch(withoutRef, normalizedPattern, { dot: true }))
|
|
699
|
+
return true;
|
|
700
|
+
return false;
|
|
701
|
+
}
|
|
702
|
+
async function listGithubFiles(owner, repo, ref, dir, tokenFromCtx) {
|
|
703
|
+
const token = tokenFromCtx || process.env.A5C_AGENT_GITHUB_TOKEN || process.env.GITHUB_TOKEN;
|
|
704
|
+
try {
|
|
705
|
+
return await listGithubFilesRecursive({ owner, repo, ref: ref || undefined, dir, token });
|
|
706
|
+
}
|
|
707
|
+
catch {
|
|
708
|
+
return [];
|
|
709
|
+
}
|
|
710
|
+
}
|
|
711
|
+
function evalExpr(expr, ctx, currentUri) {
|
|
712
|
+
// Provide helpers: event, env, vars, include(uri). Ensure template-level `this`
|
|
713
|
+
// resolves to the current item for {{#each}} blocks via explicit `thisArg`.
|
|
714
|
+
// Additionally, evaluate in strict mode to avoid global-object fallback.
|
|
715
|
+
const compiled = preprocess(expr);
|
|
716
|
+
const fn = new Function("event", "github", "env", "vars", "include", "printJSON", "printYAML", "printXML", "toJSON", "toYAML", "toXML", "select", "thisArg",
|
|
717
|
+
// Evaluate inside a function so `this` can point to current item
|
|
718
|
+
"return (function(){ 'use strict'; return (" +
|
|
719
|
+
compiled +
|
|
720
|
+
"); }).call(thisArg);");
|
|
721
|
+
const include = (u) => renderTemplate(u, ctx, currentUri);
|
|
722
|
+
const thisArg = ctx.vars && Object.prototype.hasOwnProperty.call(ctx.vars, "this")
|
|
723
|
+
? ctx.vars.this
|
|
724
|
+
: undefined;
|
|
725
|
+
return fn(ctx.event, ctx.event, ctx.env, ctx.vars, include, printJSON, printYAML, printXML, toJSON, toYAML, toXML, select, thisArg);
|
|
726
|
+
}
|
|
727
|
+
function preprocess(expr) {
|
|
728
|
+
// Support pipeline syntax: a | fn(b, c) | g() => g(fn((a), b, c))
|
|
729
|
+
const piped = transformPipes(expr);
|
|
730
|
+
// Map leading `this` to explicit `thisArg` to avoid relying on JS `this` binding.
|
|
731
|
+
// Handles: `this`, `this.something`, `this["key"]` with possible leading spaces.
|
|
732
|
+
// Intentionally minimal to cover template use-cases; avoids altering strings.
|
|
733
|
+
const replaced = piped.replace(/^\s*this(?=(?:\s*$|[\.\[]))/, (m) => m.replace(/this$/, "thisArg"));
|
|
734
|
+
return replaced;
|
|
735
|
+
}
|
|
736
|
+
function expandDollarExpressions(s, ctx) {
|
|
737
|
+
return String(s).replace(/\$\{\{\s*([^}]+)\s*\}\}/g, (_m, expr) => {
|
|
738
|
+
try {
|
|
739
|
+
const val = evalExpr(String(expr), ctx, "file:///");
|
|
740
|
+
return val == null ? "" : String(val);
|
|
741
|
+
}
|
|
742
|
+
catch {
|
|
743
|
+
return "";
|
|
744
|
+
}
|
|
745
|
+
});
|
|
746
|
+
}
|
|
747
|
+
// Expand {{ expr }} occurrences inside a URI string context without rendering full template,
|
|
748
|
+
// so we can build dynamic include URIs from event/env/vars.
|
|
749
|
+
function expandCurlyExpressionsForUri(uriTpl, ctx, currentUri) {
|
|
750
|
+
try {
|
|
751
|
+
return String(uriTpl).replace(/\{\{\s*([^}]+)\s*\}\}/g, (_m, expr) => {
|
|
752
|
+
try {
|
|
753
|
+
const val = evalExpr(String(expr), ctx, currentUri);
|
|
754
|
+
return val == null ? "" : String(val);
|
|
755
|
+
}
|
|
756
|
+
catch {
|
|
757
|
+
return "";
|
|
758
|
+
}
|
|
759
|
+
});
|
|
760
|
+
}
|
|
761
|
+
catch {
|
|
762
|
+
return String(uriTpl);
|
|
763
|
+
}
|
|
764
|
+
}
|
|
765
|
+
// Helpers
|
|
766
|
+
function isThenable(v) {
|
|
767
|
+
return !!v && typeof v === "object" && typeof v.then === "function";
|
|
768
|
+
}
|
|
769
|
+
function printJSON(value) {
|
|
770
|
+
try {
|
|
771
|
+
const safe = sanitizeForPrint(value);
|
|
772
|
+
return JSON.stringify(safe, null, 2);
|
|
773
|
+
}
|
|
774
|
+
catch {
|
|
775
|
+
return "";
|
|
776
|
+
}
|
|
777
|
+
}
|
|
778
|
+
function printYAML(value) {
|
|
779
|
+
try {
|
|
780
|
+
const safe = sanitizeForPrint(value);
|
|
781
|
+
return yamlStringify(safe);
|
|
782
|
+
}
|
|
783
|
+
catch {
|
|
784
|
+
return "";
|
|
785
|
+
}
|
|
786
|
+
}
|
|
787
|
+
function toJSON(value, indent) {
|
|
788
|
+
try {
|
|
789
|
+
const safe = sanitizeForPrint(value);
|
|
790
|
+
return JSON.stringify(safe, null, typeof indent === "number" ? indent : 2);
|
|
791
|
+
}
|
|
792
|
+
catch {
|
|
793
|
+
return "";
|
|
794
|
+
}
|
|
795
|
+
}
|
|
796
|
+
function toYAML(value) {
|
|
797
|
+
try {
|
|
798
|
+
const safe = sanitizeForPrint(value);
|
|
799
|
+
return yamlStringify(safe);
|
|
800
|
+
}
|
|
801
|
+
catch {
|
|
802
|
+
return "";
|
|
803
|
+
}
|
|
804
|
+
}
|
|
805
|
+
function xmlEscape(s) {
|
|
806
|
+
return String(s)
|
|
807
|
+
.replace(/&/g, "&")
|
|
808
|
+
.replace(/</g, "<")
|
|
809
|
+
.replace(/>/g, ">")
|
|
810
|
+
.replace(/"/g, """)
|
|
811
|
+
.replace(/'/g, "'");
|
|
812
|
+
}
|
|
813
|
+
function valueToXML(key, value) {
|
|
814
|
+
const tag = key || "item";
|
|
815
|
+
if (value == null)
|
|
816
|
+
return `<${tag}/>\n`;
|
|
817
|
+
if (Array.isArray(value)) {
|
|
818
|
+
return value.map((v) => valueToXML(tag, v)).join("");
|
|
819
|
+
}
|
|
820
|
+
if (typeof value === "object") {
|
|
821
|
+
const inner = Object.entries(value)
|
|
822
|
+
.map(([k, v]) => valueToXML(k, v))
|
|
823
|
+
.join("");
|
|
824
|
+
return `<${tag}>\n${inner}</${tag}>\n`;
|
|
825
|
+
}
|
|
826
|
+
return `<${tag}>${xmlEscape(String(value))}</${tag}>\n`;
|
|
827
|
+
}
|
|
828
|
+
function toXML(value, rootName) {
|
|
829
|
+
const root = rootName || "root";
|
|
830
|
+
const safe = sanitizeForPrint(value);
|
|
831
|
+
return valueToXML(root, safe);
|
|
832
|
+
}
|
|
833
|
+
function printXML(value) {
|
|
834
|
+
try {
|
|
835
|
+
return toXML(value);
|
|
836
|
+
}
|
|
837
|
+
catch {
|
|
838
|
+
return "";
|
|
839
|
+
}
|
|
840
|
+
}
|
|
841
|
+
function select(value, selector) {
|
|
842
|
+
try {
|
|
843
|
+
if (typeof selector === "function")
|
|
844
|
+
return selector(value);
|
|
845
|
+
if (typeof selector === "string") {
|
|
846
|
+
return getByPath(value, selector);
|
|
847
|
+
}
|
|
848
|
+
return value;
|
|
849
|
+
}
|
|
850
|
+
catch {
|
|
851
|
+
return undefined;
|
|
852
|
+
}
|
|
853
|
+
}
|
|
854
|
+
function getByPath(obj, pathStr) {
|
|
855
|
+
const parts = String(pathStr)
|
|
856
|
+
.replace(/\[(\d+)\]/g, ".$1")
|
|
857
|
+
.split(".")
|
|
858
|
+
.filter(Boolean);
|
|
859
|
+
let cur = obj;
|
|
860
|
+
for (const p of parts) {
|
|
861
|
+
if (cur == null)
|
|
862
|
+
return undefined;
|
|
863
|
+
cur = cur[p];
|
|
864
|
+
}
|
|
865
|
+
return cur;
|
|
866
|
+
}
|
|
867
|
+
function transformPipes(expr) {
|
|
868
|
+
// Split by '|' that are not inside strings/parens and not part of '||' or '|='
|
|
869
|
+
const tokens = [];
|
|
870
|
+
let buf = "";
|
|
871
|
+
let depth = 0;
|
|
872
|
+
let quote = null;
|
|
873
|
+
for (let i = 0; i < expr.length; i++) {
|
|
874
|
+
const ch = expr[i];
|
|
875
|
+
const prev = i > 0 ? expr[i - 1] : "";
|
|
876
|
+
const next = i + 1 < expr.length ? expr[i + 1] : "";
|
|
877
|
+
if (quote) {
|
|
878
|
+
buf += ch;
|
|
879
|
+
if (ch === quote && prev !== "\\")
|
|
880
|
+
quote = null;
|
|
881
|
+
continue;
|
|
882
|
+
}
|
|
883
|
+
if (ch === '"' || ch === "'" || ch === "`") {
|
|
884
|
+
quote = ch;
|
|
885
|
+
buf += ch;
|
|
886
|
+
continue;
|
|
887
|
+
}
|
|
888
|
+
if (ch === "(") {
|
|
889
|
+
depth++;
|
|
890
|
+
buf += ch;
|
|
891
|
+
continue;
|
|
892
|
+
}
|
|
893
|
+
if (ch === ")") {
|
|
894
|
+
depth = Math.max(0, depth - 1);
|
|
895
|
+
buf += ch;
|
|
896
|
+
continue;
|
|
897
|
+
}
|
|
898
|
+
if (ch === "|" &&
|
|
899
|
+
depth === 0 &&
|
|
900
|
+
prev !== "|" &&
|
|
901
|
+
next !== "|" &&
|
|
902
|
+
next !== "=") {
|
|
903
|
+
tokens.push(buf.trim());
|
|
904
|
+
buf = "";
|
|
905
|
+
continue;
|
|
906
|
+
}
|
|
907
|
+
buf += ch;
|
|
908
|
+
}
|
|
909
|
+
if (buf.trim())
|
|
910
|
+
tokens.push(buf.trim());
|
|
911
|
+
if (tokens.length <= 1)
|
|
912
|
+
return expr;
|
|
913
|
+
let acc = `(${tokens[0]})`;
|
|
914
|
+
for (let i = 1; i < tokens.length; i++) {
|
|
915
|
+
const seg = tokens[i];
|
|
916
|
+
const m = /^([a-zA-Z_$][\w$]*)\s*(?:\((.*)\))?$/.exec(seg);
|
|
917
|
+
if (m) {
|
|
918
|
+
const name = m[1];
|
|
919
|
+
const args = (m[2] || "").trim();
|
|
920
|
+
acc = args ? `${name}(${acc}, ${args})` : `${name}(${acc})`;
|
|
921
|
+
}
|
|
922
|
+
else {
|
|
923
|
+
// If not a simple identifier or call, leave as-is by concatenation
|
|
924
|
+
acc = `${seg}(${acc})`;
|
|
925
|
+
}
|
|
926
|
+
}
|
|
927
|
+
return acc;
|
|
928
|
+
}
|
|
929
|
+
function sanitizeForPrint(value) {
|
|
930
|
+
const masked = redactEnvFields(value, DEFAULT_MASK);
|
|
931
|
+
return redactObject(masked);
|
|
932
|
+
}
|
|
933
|
+
function redactEnvFields(value, mask) {
|
|
934
|
+
if (Array.isArray(value))
|
|
935
|
+
return value.map((v) => redactEnvFields(v, mask));
|
|
936
|
+
if (value && typeof value === "object") {
|
|
937
|
+
const out = Array.isArray(value) ? [] : {};
|
|
938
|
+
for (const [k, v] of Object.entries(value)) {
|
|
939
|
+
if (k.toLowerCase() === "env") {
|
|
940
|
+
out[k] = mask;
|
|
941
|
+
}
|
|
942
|
+
else {
|
|
943
|
+
out[k] = redactEnvFields(v, mask);
|
|
944
|
+
}
|
|
945
|
+
}
|
|
946
|
+
return out;
|
|
947
|
+
}
|
|
948
|
+
return value;
|
|
949
|
+
}
|
|
950
|
+
function sanitizeEventForTemplate(ev) {
|
|
951
|
+
try {
|
|
952
|
+
if (!ev || typeof ev !== "object")
|
|
953
|
+
return ev;
|
|
954
|
+
const out = { ...ev };
|
|
955
|
+
delete out.script;
|
|
956
|
+
delete out.event_type;
|
|
957
|
+
delete out.original_event;
|
|
958
|
+
delete out.client_payload;
|
|
959
|
+
return out;
|
|
960
|
+
}
|
|
961
|
+
catch {
|
|
962
|
+
return ev;
|
|
963
|
+
}
|
|
964
|
+
}
|
|
965
|
+
//# sourceMappingURL=agentGenerateContext.js.map
|