@chrysb/alphaclaw 0.4.6-beta.8 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. package/bin/alphaclaw.js +2 -32
  2. package/lib/public/css/theme.css +19 -0
  3. package/lib/public/js/app.js +1 -1
  4. package/lib/public/js/components/envars.js +0 -1
  5. package/lib/public/js/components/onboarding/welcome-config.js +39 -17
  6. package/lib/public/js/components/onboarding/welcome-form-step.js +142 -47
  7. package/lib/public/js/components/onboarding/welcome-import-step.js +306 -0
  8. package/lib/public/js/components/onboarding/welcome-placeholder-review-step.js +99 -0
  9. package/lib/public/js/components/onboarding/welcome-secret-review-step.js +191 -0
  10. package/lib/public/js/components/segmented-control.js +7 -1
  11. package/lib/public/js/components/welcome/index.js +112 -0
  12. package/lib/public/js/components/welcome/use-welcome.js +561 -0
  13. package/lib/public/js/lib/api.js +221 -161
  14. package/lib/server/commands.js +1 -0
  15. package/lib/server/constants.js +0 -1
  16. package/lib/server/gateway.js +15 -40
  17. package/lib/server/onboarding/github.js +120 -19
  18. package/lib/server/onboarding/import/import-applier.js +321 -0
  19. package/lib/server/onboarding/import/import-config.js +69 -0
  20. package/lib/server/onboarding/import/import-scanner.js +469 -0
  21. package/lib/server/onboarding/import/import-temp.js +63 -0
  22. package/lib/server/onboarding/import/secret-detector.js +289 -0
  23. package/lib/server/onboarding/index.js +256 -29
  24. package/lib/server/onboarding/workspace.js +38 -6
  25. package/lib/server/routes/onboarding.js +281 -12
  26. package/lib/server.js +11 -2
  27. package/package.json +1 -1
  28. package/lib/public/js/components/welcome.js +0 -318
@@ -14,6 +14,7 @@ const createCommands = ({ gatewayEnv }) => {
14
14
  console.log(
15
15
  `[onboard] Running: ${cmd
16
16
  .replace(/ghp_[^\s"]+/g, "***")
17
+ .replace(/github_pat_[^\s"]+/g, "***")
17
18
  .replace(/sk-[^\s"]+/g, "***")
18
19
  .slice(0, 200)}`,
19
20
  );
@@ -193,7 +193,6 @@ const kKnownVars = [
193
193
  key: "GITHUB_TOKEN",
194
194
  label: "GitHub Access Token",
195
195
  group: "github",
196
- hint: "Create one with repo scope at github.com/settings/tokens",
197
196
  },
198
197
  {
199
198
  key: "GITHUB_WORKSPACE_REPO",
@@ -3,6 +3,7 @@ const { spawn, execSync } = require("child_process");
3
3
  const fs = require("fs");
4
4
  const net = require("net");
5
5
  const {
6
+ ALPHACLAW_DIR,
6
7
  OPENCLAW_DIR,
7
8
  GATEWAY_HOST,
8
9
  GATEWAY_PORT,
@@ -48,51 +49,25 @@ const gatewayEnv = () => ({
48
49
  XDG_CONFIG_HOME: OPENCLAW_DIR,
49
50
  });
50
51
 
51
- const hasOnboardingModelConfig = () => {
52
- const configPath = `${OPENCLAW_DIR}/openclaw.json`;
53
- if (!fs.existsSync(configPath)) return false;
54
- try {
55
- const config = JSON.parse(fs.readFileSync(configPath, "utf8"));
56
- const primaryModel = String(
57
- config?.agents?.defaults?.model?.primary || "",
58
- ).trim();
59
- return primaryModel.includes("/");
60
- } catch {
61
- return false;
62
- }
63
- };
64
-
65
- const hasLegacyOnboardingArtifacts = () => fs.existsSync(kControlUiSkillPath);
66
-
67
52
  const writeOnboardingMarker = (reason) => {
68
- try {
69
- fs.mkdirSync(path.dirname(kOnboardingMarkerPath), { recursive: true });
70
- fs.writeFileSync(
71
- kOnboardingMarkerPath,
72
- JSON.stringify(
73
- {
74
- onboarded: true,
75
- reason: String(reason || "unknown"),
76
- markedAt: new Date().toISOString(),
77
- },
78
- null,
79
- 2,
80
- ),
81
- );
82
- return true;
83
- } catch (err) {
84
- console.error(`[alphaclaw] Failed to write onboarding marker: ${err.message}`);
85
- return false;
86
- }
53
+ fs.mkdirSync(ALPHACLAW_DIR, { recursive: true });
54
+ fs.writeFileSync(
55
+ kOnboardingMarkerPath,
56
+ JSON.stringify(
57
+ {
58
+ onboarded: true,
59
+ reason,
60
+ markedAt: new Date().toISOString(),
61
+ },
62
+ null,
63
+ 2,
64
+ ),
65
+ );
87
66
  };
88
67
 
89
68
  const isOnboarded = () => {
90
69
  if (fs.existsSync(kOnboardingMarkerPath)) return true;
91
- if (hasOnboardingModelConfig()) {
92
- writeOnboardingMarker("config_primary_model");
93
- return true;
94
- }
95
- if (hasLegacyOnboardingArtifacts()) {
70
+ if (fs.existsSync(kControlUiSkillPath)) {
96
71
  writeOnboardingMarker("legacy_artifact_backfill");
97
72
  return true;
98
73
  }
@@ -1,3 +1,12 @@
1
+ const fs = require("fs");
2
+ const os = require("os");
3
+ const path = require("path");
4
+ const crypto = require("crypto");
5
+ const {
6
+ kImportTempPrefix,
7
+ isValidImportTempDir,
8
+ } = require("./import/import-temp");
9
+
1
10
  const buildGithubHeaders = (githubToken) => ({
2
11
  Authorization: `token ${githubToken}`,
3
12
  "User-Agent": "openclaw-railway",
@@ -14,9 +23,18 @@ const parseGithubErrorMessage = async (response) => {
14
23
  return response.statusText || `HTTP ${response.status}`;
15
24
  };
16
25
 
17
- const verifyGithubRepoForOnboarding = async ({ repoUrl, githubToken }) => {
26
+ const isClassicPat = (token) => String(token || "").startsWith("ghp_");
27
+ const isFineGrainedPat = (token) =>
28
+ String(token || "").startsWith("github_pat_");
29
+
30
+ const verifyGithubRepoForOnboarding = async ({
31
+ repoUrl,
32
+ githubToken,
33
+ mode = "new",
34
+ }) => {
18
35
  const ghHeaders = buildGithubHeaders(githubToken);
19
36
  const [repoOwner] = String(repoUrl || "").split("/", 1);
37
+ const isExisting = mode === "existing";
20
38
 
21
39
  try {
22
40
  const userRes = await fetch("https://api.github.com/user", {
@@ -30,25 +48,28 @@ const verifyGithubRepoForOnboarding = async ({ repoUrl, githubToken }) => {
30
48
  error: `Cannot verify GitHub token: ${details}`,
31
49
  };
32
50
  }
33
- const oauthScopes = (userRes.headers?.get?.("x-oauth-scopes") || "")
34
- .toLowerCase()
35
- .split(",")
36
- .map((s) => s.trim())
37
- .filter(Boolean);
38
- if (
39
- oauthScopes.length > 0 &&
40
- !oauthScopes.includes("repo") &&
41
- !oauthScopes.includes("public_repo")
42
- ) {
43
- return {
44
- ok: false,
45
- status: 400,
46
- error: `Your token needs the "repo" scope to create repositories. Current scopes: ${oauthScopes.join(", ")}`,
47
- };
51
+ if (isClassicPat(githubToken)) {
52
+ const oauthScopes = (userRes.headers?.get?.("x-oauth-scopes") || "")
53
+ .toLowerCase()
54
+ .split(",")
55
+ .map((s) => s.trim())
56
+ .filter(Boolean);
57
+ if (
58
+ oauthScopes.length > 0 &&
59
+ !oauthScopes.includes("repo") &&
60
+ !oauthScopes.includes("public_repo")
61
+ ) {
62
+ return {
63
+ ok: false,
64
+ status: 400,
65
+ error: `Your token needs the "repo" scope. Current scopes: ${oauthScopes.join(", ")}`,
66
+ };
67
+ }
48
68
  }
49
69
  const authedUser = await userRes.json().catch(() => ({}));
50
70
  const authedLogin = String(authedUser?.login || "").trim();
51
71
  if (
72
+ !isExisting &&
52
73
  repoOwner &&
53
74
  authedLogin &&
54
75
  repoOwner.toLowerCase() !== authedLogin.toLowerCase()
@@ -56,7 +77,7 @@ const verifyGithubRepoForOnboarding = async ({ repoUrl, githubToken }) => {
56
77
  return {
57
78
  ok: false,
58
79
  status: 400,
59
- error: `Workspace repo owner must match your token user "${authedLogin}"`,
80
+ error: `New workspace repo owner must match your token user "${authedLogin}"`,
60
81
  };
61
82
  }
62
83
 
@@ -64,6 +85,13 @@ const verifyGithubRepoForOnboarding = async ({ repoUrl, githubToken }) => {
64
85
  headers: ghHeaders,
65
86
  });
66
87
  if (checkRes.status === 404) {
88
+ if (isExisting) {
89
+ return {
90
+ ok: false,
91
+ status: 400,
92
+ error: `Repository "${repoUrl}" not found. Check the repo name and token permissions.`,
93
+ };
94
+ }
67
95
  return { ok: true, repoExists: false, repoIsEmpty: false };
68
96
  }
69
97
  if (checkRes.ok) {
@@ -75,10 +103,13 @@ const verifyGithubRepoForOnboarding = async ({ repoUrl, githubToken }) => {
75
103
  return { ok: true, repoExists: true, repoIsEmpty: true };
76
104
  }
77
105
  if (commitsRes.ok) {
106
+ if (isExisting) {
107
+ return { ok: true, repoExists: true, repoIsEmpty: false };
108
+ }
78
109
  return {
79
110
  ok: false,
80
111
  status: 400,
81
- error: `Repository "${repoUrl}" already exists and is not empty.`,
112
+ error: `Repository "${repoUrl}" already exists and is not empty. Did you mean to use "Import existing setup"?`,
82
113
  };
83
114
  }
84
115
  const commitCheckDetails = await parseGithubErrorMessage(commitsRes);
@@ -90,6 +121,13 @@ const verifyGithubRepoForOnboarding = async ({ repoUrl, githubToken }) => {
90
121
  }
91
122
 
92
123
  const details = await parseGithubErrorMessage(checkRes);
124
+ if (isFineGrainedPat(githubToken) && checkRes.status === 403) {
125
+ return {
126
+ ok: false,
127
+ status: 400,
128
+ error: `Your fine-grained token needs Contents (read/write) and Metadata (read) permissions for "${repoUrl}".`,
129
+ };
130
+ }
93
131
  return {
94
132
  ok: false,
95
133
  status: 400,
@@ -150,4 +188,67 @@ const ensureGithubRepoAccessible = async ({
150
188
  }
151
189
  };
152
190
 
153
- module.exports = { ensureGithubRepoAccessible, verifyGithubRepoForOnboarding };
191
+ const cloneRepoToTemp = async ({ repoUrl, githubToken, shellCmd }) => {
192
+ const tempId = crypto.randomUUID().slice(0, 8);
193
+ const tempDir = path.join(os.tmpdir(), `${kImportTempPrefix}${tempId}`);
194
+ const askPassPath = path.join(
195
+ os.tmpdir(),
196
+ `alphaclaw-import-askpass-${tempId}.sh`,
197
+ );
198
+
199
+ try {
200
+ fs.writeFileSync(
201
+ askPassPath,
202
+ [
203
+ "#!/bin/sh",
204
+ 'case "$1" in',
205
+ ' *Username*) printf "%s\\n" "x-access-token" ;;',
206
+ ' *) printf "%s\\n" "$ALPHACLAW_GITHUB_TOKEN" ;;',
207
+ "esac",
208
+ "",
209
+ ].join("\n"),
210
+ { mode: 0o700 },
211
+ );
212
+ await shellCmd(
213
+ `git clone --depth=1 "https://github.com/${repoUrl}.git" "${tempDir}"`,
214
+ {
215
+ timeout: 60000,
216
+ env: {
217
+ ...process.env,
218
+ GIT_ASKPASS: askPassPath,
219
+ GIT_TERMINAL_PROMPT: "0",
220
+ ALPHACLAW_GITHUB_TOKEN: githubToken,
221
+ },
222
+ },
223
+ );
224
+ console.log(`[onboard] Cloned ${repoUrl} to ${tempDir}`);
225
+ return { ok: true, tempDir };
226
+ } catch (e) {
227
+ return {
228
+ ok: false,
229
+ error: `Failed to clone repo: ${e.message}`,
230
+ };
231
+ } finally {
232
+ try {
233
+ fs.rmSync(askPassPath, { force: true });
234
+ } catch {}
235
+ }
236
+ };
237
+
238
+ const cleanupTempClone = (tempDir) => {
239
+ try {
240
+ if (isValidImportTempDir(tempDir)) {
241
+ fs.rmSync(tempDir, { recursive: true, force: true });
242
+ console.log(`[onboard] Cleaned up temp clone ${tempDir}`);
243
+ }
244
+ } catch (e) {
245
+ console.error(`[onboard] Temp cleanup error: ${e.message}`);
246
+ }
247
+ };
248
+
249
+ module.exports = {
250
+ ensureGithubRepoAccessible,
251
+ verifyGithubRepoForOnboarding,
252
+ cloneRepoToTemp,
253
+ cleanupTempClone,
254
+ };
@@ -0,0 +1,321 @@
1
+ const path = require("path");
2
+ const { isValidImportTempDir } = require("./import-temp");
3
+ const {
4
+ normalizeHookPath,
5
+ normalizeTransformModulePath,
6
+ } = require("./import-config");
7
+
8
+ const kEnvVarNamePattern = /^[A-Z_][A-Z0-9_]*$/;
9
+
10
+ const isValidTempDir = (tempDir) => isValidImportTempDir(tempDir);
11
+
12
+ const kTransformsRoot = path.join("hooks", "transforms");
13
+ const kTransformsBackupRoot = path.join(kTransformsRoot, "_backup");
14
+
15
+ const kReplaceableBootstrapPaths = [
16
+ ".env",
17
+ ".alphaclaw",
18
+ "gogcli",
19
+ path.join("workspace", "hooks", "bootstrap"),
20
+ path.join("skills", "control-ui"),
21
+ path.join("skills", "gog-cli"),
22
+ ];
23
+
24
+ const removeIfExists = (fs, targetPath) => {
25
+ try {
26
+ if (fs.existsSync(targetPath)) {
27
+ fs.rmSync(targetPath, { recursive: true, force: true });
28
+ }
29
+ } catch {}
30
+ };
31
+
32
+ const removeEmptyParents = (fs, rootDir, targetPath) => {
33
+ let current = path.dirname(targetPath);
34
+ while (current.startsWith(rootDir) && current !== rootDir) {
35
+ try {
36
+ const entries = fs.readdirSync(current);
37
+ if (entries.length > 0) break;
38
+ fs.rmSync(current, { recursive: true, force: true });
39
+ current = path.dirname(current);
40
+ } catch {
41
+ break;
42
+ }
43
+ }
44
+ };
45
+
46
+ const cleanupBootstrapArtifacts = (fs, openclawDir) => {
47
+ for (const relPath of kReplaceableBootstrapPaths) {
48
+ const absolutePath = path.join(openclawDir, relPath);
49
+ removeIfExists(fs, absolutePath);
50
+ removeEmptyParents(fs, openclawDir, absolutePath);
51
+ }
52
+ };
53
+
54
+ const promoteCloneToTarget = ({
55
+ fs,
56
+ tempDir,
57
+ targetDir,
58
+ sourceSubdir = "",
59
+ cleanupBootstrap = false,
60
+ }) => {
61
+ if (!isValidTempDir(tempDir)) {
62
+ return { ok: false, error: "Invalid temp directory" };
63
+ }
64
+
65
+ const sourceDir = sourceSubdir ? path.join(tempDir, sourceSubdir) : tempDir;
66
+
67
+ try {
68
+ if (!fs.existsSync(sourceDir)) {
69
+ return { ok: false, error: "Import source directory not found" };
70
+ }
71
+ if (fs.existsSync(targetDir)) {
72
+ if (cleanupBootstrap) {
73
+ cleanupBootstrapArtifacts(fs, targetDir);
74
+ }
75
+ const existingEntries = fs.readdirSync(targetDir);
76
+ if (existingEntries.length > 0) {
77
+ return {
78
+ ok: false,
79
+ error: "Import target directory already exists and is not empty",
80
+ };
81
+ }
82
+ fs.rmSync(targetDir, { recursive: true, force: true });
83
+ }
84
+ fs.mkdirSync(path.dirname(targetDir), { recursive: true });
85
+ fs.renameSync(sourceDir, targetDir);
86
+ if (sourceDir !== tempDir) {
87
+ fs.rmSync(tempDir, { recursive: true, force: true });
88
+ }
89
+ console.log(`[import] Promoted ${sourceDir} to ${targetDir}`);
90
+ return { ok: true };
91
+ } catch (e) {
92
+ // Cross-device rename falls back to copy
93
+ if (e.code === "EXDEV") {
94
+ try {
95
+ copyDirRecursive(fs, sourceDir, targetDir);
96
+ fs.rmSync(tempDir, { recursive: true, force: true });
97
+ console.log(
98
+ `[import] Copied ${sourceDir} to ${targetDir} (cross-device)`,
99
+ );
100
+ return { ok: true };
101
+ } catch (copyErr) {
102
+ return { ok: false, error: `Failed to copy clone: ${copyErr.message}` };
103
+ }
104
+ }
105
+ return { ok: false, error: `Failed to promote clone: ${e.message}` };
106
+ }
107
+ };
108
+
109
+ const copyDirRecursive = (fs, src, dest) => {
110
+ fs.mkdirSync(dest, { recursive: true });
111
+ const entries = fs.readdirSync(src, { withFileTypes: true });
112
+ for (const entry of entries) {
113
+ const srcPath = path.join(src, entry.name);
114
+ const destPath = path.join(dest, entry.name);
115
+ if (entry.isDirectory()) {
116
+ copyDirRecursive(fs, srcPath, destPath);
117
+ } else {
118
+ fs.copyFileSync(srcPath, destPath);
119
+ }
120
+ }
121
+ };
122
+
123
+ const toPosixPath = (value) => String(value || "").replace(/\\/g, "/");
124
+ const ensureRelativeImportPath = (value) => {
125
+ const normalized = toPosixPath(value);
126
+ if (normalized.startsWith(".")) return normalized;
127
+ return `./${normalized}`;
128
+ };
129
+ const pathExists = (fs, targetPath) => {
130
+ try {
131
+ return fs.existsSync(targetPath);
132
+ } catch {
133
+ return false;
134
+ }
135
+ };
136
+
137
+ const resolveExtractionTargetPath = (baseDir, file) => {
138
+ const relativeFile = String(file || "").trim();
139
+ if (!relativeFile || path.isAbsolute(relativeFile)) return "";
140
+ const resolvedBaseDir = path.resolve(baseDir);
141
+ const resolvedFilePath = path.resolve(resolvedBaseDir, relativeFile);
142
+ if (
143
+ resolvedFilePath !== resolvedBaseDir &&
144
+ !resolvedFilePath.startsWith(`${resolvedBaseDir}${path.sep}`)
145
+ ) {
146
+ return "";
147
+ }
148
+ return resolvedFilePath;
149
+ };
150
+ const movePath = (fs, src, dest) => {
151
+ fs.mkdirSync(path.dirname(dest), { recursive: true });
152
+ try {
153
+ fs.renameSync(src, dest);
154
+ return;
155
+ } catch (error) {
156
+ if (error?.code !== "EXDEV") throw error;
157
+ }
158
+ const stats = fs.statSync(src);
159
+ if (stats.isDirectory()) {
160
+ copyDirRecursive(fs, src, dest);
161
+ fs.rmSync(src, { recursive: true, force: true });
162
+ return;
163
+ }
164
+ fs.copyFileSync(src, dest);
165
+ fs.rmSync(src, { force: true });
166
+ };
167
+ const buildTransformShim = (targetImportPath) =>
168
+ [
169
+ `export { default } from ${JSON.stringify(targetImportPath)};`,
170
+ `export * from ${JSON.stringify(targetImportPath)};`,
171
+ "",
172
+ ].join("\n");
173
+
174
+ const alignHookTransforms = ({ fs, baseDir, configFiles = [] }) => {
175
+ const movedRoots = new Map();
176
+ let alignedCount = 0;
177
+
178
+ for (const configFile of configFiles) {
179
+ const fullConfigPath = path.join(baseDir, configFile);
180
+ let cfg = null;
181
+ try {
182
+ cfg = JSON.parse(fs.readFileSync(fullConfigPath, "utf8"));
183
+ } catch {
184
+ continue;
185
+ }
186
+ const mappings = Array.isArray(cfg?.hooks?.mappings)
187
+ ? cfg.hooks.mappings
188
+ : [];
189
+ let changed = false;
190
+
191
+ mappings.forEach((mapping, index) => {
192
+ const hookPath = normalizeHookPath(mapping?.match?.path);
193
+ const actualModule = normalizeTransformModulePath(
194
+ mapping?.transform?.module,
195
+ );
196
+ if (!hookPath || !actualModule) return;
197
+
198
+ const expectedModule = `${hookPath}/${hookPath}-transform.mjs`;
199
+ if (actualModule === expectedModule) return;
200
+
201
+ const actualRelativePath = path.join(kTransformsRoot, actualModule);
202
+ const expectedRelativePath = path.join(kTransformsRoot, expectedModule);
203
+ const actualAbsolutePath = path.join(baseDir, actualRelativePath);
204
+ const expectedAbsolutePath = path.join(baseDir, expectedRelativePath);
205
+ if (!pathExists(fs, actualAbsolutePath)) return;
206
+
207
+ const actualParts = actualModule.split("/").filter(Boolean);
208
+ const sourceRootRelativePath =
209
+ actualParts.length > 1
210
+ ? path.join(kTransformsRoot, actualParts[0])
211
+ : actualRelativePath;
212
+ const sourceRootAbsolutePath = path.join(baseDir, sourceRootRelativePath);
213
+ const backupRootRelativePath = path.join(
214
+ kTransformsBackupRoot,
215
+ sourceRootRelativePath.slice(kTransformsRoot.length + 1),
216
+ );
217
+ const backupRootAbsolutePath = path.join(baseDir, backupRootRelativePath);
218
+
219
+ if (!movedRoots.has(sourceRootAbsolutePath)) {
220
+ if (
221
+ !pathExists(fs, backupRootAbsolutePath) &&
222
+ pathExists(fs, sourceRootAbsolutePath)
223
+ ) {
224
+ movePath(fs, sourceRootAbsolutePath, backupRootAbsolutePath);
225
+ }
226
+ movedRoots.set(sourceRootAbsolutePath, backupRootAbsolutePath);
227
+ }
228
+
229
+ const backupActualAbsolutePath = path.join(
230
+ movedRoots.get(sourceRootAbsolutePath),
231
+ path.relative(sourceRootAbsolutePath, actualAbsolutePath),
232
+ );
233
+
234
+ fs.mkdirSync(path.dirname(expectedAbsolutePath), { recursive: true });
235
+ const shimImportPath = ensureRelativeImportPath(
236
+ path.relative(
237
+ path.dirname(expectedAbsolutePath),
238
+ backupActualAbsolutePath,
239
+ ),
240
+ );
241
+ fs.writeFileSync(
242
+ expectedAbsolutePath,
243
+ buildTransformShim(shimImportPath),
244
+ );
245
+
246
+ mappings[index] = {
247
+ ...mapping,
248
+ transform: {
249
+ ...(mapping?.transform || {}),
250
+ module: expectedModule,
251
+ },
252
+ };
253
+ changed = true;
254
+ alignedCount += 1;
255
+ });
256
+
257
+ if (changed) {
258
+ fs.writeFileSync(fullConfigPath, JSON.stringify(cfg, null, 2));
259
+ }
260
+ }
261
+
262
+ return { alignedCount };
263
+ };
264
+
265
+ const applySecretExtraction = ({ fs, baseDir, approvedSecrets }) => {
266
+ const envVars = [];
267
+ const rewriteMap = new Map();
268
+
269
+ for (const secret of approvedSecrets) {
270
+ const envVar = String(secret.suggestedEnvVar || "").trim();
271
+ const value = String(secret.value || "").trim();
272
+ if (!envVar || !value || !kEnvVarNamePattern.test(envVar)) continue;
273
+
274
+ envVars.push({ key: envVar, value });
275
+
276
+ if (secret.file && !secret.file.startsWith(".env")) {
277
+ const fullPath = resolveExtractionTargetPath(baseDir, secret.file);
278
+ if (!fullPath) continue;
279
+ if (!rewriteMap.has(fullPath)) {
280
+ rewriteMap.set(fullPath, []);
281
+ }
282
+ rewriteMap.get(fullPath).push({
283
+ value,
284
+ envRef: `\${${envVar}}`,
285
+ relativeFile: secret.file,
286
+ });
287
+ }
288
+ }
289
+
290
+ for (const [fullPath, replacements] of rewriteMap) {
291
+ try {
292
+ let content = fs.readFileSync(fullPath, "utf8");
293
+ const sorted = [...replacements].sort(
294
+ (a, b) => b.value.length - a.value.length,
295
+ );
296
+ for (const { value, envRef } of sorted) {
297
+ const secretJson = JSON.stringify(value);
298
+ const envRefJson = JSON.stringify(envRef);
299
+ content = content.replace(
300
+ new RegExp(secretJson.replace(/[-/\\^$*+?.()|[\]{}]/g, "\\$&"), "g"),
301
+ envRefJson,
302
+ );
303
+ }
304
+ fs.writeFileSync(fullPath, content);
305
+ console.log(
306
+ `[import] Rewrote secrets in ${replacements[0]?.relativeFile || fullPath}`,
307
+ );
308
+ } catch (e) {
309
+ console.error(`[import] Rewrite error for ${fullPath}: ${e.message}`);
310
+ }
311
+ }
312
+
313
+ return { envVars };
314
+ };
315
+
316
+ module.exports = {
317
+ promoteCloneToTarget,
318
+ alignHookTransforms,
319
+ applySecretExtraction,
320
+ isValidTempDir,
321
+ };
@@ -0,0 +1,69 @@
1
+ const path = require("path");
2
+
3
+ const normalizeHookPath = (value) =>
4
+ String(value || "")
5
+ .trim()
6
+ .replace(/^\/+/, "");
7
+
8
+ const normalizeTransformModulePath = (value) =>
9
+ String(value || "")
10
+ .trim()
11
+ .replace(/^\/+/, "")
12
+ .replace(/^hooks\/transforms\/+/, "");
13
+
14
+ const resolveConfigIncludes = ({ fs, absoluteConfigPath }) => {
15
+ const includes = [];
16
+ try {
17
+ const raw = fs.readFileSync(absoluteConfigPath, "utf8");
18
+ const cfg = JSON.parse(raw);
19
+ const walk = (entry) => {
20
+ if (!entry || typeof entry !== "object") return;
21
+ for (const [key, value] of Object.entries(entry)) {
22
+ if (key === "$include" && typeof value === "string" && value.trim()) {
23
+ includes.push(value.trim());
24
+ continue;
25
+ }
26
+ walk(value);
27
+ }
28
+ };
29
+ walk(cfg);
30
+ } catch {}
31
+ return includes;
32
+ };
33
+
34
+ const resolveImportedConfigPaths = ({ fs, openclawDir }) => {
35
+ const discovered = new Set();
36
+ const queue = [path.join(openclawDir, "openclaw.json")].filter((configPath) =>
37
+ fs.existsSync(configPath),
38
+ );
39
+
40
+ while (queue.length > 0) {
41
+ const absoluteConfigPath = queue.shift();
42
+ if (!absoluteConfigPath || discovered.has(absoluteConfigPath)) continue;
43
+ if (!fs.existsSync(absoluteConfigPath)) continue;
44
+ discovered.add(absoluteConfigPath);
45
+
46
+ const includes = resolveConfigIncludes({ fs, absoluteConfigPath });
47
+ for (const includePath of includes) {
48
+ const candidatePaths = [
49
+ path.join(openclawDir, includePath),
50
+ path.join(path.dirname(absoluteConfigPath), includePath),
51
+ ];
52
+ for (const candidatePath of candidatePaths) {
53
+ if (fs.existsSync(candidatePath) && !discovered.has(candidatePath)) {
54
+ queue.push(candidatePath);
55
+ break;
56
+ }
57
+ }
58
+ }
59
+ }
60
+
61
+ return [...discovered];
62
+ };
63
+
64
+ module.exports = {
65
+ normalizeHookPath,
66
+ normalizeTransformModulePath,
67
+ resolveConfigIncludes,
68
+ resolveImportedConfigPaths,
69
+ };