@meshxdata/fops 0.1.38 → 0.1.40

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,3 +1,377 @@
1
+ ## [0.1.40] - 2026-03-10
2
+
3
+ - callback url for localhost (821fb94)
4
+ - disable 4 scaffolding plugin by default. (bfb2b76)
5
+ - jaccard improvements (b7494a0)
6
+ - refactor azure plugin (68dfef4)
7
+ - refactor azure plugin (b24a008)
8
+ - fix trino catalog missing (4928a55)
9
+ - v36 bump and changelog generation on openai (37a0440)
10
+ - v36 bump and changelog generation on openai (a3b02d9)
11
+ - bump (a990058)
12
+ - status bar fix and new plugin for ttyd (27dde1e)
13
+ - file demo and tray (1a3e704)
14
+ - electron app (59ad0bb)
15
+ - compose and fops file plugin (1cf0e81)
16
+ - bump (346ffc1)
17
+ - localhost replaced by 127.0.0.1 (82b9f30)
18
+ - .29 (587b0e1)
19
+ - improve up down and bootstrap script (b79ebaf)
20
+ - checksum (22c8086)
21
+ - checksum (96b434f)
22
+ - checksum (15ed3c0)
23
+ - checksum (8a6543a)
24
+ - bump embed trino linksg (8440504)
25
+ - bump data (765ffd9)
26
+ - bump (cb8b232)
27
+ - broken tests (c532229)
28
+ - release 0.1.18, preflight checks (d902249)
29
+ - fix compute display bug (d10f5d9)
30
+ - cleanup packer files (6330f18)
31
+ - plan mode (cb36a8a)
32
+ - bump to 0.1.16 - agent ui (41ac1a2)
33
+ - bump to 0.1.15 - agent ui (4ebe2e1)
34
+ - bump to 0.1.14 (6c3a7fa)
35
+ - bump to 0.1.13 (8db570f)
36
+ - release 0.1.12 (c1c79e5)
37
+ - bump (11aa3b0)
38
+ - git keep and bump tui (be1678e)
39
+ - skills, index, rrf, compacted context (100k > 10k) (7b2fffd)
40
+ - cloudflare and token consumption, graphs indexing (0ad9eec)
41
+ - bump storage default (22c83ba)
42
+ - storage fix (68a22a0)
43
+ - skills update (7f56500)
44
+ - v9 bump (3864446)
45
+ - bump (c95eedc)
46
+ - rrf (dbf8c95)
47
+ - feat: warning when running predictions (95e8c52)
48
+ - feat: support for local predictions (45cf26b)
49
+ - feat: wip support for predictions + mlflow (3457052)
50
+ - add Reciprocal Rank Fusion (RRF) to knowledge and skill retrieval (61549bc)
51
+ - validate CSV headers in compute_run readiness check (a8c7a43)
52
+ - fix corrupted Iceberg metadata: probe tables + force cleanup on re-apply (50578af)
53
+ - enforce: never use foundation_apply to fix broken products (2e049bf)
54
+ - update SKILL.md with complete tool reference for knowledge retrieval (30b1924)
55
+ - add storage read, input DP table probe, and compute_run improvements (34e6c4c)
56
+ - skills update (1220385)
57
+ - skills update (bb66958)
58
+ - some tui improvement andd tools apply overwrite (e90c35c)
59
+ - skills update (e9227a1)
60
+ - skills update (669c4b3)
61
+ - fix plugin pre-flight checks (f741743)
62
+ - increase agent context (6479aaa)
63
+ - skills and init sql fixes (5fce35e)
64
+ - checksum (3518b56)
65
+ - penging job limit (a139861)
66
+ - checksum (575d28c)
67
+ - bump (92049ba)
68
+ - fix bug per tab status (0a33657)
69
+ - fix bug per tab status (50457c6)
70
+ - checksumming (0ad842e)
71
+ - shot af mardkwon overlapping (51f63b9)
72
+ - add spark dockerfile for multiarch builds (95abbd1)
73
+ - fix plugin initialization (16b9782)
74
+ - split index.js (50902a2)
75
+ - cloudflare cidr (cc4e021)
76
+ - cloduflare restrictions (2f6ba2d)
77
+ - sequential start (86b496e)
78
+ - sequential start (4930fe1)
79
+ - sequential start (353f014)
80
+ - qa tests (2dc6a1a)
81
+ - bump sha for .85 (dc2edfe)
82
+ - preserve env on sudo (7831227)
83
+ - bump sha for .84 (6c052f9)
84
+ - non interactive for azure vms (0aa8a2f)
85
+ - keep .env if present (d072450)
86
+ - bump (7a8e732)
87
+ - ensure opa is on compose if not set (f4a5228)
88
+ - checksum bump (a2ccc20)
89
+ - netrc defensive checks (a0b0ccc)
90
+ - netrc defensive checks (ae37403)
91
+ - checksum (ec45d11)
92
+ - update sync and fix up (7f9af72)
93
+ - expand test for azure and add new per app tag support (388a168)
94
+ - checksum on update (44005fc)
95
+ - cleanup for later (15e5313)
96
+ - cleanup for later (11c9597)
97
+ - switch branch feature (822fecc)
98
+ - add pull (d1c19ab)
99
+ - Bump hono from 4.11.9 to 4.12.0 in /operator-cli (ad25144)
100
+ - tests (f180a9a)
101
+ - cleanup (39c49a3)
102
+ - registry (7b7126a)
103
+ - reconcile kafka (832d0db)
104
+ - gh login bug (025886c)
105
+ - cleanup (bb96cab)
106
+ - strip envs from process (2421180)
107
+ - force use of gh creds not tokens in envs var (fff7787)
108
+ - resolve import between npm installs and npm link (79522e1)
109
+ - fix gh scope and azure states (afd846c)
110
+ - refactoring (da50352)
111
+ - split fops repo (d447638)
112
+ - aks (b791f8f)
113
+ - refactor azure (67d3bad)
114
+ - wildcard (391f023)
115
+ - azure plugin (c074074)
116
+ - zap (d7e6e7f)
117
+ - fix knock (cf89c05)
118
+ - azure (4adec98)
119
+ - Bump tar from 7.5.7 to 7.5.9 in /operator-cli (e41e98e)
120
+ - azure stack index.js split (de12272)
121
+ - Bump ajv from 8.17.1 to 8.18.0 in /operator-cli (76da21f)
122
+ - packer (9665fbc)
123
+ - remove stack api (db0fd4d)
124
+ - packer cleanup (fe1bf14)
125
+ - force refresh token (3a3d7e2)
126
+ - provision shell (2ad505f)
127
+ - azure vm management (91dcb31)
128
+ - azure specific (2b0cca8)
129
+ - azure packer (12175b8)
130
+ - init hashed pwd (db8523c)
131
+ - packer (5b5c7c4)
132
+ - doctor for azure vm (ed524fa)
133
+ - packer and 1pwd (c6d053e)
134
+ - split big index.js (dc85a1b)
135
+ - kafka volume update (21815ec)
136
+ - fix openai azure tools confirmation and flow (0118cd1)
137
+ - nighly fixx, test fix (5e0d04f)
138
+ - open ai training (cdc494a)
139
+ - openai integration in azure (1ca1475)
140
+ - ci (672cea9)
141
+ - refresh ghcr creds (4220c48)
142
+ - cleaned up version (1a0074f)
143
+ - traefik on ghcr and templates (8e31a05)
144
+ - apply fcl (e78911f)
145
+ - demo landscape (dd205fe)
146
+ - smarter login and schema (1af514f)
147
+ - no down before up unless something broke (56b1132)
148
+ - dai, reconcile failed containers (12907fa)
149
+ - reconcile dead container (7da75e4)
150
+ - defensive around storage buckets dir (b98871d)
151
+ - defensive around storage buckets dir (e86e132)
152
+ - gear in for multiarch (bf3fa3e)
153
+ - up autofix (99c7f89)
154
+ - autofix stale containers on up (43c7d0f)
155
+ - shared sessions fix (5de1359)
156
+ - share sessions between ui and tui (8321391)
157
+ - fix chat view display details (e263996)
158
+ - fix chat view display details (9babdda)
159
+ - tui up fixes (86e9f17)
160
+ - fix commands init (442538b)
161
+ - enable k3s profile (b2dcfc8)
162
+ - test up till job creation (656d388)
163
+ - tui fixes (0599779)
164
+ - cleanup (27731f0)
165
+ - train (90bf559)
166
+ - training (f809bf6)
167
+ - training (ba2b836)
168
+ - training (6fc5267)
169
+ - training (4af8ac9)
170
+ - fix build script (bd82836)
171
+ - infra test (5b79815)
172
+ - infra test (3a0ac05)
173
+ - infra test (e5c67b5)
174
+ - tests (ae7b621)
175
+ - tests (c09ae6a)
176
+ - update tui (4784153)
177
+ - training (0a5a330)
178
+ - tui (df4dd4a)
179
+ - pkg builds (4dc9993)
180
+ - also source env for creds (9a17d8f)
181
+ - fcl support (e8a5743)
182
+ - fcl support (8d6b6cd)
183
+ - fcl support (cb76a4a)
184
+ - bump package (df2ee85)
185
+
186
+ # Changelog
187
+
188
+ All notable changes to @meshxdata/fops (Foundation Operator CLI) are documented here.
189
+
190
+ ## [0.1.39] - 2026-03-10
191
+
192
+ - callback url for localhost (821fb94)
193
+ - disable 4 scaffolding plugin by default. (bfb2b76)
194
+ - jaccard improvements (b7494a0)
195
+ - refactor azure plugin (68dfef4)
196
+ - refactor azure plugin (b24a008)
197
+ - fix trino catalog missing (4928a55)
198
+ - v36 bump and changelog generation on openai (37a0440)
199
+ - v36 bump and changelog generation on openai (a3b02d9)
200
+ - bump (a990058)
201
+ - status bar fix and new plugin for ttyd (27dde1e)
202
+ - file demo and tray (1a3e704)
203
+ - electron app (59ad0bb)
204
+ - compose and fops file plugin (1cf0e81)
205
+ - bump (346ffc1)
206
+ - localhost replaced by 127.0.0.1 (82b9f30)
207
+ - .29 (587b0e1)
208
+ - improve up down and bootstrap script (b79ebaf)
209
+ - checksum (22c8086)
210
+ - checksum (96b434f)
211
+ - checksum (15ed3c0)
212
+ - checksum (8a6543a)
213
+ - bump embed trino linksg (8440504)
214
+ - bump data (765ffd9)
215
+ - bump (cb8b232)
216
+ - broken tests (c532229)
217
+ - release 0.1.18, preflight checks (d902249)
218
+ - fix compute display bug (d10f5d9)
219
+ - cleanup packer files (6330f18)
220
+ - plan mode (cb36a8a)
221
+ - bump to 0.1.16 - agent ui (41ac1a2)
222
+ - bump to 0.1.15 - agent ui (4ebe2e1)
223
+ - bump to 0.1.14 (6c3a7fa)
224
+ - bump to 0.1.13 (8db570f)
225
+ - release 0.1.12 (c1c79e5)
226
+ - bump (11aa3b0)
227
+ - git keep and bump tui (be1678e)
228
+ - skills, index, rrf, compacted context (100k > 10k) (7b2fffd)
229
+ - cloudflare and token consumption, graphs indexing (0ad9eec)
230
+ - bump storage default (22c83ba)
231
+ - storage fix (68a22a0)
232
+ - skills update (7f56500)
233
+ - v9 bump (3864446)
234
+ - bump (c95eedc)
235
+ - rrf (dbf8c95)
236
+ - feat: warning when running predictions (95e8c52)
237
+ - feat: support for local predictions (45cf26b)
238
+ - feat: wip support for predictions + mlflow (3457052)
239
+ - add Reciprocal Rank Fusion (RRF) to knowledge and skill retrieval (61549bc)
240
+ - validate CSV headers in compute_run readiness check (a8c7a43)
241
+ - fix corrupted Iceberg metadata: probe tables + force cleanup on re-apply (50578af)
242
+ - enforce: never use foundation_apply to fix broken products (2e049bf)
243
+ - update SKILL.md with complete tool reference for knowledge retrieval (30b1924)
244
+ - add storage read, input DP table probe, and compute_run improvements (34e6c4c)
245
+ - skills update (1220385)
246
+ - skills update (bb66958)
247
+ - some tui improvement andd tools apply overwrite (e90c35c)
248
+ - skills update (e9227a1)
249
+ - skills update (669c4b3)
250
+ - fix plugin pre-flight checks (f741743)
251
+ - increase agent context (6479aaa)
252
+ - skills and init sql fixes (5fce35e)
253
+ - checksum (3518b56)
254
+ - penging job limit (a139861)
255
+ - checksum (575d28c)
256
+ - bump (92049ba)
257
+ - fix bug per tab status (0a33657)
258
+ - fix bug per tab status (50457c6)
259
+ - checksumming (0ad842e)
260
+ - shot af mardkwon overlapping (51f63b9)
261
+ - add spark dockerfile for multiarch builds (95abbd1)
262
+ - fix plugin initialization (16b9782)
263
+ - split index.js (50902a2)
264
+ - cloudflare cidr (cc4e021)
265
+ - cloduflare restrictions (2f6ba2d)
266
+ - sequential start (86b496e)
267
+ - sequential start (4930fe1)
268
+ - sequential start (353f014)
269
+ - qa tests (2dc6a1a)
270
+ - bump sha for .85 (dc2edfe)
271
+ - preserve env on sudo (7831227)
272
+ - bump sha for .84 (6c052f9)
273
+ - non interactive for azure vms (0aa8a2f)
274
+ - keep .env if present (d072450)
275
+ - bump (7a8e732)
276
+ - ensure opa is on compose if not set (f4a5228)
277
+ - checksum bump (a2ccc20)
278
+ - netrc defensive checks (a0b0ccc)
279
+ - netrc defensive checks (ae37403)
280
+ - checksum (ec45d11)
281
+ - update sync and fix up (7f9af72)
282
+ - expand test for azure and add new per app tag support (388a168)
283
+ - checksum on update (44005fc)
284
+ - cleanup for later (15e5313)
285
+ - cleanup for later (11c9597)
286
+ - switch branch feature (822fecc)
287
+ - add pull (d1c19ab)
288
+ - Bump hono from 4.11.9 to 4.12.0 in /operator-cli (ad25144)
289
+ - tests (f180a9a)
290
+ - cleanup (39c49a3)
291
+ - registry (7b7126a)
292
+ - reconcile kafka (832d0db)
293
+ - gh login bug (025886c)
294
+ - cleanup (bb96cab)
295
+ - strip envs from process (2421180)
296
+ - force use of gh creds not tokens in envs var (fff7787)
297
+ - resolve import between npm installs and npm link (79522e1)
298
+ - fix gh scope and azure states (afd846c)
299
+ - refactoring (da50352)
300
+ - split fops repo (d447638)
301
+ - aks (b791f8f)
302
+ - refactor azure (67d3bad)
303
+ - wildcard (391f023)
304
+ - azure plugin (c074074)
305
+ - zap (d7e6e7f)
306
+ - fix knock (cf89c05)
307
+ - azure (4adec98)
308
+ - Bump tar from 7.5.7 to 7.5.9 in /operator-cli (e41e98e)
309
+ - azure stack index.js split (de12272)
310
+ - Bump ajv from 8.17.1 to 8.18.0 in /operator-cli (76da21f)
311
+ - packer (9665fbc)
312
+ - remove stack api (db0fd4d)
313
+ - packer cleanup (fe1bf14)
314
+ - force refresh token (3a3d7e2)
315
+ - provision shell (2ad505f)
316
+ - azure vm management (91dcb31)
317
+ - azure specific (2b0cca8)
318
+ - azure packer (12175b8)
319
+ - init hashed pwd (db8523c)
320
+ - packer (5b5c7c4)
321
+ - doctor for azure vm (ed524fa)
322
+ - packer and 1pwd (c6d053e)
323
+ - split big index.js (dc85a1b)
324
+ - kafka volume update (21815ec)
325
+ - fix openai azure tools confirmation and flow (0118cd1)
326
+ - nighly fixx, test fix (5e0d04f)
327
+ - open ai training (cdc494a)
328
+ - openai integration in azure (1ca1475)
329
+ - ci (672cea9)
330
+ - refresh ghcr creds (4220c48)
331
+ - cleaned up version (1a0074f)
332
+ - traefik on ghcr and templates (8e31a05)
333
+ - apply fcl (e78911f)
334
+ - demo landscape (dd205fe)
335
+ - smarter login and schema (1af514f)
336
+ - no down before up unless something broke (56b1132)
337
+ - dai, reconcile failed containers (12907fa)
338
+ - reconcile dead container (7da75e4)
339
+ - defensive around storage buckets dir (b98871d)
340
+ - defensive around storage buckets dir (e86e132)
341
+ - gear in for multiarch (bf3fa3e)
342
+ - up autofix (99c7f89)
343
+ - autofix stale containers on up (43c7d0f)
344
+ - shared sessions fix (5de1359)
345
+ - share sessions between ui and tui (8321391)
346
+ - fix chat view display details (e263996)
347
+ - fix chat view display details (9babdda)
348
+ - tui up fixes (86e9f17)
349
+ - fix commands init (442538b)
350
+ - enable k3s profile (b2dcfc8)
351
+ - test up till job creation (656d388)
352
+ - tui fixes (0599779)
353
+ - cleanup (27731f0)
354
+ - train (90bf559)
355
+ - training (f809bf6)
356
+ - training (ba2b836)
357
+ - training (6fc5267)
358
+ - training (4af8ac9)
359
+ - fix build script (bd82836)
360
+ - infra test (5b79815)
361
+ - infra test (3a0ac05)
362
+ - infra test (e5c67b5)
363
+ - tests (ae7b621)
364
+ - tests (c09ae6a)
365
+ - update tui (4784153)
366
+ - training (0a5a330)
367
+ - tui (df4dd4a)
368
+ - pkg builds (4dc9993)
369
+ - also source env for creds (9a17d8f)
370
+ - fcl support (e8a5743)
371
+ - fcl support (8d6b6cd)
372
+ - fcl support (cb76a4a)
373
+ - bump package (df2ee85)
374
+
1
375
  ## [0.1.38] - 2026-03-10
2
376
 
3
377
  - callback url for localhost (821fb94)
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@meshxdata/fops",
3
- "version": "0.1.38",
3
+ "version": "0.1.40",
4
4
  "description": "CLI to install and manage data mesh platforms",
5
5
  "keywords": [
6
6
  "fops",
package/src/doctor.js CHANGED
@@ -11,6 +11,20 @@ import { rootDir } from "./project.js";
11
11
  import { wslExec, wslHomedir, wslFileExists, wslReadFile, wslCmdVersion } from "./wsl.js";
12
12
  import { getInquirer } from "./lazy.js";
13
13
 
14
+ // Check if sudo is available without a password (cached credentials or NOPASSWD)
15
+ let _sudoOk = null;
16
+ async function canSudo() {
17
+ if (_sudoOk !== null) return _sudoOk;
18
+ if (process.platform === "win32") { _sudoOk = false; return false; }
19
+ try {
20
+ const { exitCode } = await execa("sudo", ["-n", "true"], { reject: false, timeout: 5000 });
21
+ _sudoOk = exitCode === 0;
22
+ } catch { _sudoOk = false; }
23
+ return _sudoOk;
24
+ }
25
+
26
+
27
+
14
28
  const KEY_PORTS = {
15
29
  5432: "Postgres",
16
30
  9092: "Kafka",
@@ -246,6 +260,7 @@ export async function runDoctor(opts = {}, registry = null) {
246
260
  console.log(chalk.green(` ✓ Removed ${b}`));
247
261
  } catch (err) {
248
262
  if (err.code === "EACCES") {
263
+ if (!(await canSudo())) throw new Error(`Permission denied removing ${b} — sudo not available`);
249
264
  console.log(chalk.cyan(` ▶ sudo rm ${b}`));
250
265
  await execa("sudo", ["rm", b], { stdio: "inherit", timeout: 10000 });
251
266
  } else {
@@ -379,6 +394,7 @@ export async function runDoctor(opts = {}, registry = null) {
379
394
  console.log(chalk.cyan(' ▶ start "" "Docker Desktop"'));
380
395
  await execa("cmd", ["/c", "start", "", "Docker Desktop"], { timeout: 10000 });
381
396
  } else {
397
+ if (!(await canSudo())) throw new Error("sudo not available — start Docker manually: sudo systemctl start docker");
382
398
  console.log(chalk.cyan(" ▶ sudo systemctl start docker"));
383
399
  await execa("sudo", ["systemctl", "start", "docker"], { stdio: "inherit", timeout: 30000 });
384
400
  return;
@@ -447,6 +463,7 @@ export async function runDoctor(opts = {}, registry = null) {
447
463
  console.log(chalk.cyan(' ▶ start "" "Docker Desktop"'));
448
464
  await execa("cmd", ["/c", "start", "", "Docker Desktop"], { timeout: 10000 });
449
465
  } else {
466
+ if (!(await canSudo())) throw new Error("sudo not available — install Docker manually: curl -fsSL https://get.docker.com | sudo sh");
450
467
  console.log(chalk.cyan(" ▶ curl -fsSL https://get.docker.com | sudo sh"));
451
468
  await execa("sh", ["-c", "curl -fsSL https://get.docker.com | sudo sh"], {
452
469
  stdio: "inherit", timeout: 300_000,
@@ -497,6 +514,7 @@ export async function runDoctor(opts = {}, registry = null) {
497
514
  await execa("winget", ["install", "Git.Git", "--accept-source-agreements", "--accept-package-agreements"], { stdio: "inherit", timeout: 300_000 });
498
515
  }
499
516
  } else {
517
+ if (!(await canSudo())) throw new Error("sudo not available — install git manually");
500
518
  console.log(chalk.cyan(" ▶ sudo apt-get install -y git"));
501
519
  await execa("sudo", ["apt-get", "install", "-y", "git"], { stdio: "inherit", timeout: 300_000 });
502
520
  }
@@ -540,6 +558,7 @@ export async function runDoctor(opts = {}, registry = null) {
540
558
  console.log(chalk.cyan(" ▶ brew install npm"));
541
559
  await execa("brew", ["install", "npm"], { stdio: "inherit", timeout: 300_000 });
542
560
  } else if (process.platform === "linux" || (process.platform === "win32" && useWsl)) {
561
+ if (!(await canSudo())) throw new Error("sudo not available — install npm manually");
543
562
  console.log(chalk.cyan(" ▶ sudo apt-get install -y npm"));
544
563
  await run("sudo", ["apt-get", "install", "-y", "npm"], { stdio: "inherit", timeout: 300_000 });
545
564
  } else {
@@ -573,9 +592,8 @@ export async function runDoctor(opts = {}, registry = null) {
573
592
  await execa("brew", ["install", "--cask", "1password-cli"], { stdio: "inherit", timeout: 300_000 });
574
593
  } else if (process.platform === "win32") {
575
594
  if (useWsl) {
595
+ if (!(await canSudo())) throw new Error("sudo not available — install 1Password CLI manually");
576
596
  console.log(chalk.cyan(" ▶ [WSL] Installing 1Password CLI via apt…"));
577
- // Authenticate sudo upfront so the long install chain doesn't hang at a password prompt
578
- await run("sudo", ["-v"], { stdio: "inherit", timeout: 30_000 });
579
597
  await run("sh", ["-c", [
580
598
  "curl -sS https://downloads.1password.com/linux/keys/1password.asc | sudo gpg --batch --yes --dearmor --output /usr/share/keyrings/1password-archive-keyring.gpg",
581
599
  '&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/1password-archive-keyring.gpg] https://downloads.1password.com/linux/debian/$(dpkg --print-architecture) stable main" | sudo tee /etc/apt/sources.list.d/1password.list',
@@ -591,9 +609,8 @@ export async function runDoctor(opts = {}, registry = null) {
591
609
  }
592
610
  } else {
593
611
  // Linux — install via 1Password apt repository
594
- // Authenticate sudo upfront so the long install chain doesn't hang at a password prompt
612
+ if (!(await canSudo())) throw new Error("sudo not available install 1Password CLI manually");
595
613
  console.log(chalk.cyan(" ▶ Installing 1Password CLI via apt…"));
596
- await execa("sudo", ["-v"], { stdio: "inherit", timeout: 30_000 });
597
614
  await execa("sh", ["-c", [
598
615
  "curl -sS https://downloads.1password.com/linux/keys/1password.asc | sudo gpg --batch --yes --dearmor --output /usr/share/keyrings/1password-archive-keyring.gpg",
599
616
  '&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/1password-archive-keyring.gpg] https://downloads.1password.com/linux/debian/$(dpkg --print-architecture) stable main" | sudo tee /etc/apt/sources.list.d/1password.list',
@@ -607,6 +624,7 @@ export async function runDoctor(opts = {}, registry = null) {
607
624
 
608
625
  // GitHub CLI
609
626
  const installGhLinux = async (runner = execa) => {
627
+ if (!(await canSudo())) throw new Error("sudo not available — install gh manually: https://cli.github.com");
610
628
  console.log(chalk.cyan(" ▶ Installing gh via apt…"));
611
629
  await runner("sh", ["-c", [
612
630
  "type -p curl >/dev/null || (sudo apt update && sudo apt install curl -y)",
@@ -855,7 +873,26 @@ export async function runDoctor(opts = {}, registry = null) {
855
873
  console.log(chalk.green(" ✓ write:packages and repo scopes added"));
856
874
  };
857
875
 
876
+ // Double-check: if hasWritePackages came from netrc fallback, verify the gh CLI token too
877
+ let ghCliHasPackages = hasWritePackages;
858
878
  if (hasWritePackages) {
879
+ try {
880
+ const { stdout: tkOut, exitCode: tkExit } = await runGh(["auth", "token"], { timeout: 5000, reject: false });
881
+ if (tkExit === 0 && tkOut?.trim()) {
882
+ const { status, scopes } = await ghApiGetWithScopes(tkOut.trim());
883
+ if (status === 200 && scopes && !scopes.includes("write:packages") && !scopes.includes("read:packages")) {
884
+ ghCliHasPackages = false;
885
+ }
886
+ }
887
+ } catch {}
888
+ }
889
+ if (hasWritePackages && !ghCliHasPackages) {
890
+ warn(
891
+ "gh CLI token missing packages scope",
892
+ "netrc token has it, but gh auth token does not — run: gh auth refresh -h github.com -s write:packages",
893
+ ghcrRefreshFn,
894
+ );
895
+ } else if (hasWritePackages) {
859
896
  ok("gh token has write:packages scope");
860
897
  } else {
861
898
  fail(
@@ -899,32 +936,63 @@ export async function runDoctor(opts = {}, registry = null) {
899
936
  }
900
937
 
901
938
  const ghcrFixFn = async () => {
902
- // Ensure write:packages scope is present (implies read:packages)
903
- if (!hasWritePackages) {
904
- console.log(chalk.cyan(" ▶ gh auth refresh -h github.com -s write:packages -s repo"));
905
- console.log(chalk.dim(" (gh CLI will ask for device code auth, then optionally Git credential setup)"));
906
- await runGh(["auth", "refresh", "-h", "github.com", "-s", "write:packages", "-s", "repo"], {
907
- stdio: "inherit", timeout: 120_000,
908
- });
909
- }
910
-
911
- // Get fresh token and login to ghcr.io
939
+ // Find a token with packages scope prefer gh CLI token, fall back to netrc
912
940
  let ghToken = null;
941
+
942
+ // 1) Try gh CLI token
913
943
  try {
914
944
  const { stdout, exitCode } = await runGh(["auth", "token"], { timeout: 5000, reject: false });
915
- if (exitCode === 0 && stdout?.trim()) ghToken = stdout.trim();
945
+ if (exitCode === 0 && stdout?.trim()) {
946
+ const t = stdout.trim();
947
+ const { status, scopes } = await ghApiGetWithScopes(t);
948
+ if (status === 200 && scopes?.includes("write:packages")) {
949
+ ghToken = t;
950
+ } else if (status === 200) {
951
+ // gh token works but lacks packages scope — refresh it
952
+ console.log(chalk.yellow(" gh CLI token missing write:packages — refreshing…"));
953
+ console.log(chalk.cyan(" ▶ gh auth refresh -h github.com -s write:packages -s repo"));
954
+ const { exitCode: refExit } = await runGh(["auth", "refresh", "-h", "github.com", "-s", "write:packages", "-s", "repo"], {
955
+ stdio: "inherit", timeout: 120_000, reject: false,
956
+ });
957
+ if (refExit === 0) {
958
+ const { stdout: fresh } = await runGh(["auth", "token"], { timeout: 5000, reject: false });
959
+ if (fresh?.trim()) ghToken = fresh.trim();
960
+ }
961
+ }
962
+ }
916
963
  } catch {}
917
964
 
965
+ // 2) Fall back to netrc token if gh CLI token doesn't have the right scopes
966
+ if (!ghToken) {
967
+ try {
968
+ const content = await readFile(netrcPath);
969
+ const netrcToken = readNetrcToken(content, "github.com");
970
+ if (netrcToken) {
971
+ const { status, scopes } = await ghApiGetWithScopes(netrcToken);
972
+ if (status === 200 && (scopes?.includes("write:packages") || scopes?.includes("read:packages"))) {
973
+ console.log(chalk.cyan(" Using netrc token (has packages scope)"));
974
+ ghToken = netrcToken;
975
+ }
976
+ }
977
+ } catch {}
978
+ }
979
+
918
980
  if (ghToken) {
919
- console.log(chalk.cyan(" ▶ Logging into ghcr.io using gh auth token…"));
981
+ console.log(chalk.cyan(" ▶ Logging into ghcr.io…"));
982
+ // Login for both current user and root (Docker daemon runs as root)
920
983
  await run("sh", ["-c", `echo ${ghToken} | docker login ghcr.io -u x-access-token --password-stdin`], {
921
984
  stdio: "inherit", timeout: 15000,
922
985
  });
986
+ try {
987
+ await execa("sh", ["-c", `echo ${ghToken} | sudo docker login ghcr.io -u x-access-token --password-stdin`], {
988
+ timeout: 15000, stdio: "pipe",
989
+ });
990
+ } catch { /* root login best-effort */ }
923
991
  } else {
924
- console.log(chalk.yellow(" gh CLI not authenticated manual login required."));
925
- console.log(chalk.dim(" Create a PAT at https://github.com/settings/tokens with write:packages scope, then run:"));
926
- console.log(chalk.dim(" docker login ghcr.io -u <your-username>"));
927
- throw new Error("Manual GHCR login required");
992
+ console.log(chalk.yellow(" No token with packages scope found."));
993
+ console.log(chalk.dim(" Run: gh auth refresh -h github.com -s write:packages -s repo"));
994
+ console.log(chalk.dim(" Then: echo $(gh auth token) | docker login ghcr.io -u x-access-token --password-stdin"));
995
+ throw new Error("No token with packages scope — refresh gh auth or create a PAT with write:packages");
928
996
  }
929
997
  };
930
998
 
@@ -948,7 +1016,11 @@ export async function runDoctor(opts = {}, registry = null) {
948
1016
  }
949
1017
  }
950
1018
  } else if (ghcrLoggedIn) {
951
- fail("Docker logged into ghcr.io but pull access denied", "token may lack read:packages or write:packages scope", ghcrFixFn);
1019
+ fail(
1020
+ "Docker logged into ghcr.io but pull access denied",
1021
+ "Docker has a stale token — fix: echo $(gh auth token) | docker login ghcr.io -u x-access-token --password-stdin",
1022
+ ghcrFixFn,
1023
+ );
952
1024
  } else {
953
1025
  fail("Docker not logged into ghcr.io", "needed to pull/push private images", ghcrFixFn);
954
1026
  }
@@ -99,6 +99,10 @@ export function register(api) {
99
99
  } else if (synced > 0) {
100
100
  const files = synced === 1 ? ".env" : `${synced} .env files`;
101
101
  console.log(chalk.green(` ✓ ${totalSecrets} secret(s) synced → ${files}`));
102
+ // Update one-shot marker so auto-sync on next `fops up` is skipped
103
+ const markerDir = path.join(root, ".fops");
104
+ if (!fs.existsSync(markerDir)) fs.mkdirSync(markerDir, { recursive: true });
105
+ fs.writeFileSync(path.join(markerDir, ".1p-synced"), new Date().toISOString() + "\n");
102
106
  } else {
103
107
  console.log(chalk.dim(" Nothing to sync."));
104
108
  }
@@ -181,13 +185,18 @@ export function register(api) {
181
185
  },
182
186
  });
183
187
 
184
- // ── Hook: before:up — auto-sync secrets ────────────
188
+ // ── Hook: before:up — one-shot auto-sync secrets ───
185
189
  api.registerHook("before:up", async () => {
186
190
  if (!config.autoSync) return;
187
191
 
188
192
  const root = findRoot();
189
193
  if (!root) return;
190
194
 
195
+ // One-shot: skip if secrets were already synced once
196
+ const markerDir = path.join(root, ".fops");
197
+ const markerPath = path.join(markerDir, ".1p-synced");
198
+ if (fs.existsSync(markerPath)) return;
199
+
191
200
  const templates = discoverTemplates(root);
192
201
  if (templates.length === 0) return;
193
202
 
@@ -212,6 +221,9 @@ export function register(api) {
212
221
  } else if (synced > 0) {
213
222
  const files = synced === 1 ? ".env" : `${synced} .env files`;
214
223
  console.log(chalk.green(` ✓ ${totalSecrets} secret(s) synced → ${files}`));
224
+ // Mark as done so subsequent `fops up` calls skip auto-sync
225
+ if (!fs.existsSync(markerDir)) fs.mkdirSync(markerDir, { recursive: true });
226
+ fs.writeFileSync(markerPath, new Date().toISOString() + "\n");
215
227
  }
216
228
  });
217
229
 
@@ -138,8 +138,10 @@ export async function register(api) {
138
138
  } else if (process.platform === "linux") {
139
139
  const { exitCode } = await execa("which", ["apt-get"], { reject: false });
140
140
  if (exitCode !== 0) throw new Error("apt-get not found — use https://learn.microsoft.com/en-us/cli/azure/install-azure-cli for your distro");
141
- await execa("sudo", ["apt-get", "update"], { stdio: "inherit", timeout: 120000 });
142
- await execa("sudo", ["apt-get", "install", "-y", "azure-cli"], { stdio: "inherit", timeout: 120000 });
141
+ // Check sudo is available before attempting install
142
+ const sudoCheck = await execa("sudo", ["-n", "true"], { reject: false, timeout: 5000 });
143
+ if (sudoCheck.exitCode !== 0) throw new Error("sudo not available — install Azure CLI manually: curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash");
144
+ await execa("sudo", ["sh", "-c", "curl -sL https://aka.ms/InstallAzureCLIDeb | bash"], { stdio: "inherit", timeout: 300000 });
143
145
  } else {
144
146
  throw new Error("Auto-install only on macOS and Linux — use the Microsoft install link");
145
147
  }
@@ -60,12 +60,44 @@ export function suppressTlsWarning() {
60
60
  };
61
61
  }
62
62
 
63
+ /**
64
+ * Resolve Cloudflare Access service-token headers from env or .env files.
65
+ * Returns { "CF-Access-Client-Id": ..., "CF-Access-Client-Secret": ... } or {}.
66
+ */
67
+ let _cfAccessHeaders;
68
+ export function resolveCfAccessHeaders() {
69
+ if (_cfAccessHeaders !== undefined) return _cfAccessHeaders;
70
+ let id = process.env.CF_ACCESS_CLIENT_ID || "";
71
+ let secret = process.env.CF_ACCESS_CLIENT_SECRET || "";
72
+ if (!id) {
73
+ // Try .env files
74
+ const candidates = [pathMod.resolve(".env"), pathMod.resolve("..", ".env")];
75
+ try {
76
+ const raw = JSON.parse(fs.readFileSync(pathMod.join(os.homedir(), ".fops.json"), "utf8"));
77
+ if (raw?.projectRoot) candidates.unshift(pathMod.join(raw.projectRoot, ".env"));
78
+ } catch {}
79
+ for (const ep of candidates) {
80
+ try {
81
+ const lines = fs.readFileSync(ep, "utf8").split("\n");
82
+ const get = (k) => { const ln = lines.find((l) => l.startsWith(`${k}=`)); return ln ? ln.slice(k.length + 1).trim().replace(/^["']|["']$/g, "") : ""; };
83
+ id = id || get("CF_ACCESS_CLIENT_ID");
84
+ secret = secret || get("CF_ACCESS_CLIENT_SECRET");
85
+ if (id && secret) break;
86
+ } catch {}
87
+ }
88
+ }
89
+ _cfAccessHeaders = id && secret ? { "CF-Access-Client-Id": id, "CF-Access-Client-Secret": secret } : {};
90
+ return _cfAccessHeaders;
91
+ }
92
+
63
93
  export async function vmFetch(url, opts = {}) {
64
94
  suppressTlsWarning();
65
95
  const prev = process.env.NODE_TLS_REJECT_UNAUTHORIZED;
66
96
  process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
67
97
  try {
68
- return await fetch(url, { signal: AbortSignal.timeout(10_000), ...opts });
98
+ const cfHeaders = resolveCfAccessHeaders();
99
+ const headers = { ...cfHeaders, ...(opts.headers || {}) };
100
+ return await fetch(url, { signal: AbortSignal.timeout(10_000), ...opts, headers });
69
101
  } finally {
70
102
  if (prev === undefined) delete process.env.NODE_TLS_REJECT_UNAUTHORIZED;
71
103
  else process.env.NODE_TLS_REJECT_UNAUTHORIZED = prev;
@@ -191,6 +223,13 @@ export async function resolveRemoteAuth(opts = {}) {
191
223
  bearerToken = "";
192
224
 
193
225
  // 2) Pre-auth against the backend /iam/login
226
+ const cfHeaders = resolveCfAccessHeaders();
227
+ const cfKeys = Object.keys(cfHeaders);
228
+ if (cfKeys.length) {
229
+ log(chalk.dim(` CF Access headers: ${cfKeys.join(", ")} (id=${cfHeaders["CF-Access-Client-Id"]?.slice(0, 8)}…)`));
230
+ } else {
231
+ log(chalk.yellow(" ⚠ No CF Access service token found (set CF_ACCESS_CLIENT_ID + CF_ACCESS_CLIENT_SECRET)"));
232
+ }
194
233
  if (qaUser && qaPass && apiUrl) {
195
234
  try {
196
235
  if (suppressTls) suppressTls();
@@ -199,8 +238,8 @@ export async function resolveRemoteAuth(opts = {}) {
199
238
  try {
200
239
  const resp = await fetch(`${apiUrl}/iam/login`, {
201
240
  method: "POST",
202
- headers: { "Content-Type": "application/json" },
203
- body: JSON.stringify({ username: qaUser, password: qaPass }),
241
+ headers: { "Content-Type": "application/json", ...cfHeaders },
242
+ body: JSON.stringify({ user: qaUser, password: qaPass }),
204
243
  signal: AbortSignal.timeout(10_000),
205
244
  });
206
245
  if (resp.ok) {
@@ -211,7 +250,9 @@ export async function resolveRemoteAuth(opts = {}) {
211
250
  return { bearerToken, qaUser, qaPass, useTokenMode: true };
212
251
  }
213
252
  } else {
214
- log(chalk.dim(` Local creds rejected: HTTP ${resp.status}`));
253
+ const body = await resp.text().catch(() => "");
254
+ log(chalk.dim(` Local creds rejected: HTTP ${resp.status} (user=${qaUser})`));
255
+ if (body) log(chalk.dim(` Response: ${body.slice(0, 200)}`));
215
256
  }
216
257
  } finally {
217
258
  if (prev === undefined) delete process.env.NODE_TLS_REJECT_UNAUTHORIZED;
@@ -243,32 +284,8 @@ export async function resolveRemoteAuth(opts = {}) {
243
284
  if (resp.ok) {
244
285
  const data = await resp.json();
245
286
  if (data.access_token) {
246
- // Validate the token against the target API before committing to it.
247
- // Local Auth0 config may have a different audience than the remote VM expects.
248
- let tokenValid = true;
249
- if (apiUrl) {
250
- try {
251
- const prev = process.env.NODE_TLS_REJECT_UNAUTHORIZED;
252
- process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
253
- try {
254
- const check = await fetch(`${apiUrl}/iam/me`, {
255
- headers: { Authorization: `Bearer ${data.access_token}` },
256
- signal: AbortSignal.timeout(8_000),
257
- });
258
- if (check.status === 401 || check.status === 403) {
259
- tokenValid = false;
260
- log(chalk.dim(` Auth0 token rejected by API (wrong audience) — trying SSH fallback…`));
261
- }
262
- } finally {
263
- if (prev === undefined) delete process.env.NODE_TLS_REJECT_UNAUTHORIZED;
264
- else process.env.NODE_TLS_REJECT_UNAUTHORIZED = prev;
265
- }
266
- } catch { /* network error — assume token is OK */ }
267
- }
268
- if (tokenValid) {
269
- log(chalk.green(` ✓ Authenticated as ${qaUser} via Auth0`));
270
- return { bearerToken: data.access_token, qaUser, qaPass, useTokenMode: true };
271
- }
287
+ log(chalk.green(` ✓ Authenticated as ${qaUser} via Auth0`));
288
+ return { bearerToken: data.access_token, qaUser, qaPass, useTokenMode: true };
272
289
  }
273
290
  } else {
274
291
  log(chalk.dim(` Auth0 rejected: HTTP ${resp.status}`));
@@ -275,7 +275,15 @@ export async function ensureAzAuth(execa, { subscription, throwOnMissing = false
275
275
  if (subscription) args.push("--subscription", subscription);
276
276
  const { stdout } = await execa("az", args, { timeout: 15000 });
277
277
  return JSON.parse(stdout);
278
- } catch {
278
+ } catch (err) {
279
+ if (isAzSessionExpiredError(err)) {
280
+ const { suggested } = parseAzReloginHint(err);
281
+ const msg = `Azure session expired (MFA). Run:\n ${suggested.replace(/\n/g, "\n ")}`;
282
+ if (throwOnMissing) throw new Error(msg);
283
+ console.error(chalk.yellow(`\n Azure session expired (MFA or token refresh required).`));
284
+ console.error(chalk.cyan(` Run: ${suggested.split("\n")[0]}\n`));
285
+ process.exit(1);
286
+ }
279
287
  const msg = "Not logged in to Azure. Run: az login";
280
288
  if (throwOnMissing) throw new Error(msg);
281
289
  console.error(chalk.red("\n Not logged in to Azure. Run: az login\n"));
@@ -447,7 +455,61 @@ async function refreshTokenViaGh(execa, missingScopes) {
447
455
  }
448
456
 
449
457
  export async function verifyGithubToken(token) {
450
- if (!token) return { token, login: undefined };
458
+ if (!token) {
459
+ // No token anywhere — try gh CLI auth
460
+ const execa = await lazyExeca();
461
+ try {
462
+ const { stdout: ghToken, exitCode } = await execa("gh", ["auth", "token", "-h", "github.com"], { timeout: 10000, reject: false });
463
+ const existing = (ghToken || "").trim();
464
+ if (exitCode === 0 && existing) {
465
+ console.log(chalk.cyan(" No token in env/netrc — using gh CLI token"));
466
+ token = existing;
467
+ }
468
+ } catch { /* gh not installed or not authed */ }
469
+
470
+ if (!token) {
471
+ // Still no token — offer interactive gh auth login
472
+ console.log(chalk.yellow("\n ⚠ No GitHub token found (checked --github-token, $GITHUB_TOKEN, ~/.netrc, gh CLI)"));
473
+ try {
474
+ const { exitCode: ghExists } = await execa("which", ["gh"], { reject: false, timeout: 5000 });
475
+ if (ghExists === 0) {
476
+ console.log(chalk.cyan(" ▶ Running gh auth login…\n"));
477
+ const { exitCode: loginExit } = await execa("gh", ["auth", "login", "-h", "github.com", "-s", "write:packages,repo"], { stdio: "inherit", reject: false, timeout: 300000 });
478
+ if (loginExit === 0) {
479
+ const { stdout: newToken } = await execa("gh", ["auth", "token", "-h", "github.com"], { timeout: 10000 });
480
+ token = (newToken || "").trim();
481
+ if (token) {
482
+ // Sync to .netrc for future use
483
+ const netrcPath = path.join(os.homedir(), ".netrc");
484
+ const entry = `machine github.com login x-access-token password ${token}`;
485
+ try {
486
+ let content = "";
487
+ try { content = fs.readFileSync(netrcPath, "utf8"); } catch {}
488
+ if (/^machine\s+github\.com\b/m.test(content)) {
489
+ content = content.replace(
490
+ /machine\s+github\.com\b[^\n]*(\n\s*(login|password)\s+[^\n]*)*/gm,
491
+ entry,
492
+ );
493
+ } else {
494
+ content = content.trimEnd() + (content ? "\n" : "") + entry + "\n";
495
+ }
496
+ fs.writeFileSync(netrcPath, content, { mode: 0o600 });
497
+ console.log(chalk.green(" ✓ ~/.netrc updated"));
498
+ } catch {}
499
+ }
500
+ }
501
+ } else {
502
+ console.log(chalk.dim(" Install gh CLI to authenticate: https://cli.github.com"));
503
+ }
504
+ } catch {}
505
+
506
+ if (!token) {
507
+ console.error(chalk.red(" ✗ GitHub authentication required — GHCR pulls will fail without a token."));
508
+ console.error(chalk.dim(" Set $GITHUB_TOKEN, run gh auth login, or pass --github-token.\n"));
509
+ process.exit(1);
510
+ }
511
+ }
512
+ }
451
513
  const execa = await lazyExeca();
452
514
  try {
453
515
  let res = await fetch("https://api.github.com/user", {
@@ -3273,7 +3273,7 @@ export async function azureSshWhitelistMe(opts = {}) {
3273
3273
 
3274
3274
  const merged = [...new Set([...currentSources.filter(s => s && s !== "*" && s !== "Internet"), myCidr])];
3275
3275
  console.log(chalk.yellow(` ↻ Adding ${myCidr} to SSH rule on ${nsgName} (${currentSources.length} existing)...`));
3276
- const { exitCode: updateCode } = await execa("az", [
3276
+ const { exitCode: updateCode, stderr: updateStderr } = await execa("az", [
3277
3277
  "network", "nsg", "rule", "create", "-g", rg, "--nsg-name", nsgName,
3278
3278
  "-n", sshRule?.name || "allow-ssh", "--priority", String(sshRule?.priority || 1000),
3279
3279
  "--destination-port-ranges", "22", "--access", "Allow",
@@ -3283,8 +3283,17 @@ export async function azureSshWhitelistMe(opts = {}) {
3283
3283
  ], { reject: false, timeout: 30000 });
3284
3284
 
3285
3285
  if (updateCode !== 0) {
3286
- console.error(ERR(`\n Failed to update NSG rule on ${nsgName}\n`));
3287
- process.exit(1);
3286
+ console.error(ERR(`\n Failed to update NSG rule on ${nsgName}`));
3287
+ const msg = (updateStderr || "").trim();
3288
+ if (msg.includes("AADSTS") || msg.includes("Interactive authentication")) {
3289
+ console.error(ERR(" Azure session expired — run: az login"));
3290
+ } else if (msg.includes("AuthorizationFailed")) {
3291
+ console.error(ERR(" Insufficient permissions to update NSG rules in this subscription."));
3292
+ } else if (msg) {
3293
+ console.error(DIM(` ${msg.split("\n")[0]}`));
3294
+ }
3295
+ console.error("");
3296
+ return;
3288
3297
  }
3289
3298
  console.log(OK(`\n ✓ SSH (22) whitelisted for ${myCidr} on ${vmName} (${nsgName})\n`));
3290
3299
  console.log(` Sources: ${merged.join(", ")}\n`);
@@ -78,6 +78,20 @@ export function registerTestCommands(azure) {
78
78
  : content + `\n${key}=${value}`;
79
79
  };
80
80
 
81
+ // Resolve CF Access service token for Cloudflare-proxied endpoints
82
+ let cfClientId = process.env.CF_ACCESS_CLIENT_ID || "";
83
+ let cfClientSecret = process.env.CF_ACCESS_CLIENT_SECRET || "";
84
+ if (!cfClientId) {
85
+ // Try reading from the compose .env
86
+ try {
87
+ const composeDotEnv = await fsp.readFile(path.join(root, ".env"), "utf8");
88
+ const idMatch = composeDotEnv.match(/^CF_ACCESS_CLIENT_ID=(.+)$/m);
89
+ const secretMatch = composeDotEnv.match(/^CF_ACCESS_CLIENT_SECRET=(.+)$/m);
90
+ if (idMatch) cfClientId = idMatch[1].trim();
91
+ if (secretMatch) cfClientSecret = secretMatch[1].trim();
92
+ } catch { /* .env may not exist */ }
93
+ }
94
+
81
95
  envContent = setVar(envContent, "API_URL", apiUrl);
82
96
  envContent = setVar(envContent, "DEV_API_URL", apiUrl);
83
97
  envContent = setVar(envContent, "LIVE_API_URL", apiUrl);
@@ -93,6 +107,10 @@ export function registerTestCommands(azure) {
93
107
  envContent = setVar(envContent, "BEARER_TOKEN", bearerToken);
94
108
  envContent = setVar(envContent, "TOKEN_AUTH0", bearerToken);
95
109
  }
110
+ if (cfClientId && cfClientSecret) {
111
+ envContent = setVar(envContent, "CF_ACCESS_CLIENT_ID", cfClientId);
112
+ envContent = setVar(envContent, "CF_ACCESS_CLIENT_SECRET", cfClientSecret);
113
+ }
96
114
 
97
115
  await fsp.writeFile(envPath, envContent);
98
116
  console.log(chalk.green(` ✓ Configured QA .env → ${apiUrl}`));
@@ -154,6 +172,10 @@ export function registerTestCommands(azure) {
154
172
  testEnv.BEARER_TOKEN = bearerToken;
155
173
  testEnv.TOKEN_AUTH0 = bearerToken;
156
174
  }
175
+ if (cfClientId && cfClientSecret) {
176
+ testEnv.CF_ACCESS_CLIENT_ID = cfClientId;
177
+ testEnv.CF_ACCESS_CLIENT_SECRET = cfClientSecret;
178
+ }
157
179
 
158
180
  const startMs = Date.now();
159
181
  const proc = execaFn(
@@ -986,11 +986,24 @@ app.run()
986
986
  .option("--url <url>", "Override the backend API URL")
987
987
  .action(async (opts) => {
988
988
  const { spawn } = await import("node:child_process");
989
- const { writeFileSync, existsSync, realpathSync, readFileSync } = await import("node:fs");
989
+ const { writeFileSync, existsSync, realpathSync, readFileSync, unlinkSync, mkdirSync } = await import("node:fs");
990
990
  const { tmpdir, homedir } = await import("node:os");
991
991
  const { join, dirname } = await import("node:path");
992
992
  const { findComposeRoot } = await import("./lib/tools-write.js");
993
993
 
994
+ // ── Singleton: kill any existing tray process ──────────────────────────
995
+ const pidDir = join(homedir(), ".fops");
996
+ const pidFile = join(pidDir, "tray.pid");
997
+ if (existsSync(pidFile)) {
998
+ try {
999
+ const oldPid = parseInt(readFileSync(pidFile, "utf8").trim(), 10);
1000
+ if (oldPid) process.kill(oldPid, "SIGTERM");
1001
+ } catch {
1002
+ // process already gone — ignore
1003
+ }
1004
+ try { unlinkSync(pidFile); } catch {}
1005
+ }
1006
+
994
1007
  const composeRoot = program._fopsRoot || findComposeRoot() || "";
995
1008
 
996
1009
  let apiUrl = opts.url || process.env.FOPS_API_URL || "http://127.0.0.1:9001";
@@ -1235,6 +1248,8 @@ $tray.Visible = $false
1235
1248
  env: trayEnv,
1236
1249
  windowsHide: true,
1237
1250
  });
1251
+ if (!existsSync(pidDir)) mkdirSync(pidDir, { recursive: true });
1252
+ writeFileSync(pidFile, String(winChild.pid));
1238
1253
  winChild.unref();
1239
1254
  return;
1240
1255
  }
@@ -1995,6 +2010,8 @@ app.run()
1995
2010
  detached: true,
1996
2011
  env: trayEnv,
1997
2012
  });
2013
+ if (!existsSync(pidDir)) mkdirSync(pidDir, { recursive: true });
2014
+ writeFileSync(pidFile, String(child.pid));
1998
2015
  child.unref();
1999
2016
  });
2000
2017
  });