@lead-routing/cli 0.1.13 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +468 -315
- package/dist/prisma/migrations/20260308100000_route_match_config/migration.sql +95 -0
- package/dist/prisma/migrations/20260309200000_add_path_label_to_routing_logs/migration.sql +2 -0
- package/dist/prisma/migrations/20260310000000_add_team_to_routing_log/migration.sql +6 -0
- package/dist/prisma/migrations/20260310100000_add_field_type_to_branch_conditions/migration.sql +2 -0
- package/dist/prisma/migrations/20260310200000_analytics_foundation/migration.sql +84 -0
- package/dist/prisma/migrations/20260310300000_make_branch_assignment_type_nullable/migration.sql +2 -0
- package/dist/prisma/schema.prisma +188 -17
- package/dist/sfdc-package/force-app/main/default/classes/AccountTriggerTest.cls +26 -0
- package/dist/sfdc-package/force-app/main/default/classes/LeadTriggerTest.cls +49 -0
- package/dist/sfdc-package/force-app/main/default/classes/RoutingEngineCallout.cls +31 -4
- package/dist/sfdc-package/force-app/main/default/classes/RoutingEngineMock.cls +9 -2
- package/dist/sfdc-package/force-app/main/default/classes/RoutingPayloadBuilder.cls +36 -0
- package/dist/sfdc-package/force-app/main/default/triggers/AccountTrigger.trigger +14 -4
- package/dist/sfdc-package/force-app/main/default/triggers/ContactTrigger.trigger +14 -4
- package/dist/sfdc-package/force-app/main/default/triggers/LeadTrigger.trigger +16 -4
- package/package.json +11 -3
package/dist/index.js
CHANGED
|
@@ -5,29 +5,16 @@ import { Command } from "commander";
|
|
|
5
5
|
|
|
6
6
|
// src/commands/init.ts
|
|
7
7
|
import { promises as dns } from "dns";
|
|
8
|
+
import { readFileSync as readFileSync5, writeFileSync as writeFileSync4 } from "fs";
|
|
9
|
+
import { join as join6 } from "path";
|
|
8
10
|
import { intro, outro, note as note4, log as log9, confirm as confirm2, cancel as cancel3, isCancel as isCancel4, password as promptPassword } from "@clack/prompts";
|
|
9
11
|
import chalk2 from "chalk";
|
|
10
12
|
|
|
11
13
|
// src/steps/prerequisites.ts
|
|
12
14
|
import { log } from "@clack/prompts";
|
|
13
|
-
|
|
14
|
-
// src/utils/exec.ts
|
|
15
|
-
import { execa } from "execa";
|
|
16
|
-
import { spinner } from "@clack/prompts";
|
|
17
|
-
async function runSilent(cmd, args, opts = {}) {
|
|
18
|
-
try {
|
|
19
|
-
const result = await execa(cmd, args, { cwd: opts.cwd, reject: false });
|
|
20
|
-
return result.stdout;
|
|
21
|
-
} catch {
|
|
22
|
-
return "";
|
|
23
|
-
}
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
// src/steps/prerequisites.ts
|
|
27
15
|
async function checkPrerequisites() {
|
|
28
16
|
const results = await Promise.all([
|
|
29
|
-
checkNodeVersion()
|
|
30
|
-
checkSalesforceCLI()
|
|
17
|
+
checkNodeVersion()
|
|
31
18
|
]);
|
|
32
19
|
const failed = results.filter((r) => !r.ok);
|
|
33
20
|
for (const r of results) {
|
|
@@ -54,17 +41,6 @@ async function checkNodeVersion() {
|
|
|
54
41
|
}
|
|
55
42
|
return { ok: true, label: `Node.js ${version}` };
|
|
56
43
|
}
|
|
57
|
-
async function checkSalesforceCLI() {
|
|
58
|
-
const out = await runSilent("sf", ["--version"]);
|
|
59
|
-
if (!out) {
|
|
60
|
-
return {
|
|
61
|
-
ok: false,
|
|
62
|
-
label: "Salesforce CLI (sf) \u2014 not found",
|
|
63
|
-
detail: "install from https://developer.salesforce.com/tools/salesforcecli"
|
|
64
|
-
};
|
|
65
|
-
}
|
|
66
|
-
return { ok: true, label: `Salesforce CLI \u2014 ${out.trim()}` };
|
|
67
|
-
}
|
|
68
44
|
|
|
69
45
|
// src/steps/collect-ssh-config.ts
|
|
70
46
|
import { existsSync } from "fs";
|
|
@@ -196,8 +172,9 @@ async function collectConfig(opts = {}) {
|
|
|
196
172
|
const dbPassword = generateSecret(16);
|
|
197
173
|
const managedDb = !opts.externalDb;
|
|
198
174
|
const databaseUrl = opts.externalDb ?? `postgresql://leadrouting:${dbPassword}@postgres:5432/leadrouting`;
|
|
175
|
+
const redisPassword = generateSecret(16);
|
|
199
176
|
const managedRedis = !opts.externalRedis;
|
|
200
|
-
const redisUrl = opts.externalRedis ??
|
|
177
|
+
const redisUrl = opts.externalRedis ?? `redis://:${redisPassword}@redis:6379`;
|
|
201
178
|
note2("This creates the first admin user for the web app.", "Admin Account");
|
|
202
179
|
const adminEmail = await text2({
|
|
203
180
|
message: "Admin email address",
|
|
@@ -219,6 +196,7 @@ async function collectConfig(opts = {}) {
|
|
|
219
196
|
const sessionSecret = generateSecret(32);
|
|
220
197
|
const engineWebhookSecret = generateSecret(32);
|
|
221
198
|
const adminSecret = generateSecret(16);
|
|
199
|
+
const internalApiKey = generateSecret(32);
|
|
222
200
|
return {
|
|
223
201
|
appUrl: appUrl.trim().replace(/\/+$/, ""),
|
|
224
202
|
engineUrl: engineUrl.trim().replace(/\/+$/, ""),
|
|
@@ -230,13 +208,15 @@ async function collectConfig(opts = {}) {
|
|
|
230
208
|
dbPassword: managedDb ? dbPassword : "",
|
|
231
209
|
managedRedis,
|
|
232
210
|
redisUrl,
|
|
211
|
+
redisPassword: managedRedis ? redisPassword : "",
|
|
233
212
|
adminEmail,
|
|
234
213
|
adminPassword,
|
|
235
214
|
resendApiKey: "",
|
|
236
215
|
feedbackToEmail: "",
|
|
237
216
|
sessionSecret,
|
|
238
217
|
engineWebhookSecret,
|
|
239
|
-
adminSecret
|
|
218
|
+
adminSecret,
|
|
219
|
+
internalApiKey
|
|
240
220
|
};
|
|
241
221
|
}
|
|
242
222
|
|
|
@@ -269,14 +249,16 @@ function renderDockerCompose(c) {
|
|
|
269
249
|
timeout: 5s
|
|
270
250
|
retries: 10
|
|
271
251
|
` : "";
|
|
252
|
+
const redisPassword = c.redisPassword ?? "";
|
|
272
253
|
const redisService = c.managedRedis ? `
|
|
273
254
|
redis:
|
|
274
255
|
image: redis:7-alpine
|
|
275
|
-
restart: unless-stopped
|
|
256
|
+
restart: unless-stopped${redisPassword ? `
|
|
257
|
+
command: redis-server --requirepass ${redisPassword}` : ""}
|
|
276
258
|
volumes:
|
|
277
259
|
- redis_data:/data
|
|
278
260
|
healthcheck:
|
|
279
|
-
test: ["CMD", "redis-cli"
|
|
261
|
+
test: ["CMD-SHELL", "redis-cli${redisPassword ? ` -a ${redisPassword}` : ""} ping | grep PONG"]
|
|
280
262
|
interval: 5s
|
|
281
263
|
timeout: 3s
|
|
282
264
|
retries: 10
|
|
@@ -389,6 +371,11 @@ function renderEnvWeb(c) {
|
|
|
389
371
|
``,
|
|
390
372
|
`# Admin`,
|
|
391
373
|
`ADMIN_SECRET=${c.adminSecret}`,
|
|
374
|
+
`ADMIN_EMAIL=${c.adminEmail}`,
|
|
375
|
+
`ADMIN_PASSWORD=${c.adminPassword}`,
|
|
376
|
+
``,
|
|
377
|
+
`# Internal API key (shared with engine for analytics)`,
|
|
378
|
+
`INTERNAL_API_KEY=${c.internalApiKey}`,
|
|
392
379
|
``,
|
|
393
380
|
`# Email (optional)`,
|
|
394
381
|
`RESEND_API_KEY=${c.resendApiKey ?? ""}`,
|
|
@@ -419,7 +406,10 @@ function renderEnvEngine(c) {
|
|
|
419
406
|
`SFDC_LOGIN_URL=${c.sfdcLoginUrl}`,
|
|
420
407
|
``,
|
|
421
408
|
`# Webhook`,
|
|
422
|
-
`ENGINE_WEBHOOK_SECRET=${c.engineWebhookSecret}
|
|
409
|
+
`ENGINE_WEBHOOK_SECRET=${c.engineWebhookSecret}`,
|
|
410
|
+
``,
|
|
411
|
+
`# Internal API key (Bearer token for analytics endpoints)`,
|
|
412
|
+
`INTERNAL_API_KEY=${c.internalApiKey}`
|
|
423
413
|
].join("\n");
|
|
424
414
|
}
|
|
425
415
|
|
|
@@ -436,7 +426,18 @@ function renderCaddyfile(appUrl, engineUrl) {
|
|
|
436
426
|
`# Generated by lead-routing CLI`,
|
|
437
427
|
`# Caddy auto-provisions SSL certificates via Let's Encrypt`,
|
|
438
428
|
``,
|
|
429
|
+
`(security_headers) {`,
|
|
430
|
+
` header {`,
|
|
431
|
+
` X-Content-Type-Options nosniff`,
|
|
432
|
+
` X-Frame-Options DENY`,
|
|
433
|
+
` Referrer-Policy strict-origin-when-cross-origin`,
|
|
434
|
+
` Permissions-Policy interest-cohort=()`,
|
|
435
|
+
` Strict-Transport-Security "max-age=31536000; includeSubDomains"`,
|
|
436
|
+
` }`,
|
|
437
|
+
`}`,
|
|
438
|
+
``,
|
|
439
439
|
`${appHost} {`,
|
|
440
|
+
` import security_headers`,
|
|
440
441
|
` reverse_proxy web:3000 {`,
|
|
441
442
|
` health_uri /api/health`,
|
|
442
443
|
` health_interval 15s`,
|
|
@@ -444,6 +445,7 @@ function renderCaddyfile(appUrl, engineUrl) {
|
|
|
444
445
|
`}`,
|
|
445
446
|
``,
|
|
446
447
|
`${appHost}:${enginePort} {`,
|
|
448
|
+
` import security_headers`,
|
|
447
449
|
` reverse_proxy engine:3001 {`,
|
|
448
450
|
` health_uri /health`,
|
|
449
451
|
` health_interval 15s`,
|
|
@@ -456,7 +458,18 @@ function renderCaddyfile(appUrl, engineUrl) {
|
|
|
456
458
|
`# Generated by lead-routing CLI`,
|
|
457
459
|
`# Caddy auto-provisions SSL certificates via Let's Encrypt`,
|
|
458
460
|
``,
|
|
461
|
+
`(security_headers) {`,
|
|
462
|
+
` header {`,
|
|
463
|
+
` X-Content-Type-Options nosniff`,
|
|
464
|
+
` X-Frame-Options DENY`,
|
|
465
|
+
` Referrer-Policy strict-origin-when-cross-origin`,
|
|
466
|
+
` Permissions-Policy interest-cohort=()`,
|
|
467
|
+
` Strict-Transport-Security "max-age=31536000; includeSubDomains"`,
|
|
468
|
+
` }`,
|
|
469
|
+
`}`,
|
|
470
|
+
``,
|
|
459
471
|
`${appHost} {`,
|
|
472
|
+
` import security_headers`,
|
|
460
473
|
` reverse_proxy web:3000 {`,
|
|
461
474
|
` health_uri /api/health`,
|
|
462
475
|
` health_interval 15s`,
|
|
@@ -464,6 +477,7 @@ function renderCaddyfile(appUrl, engineUrl) {
|
|
|
464
477
|
`}`,
|
|
465
478
|
``,
|
|
466
479
|
`${engineHost} {`,
|
|
480
|
+
` import security_headers`,
|
|
467
481
|
` reverse_proxy engine:3001 {`,
|
|
468
482
|
` health_uri /health`,
|
|
469
483
|
` health_interval 15s`,
|
|
@@ -479,10 +493,10 @@ function getConfigPath(dir) {
|
|
|
479
493
|
return join(dir, "lead-routing.json");
|
|
480
494
|
}
|
|
481
495
|
function readConfig(dir) {
|
|
482
|
-
const
|
|
483
|
-
if (!existsSync2(
|
|
496
|
+
const path = getConfigPath(dir);
|
|
497
|
+
if (!existsSync2(path)) return null;
|
|
484
498
|
try {
|
|
485
|
-
return JSON.parse(readFileSync(
|
|
499
|
+
return JSON.parse(readFileSync(path, "utf8"));
|
|
486
500
|
} catch {
|
|
487
501
|
return null;
|
|
488
502
|
}
|
|
@@ -515,7 +529,8 @@ function generateFiles(cfg, sshCfg) {
|
|
|
515
529
|
const composeContent = renderDockerCompose({
|
|
516
530
|
managedDb: cfg.managedDb,
|
|
517
531
|
managedRedis: cfg.managedRedis,
|
|
518
|
-
dbPassword: cfg.dbPassword
|
|
532
|
+
dbPassword: cfg.dbPassword,
|
|
533
|
+
redisPassword: cfg.redisPassword
|
|
519
534
|
});
|
|
520
535
|
const composeFile = join2(dir, "docker-compose.yml");
|
|
521
536
|
writeFileSync2(composeFile, composeContent, "utf8");
|
|
@@ -535,6 +550,9 @@ function generateFiles(cfg, sshCfg) {
|
|
|
535
550
|
sessionSecret: cfg.sessionSecret,
|
|
536
551
|
engineWebhookSecret: cfg.engineWebhookSecret,
|
|
537
552
|
adminSecret: cfg.adminSecret,
|
|
553
|
+
adminEmail: cfg.adminEmail,
|
|
554
|
+
adminPassword: cfg.adminPassword,
|
|
555
|
+
internalApiKey: cfg.internalApiKey,
|
|
538
556
|
resendApiKey: cfg.resendApiKey || void 0,
|
|
539
557
|
feedbackToEmail: cfg.feedbackToEmail || void 0
|
|
540
558
|
});
|
|
@@ -547,7 +565,8 @@ function generateFiles(cfg, sshCfg) {
|
|
|
547
565
|
sfdcClientId: cfg.sfdcClientId,
|
|
548
566
|
sfdcClientSecret: cfg.sfdcClientSecret,
|
|
549
567
|
sfdcLoginUrl: cfg.sfdcLoginUrl,
|
|
550
|
-
engineWebhookSecret: cfg.engineWebhookSecret
|
|
568
|
+
engineWebhookSecret: cfg.engineWebhookSecret,
|
|
569
|
+
internalApiKey: cfg.internalApiKey
|
|
551
570
|
});
|
|
552
571
|
const envEngine = join2(dir, ".env.engine");
|
|
553
572
|
writeFileSync2(envEngine, envEngineContent, "utf8");
|
|
@@ -571,6 +590,7 @@ function generateFiles(cfg, sshCfg) {
|
|
|
571
590
|
// Stored so `lead-routing sfdc deploy` can re-authenticate without re-prompting
|
|
572
591
|
sfdcClientId: cfg.sfdcClientId,
|
|
573
592
|
sfdcLoginUrl: cfg.sfdcLoginUrl,
|
|
593
|
+
engineWebhookSecret: cfg.engineWebhookSecret,
|
|
574
594
|
installedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
575
595
|
version: getCliVersion()
|
|
576
596
|
});
|
|
@@ -579,7 +599,7 @@ function generateFiles(cfg, sshCfg) {
|
|
|
579
599
|
}
|
|
580
600
|
|
|
581
601
|
// src/steps/check-remote-prerequisites.ts
|
|
582
|
-
import { log as log4, spinner
|
|
602
|
+
import { log as log4, spinner } from "@clack/prompts";
|
|
583
603
|
async function checkRemotePrerequisites(ssh) {
|
|
584
604
|
const dockerResult = await checkOrInstallDocker(ssh);
|
|
585
605
|
const composeResult = await checkRemoteDockerCompose(ssh);
|
|
@@ -620,7 +640,7 @@ async function checkOrInstallDocker(ssh) {
|
|
|
620
640
|
}
|
|
621
641
|
return { ok: true, label: `Docker \u2014 ${stdout.trim()}` };
|
|
622
642
|
}
|
|
623
|
-
const s =
|
|
643
|
+
const s = spinner();
|
|
624
644
|
s.start("Docker not found \u2014 installing via get.docker.com (~2 min)\u2026");
|
|
625
645
|
try {
|
|
626
646
|
const { code: curlCode } = await ssh.execSilent("command -v curl 2>/dev/null");
|
|
@@ -719,9 +739,9 @@ async function checkRemotePort(ssh, port) {
|
|
|
719
739
|
|
|
720
740
|
// src/steps/upload-files.ts
|
|
721
741
|
import { join as join3 } from "path";
|
|
722
|
-
import { spinner as
|
|
742
|
+
import { spinner as spinner2 } from "@clack/prompts";
|
|
723
743
|
async function uploadFiles(ssh, localDir, remoteDir) {
|
|
724
|
-
const s =
|
|
744
|
+
const s = spinner2();
|
|
725
745
|
s.start("Uploading config files to server");
|
|
726
746
|
try {
|
|
727
747
|
await ssh.mkdir(remoteDir);
|
|
@@ -746,7 +766,7 @@ async function uploadFiles(ssh, localDir, remoteDir) {
|
|
|
746
766
|
}
|
|
747
767
|
|
|
748
768
|
// src/steps/start-services.ts
|
|
749
|
-
import { spinner as
|
|
769
|
+
import { spinner as spinner3, log as log5 } from "@clack/prompts";
|
|
750
770
|
async function startServices(ssh, remoteDir) {
|
|
751
771
|
await wipeStalePostgresVolume(ssh, remoteDir);
|
|
752
772
|
await pullImages(ssh, remoteDir);
|
|
@@ -760,7 +780,7 @@ async function wipeStalePostgresVolume(ssh, remoteDir) {
|
|
|
760
780
|
if (code !== 0) {
|
|
761
781
|
return;
|
|
762
782
|
}
|
|
763
|
-
const s =
|
|
783
|
+
const s = spinner3();
|
|
764
784
|
s.start("Removing existing database volume for clean install");
|
|
765
785
|
try {
|
|
766
786
|
await ssh.exec("docker compose down -v --remove-orphans", remoteDir);
|
|
@@ -774,7 +794,7 @@ async function wipeStalePostgresVolume(ssh, remoteDir) {
|
|
|
774
794
|
}
|
|
775
795
|
}
|
|
776
796
|
async function pullImages(ssh, remoteDir) {
|
|
777
|
-
const s =
|
|
797
|
+
const s = spinner3();
|
|
778
798
|
s.start("Pulling Docker images on server (this may take a few minutes)");
|
|
779
799
|
try {
|
|
780
800
|
await ssh.exec("docker compose pull", remoteDir);
|
|
@@ -787,7 +807,7 @@ async function pullImages(ssh, remoteDir) {
|
|
|
787
807
|
}
|
|
788
808
|
}
|
|
789
809
|
async function startContainers(ssh, remoteDir) {
|
|
790
|
-
const s =
|
|
810
|
+
const s = spinner3();
|
|
791
811
|
s.start("Starting services");
|
|
792
812
|
try {
|
|
793
813
|
await ssh.exec("docker compose up -d --remove-orphans", remoteDir);
|
|
@@ -798,7 +818,7 @@ async function startContainers(ssh, remoteDir) {
|
|
|
798
818
|
}
|
|
799
819
|
}
|
|
800
820
|
async function waitForPostgres(ssh, remoteDir) {
|
|
801
|
-
const s =
|
|
821
|
+
const s = spinner3();
|
|
802
822
|
s.start("Waiting for PostgreSQL to be ready");
|
|
803
823
|
const maxAttempts = 24;
|
|
804
824
|
let containerReady = false;
|
|
@@ -834,119 +854,11 @@ async function waitForPostgres(ssh, remoteDir) {
|
|
|
834
854
|
log5.warn("Host TCP port check timed out \u2014 tunnel may have issues. Proceeding anyway.");
|
|
835
855
|
}
|
|
836
856
|
function sleep(ms) {
|
|
837
|
-
return new Promise((
|
|
838
|
-
}
|
|
839
|
-
|
|
840
|
-
// src/steps/run-migrations.ts
|
|
841
|
-
import * as fs from "fs";
|
|
842
|
-
import * as path from "path";
|
|
843
|
-
import * as crypto from "crypto";
|
|
844
|
-
import { fileURLToPath as fileURLToPath2 } from "url";
|
|
845
|
-
import { execa as execa2 } from "execa";
|
|
846
|
-
import { spinner as spinner5 } from "@clack/prompts";
|
|
847
|
-
var __filename = fileURLToPath2(import.meta.url);
|
|
848
|
-
var __dirname2 = path.dirname(__filename);
|
|
849
|
-
function readEnvVar(envFile, key) {
|
|
850
|
-
const content = fs.readFileSync(envFile, "utf8");
|
|
851
|
-
const match = content.match(new RegExp(`^${key}=(.+)$`, "m"));
|
|
852
|
-
if (!match) throw new Error(`${key} not found in ${envFile}`);
|
|
853
|
-
return match[1].trim().replace(/^["']|["']$/g, "");
|
|
854
|
-
}
|
|
855
|
-
function getTunneledDbUrl(localDir, localPort) {
|
|
856
|
-
const rawUrl = readEnvVar(path.join(localDir, ".env.web"), "DATABASE_URL");
|
|
857
|
-
const parsed = new URL(rawUrl);
|
|
858
|
-
parsed.hostname = "localhost";
|
|
859
|
-
parsed.port = String(localPort);
|
|
860
|
-
return parsed.toString();
|
|
861
|
-
}
|
|
862
|
-
function findPrismaBin() {
|
|
863
|
-
const candidates = [
|
|
864
|
-
// npx / npm global install: @lead-routing/cli is nested under the scope dir,
|
|
865
|
-
// so prisma lands 3 levels above dist/ in node_modules/.bin/
|
|
866
|
-
// e.g. ~/.npm/_npx/HASH/node_modules/.bin/prisma
|
|
867
|
-
path.join(__dirname2, "../../../.bin/prisma"),
|
|
868
|
-
path.join(__dirname2, "../../../prisma/bin/prisma.js"),
|
|
869
|
-
// Fallback: prisma nested inside the package's own node_modules (hoisted install)
|
|
870
|
-
path.join(__dirname2, "../node_modules/.bin/prisma"),
|
|
871
|
-
path.join(__dirname2, "../node_modules/prisma/bin/prisma.js"),
|
|
872
|
-
// Monorepo dev paths
|
|
873
|
-
path.resolve("packages/db/node_modules/.bin/prisma"),
|
|
874
|
-
path.resolve("node_modules/.bin/prisma"),
|
|
875
|
-
path.resolve("node_modules/.pnpm/node_modules/.bin/prisma")
|
|
876
|
-
];
|
|
877
|
-
const found = candidates.find(fs.existsSync);
|
|
878
|
-
if (!found) throw new Error("Prisma binary not found \u2014 CLI may need to be reinstalled.");
|
|
879
|
-
return found;
|
|
880
|
-
}
|
|
881
|
-
async function runMigrations(ssh, localDir, adminEmail, adminPassword) {
|
|
882
|
-
const s = spinner5();
|
|
883
|
-
s.start("Opening secure tunnel to database");
|
|
884
|
-
let tunnelClose;
|
|
885
|
-
try {
|
|
886
|
-
const { localPort, close } = await ssh.tunnel(5432);
|
|
887
|
-
tunnelClose = close;
|
|
888
|
-
s.stop(`Database tunnel open (local port ${localPort})`);
|
|
889
|
-
await applyMigrations(localDir, localPort);
|
|
890
|
-
if (adminEmail && adminPassword) {
|
|
891
|
-
await seedAdminUser(localDir, localPort, adminEmail, adminPassword);
|
|
892
|
-
}
|
|
893
|
-
} finally {
|
|
894
|
-
tunnelClose?.();
|
|
895
|
-
}
|
|
896
|
-
}
|
|
897
|
-
async function applyMigrations(localDir, localPort) {
|
|
898
|
-
const s = spinner5();
|
|
899
|
-
s.start("Running database migrations");
|
|
900
|
-
try {
|
|
901
|
-
const DATABASE_URL = getTunneledDbUrl(localDir, localPort);
|
|
902
|
-
const prismaBin = findPrismaBin();
|
|
903
|
-
const bundledSchema = path.join(__dirname2, "prisma/schema.prisma");
|
|
904
|
-
const monoSchema = path.resolve("packages/db/prisma/schema.prisma");
|
|
905
|
-
const schemaPath = fs.existsSync(bundledSchema) ? bundledSchema : monoSchema;
|
|
906
|
-
await execa2(prismaBin, ["migrate", "deploy", "--schema", schemaPath], {
|
|
907
|
-
env: { ...process.env, DATABASE_URL }
|
|
908
|
-
});
|
|
909
|
-
s.stop("Database migrations applied");
|
|
910
|
-
} catch (err) {
|
|
911
|
-
s.stop("Migrations failed");
|
|
912
|
-
throw err;
|
|
913
|
-
}
|
|
914
|
-
}
|
|
915
|
-
async function seedAdminUser(localDir, localPort, adminEmail, adminPassword) {
|
|
916
|
-
const s = spinner5();
|
|
917
|
-
s.start("Creating admin user");
|
|
918
|
-
try {
|
|
919
|
-
const DATABASE_URL = getTunneledDbUrl(localDir, localPort);
|
|
920
|
-
const webhookSecret = readEnvVar(path.join(localDir, ".env.engine"), "ENGINE_WEBHOOK_SECRET");
|
|
921
|
-
const salt = crypto.randomBytes(16).toString("hex");
|
|
922
|
-
const pbkdf2Hash = crypto.pbkdf2Sync(adminPassword, salt, 31e4, 32, "sha256").toString("hex");
|
|
923
|
-
const passwordHash = `${salt}:${pbkdf2Hash}`;
|
|
924
|
-
const safeEmail = adminEmail.replace(/'/g, "''");
|
|
925
|
-
const safeWebhookSecret = webhookSecret.replace(/'/g, "''");
|
|
926
|
-
const sql = `
|
|
927
|
-
-- Create initial organisation if none exists (self-hosted defaults: PAID plan, unlimited seats)
|
|
928
|
-
INSERT INTO organizations (id, "webhookSecret", plan, "seatsPurchased", "isActive", "createdAt", "updatedAt")
|
|
929
|
-
SELECT gen_random_uuid(), '${safeWebhookSecret}', 'PAID', 9999, true, NOW(), NOW()
|
|
930
|
-
WHERE NOT EXISTS (SELECT 1 FROM organizations);
|
|
931
|
-
|
|
932
|
-
-- Create admin AppUser under the first org (idempotent)
|
|
933
|
-
INSERT INTO app_users (id, "orgId", email, name, "passwordHash", role, "isActive", "createdAt", "updatedAt")
|
|
934
|
-
SELECT gen_random_uuid(), o.id, '${safeEmail}', 'Admin', '${passwordHash}', 'ADMIN', true, NOW(), NOW()
|
|
935
|
-
FROM organizations o
|
|
936
|
-
LIMIT 1
|
|
937
|
-
ON CONFLICT ("orgId", email) DO NOTHING;
|
|
938
|
-
`;
|
|
939
|
-
const prismaBin = findPrismaBin();
|
|
940
|
-
await execa2(prismaBin, ["db", "execute", "--stdin", "--url", DATABASE_URL], { input: sql });
|
|
941
|
-
s.stop("Admin user ready");
|
|
942
|
-
} catch (err) {
|
|
943
|
-
s.stop("Seed failed");
|
|
944
|
-
throw err;
|
|
945
|
-
}
|
|
857
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
946
858
|
}
|
|
947
859
|
|
|
948
860
|
// src/steps/verify-health.ts
|
|
949
|
-
import { spinner as
|
|
861
|
+
import { spinner as spinner4, log as log6 } from "@clack/prompts";
|
|
950
862
|
async function verifyHealth(appUrl, engineUrl, ssh, remoteDir) {
|
|
951
863
|
const checks = [
|
|
952
864
|
{ service: "Web app", url: `${appUrl}/api/health` },
|
|
@@ -995,7 +907,7 @@ To resume once fixed:
|
|
|
995
907
|
);
|
|
996
908
|
}
|
|
997
909
|
async function pollHealth(service, url, maxAttempts = 24, intervalMs = 5e3) {
|
|
998
|
-
const s =
|
|
910
|
+
const s = spinner4();
|
|
999
911
|
s.start(`Waiting for ${service}`);
|
|
1000
912
|
for (let i = 0; i < maxAttempts; i++) {
|
|
1001
913
|
try {
|
|
@@ -1020,44 +932,304 @@ async function pollHealth(service, url, maxAttempts = 24, intervalMs = 5e3) {
|
|
|
1020
932
|
};
|
|
1021
933
|
}
|
|
1022
934
|
function sleep2(ms) {
|
|
1023
|
-
return new Promise((
|
|
935
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
1024
936
|
}
|
|
1025
937
|
|
|
1026
938
|
// src/steps/sfdc-deploy-inline.ts
|
|
1027
939
|
import { readFileSync as readFileSync4, writeFileSync as writeFileSync3, existsSync as existsSync4, cpSync, rmSync } from "fs";
|
|
1028
|
-
import { join as join5, dirname as
|
|
940
|
+
import { join as join5, dirname as dirname2 } from "path";
|
|
1029
941
|
import { tmpdir } from "os";
|
|
1030
|
-
import { fileURLToPath as
|
|
1031
|
-
import {
|
|
1032
|
-
import {
|
|
1033
|
-
|
|
942
|
+
import { fileURLToPath as fileURLToPath2 } from "url";
|
|
943
|
+
import { execSync } from "child_process";
|
|
944
|
+
import { spinner as spinner5, log as log7 } from "@clack/prompts";
|
|
945
|
+
|
|
946
|
+
// src/utils/sfdc-api.ts
|
|
947
|
+
var API_VERSION = "v59.0";
|
|
948
|
+
var SalesforceApi = class {
|
|
949
|
+
constructor(instanceUrl, accessToken) {
|
|
950
|
+
this.instanceUrl = instanceUrl;
|
|
951
|
+
this.accessToken = accessToken;
|
|
952
|
+
this.baseUrl = `${instanceUrl.replace(/\/+$/, "")}/services/data/${API_VERSION}`;
|
|
953
|
+
}
|
|
954
|
+
baseUrl;
|
|
955
|
+
headers(extra) {
|
|
956
|
+
return {
|
|
957
|
+
Authorization: `Bearer ${this.accessToken}`,
|
|
958
|
+
"Content-Type": "application/json",
|
|
959
|
+
...extra
|
|
960
|
+
};
|
|
961
|
+
}
|
|
962
|
+
/** Execute a SOQL query and return records */
|
|
963
|
+
async query(soql) {
|
|
964
|
+
const url = `${this.baseUrl}/query?q=${encodeURIComponent(soql)}`;
|
|
965
|
+
const res = await fetch(url, { headers: this.headers() });
|
|
966
|
+
if (!res.ok) {
|
|
967
|
+
const body = await res.text();
|
|
968
|
+
throw new Error(`SOQL query failed (${res.status}): ${body}`);
|
|
969
|
+
}
|
|
970
|
+
const data = await res.json();
|
|
971
|
+
return data.records;
|
|
972
|
+
}
|
|
973
|
+
/** Create an sObject record, returns the new record ID */
|
|
974
|
+
async create(sobject, data) {
|
|
975
|
+
const url = `${this.baseUrl}/sobjects/${sobject}`;
|
|
976
|
+
const res = await fetch(url, {
|
|
977
|
+
method: "POST",
|
|
978
|
+
headers: this.headers(),
|
|
979
|
+
body: JSON.stringify(data)
|
|
980
|
+
});
|
|
981
|
+
if (!res.ok) {
|
|
982
|
+
const body = await res.text();
|
|
983
|
+
if (res.status === 400 && body.includes("Duplicate")) {
|
|
984
|
+
throw new DuplicateError(body);
|
|
985
|
+
}
|
|
986
|
+
throw new Error(`Create ${sobject} failed (${res.status}): ${body}`);
|
|
987
|
+
}
|
|
988
|
+
const result = await res.json();
|
|
989
|
+
return result.id;
|
|
990
|
+
}
|
|
991
|
+
/** Update an sObject record */
|
|
992
|
+
async update(sobject, id, data) {
|
|
993
|
+
const url = `${this.baseUrl}/sobjects/${sobject}/${id}`;
|
|
994
|
+
const res = await fetch(url, {
|
|
995
|
+
method: "PATCH",
|
|
996
|
+
headers: this.headers(),
|
|
997
|
+
body: JSON.stringify(data)
|
|
998
|
+
});
|
|
999
|
+
if (!res.ok) {
|
|
1000
|
+
const body = await res.text();
|
|
1001
|
+
throw new Error(`Update ${sobject}/${id} failed (${res.status}): ${body}`);
|
|
1002
|
+
}
|
|
1003
|
+
}
|
|
1004
|
+
/** Get current user info (for permission set assignment) */
|
|
1005
|
+
async getCurrentUserId() {
|
|
1006
|
+
const url = `${this.instanceUrl.replace(/\/+$/, "")}/services/oauth2/userinfo`;
|
|
1007
|
+
const res = await fetch(url, {
|
|
1008
|
+
headers: { Authorization: `Bearer ${this.accessToken}` }
|
|
1009
|
+
});
|
|
1010
|
+
if (!res.ok) {
|
|
1011
|
+
const body = await res.text();
|
|
1012
|
+
throw new Error(`Get current user failed (${res.status}): ${body}`);
|
|
1013
|
+
}
|
|
1014
|
+
const data = await res.json();
|
|
1015
|
+
return data.user_id;
|
|
1016
|
+
}
|
|
1017
|
+
/**
|
|
1018
|
+
* Deploy metadata using the Source Deploy API (same API that `sf project deploy start` uses).
|
|
1019
|
+
* Accepts a ZIP buffer containing the source-format package.
|
|
1020
|
+
* Returns the deploy request ID for polling.
|
|
1021
|
+
*/
|
|
1022
|
+
async deployMetadata(zipBuffer) {
|
|
1023
|
+
const url = `${this.baseUrl}/metadata/deployRequest`;
|
|
1024
|
+
const boundary = `----FormBoundary${Date.now()}`;
|
|
1025
|
+
const deployOptions = JSON.stringify({
|
|
1026
|
+
deployOptions: {
|
|
1027
|
+
rollbackOnError: true,
|
|
1028
|
+
singlePackage: true
|
|
1029
|
+
}
|
|
1030
|
+
});
|
|
1031
|
+
const parts = [];
|
|
1032
|
+
parts.push(
|
|
1033
|
+
Buffer.from(
|
|
1034
|
+
`--${boundary}\r
|
|
1035
|
+
Content-Disposition: form-data; name="json"\r
|
|
1036
|
+
Content-Type: application/json\r
|
|
1037
|
+
\r
|
|
1038
|
+
${deployOptions}\r
|
|
1039
|
+
`
|
|
1040
|
+
)
|
|
1041
|
+
);
|
|
1042
|
+
parts.push(
|
|
1043
|
+
Buffer.from(
|
|
1044
|
+
`--${boundary}\r
|
|
1045
|
+
Content-Disposition: form-data; name="file"; filename="deploy.zip"\r
|
|
1046
|
+
Content-Type: application/zip\r
|
|
1047
|
+
\r
|
|
1048
|
+
`
|
|
1049
|
+
)
|
|
1050
|
+
);
|
|
1051
|
+
parts.push(zipBuffer);
|
|
1052
|
+
parts.push(Buffer.from(`\r
|
|
1053
|
+
--${boundary}--\r
|
|
1054
|
+
`));
|
|
1055
|
+
const body = Buffer.concat(parts);
|
|
1056
|
+
const res = await fetch(url, {
|
|
1057
|
+
method: "POST",
|
|
1058
|
+
headers: {
|
|
1059
|
+
Authorization: `Bearer ${this.accessToken}`,
|
|
1060
|
+
"Content-Type": `multipart/form-data; boundary=${boundary}`
|
|
1061
|
+
},
|
|
1062
|
+
body
|
|
1063
|
+
});
|
|
1064
|
+
if (!res.ok) {
|
|
1065
|
+
const text5 = await res.text();
|
|
1066
|
+
throw new Error(
|
|
1067
|
+
`Metadata deploy request failed (${res.status}): ${text5}`
|
|
1068
|
+
);
|
|
1069
|
+
}
|
|
1070
|
+
const result = await res.json();
|
|
1071
|
+
return result.id;
|
|
1072
|
+
}
|
|
1073
|
+
/**
|
|
1074
|
+
* Poll deploy status until complete.
|
|
1075
|
+
* Returns deploy result with success/failure info.
|
|
1076
|
+
*/
|
|
1077
|
+
async waitForDeploy(deployId, timeoutMs = 3e5) {
|
|
1078
|
+
const startTime = Date.now();
|
|
1079
|
+
const pollInterval = 3e3;
|
|
1080
|
+
while (Date.now() - startTime < timeoutMs) {
|
|
1081
|
+
const url = `${this.baseUrl}/metadata/deployRequest/${deployId}?includeDetails=true`;
|
|
1082
|
+
const res = await fetch(url, { headers: this.headers() });
|
|
1083
|
+
if (!res.ok) {
|
|
1084
|
+
const text5 = await res.text();
|
|
1085
|
+
throw new Error(
|
|
1086
|
+
`Deploy status check failed (${res.status}): ${text5}`
|
|
1087
|
+
);
|
|
1088
|
+
}
|
|
1089
|
+
const data = await res.json();
|
|
1090
|
+
const result = data.deployResult;
|
|
1091
|
+
if (result.done) {
|
|
1092
|
+
return result;
|
|
1093
|
+
}
|
|
1094
|
+
await new Promise((resolve) => setTimeout(resolve, pollInterval));
|
|
1095
|
+
}
|
|
1096
|
+
throw new Error(`Deploy timed out after ${timeoutMs / 1e3}s`);
|
|
1097
|
+
}
|
|
1098
|
+
};
|
|
1099
|
+
var DuplicateError = class extends Error {
|
|
1100
|
+
constructor(message) {
|
|
1101
|
+
super(message);
|
|
1102
|
+
this.name = "DuplicateError";
|
|
1103
|
+
}
|
|
1104
|
+
};
|
|
1105
|
+
|
|
1106
|
+
// src/utils/zip-source.ts
|
|
1107
|
+
import { join as join4 } from "path";
|
|
1108
|
+
import { readdirSync, readFileSync as readFileSync3, existsSync as existsSync3 } from "fs";
|
|
1109
|
+
import archiver from "archiver";
|
|
1110
|
+
var META_TYPE_MAP = {
|
|
1111
|
+
applications: "CustomApplication",
|
|
1112
|
+
classes: "ApexClass",
|
|
1113
|
+
triggers: "ApexTrigger",
|
|
1114
|
+
lwc: "LightningComponentBundle",
|
|
1115
|
+
permissionsets: "PermissionSet",
|
|
1116
|
+
namedCredentials: "NamedCredential",
|
|
1117
|
+
remoteSiteSettings: "RemoteSiteSetting",
|
|
1118
|
+
tabs: "CustomTab"
|
|
1119
|
+
};
|
|
1120
|
+
async function zipSourcePackage(packageDir) {
|
|
1121
|
+
const forceAppDefault = join4(packageDir, "force-app", "main", "default");
|
|
1122
|
+
let apiVersion = "59.0";
|
|
1123
|
+
try {
|
|
1124
|
+
const proj = JSON.parse(readFileSync3(join4(packageDir, "sfdx-project.json"), "utf8"));
|
|
1125
|
+
if (proj.sourceApiVersion) apiVersion = proj.sourceApiVersion;
|
|
1126
|
+
} catch {
|
|
1127
|
+
}
|
|
1128
|
+
const members = /* @__PURE__ */ new Map();
|
|
1129
|
+
const addMember = (type, name) => {
|
|
1130
|
+
if (!members.has(type)) members.set(type, /* @__PURE__ */ new Set());
|
|
1131
|
+
members.get(type).add(name);
|
|
1132
|
+
};
|
|
1133
|
+
return new Promise((resolve, reject) => {
|
|
1134
|
+
const chunks = [];
|
|
1135
|
+
const archive = archiver("zip", { zlib: { level: 9 } });
|
|
1136
|
+
archive.on("data", (chunk) => chunks.push(chunk));
|
|
1137
|
+
archive.on("end", () => resolve(Buffer.concat(chunks)));
|
|
1138
|
+
archive.on("error", reject);
|
|
1139
|
+
for (const [dirName, metaType] of Object.entries(META_TYPE_MAP)) {
|
|
1140
|
+
const srcDir = join4(forceAppDefault, dirName);
|
|
1141
|
+
if (!existsSync3(srcDir)) continue;
|
|
1142
|
+
const entries = readdirSync(srcDir, { withFileTypes: true });
|
|
1143
|
+
for (const entry of entries) {
|
|
1144
|
+
if (dirName === "lwc" && entry.isDirectory()) {
|
|
1145
|
+
addMember(metaType, entry.name);
|
|
1146
|
+
archive.directory(join4(srcDir, entry.name), `${dirName}/${entry.name}`);
|
|
1147
|
+
} else if (entry.isFile()) {
|
|
1148
|
+
archive.file(join4(srcDir, entry.name), { name: `${dirName}/${entry.name}` });
|
|
1149
|
+
if (!entry.name.endsWith("-meta.xml")) {
|
|
1150
|
+
const memberName = entry.name.replace(/\.[^.]+$/, "");
|
|
1151
|
+
addMember(metaType, memberName);
|
|
1152
|
+
}
|
|
1153
|
+
}
|
|
1154
|
+
}
|
|
1155
|
+
}
|
|
1156
|
+
const objectsDir = join4(forceAppDefault, "objects");
|
|
1157
|
+
if (existsSync3(objectsDir)) {
|
|
1158
|
+
for (const objEntry of readdirSync(objectsDir, { withFileTypes: true })) {
|
|
1159
|
+
if (!objEntry.isDirectory()) continue;
|
|
1160
|
+
const objName = objEntry.name;
|
|
1161
|
+
addMember("CustomObject", objName);
|
|
1162
|
+
const objDir = join4(objectsDir, objName);
|
|
1163
|
+
const objectXml = mergeObjectXml(objDir, objName, apiVersion);
|
|
1164
|
+
archive.append(Buffer.from(objectXml, "utf8"), {
|
|
1165
|
+
name: `objects/${objName}.object`
|
|
1166
|
+
});
|
|
1167
|
+
}
|
|
1168
|
+
}
|
|
1169
|
+
const packageXml = generatePackageXml(members, apiVersion);
|
|
1170
|
+
archive.append(Buffer.from(packageXml, "utf8"), { name: "package.xml" });
|
|
1171
|
+
archive.finalize();
|
|
1172
|
+
});
|
|
1173
|
+
}
|
|
1174
|
+
function mergeObjectXml(objDir, objName, apiVersion) {
|
|
1175
|
+
const lines = [
|
|
1176
|
+
'<?xml version="1.0" encoding="UTF-8"?>',
|
|
1177
|
+
'<CustomObject xmlns="http://soap.sforce.com/2006/04/metadata">'
|
|
1178
|
+
];
|
|
1179
|
+
const objMetaPath = join4(objDir, `${objName}.object-meta.xml`);
|
|
1180
|
+
if (existsSync3(objMetaPath)) {
|
|
1181
|
+
const content = readFileSync3(objMetaPath, "utf8");
|
|
1182
|
+
const inner = content.replace(/<\?xml[^?]*\?>\s*/g, "").replace(/<CustomObject[^>]*>/g, "").replace(/<\/CustomObject>/g, "").trim();
|
|
1183
|
+
if (inner) lines.push(inner);
|
|
1184
|
+
}
|
|
1185
|
+
const fieldsDir = join4(objDir, "fields");
|
|
1186
|
+
if (existsSync3(fieldsDir)) {
|
|
1187
|
+
for (const fieldFile of readdirSync(fieldsDir).sort()) {
|
|
1188
|
+
if (!fieldFile.endsWith(".field-meta.xml")) continue;
|
|
1189
|
+
const content = readFileSync3(join4(fieldsDir, fieldFile), "utf8");
|
|
1190
|
+
const inner = content.replace(/<\?xml[^?]*\?>\s*/g, "").replace(/<CustomField[^>]*>/g, "").replace(/<\/CustomField>/g, "").trim();
|
|
1191
|
+
if (inner) {
|
|
1192
|
+
lines.push(" <fields>");
|
|
1193
|
+
lines.push(` ${inner}`);
|
|
1194
|
+
lines.push(" </fields>");
|
|
1195
|
+
}
|
|
1196
|
+
}
|
|
1197
|
+
}
|
|
1198
|
+
lines.push("</CustomObject>");
|
|
1199
|
+
return lines.join("\n");
|
|
1200
|
+
}
|
|
1201
|
+
function generatePackageXml(members, apiVersion) {
|
|
1202
|
+
const lines = [
|
|
1203
|
+
'<?xml version="1.0" encoding="UTF-8"?>',
|
|
1204
|
+
'<Package xmlns="http://soap.sforce.com/2006/04/metadata">'
|
|
1205
|
+
];
|
|
1206
|
+
for (const [metaType, names] of [...members.entries()].sort((a, b) => a[0].localeCompare(b[0]))) {
|
|
1207
|
+
lines.push(" <types>");
|
|
1208
|
+
for (const name of [...names].sort()) {
|
|
1209
|
+
lines.push(` <members>${name}</members>`);
|
|
1210
|
+
}
|
|
1211
|
+
lines.push(` <name>${metaType}</name>`);
|
|
1212
|
+
lines.push(" </types>");
|
|
1213
|
+
}
|
|
1214
|
+
lines.push(` <version>${apiVersion}</version>`);
|
|
1215
|
+
lines.push("</Package>");
|
|
1216
|
+
return lines.join("\n");
|
|
1217
|
+
}
|
|
1218
|
+
|
|
1219
|
+
// src/steps/sfdc-deploy-inline.ts
|
|
1220
|
+
var __dirname2 = dirname2(fileURLToPath2(import.meta.url));
|
|
1034
1221
|
function patchXml(content, tag, value) {
|
|
1035
1222
|
const re = new RegExp(`(<${tag}>)[^<]*(</\\s*${tag}>)`, "g");
|
|
1036
1223
|
return content.replace(re, `$1${value}$2`);
|
|
1037
1224
|
}
|
|
1038
1225
|
async function sfdcDeployInline(params) {
|
|
1039
|
-
const { appUrl, engineUrl,
|
|
1040
|
-
const s =
|
|
1041
|
-
const {
|
|
1042
|
-
|
|
1043
|
-
["org", "display", "--target-org", orgAlias, "--json"],
|
|
1044
|
-
{ reject: false }
|
|
1045
|
-
);
|
|
1046
|
-
const alreadyAuthed = authCheck === 0;
|
|
1047
|
-
let sfCredEnv = {};
|
|
1048
|
-
let targetOrgArgs = ["--target-org", orgAlias];
|
|
1049
|
-
if (alreadyAuthed) {
|
|
1050
|
-
log7.success("Using existing Salesforce authentication");
|
|
1051
|
-
} else {
|
|
1052
|
-
const { accessToken, instanceUrl, aliasStored } = await loginViaAppBridge(appUrl, orgAlias);
|
|
1053
|
-
sfCredEnv = { SF_ACCESS_TOKEN: accessToken, SF_ORG_INSTANCE_URL: instanceUrl };
|
|
1054
|
-
if (!aliasStored) {
|
|
1055
|
-
targetOrgArgs = [];
|
|
1056
|
-
}
|
|
1057
|
-
}
|
|
1226
|
+
const { appUrl, engineUrl, installDir } = params;
|
|
1227
|
+
const s = spinner5();
|
|
1228
|
+
const { accessToken, instanceUrl } = await loginViaAppBridge(appUrl);
|
|
1229
|
+
const sf = new SalesforceApi(instanceUrl, accessToken);
|
|
1058
1230
|
s.start("Copying Salesforce package\u2026");
|
|
1059
|
-
const inDist = join5(
|
|
1060
|
-
const nextToDist = join5(
|
|
1231
|
+
const inDist = join5(__dirname2, "sfdc-package");
|
|
1232
|
+
const nextToDist = join5(__dirname2, "..", "sfdc-package");
|
|
1061
1233
|
const bundledPkg = existsSync4(inDist) ? inDist : nextToDist;
|
|
1062
1234
|
const destPkg = join5(installDir ?? tmpdir(), "lead-routing-sfdc-package");
|
|
1063
1235
|
if (!existsSync4(bundledPkg)) {
|
|
@@ -1111,37 +1283,59 @@ The CLI may need to be reinstalled: npm i -g @lead-routing/cli`
|
|
|
1111
1283
|
log7.success("Remote Site Settings patched");
|
|
1112
1284
|
s.start("Deploying Salesforce package (this may take ~2 min)\u2026");
|
|
1113
1285
|
try {
|
|
1114
|
-
await
|
|
1115
|
-
|
|
1116
|
-
|
|
1117
|
-
|
|
1118
|
-
|
|
1119
|
-
|
|
1286
|
+
const zipBuffer = await zipSourcePackage(destPkg);
|
|
1287
|
+
const deployId = await sf.deployMetadata(zipBuffer);
|
|
1288
|
+
const result = await sf.waitForDeploy(deployId);
|
|
1289
|
+
if (!result.success) {
|
|
1290
|
+
const failures = result.details?.componentFailures ?? [];
|
|
1291
|
+
const failureMsg = failures.map((f) => ` ${f.componentType}/${f.fullName}: ${f.problem}`).join("\n");
|
|
1292
|
+
s.stop("Deployment failed");
|
|
1293
|
+
throw new Error(
|
|
1294
|
+
`Metadata deploy failed (${result.numberComponentErrors} error(s)):
|
|
1295
|
+
${failureMsg || result.errorMessage || "Unknown error"}`
|
|
1296
|
+
);
|
|
1297
|
+
}
|
|
1298
|
+
s.stop(`Package deployed (${result.numberComponentsDeployed} components)`);
|
|
1120
1299
|
} catch (err) {
|
|
1300
|
+
if (err instanceof Error && err.message.startsWith("Metadata deploy failed")) {
|
|
1301
|
+
throw err;
|
|
1302
|
+
}
|
|
1121
1303
|
s.stop("Deployment failed");
|
|
1122
1304
|
throw new Error(
|
|
1123
|
-
`
|
|
1305
|
+
`Metadata deploy failed: ${String(err)}
|
|
1124
1306
|
|
|
1125
|
-
|
|
1126
|
-
|
|
1127
|
-
sf project deploy start --target-org ${orgAlias} --source-dir force-app`
|
|
1307
|
+
The patched package is at: ${destPkg}
|
|
1308
|
+
You can retry with: sf project deploy start --source-dir force-app`
|
|
1128
1309
|
);
|
|
1129
1310
|
}
|
|
1130
1311
|
s.start("Assigning LeadRouterAdmin permission set\u2026");
|
|
1131
1312
|
try {
|
|
1132
|
-
await
|
|
1133
|
-
"
|
|
1134
|
-
["org", "assign", "permset", "--name", "LeadRouterAdmin", ...targetOrgArgs],
|
|
1135
|
-
{ stdio: "inherit", env: { ...process.env, ...sfCredEnv } }
|
|
1313
|
+
const permSets = await sf.query(
|
|
1314
|
+
"SELECT Id FROM PermissionSet WHERE Name = 'LeadRouterAdmin' LIMIT 1"
|
|
1136
1315
|
);
|
|
1137
|
-
|
|
1138
|
-
|
|
1139
|
-
|
|
1140
|
-
if (msg.includes("Duplicate PermissionSetAssignment")) {
|
|
1141
|
-
s.stop("Permission set already assigned");
|
|
1316
|
+
if (permSets.length === 0) {
|
|
1317
|
+
s.stop("LeadRouterAdmin permission set not found (non-fatal)");
|
|
1318
|
+
log7.warn("The permission set may not have been included in the deploy.");
|
|
1142
1319
|
} else {
|
|
1320
|
+
const userId = await sf.getCurrentUserId();
|
|
1321
|
+
try {
|
|
1322
|
+
await sf.create("PermissionSetAssignment", {
|
|
1323
|
+
AssigneeId: userId,
|
|
1324
|
+
PermissionSetId: permSets[0].Id
|
|
1325
|
+
});
|
|
1326
|
+
s.stop("Permission set assigned \u2014 Lead Router Setup will appear in the App Launcher");
|
|
1327
|
+
} catch (err) {
|
|
1328
|
+
if (err instanceof DuplicateError) {
|
|
1329
|
+
s.stop("Permission set already assigned");
|
|
1330
|
+
} else {
|
|
1331
|
+
throw err;
|
|
1332
|
+
}
|
|
1333
|
+
}
|
|
1334
|
+
}
|
|
1335
|
+
} catch (err) {
|
|
1336
|
+
if (!(err instanceof DuplicateError)) {
|
|
1143
1337
|
s.stop("Permission set assignment failed (non-fatal)");
|
|
1144
|
-
log7.warn(
|
|
1338
|
+
log7.warn(String(err));
|
|
1145
1339
|
log7.info(
|
|
1146
1340
|
"Grant access manually:\n Salesforce Setup \u2192 Users \u2192 Permission Sets \u2192 Lead Router Admin \u2192 Manage Assignments"
|
|
1147
1341
|
);
|
|
@@ -1149,44 +1343,20 @@ The CLI may need to be reinstalled: npm i -g @lead-routing/cli`
|
|
|
1149
1343
|
}
|
|
1150
1344
|
s.start("Writing org settings to Routing_Settings__c\u2026");
|
|
1151
1345
|
try {
|
|
1152
|
-
|
|
1153
|
-
|
|
1154
|
-
|
|
1155
|
-
|
|
1156
|
-
|
|
1157
|
-
|
|
1158
|
-
|
|
1159
|
-
|
|
1160
|
-
|
|
1161
|
-
], { env: { ...process.env, ...sfCredEnv } });
|
|
1162
|
-
const parsed = JSON.parse(qr.stdout);
|
|
1163
|
-
existingId = parsed?.result?.records?.[0]?.Id;
|
|
1164
|
-
} catch {
|
|
1346
|
+
const existing = await sf.query(
|
|
1347
|
+
"SELECT Id FROM Routing_Settings__c LIMIT 1"
|
|
1348
|
+
);
|
|
1349
|
+
const settingsData = {
|
|
1350
|
+
App_Url__c: appUrl,
|
|
1351
|
+
Engine_Endpoint__c: engineUrl
|
|
1352
|
+
};
|
|
1353
|
+
if (params.webhookSecret) {
|
|
1354
|
+
settingsData.Webhook_Secret__c = params.webhookSecret;
|
|
1165
1355
|
}
|
|
1166
|
-
if (
|
|
1167
|
-
await
|
|
1168
|
-
"data",
|
|
1169
|
-
"update",
|
|
1170
|
-
"record",
|
|
1171
|
-
...targetOrgArgs,
|
|
1172
|
-
"--sobject",
|
|
1173
|
-
"Routing_Settings__c",
|
|
1174
|
-
"--record-id",
|
|
1175
|
-
existingId,
|
|
1176
|
-
"--values",
|
|
1177
|
-
`App_Url__c='${appUrl}' Engine_Endpoint__c='${engineUrl}'`
|
|
1178
|
-
], { stdio: "inherit", env: { ...process.env, ...sfCredEnv } });
|
|
1356
|
+
if (existing.length > 0) {
|
|
1357
|
+
await sf.update("Routing_Settings__c", existing[0].Id, settingsData);
|
|
1179
1358
|
} else {
|
|
1180
|
-
await
|
|
1181
|
-
"data",
|
|
1182
|
-
"create",
|
|
1183
|
-
"record",
|
|
1184
|
-
...targetOrgArgs,
|
|
1185
|
-
"--sobject",
|
|
1186
|
-
"Routing_Settings__c",
|
|
1187
|
-
"--values",
|
|
1188
|
-
`App_Url__c='${appUrl}' Engine_Endpoint__c='${engineUrl}'`
|
|
1189
|
-
], { stdio: "inherit", env: { ...process.env, ...sfCredEnv } });
|
|
1359
|
+
await sf.create("Routing_Settings__c", settingsData);
|
|
1190
1360
|
}
|
|
1191
1361
|
s.stop("Org settings written");
|
|
1192
1362
|
} catch (err) {
|
|
@@ -1195,9 +1365,9 @@ The CLI may need to be reinstalled: npm i -g @lead-routing/cli`
|
|
|
1195
1365
|
log7.info("Set manually: Salesforce \u2192 Custom Settings \u2192 Routing Settings \u2192 Manage");
|
|
1196
1366
|
}
|
|
1197
1367
|
}
|
|
1198
|
-
async function loginViaAppBridge(rawAppUrl
|
|
1368
|
+
async function loginViaAppBridge(rawAppUrl) {
|
|
1199
1369
|
const appUrl = rawAppUrl.replace(/\/+$/, "");
|
|
1200
|
-
const s =
|
|
1370
|
+
const s = spinner5();
|
|
1201
1371
|
s.start("Starting Salesforce authentication via your Lead Router app\u2026");
|
|
1202
1372
|
let sessionId;
|
|
1203
1373
|
let authUrl;
|
|
@@ -1227,8 +1397,10 @@ Ensure the app is running and the URL is correct.`
|
|
|
1227
1397
|
`);
|
|
1228
1398
|
log7.info('If Chrome shows a "Dangerous site" warning with no proceed option, paste the URL into Safari or Firefox.');
|
|
1229
1399
|
const opener = process.platform === "win32" ? "start" : process.platform === "darwin" ? "open" : "xdg-open";
|
|
1230
|
-
|
|
1231
|
-
|
|
1400
|
+
try {
|
|
1401
|
+
execSync(`${opener} "${authUrl}"`, { stdio: "ignore" });
|
|
1402
|
+
} catch {
|
|
1403
|
+
}
|
|
1232
1404
|
s.start("Waiting for Salesforce authentication in browser\u2026");
|
|
1233
1405
|
const maxPolls = 150;
|
|
1234
1406
|
let accessToken;
|
|
@@ -1258,20 +1430,7 @@ Ensure the app is running and the URL is correct.`
|
|
|
1258
1430
|
);
|
|
1259
1431
|
}
|
|
1260
1432
|
s.stop("Authenticated with Salesforce");
|
|
1261
|
-
|
|
1262
|
-
try {
|
|
1263
|
-
await execa3(
|
|
1264
|
-
"sf",
|
|
1265
|
-
["org", "login", "access-token", "--instance-url", instanceUrl, "--alias", orgAlias, "--no-prompt"],
|
|
1266
|
-
{ env: { ...process.env, SFDX_ACCESS_TOKEN: accessToken } }
|
|
1267
|
-
);
|
|
1268
|
-
log7.success(`Salesforce org saved as "${orgAlias}"`);
|
|
1269
|
-
aliasStored = true;
|
|
1270
|
-
} catch (err) {
|
|
1271
|
-
log7.warn(`Could not persist sf CLI credentials: ${String(err)}`);
|
|
1272
|
-
log7.info("Continuing with direct token auth for this session.");
|
|
1273
|
-
}
|
|
1274
|
-
return { accessToken, instanceUrl, aliasStored };
|
|
1433
|
+
return { accessToken, instanceUrl };
|
|
1275
1434
|
}
|
|
1276
1435
|
|
|
1277
1436
|
// src/steps/app-launcher-guide.ts
|
|
@@ -1412,10 +1571,10 @@ ${result.stderr || result.stdout}`
|
|
|
1412
1571
|
}
|
|
1413
1572
|
);
|
|
1414
1573
|
});
|
|
1415
|
-
return new Promise((
|
|
1574
|
+
return new Promise((resolve, reject) => {
|
|
1416
1575
|
server.listen(0, "127.0.0.1", () => {
|
|
1417
1576
|
const { port } = server.address();
|
|
1418
|
-
|
|
1577
|
+
resolve({
|
|
1419
1578
|
localPort: port,
|
|
1420
1579
|
close: () => server.close()
|
|
1421
1580
|
});
|
|
@@ -1490,16 +1649,17 @@ async function runInit(options = {}) {
|
|
|
1490
1649
|
});
|
|
1491
1650
|
log9.success(`Connected to ${saved.ssh.host}`);
|
|
1492
1651
|
const remoteDir = await ssh.resolveHome(saved.remoteDir);
|
|
1493
|
-
log9.step("Step 8
|
|
1652
|
+
log9.step("Step 7/8 Verifying health");
|
|
1494
1653
|
await verifyHealth(saved.appUrl, saved.engineUrl, ssh, remoteDir);
|
|
1495
|
-
log9.step("Step
|
|
1654
|
+
log9.step("Step 8/8 Deploying Salesforce package");
|
|
1496
1655
|
await sfdcDeployInline({
|
|
1497
1656
|
appUrl: saved.appUrl,
|
|
1498
1657
|
engineUrl: saved.engineUrl,
|
|
1499
1658
|
orgAlias: "lead-routing",
|
|
1500
1659
|
sfdcClientId: saved.sfdcClientId ?? "",
|
|
1501
1660
|
sfdcLoginUrl: saved.sfdcLoginUrl ?? "https://login.salesforce.com",
|
|
1502
|
-
installDir: dir
|
|
1661
|
+
installDir: dir,
|
|
1662
|
+
webhookSecret: saved.engineWebhookSecret
|
|
1503
1663
|
});
|
|
1504
1664
|
await guideAppLauncherSetup(saved.appUrl);
|
|
1505
1665
|
outro(
|
|
@@ -1524,9 +1684,9 @@ async function runInit(options = {}) {
|
|
|
1524
1684
|
return;
|
|
1525
1685
|
}
|
|
1526
1686
|
try {
|
|
1527
|
-
log9.step("Step 1/
|
|
1687
|
+
log9.step("Step 1/8 Checking local prerequisites");
|
|
1528
1688
|
await checkPrerequisites();
|
|
1529
|
-
log9.step("Step 2/
|
|
1689
|
+
log9.step("Step 2/8 SSH connection");
|
|
1530
1690
|
const sshCfg = await collectSshConfig({
|
|
1531
1691
|
sshPort: options.sshPort,
|
|
1532
1692
|
sshUser: options.sshUser,
|
|
@@ -1543,14 +1703,14 @@ async function runInit(options = {}) {
|
|
|
1543
1703
|
process.exit(1);
|
|
1544
1704
|
}
|
|
1545
1705
|
}
|
|
1546
|
-
log9.step("Step 3/
|
|
1706
|
+
log9.step("Step 3/8 Configuration");
|
|
1547
1707
|
const cfg = await collectConfig({
|
|
1548
1708
|
sandbox: options.sandbox,
|
|
1549
1709
|
externalDb: options.externalDb,
|
|
1550
1710
|
externalRedis: options.externalRedis
|
|
1551
1711
|
});
|
|
1552
1712
|
await checkDnsResolvable(cfg.appUrl, cfg.engineUrl);
|
|
1553
|
-
log9.step("Step 4/
|
|
1713
|
+
log9.step("Step 4/8 Generating config files");
|
|
1554
1714
|
const { dir, adminSecret } = generateFiles(cfg, sshCfg);
|
|
1555
1715
|
note4(
|
|
1556
1716
|
`Local config directory: ${chalk2.cyan(dir)}
|
|
@@ -1567,24 +1727,31 @@ Files created: docker-compose.yml, Caddyfile, .env.web, .env.engine, lead-routin
|
|
|
1567
1727
|
);
|
|
1568
1728
|
return;
|
|
1569
1729
|
}
|
|
1570
|
-
log9.step("Step 5/
|
|
1730
|
+
log9.step("Step 5/8 Remote setup");
|
|
1571
1731
|
const remoteDir = await ssh.resolveHome(sshCfg.remoteDir);
|
|
1572
1732
|
await checkRemotePrerequisites(ssh);
|
|
1573
1733
|
await uploadFiles(ssh, dir, remoteDir);
|
|
1574
|
-
log9.step("Step 6/
|
|
1734
|
+
log9.step("Step 6/8 Starting services");
|
|
1575
1735
|
await startServices(ssh, remoteDir);
|
|
1576
|
-
log9.step("Step 7/
|
|
1577
|
-
await runMigrations(ssh, dir, cfg.adminEmail, cfg.adminPassword);
|
|
1578
|
-
log9.step("Step 8/9 Verifying health");
|
|
1736
|
+
log9.step("Step 7/8 Verifying health");
|
|
1579
1737
|
await verifyHealth(cfg.appUrl, cfg.engineUrl, ssh, remoteDir);
|
|
1580
|
-
|
|
1738
|
+
try {
|
|
1739
|
+
const envWebPath = join6(dir, ".env.web");
|
|
1740
|
+
const envContent = readFileSync5(envWebPath, "utf-8");
|
|
1741
|
+
const cleaned = envContent.split("\n").filter((line) => !line.startsWith("ADMIN_PASSWORD=")).join("\n");
|
|
1742
|
+
writeFileSync4(envWebPath, cleaned, "utf-8");
|
|
1743
|
+
log9.success("Removed ADMIN_PASSWORD from .env.web (no longer needed after seed)");
|
|
1744
|
+
} catch {
|
|
1745
|
+
}
|
|
1746
|
+
log9.step("Step 8/8 Deploying Salesforce package");
|
|
1581
1747
|
await sfdcDeployInline({
|
|
1582
1748
|
appUrl: cfg.appUrl,
|
|
1583
1749
|
engineUrl: cfg.engineUrl,
|
|
1584
1750
|
orgAlias: "lead-routing",
|
|
1585
1751
|
sfdcClientId: cfg.sfdcClientId,
|
|
1586
1752
|
sfdcLoginUrl: cfg.sfdcLoginUrl,
|
|
1587
|
-
installDir: dir
|
|
1753
|
+
installDir: dir,
|
|
1754
|
+
webhookSecret: cfg.engineWebhookSecret
|
|
1588
1755
|
});
|
|
1589
1756
|
await guideAppLauncherSetup(cfg.appUrl);
|
|
1590
1757
|
outro(
|
|
@@ -1613,8 +1780,8 @@ Files created: docker-compose.yml, Caddyfile, .env.web, .env.engine, lead-routin
|
|
|
1613
1780
|
}
|
|
1614
1781
|
|
|
1615
1782
|
// src/commands/deploy.ts
|
|
1616
|
-
import { writeFileSync as
|
|
1617
|
-
import { join as
|
|
1783
|
+
import { writeFileSync as writeFileSync5, unlinkSync } from "fs";
|
|
1784
|
+
import { join as join7 } from "path";
|
|
1618
1785
|
import { tmpdir as tmpdir2 } from "os";
|
|
1619
1786
|
import { intro as intro2, outro as outro2, log as log10, password as promptPassword2 } from "@clack/prompts";
|
|
1620
1787
|
import chalk3 from "chalk";
|
|
@@ -1656,8 +1823,8 @@ async function runDeploy() {
|
|
|
1656
1823
|
const remoteDir = await ssh.resolveHome(cfg.remoteDir);
|
|
1657
1824
|
log10.step("Syncing Caddyfile");
|
|
1658
1825
|
const caddyContent = renderCaddyfile(cfg.appUrl, cfg.engineUrl);
|
|
1659
|
-
const tmpCaddy =
|
|
1660
|
-
|
|
1826
|
+
const tmpCaddy = join7(tmpdir2(), "lead-routing-Caddyfile");
|
|
1827
|
+
writeFileSync5(tmpCaddy, caddyContent, "utf8");
|
|
1661
1828
|
await ssh.upload([{ local: tmpCaddy, remote: `${remoteDir}/Caddyfile` }]);
|
|
1662
1829
|
unlinkSync(tmpCaddy);
|
|
1663
1830
|
await ssh.exec("docker compose restart caddy", remoteDir);
|
|
@@ -1668,8 +1835,6 @@ async function runDeploy() {
|
|
|
1668
1835
|
log10.step("Restarting services");
|
|
1669
1836
|
await ssh.exec("docker compose up -d --remove-orphans", remoteDir);
|
|
1670
1837
|
log10.success("Services restarted");
|
|
1671
|
-
log10.step("Running database migrations");
|
|
1672
|
-
await runMigrations(ssh, dir);
|
|
1673
1838
|
outro2(
|
|
1674
1839
|
chalk3.green("\u2714 Deployment complete!") + `
|
|
1675
1840
|
|
|
@@ -1687,7 +1852,7 @@ async function runDeploy() {
|
|
|
1687
1852
|
// src/commands/doctor.ts
|
|
1688
1853
|
import { intro as intro3, outro as outro3, log as log11 } from "@clack/prompts";
|
|
1689
1854
|
import chalk4 from "chalk";
|
|
1690
|
-
import { execa
|
|
1855
|
+
import { execa } from "execa";
|
|
1691
1856
|
async function runDoctor() {
|
|
1692
1857
|
console.log();
|
|
1693
1858
|
intro3(chalk4.bold.cyan("Lead Routing \u2014 Health Check"));
|
|
@@ -1725,7 +1890,7 @@ async function runDoctor() {
|
|
|
1725
1890
|
}
|
|
1726
1891
|
async function checkDockerDaemon() {
|
|
1727
1892
|
try {
|
|
1728
|
-
await
|
|
1893
|
+
await execa("docker", ["info"], { reject: true });
|
|
1729
1894
|
return { label: "Docker daemon", pass: true };
|
|
1730
1895
|
} catch {
|
|
1731
1896
|
return { label: "Docker daemon", pass: false, detail: "not running" };
|
|
@@ -1733,7 +1898,7 @@ async function checkDockerDaemon() {
|
|
|
1733
1898
|
}
|
|
1734
1899
|
async function checkContainer(name, dir) {
|
|
1735
1900
|
try {
|
|
1736
|
-
const result = await
|
|
1901
|
+
const result = await execa(
|
|
1737
1902
|
"docker",
|
|
1738
1903
|
["compose", "ps", "--format", "json", name],
|
|
1739
1904
|
{ cwd: dir, reject: false }
|
|
@@ -1777,7 +1942,7 @@ async function checkEndpoint(label, url) {
|
|
|
1777
1942
|
|
|
1778
1943
|
// src/commands/logs.ts
|
|
1779
1944
|
import { log as log12 } from "@clack/prompts";
|
|
1780
|
-
import { execa as
|
|
1945
|
+
import { execa as execa2 } from "execa";
|
|
1781
1946
|
var VALID_SERVICES = ["web", "engine", "postgres", "redis"];
|
|
1782
1947
|
async function runLogs(service = "engine") {
|
|
1783
1948
|
if (!VALID_SERVICES.includes(service)) {
|
|
@@ -1792,7 +1957,7 @@ async function runLogs(service = "engine") {
|
|
|
1792
1957
|
console.log(`
|
|
1793
1958
|
Streaming logs for ${service} (Ctrl+C to stop)...
|
|
1794
1959
|
`);
|
|
1795
|
-
const child =
|
|
1960
|
+
const child = execa2("docker", ["compose", "logs", "-f", "--tail=100", service], {
|
|
1796
1961
|
cwd: dir,
|
|
1797
1962
|
stdio: "inherit",
|
|
1798
1963
|
reject: false
|
|
@@ -1802,14 +1967,14 @@ Streaming logs for ${service} (Ctrl+C to stop)...
|
|
|
1802
1967
|
|
|
1803
1968
|
// src/commands/status.ts
|
|
1804
1969
|
import { log as log13 } from "@clack/prompts";
|
|
1805
|
-
import { execa as
|
|
1970
|
+
import { execa as execa3 } from "execa";
|
|
1806
1971
|
async function runStatus() {
|
|
1807
1972
|
const dir = findInstallDir();
|
|
1808
1973
|
if (!dir) {
|
|
1809
1974
|
log13.error("No lead-routing.json found. Run `lead-routing init` first.");
|
|
1810
1975
|
process.exit(1);
|
|
1811
1976
|
}
|
|
1812
|
-
const result = await
|
|
1977
|
+
const result = await execa3("docker", ["compose", "ps"], {
|
|
1813
1978
|
cwd: dir,
|
|
1814
1979
|
stdio: "inherit",
|
|
1815
1980
|
reject: false
|
|
@@ -1821,15 +1986,15 @@ async function runStatus() {
|
|
|
1821
1986
|
}
|
|
1822
1987
|
|
|
1823
1988
|
// src/commands/config.ts
|
|
1824
|
-
import { readFileSync as
|
|
1825
|
-
import { join as
|
|
1826
|
-
import { intro as intro4, outro as outro4, text as text3, password as password3, spinner as
|
|
1989
|
+
import { readFileSync as readFileSync6, writeFileSync as writeFileSync6, existsSync as existsSync5 } from "fs";
|
|
1990
|
+
import { join as join8 } from "path";
|
|
1991
|
+
import { intro as intro4, outro as outro4, text as text3, password as password3, spinner as spinner6, log as log14 } from "@clack/prompts";
|
|
1827
1992
|
import chalk5 from "chalk";
|
|
1828
|
-
import { execa as
|
|
1993
|
+
import { execa as execa4 } from "execa";
|
|
1829
1994
|
function parseEnv(filePath) {
|
|
1830
1995
|
const map = /* @__PURE__ */ new Map();
|
|
1831
1996
|
if (!existsSync5(filePath)) return map;
|
|
1832
|
-
for (const line of
|
|
1997
|
+
for (const line of readFileSync6(filePath, "utf8").split("\n")) {
|
|
1833
1998
|
const trimmed = line.trim();
|
|
1834
1999
|
if (!trimmed || trimmed.startsWith("#")) continue;
|
|
1835
2000
|
const eq = trimmed.indexOf("=");
|
|
@@ -1839,7 +2004,7 @@ function parseEnv(filePath) {
|
|
|
1839
2004
|
return map;
|
|
1840
2005
|
}
|
|
1841
2006
|
function writeEnv(filePath, updates) {
|
|
1842
|
-
const lines = existsSync5(filePath) ?
|
|
2007
|
+
const lines = existsSync5(filePath) ? readFileSync6(filePath, "utf8").split("\n") : [];
|
|
1843
2008
|
const updated = /* @__PURE__ */ new Set();
|
|
1844
2009
|
const result = lines.map((line) => {
|
|
1845
2010
|
const trimmed = line.trim();
|
|
@@ -1856,7 +2021,7 @@ function writeEnv(filePath, updates) {
|
|
|
1856
2021
|
for (const [key, val] of Object.entries(updates)) {
|
|
1857
2022
|
if (!updated.has(key)) result.push(`${key}=${val}`);
|
|
1858
2023
|
}
|
|
1859
|
-
|
|
2024
|
+
writeFileSync6(filePath, result.join("\n"), "utf8");
|
|
1860
2025
|
}
|
|
1861
2026
|
async function runConfigSfdc() {
|
|
1862
2027
|
intro4("Lead Routing \u2014 Update Salesforce Credentials");
|
|
@@ -1866,8 +2031,8 @@ async function runConfigSfdc() {
|
|
|
1866
2031
|
log14.info("Run `lead-routing init` first, or cd into your installation directory.");
|
|
1867
2032
|
process.exit(1);
|
|
1868
2033
|
}
|
|
1869
|
-
const envWeb =
|
|
1870
|
-
const envEngine =
|
|
2034
|
+
const envWeb = join8(dir, ".env.web");
|
|
2035
|
+
const envEngine = join8(dir, ".env.engine");
|
|
1871
2036
|
const currentWeb = parseEnv(envWeb);
|
|
1872
2037
|
const currentClientId = currentWeb.get("SFDC_CLIENT_ID") ?? "";
|
|
1873
2038
|
const currentLoginUrl = currentWeb.get("SFDC_LOGIN_URL") ?? "https://login.salesforce.com";
|
|
@@ -1899,10 +2064,10 @@ Callback URL for your Connected App: ${callbackUrl}`
|
|
|
1899
2064
|
writeEnv(envWeb, updates);
|
|
1900
2065
|
writeEnv(envEngine, updates);
|
|
1901
2066
|
log14.success("Updated .env.web and .env.engine");
|
|
1902
|
-
const s =
|
|
2067
|
+
const s = spinner6();
|
|
1903
2068
|
s.start("Restarting web and engine containers\u2026");
|
|
1904
2069
|
try {
|
|
1905
|
-
await
|
|
2070
|
+
await execa4("docker", ["compose", "up", "-d", "--force-recreate", "web", "engine"], {
|
|
1906
2071
|
cwd: dir
|
|
1907
2072
|
});
|
|
1908
2073
|
s.stop("Containers restarted");
|
|
@@ -1920,7 +2085,7 @@ function runConfigShow() {
|
|
|
1920
2085
|
console.error("No lead-routing installation found in the current directory.");
|
|
1921
2086
|
process.exit(1);
|
|
1922
2087
|
}
|
|
1923
|
-
const envWeb =
|
|
2088
|
+
const envWeb = join8(dir, ".env.web");
|
|
1924
2089
|
const cfg = parseEnv(envWeb);
|
|
1925
2090
|
const adminSecret = cfg.get("ADMIN_SECRET") ?? "(not found)";
|
|
1926
2091
|
const appUrl = cfg.get("APP_URL") ?? "(not found)";
|
|
@@ -1936,9 +2101,8 @@ function runConfigShow() {
|
|
|
1936
2101
|
}
|
|
1937
2102
|
|
|
1938
2103
|
// src/commands/sfdc.ts
|
|
1939
|
-
import { intro as intro5, outro as outro5, text as text4,
|
|
2104
|
+
import { intro as intro5, outro as outro5, text as text4, log as log15 } from "@clack/prompts";
|
|
1940
2105
|
import chalk6 from "chalk";
|
|
1941
|
-
import { execa as execa8 } from "execa";
|
|
1942
2106
|
async function runSfdcDeploy() {
|
|
1943
2107
|
intro5("Lead Routing \u2014 Deploy Salesforce Package");
|
|
1944
2108
|
let appUrl;
|
|
@@ -1964,18 +2128,6 @@ async function runSfdcDeploy() {
|
|
|
1964
2128
|
if (typeof rawEngine === "symbol") process.exit(0);
|
|
1965
2129
|
engineUrl = rawEngine.trim();
|
|
1966
2130
|
}
|
|
1967
|
-
const s = spinner9();
|
|
1968
|
-
s.start("Checking Salesforce CLI\u2026");
|
|
1969
|
-
try {
|
|
1970
|
-
await execa8("sf", ["--version"], { all: true });
|
|
1971
|
-
s.stop("Salesforce CLI found");
|
|
1972
|
-
} catch {
|
|
1973
|
-
s.stop("Salesforce CLI (sf) not found");
|
|
1974
|
-
log15.error(
|
|
1975
|
-
"Install the Salesforce CLI and re-run this command:\n https://developer.salesforce.com/tools/salesforcecli"
|
|
1976
|
-
);
|
|
1977
|
-
process.exit(1);
|
|
1978
|
-
}
|
|
1979
2131
|
const alias = await text4({
|
|
1980
2132
|
message: "Salesforce org alias (used to log in)",
|
|
1981
2133
|
placeholder: "lead-routing",
|
|
@@ -1988,10 +2140,11 @@ async function runSfdcDeploy() {
|
|
|
1988
2140
|
appUrl,
|
|
1989
2141
|
engineUrl,
|
|
1990
2142
|
orgAlias: alias,
|
|
1991
|
-
// Read from config if available
|
|
2143
|
+
// Read from config if available
|
|
1992
2144
|
sfdcClientId: config2?.sfdcClientId ?? "",
|
|
1993
2145
|
sfdcLoginUrl: config2?.sfdcLoginUrl ?? "https://login.salesforce.com",
|
|
1994
|
-
installDir: dir ?? void 0
|
|
2146
|
+
installDir: dir ?? void 0,
|
|
2147
|
+
webhookSecret: config2?.engineWebhookSecret
|
|
1995
2148
|
});
|
|
1996
2149
|
} catch (err) {
|
|
1997
2150
|
log15.error(err instanceof Error ? err.message : String(err));
|