@lead-routing/cli 0.1.2 → 0.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +115 -46
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -189,7 +189,8 @@ async function collectConfig() {
189
189
  validate: (v) => {
190
190
  if (!v) return "Required";
191
191
  try {
192
- new URL(v);
192
+ const u = new URL(v);
193
+ if (u.protocol !== "https:") return "Must be an HTTPS URL (required for Salesforce OAuth)";
193
194
  } catch {
194
195
  return "Must be a valid URL (e.g. https://routing.acme.com)";
195
196
  }
@@ -211,16 +212,16 @@ async function collectConfig() {
211
212
  }
212
213
  });
213
214
  if (isCancel2(engineUrl)) bail2(engineUrl);
214
- const callbackUrl = `${appUrl}/api/auth/callback`;
215
+ const callbackUrl = `${appUrl.trim().replace(/\/+$/, "")}/api/auth/sfdc/callback`;
215
216
  note2(
216
217
  `You need a Salesforce Connected App. If you haven't created one yet:
217
218
 
218
219
  1. Go to Salesforce Setup \u2192 App Manager \u2192 New Connected App
219
220
  2. Connected App Name: Lead Routing
220
221
  3. Check "Enable OAuth Settings"
221
- 4. Callback URL:
222
+ 4. Callback URL (copy exactly \u2014 must match):
222
223
  ${callbackUrl}
223
- 5. Selected Scopes: api \u2022 refresh_token, offline_access \u2022 openid
224
+ 5. Selected Scopes: api \u2022 refresh_token, offline_access
224
225
  6. Check "Require Secret for Web Server Flow"
225
226
  7. Save \u2014 wait ~2 min, then click "Manage Consumer Details"
226
227
  8. Copy the Consumer Key (Client ID) and Consumer Secret below`,
@@ -342,8 +343,8 @@ async function collectConfig() {
342
343
  return {
343
344
  appUrl: appUrl.trim().replace(/\/+$/, ""),
344
345
  engineUrl: engineUrl.trim().replace(/\/+$/, ""),
345
- sfdcClientId,
346
- sfdcClientSecret,
346
+ sfdcClientId: sfdcClientId.trim(),
347
+ sfdcClientSecret: sfdcClientSecret.trim(),
347
348
  sfdcLoginUrl,
348
349
  orgAlias,
349
350
  managedDb,
@@ -362,8 +363,9 @@ async function collectConfig() {
362
363
  }
363
364
 
364
365
  // src/steps/generate-files.ts
365
- import { mkdirSync, writeFileSync as writeFileSync2 } from "fs";
366
- import { join as join2 } from "path";
366
+ import { mkdirSync, writeFileSync as writeFileSync2, readFileSync as readFileSync2 } from "fs";
367
+ import { join as join2, dirname } from "path";
368
+ import { fileURLToPath } from "url";
367
369
  import { log as log2 } from "@clack/prompts";
368
370
 
369
371
  // src/templates/docker-compose.ts
@@ -618,6 +620,15 @@ function findInstallDir(startDir = process.cwd()) {
618
620
  }
619
621
 
620
622
  // src/steps/generate-files.ts
623
+ var __dirname = dirname(fileURLToPath(import.meta.url));
624
+ function getCliVersion() {
625
+ try {
626
+ const pkg = JSON.parse(readFileSync2(join2(__dirname, "../../package.json"), "utf8"));
627
+ return pkg.version ?? "0.1.0";
628
+ } catch {
629
+ return "0.1.0";
630
+ }
631
+ }
621
632
  function generateFiles(cfg, sshCfg) {
622
633
  const dir = join2(process.cwd(), "lead-routing");
623
634
  mkdirSync(dir, { recursive: true });
@@ -681,7 +692,7 @@ function generateFiles(cfg, sshCfg) {
681
692
  sfdcClientId: cfg.sfdcClientId,
682
693
  sfdcLoginUrl: cfg.sfdcLoginUrl,
683
694
  installedAt: (/* @__PURE__ */ new Date()).toISOString(),
684
- version: "0.1.0"
695
+ version: getCliVersion()
685
696
  });
686
697
  log2.success("Generated lead-routing.json");
687
698
  return { dir, composeFile, envWeb, envEngine, adminSecret: cfg.adminSecret };
@@ -744,18 +755,41 @@ async function checkRemoteDockerCompose(ssh) {
744
755
  return { ok: true, label: `Docker Compose \u2014 ${stdout.trim()}` };
745
756
  }
746
757
  async function checkRemotePort(ssh, port) {
747
- const { stdout } = await ssh.execSilent(
748
- `ss -tlnp 2>/dev/null | grep ':${port} ' || netstat -tlnp 2>/dev/null | grep ':${port} ' || echo "free"`
749
- );
750
- const isBound = stdout.trim() !== "free" && stdout.trim() !== "";
751
- if (isBound) {
752
- return {
753
- ok: false,
754
- warn: true,
755
- label: `Port ${port} \u2014 already in use on server (Caddy needs it for HTTPS \u2014 ensure nothing else is binding it)`
756
- };
758
+ const portCheckCmd = `ss -tlnp 2>/dev/null | grep ':${port} ' || netstat -tlnp 2>/dev/null | grep ':${port} ' || echo "free"`;
759
+ const { stdout: initial } = await ssh.execSilent(portCheckCmd);
760
+ const isBound = initial.trim() !== "free" && initial.trim() !== "";
761
+ if (!isBound) {
762
+ return { ok: true, label: `Port ${port} \u2014 available` };
763
+ }
764
+ const knownServices = ["nginx", "apache2", "httpd", "lighttpd", "caddy"];
765
+ for (const svc of knownServices) {
766
+ const { code: activeCode } = await ssh.execSilent(
767
+ `systemctl is-active --quiet ${svc} 2>/dev/null`
768
+ );
769
+ if (activeCode === 0) {
770
+ await ssh.execSilent(
771
+ `systemctl stop ${svc} 2>/dev/null; systemctl disable ${svc} 2>/dev/null`
772
+ );
773
+ const { stdout: recheck } = await ssh.execSilent(portCheckCmd);
774
+ if (recheck.trim() === "free" || !recheck.trim()) {
775
+ return {
776
+ ok: true,
777
+ label: `Port ${port} \u2014 freed (stopped and disabled system ${svc} service)`
778
+ };
779
+ }
780
+ }
757
781
  }
758
- return { ok: true, label: `Port ${port} \u2014 available` };
782
+ const { stdout: occupant } = await ssh.execSilent(
783
+ `ss -tlnp 2>/dev/null | grep ':${port} ' | head -1 || echo "unknown process"`
784
+ );
785
+ return {
786
+ ok: false,
787
+ // Hard error — Caddy cannot get TLS certs without these ports
788
+ label: `Port ${port} is occupied and could not be freed automatically.
789
+ Occupant: ${occupant.trim()}
790
+ Stop the conflicting process on the server, then re-run:
791
+ lead-routing init`
792
+ };
759
793
  }
760
794
 
761
795
  // src/steps/upload-files.ts
@@ -882,11 +916,11 @@ function sleep(ms) {
882
916
  import * as fs from "fs";
883
917
  import * as path from "path";
884
918
  import * as crypto from "crypto";
885
- import { fileURLToPath } from "url";
919
+ import { fileURLToPath as fileURLToPath2 } from "url";
886
920
  import { execa as execa2 } from "execa";
887
921
  import { spinner as spinner4 } from "@clack/prompts";
888
- var __filename = fileURLToPath(import.meta.url);
889
- var __dirname = path.dirname(__filename);
922
+ var __filename = fileURLToPath2(import.meta.url);
923
+ var __dirname2 = path.dirname(__filename);
890
924
  function readEnvVar(envFile, key) {
891
925
  const content = fs.readFileSync(envFile, "utf8");
892
926
  const match = content.match(new RegExp(`^${key}=(.+)$`, "m"));
@@ -905,11 +939,11 @@ function findPrismaBin() {
905
939
  // npx / npm global install: @lead-routing/cli is nested under the scope dir,
906
940
  // so prisma lands 3 levels above dist/ in node_modules/.bin/
907
941
  // e.g. ~/.npm/_npx/HASH/node_modules/.bin/prisma
908
- path.join(__dirname, "../../../.bin/prisma"),
909
- path.join(__dirname, "../../../prisma/bin/prisma.js"),
942
+ path.join(__dirname2, "../../../.bin/prisma"),
943
+ path.join(__dirname2, "../../../prisma/bin/prisma.js"),
910
944
  // Fallback: prisma nested inside the package's own node_modules (hoisted install)
911
- path.join(__dirname, "../node_modules/.bin/prisma"),
912
- path.join(__dirname, "../node_modules/prisma/bin/prisma.js"),
945
+ path.join(__dirname2, "../node_modules/.bin/prisma"),
946
+ path.join(__dirname2, "../node_modules/prisma/bin/prisma.js"),
913
947
  // Monorepo dev paths
914
948
  path.resolve("packages/db/node_modules/.bin/prisma"),
915
949
  path.resolve("node_modules/.bin/prisma"),
@@ -928,7 +962,9 @@ async function runMigrations(ssh, localDir, adminEmail, adminPassword) {
928
962
  tunnelClose = close;
929
963
  s.stop(`Database tunnel open (local port ${localPort})`);
930
964
  await applyMigrations(localDir, localPort);
931
- await seedAdminUser(localDir, localPort, adminEmail, adminPassword);
965
+ if (adminEmail && adminPassword) {
966
+ await seedAdminUser(localDir, localPort, adminEmail, adminPassword);
967
+ }
932
968
  } finally {
933
969
  tunnelClose?.();
934
970
  }
@@ -939,7 +975,7 @@ async function applyMigrations(localDir, localPort) {
939
975
  try {
940
976
  const DATABASE_URL = getTunneledDbUrl(localDir, localPort);
941
977
  const prismaBin = findPrismaBin();
942
- const bundledSchema = path.join(__dirname, "prisma/schema.prisma");
978
+ const bundledSchema = path.join(__dirname2, "prisma/schema.prisma");
943
979
  const monoSchema = path.resolve("packages/db/prisma/schema.prisma");
944
980
  const schemaPath = fs.existsSync(bundledSchema) ? bundledSchema : monoSchema;
945
981
  await execa2(prismaBin, ["migrate", "deploy", "--schema", schemaPath], {
@@ -986,7 +1022,7 @@ ON CONFLICT ("orgId", email) DO NOTHING;
986
1022
 
987
1023
  // src/steps/verify-health.ts
988
1024
  import { spinner as spinner5, log as log5 } from "@clack/prompts";
989
- async function verifyHealth(appUrl, engineUrl) {
1025
+ async function verifyHealth(appUrl, engineUrl, ssh, remoteDir) {
990
1026
  const checks = [
991
1027
  { service: "Web app", url: `${appUrl}/api/health` },
992
1028
  { service: "Routing engine", url: `${engineUrl}/health` }
@@ -996,9 +1032,42 @@ async function verifyHealth(appUrl, engineUrl) {
996
1032
  if (r.ok) {
997
1033
  log5.success(`${r.service} \u2014 ${r.url}`);
998
1034
  } else {
999
- log5.warn(`${r.service} not responding yet \u2014 ${r.detail}`);
1035
+ log5.warn(`${r.service} \u2014 did not respond after ${r.detail}`);
1000
1036
  }
1001
1037
  }
1038
+ const failed = results.filter((r) => !r.ok);
1039
+ if (failed.length === 0) return;
1040
+ log5.info("Fetching remote diagnostics\u2026");
1041
+ try {
1042
+ const { stdout: ps } = await ssh.execSilent("docker compose ps --format table", remoteDir);
1043
+ if (ps.trim()) log5.info(`Container status:
1044
+ ${ps.trim()}`);
1045
+ } catch {
1046
+ }
1047
+ try {
1048
+ const { stdout: caddyLogs } = await ssh.execSilent(
1049
+ "docker compose logs caddy --tail 30 --no-color 2>&1",
1050
+ remoteDir
1051
+ );
1052
+ if (caddyLogs.trim()) log5.info(`Caddy logs (last 30 lines):
1053
+ ${caddyLogs.trim()}`);
1054
+ } catch {
1055
+ }
1056
+ const failedNames = failed.map((r) => r.service).join(" and ");
1057
+ throw new Error(
1058
+ `${failedNames} did not respond after 2 minutes.
1059
+
1060
+ Common causes (check Caddy logs above):
1061
+ \u2022 Let's Encrypt rate limit \u2014 wait until tomorrow and re-run
1062
+ \u2022 Port 80/443 still blocked by another process
1063
+ \u2022 Container crashed \u2014 check container status above
1064
+
1065
+ To resume once fixed:
1066
+ 1. SSH into your server:
1067
+ cd ${remoteDir} && docker compose restart caddy
1068
+ 2. Then re-run Salesforce setup:
1069
+ lead-routing sfdc deploy`
1070
+ );
1002
1071
  }
1003
1072
  async function pollHealth(service, url, maxAttempts = 24, intervalMs = 5e3) {
1004
1073
  const s = spinner5();
@@ -1022,7 +1091,7 @@ async function pollHealth(service, url, maxAttempts = 24, intervalMs = 5e3) {
1022
1091
  service,
1023
1092
  url,
1024
1093
  ok: false,
1025
- detail: `timed out after ${maxAttempts * intervalMs / 1e3}s`
1094
+ detail: `${maxAttempts * intervalMs / 1e3}s`
1026
1095
  };
1027
1096
  }
1028
1097
  function sleep2(ms) {
@@ -1030,13 +1099,13 @@ function sleep2(ms) {
1030
1099
  }
1031
1100
 
1032
1101
  // src/steps/sfdc-deploy-inline.ts
1033
- import { readFileSync as readFileSync3, writeFileSync as writeFileSync3, existsSync as existsSync4, cpSync, rmSync } from "fs";
1034
- import { join as join5, dirname as dirname2 } from "path";
1102
+ import { readFileSync as readFileSync4, writeFileSync as writeFileSync3, existsSync as existsSync4, cpSync, rmSync } from "fs";
1103
+ import { join as join5, dirname as dirname3 } from "path";
1035
1104
  import { tmpdir } from "os";
1036
- import { fileURLToPath as fileURLToPath2 } from "url";
1105
+ import { fileURLToPath as fileURLToPath3 } from "url";
1037
1106
  import { spinner as spinner6, log as log6 } from "@clack/prompts";
1038
1107
  import { execa as execa3 } from "execa";
1039
- var __dirname2 = dirname2(fileURLToPath2(import.meta.url));
1108
+ var __dirname3 = dirname3(fileURLToPath3(import.meta.url));
1040
1109
  function patchXml(content, tag, value) {
1041
1110
  const re = new RegExp(`(<${tag}>)[^<]*(</\\s*${tag}>)`, "g");
1042
1111
  return content.replace(re, `$1${value}$2`);
@@ -1062,8 +1131,8 @@ async function sfdcDeployInline(params) {
1062
1131
  }
1063
1132
  }
1064
1133
  s.start("Copying Salesforce package\u2026");
1065
- const inDist = join5(__dirname2, "sfdc-package");
1066
- const nextToDist = join5(__dirname2, "..", "sfdc-package");
1134
+ const inDist = join5(__dirname3, "sfdc-package");
1135
+ const nextToDist = join5(__dirname3, "..", "sfdc-package");
1067
1136
  const bundledPkg = existsSync4(inDist) ? inDist : nextToDist;
1068
1137
  const destPkg = join5(installDir ?? tmpdir(), "lead-routing-sfdc-package");
1069
1138
  if (!existsSync4(bundledPkg)) {
@@ -1085,7 +1154,7 @@ The CLI may need to be reinstalled: npm i -g @lead-routing/cli`
1085
1154
  "RoutingEngine.namedCredential-meta.xml"
1086
1155
  );
1087
1156
  if (existsSync4(ncPath)) {
1088
- const nc = patchXml(readFileSync3(ncPath, "utf8"), "endpoint", engineUrl);
1157
+ const nc = patchXml(readFileSync4(ncPath, "utf8"), "endpoint", engineUrl);
1089
1158
  writeFileSync3(ncPath, nc, "utf8");
1090
1159
  }
1091
1160
  const rssEnginePath = join5(
@@ -1097,7 +1166,7 @@ The CLI may need to be reinstalled: npm i -g @lead-routing/cli`
1097
1166
  "LeadRouterEngine.remoteSite-meta.xml"
1098
1167
  );
1099
1168
  if (existsSync4(rssEnginePath)) {
1100
- let rss = patchXml(readFileSync3(rssEnginePath, "utf8"), "url", engineUrl);
1169
+ let rss = patchXml(readFileSync4(rssEnginePath, "utf8"), "url", engineUrl);
1101
1170
  rss = patchXml(rss, "description", "Lead Router Engine endpoint");
1102
1171
  writeFileSync3(rssEnginePath, rss, "utf8");
1103
1172
  }
@@ -1110,7 +1179,7 @@ The CLI may need to be reinstalled: npm i -g @lead-routing/cli`
1110
1179
  "LeadRouterApp.remoteSite-meta.xml"
1111
1180
  );
1112
1181
  if (existsSync4(rssAppPath)) {
1113
- let rss = patchXml(readFileSync3(rssAppPath, "utf8"), "url", appUrl);
1182
+ let rss = patchXml(readFileSync4(rssAppPath, "utf8"), "url", appUrl);
1114
1183
  rss = patchXml(rss, "description", "Lead Router App URL");
1115
1184
  writeFileSync3(rssAppPath, rss, "utf8");
1116
1185
  }
@@ -1479,7 +1548,7 @@ Files created: docker-compose.yml, Caddyfile, .env.web, .env.engine, lead-routin
1479
1548
  log8.step("Step 7/9 Database migrations");
1480
1549
  await runMigrations(ssh, dir, cfg.adminEmail, cfg.adminPassword);
1481
1550
  log8.step("Step 8/9 Verifying health");
1482
- await verifyHealth(cfg.appUrl, cfg.engineUrl);
1551
+ await verifyHealth(cfg.appUrl, cfg.engineUrl, ssh, remoteDir);
1483
1552
  log8.step("Step 9/9 Deploying Salesforce package");
1484
1553
  await sfdcDeployInline({
1485
1554
  appUrl: cfg.appUrl,
@@ -1572,7 +1641,7 @@ async function runDeploy() {
1572
1641
  await ssh.exec("docker compose up -d --remove-orphans", remoteDir);
1573
1642
  log9.success("Services restarted");
1574
1643
  log9.step("Running database migrations");
1575
- await runMigrations(ssh, dir, "", "");
1644
+ await runMigrations(ssh, dir);
1576
1645
  outro2(
1577
1646
  chalk3.green("\u2714 Deployment complete!") + `
1578
1647
 
@@ -1724,7 +1793,7 @@ async function runStatus() {
1724
1793
  }
1725
1794
 
1726
1795
  // src/commands/config.ts
1727
- import { readFileSync as readFileSync4, writeFileSync as writeFileSync5, existsSync as existsSync5 } from "fs";
1796
+ import { readFileSync as readFileSync5, writeFileSync as writeFileSync5, existsSync as existsSync5 } from "fs";
1728
1797
  import { join as join7 } from "path";
1729
1798
  import { intro as intro4, outro as outro4, text as text3, password as password3, spinner as spinner7, log as log13 } from "@clack/prompts";
1730
1799
  import chalk5 from "chalk";
@@ -1732,7 +1801,7 @@ import { execa as execa7 } from "execa";
1732
1801
  function parseEnv(filePath) {
1733
1802
  const map = /* @__PURE__ */ new Map();
1734
1803
  if (!existsSync5(filePath)) return map;
1735
- for (const line of readFileSync4(filePath, "utf8").split("\n")) {
1804
+ for (const line of readFileSync5(filePath, "utf8").split("\n")) {
1736
1805
  const trimmed = line.trim();
1737
1806
  if (!trimmed || trimmed.startsWith("#")) continue;
1738
1807
  const eq = trimmed.indexOf("=");
@@ -1742,7 +1811,7 @@ function parseEnv(filePath) {
1742
1811
  return map;
1743
1812
  }
1744
1813
  function writeEnv(filePath, updates) {
1745
- const lines = existsSync5(filePath) ? readFileSync4(filePath, "utf8").split("\n") : [];
1814
+ const lines = existsSync5(filePath) ? readFileSync5(filePath, "utf8").split("\n") : [];
1746
1815
  const updated = /* @__PURE__ */ new Set();
1747
1816
  const result = lines.map((line) => {
1748
1817
  const trimmed = line.trim();
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lead-routing/cli",
3
- "version": "0.1.2",
3
+ "version": "0.1.4",
4
4
  "description": "Self-hosted deployment CLI for Lead Routing",
5
5
  "homepage": "https://github.com/lead-routing/lead-routing",
6
6
  "keywords": ["salesforce", "lead-routing", "self-hosted", "deployment", "cli"],