@meshxdata/fops 0.1.26 → 0.1.28

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of @meshxdata/fops might be problematic. Click here for more details.

package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@meshxdata/fops",
3
- "version": "0.1.26",
3
+ "version": "0.1.28",
4
4
  "description": "CLI to install and manage data mesh platforms",
5
5
  "keywords": [
6
6
  "fops",
@@ -32,6 +32,7 @@
32
32
  "changelog:bootstrap": "node scripts/generate-changelog.js --bootstrap",
33
33
  "prepublishOnly": "npm run changelog",
34
34
  "postpublish": "node scripts/pin-checksum.js",
35
+ "preinstall": "node -e \"if(+process.versions.node.split('.')[0]<20){console.error('\\n\\x1b[31mError: @meshxdata/fops requires Node.js >= 20.\\nYou are running Node.js '+process.version+'.\\nPlease upgrade: https://nodejs.org\\x1b[0m\\n');process.exit(1)}\"",
35
36
  "prepare": "cd .. && husky operator-cli/.husky"
36
37
  },
37
38
  "dependencies": {
@@ -72,7 +73,7 @@
72
73
  "access": "public"
73
74
  },
74
75
  "engines": {
75
- "node": ">=18"
76
+ "node": ">=20"
76
77
  },
77
78
  "devDependencies": {
78
79
  "@tailwindcss/vite": "^4.1.18",
@@ -22,8 +22,8 @@ function resolveServiceName(root, input) {
22
22
  export function registerLifecycleCommands(program, registry) {
23
23
  program
24
24
  .command("up")
25
- .description("Start all Foundation services (docker compose up)")
26
- .argument("[component]", "Component to run from a specific branch (backend, frontend, watcher, processor, scheduler, storage, hive, traefik, data)")
25
+ .description("Start Foundation services (all or specific component)")
26
+ .argument("[component]", "Component to start (backend, frontend, watcher, processor, scheduler, storage, hive, traefik, data). Add [branch] to checkout first.")
27
27
  .argument("[branch]", "Git branch to checkout in that component (e.g. FOU-2072)")
28
28
  .option("-d, --detach", "Run in background", true)
29
29
  .option("--k3s", "Include k3s Kubernetes services (default: true for local)", true)
@@ -51,13 +51,28 @@ export function registerLifecycleCommands(program, registry) {
51
51
 
52
52
  program
53
53
  .command("down")
54
- .description("Stop all Foundation services")
54
+ .description("Stop Foundation services (all or specific)")
55
+ .argument("[services...]", "Optional: stop only these services (e.g. watcher, backend)")
55
56
  .option("--clean", "Remove volumes and orphans (make clean)")
56
- .action(async (opts) => {
57
+ .action(async (services, opts) => {
57
58
  const root = requireRoot(program);
58
59
  const { execa } = await import("execa");
59
60
  await runHook(registry, "before:down", { root });
60
- if (opts.clean) {
61
+
62
+ // Resolve short service names to docker compose service names
63
+ const serviceNames = (services || []).filter(Boolean).flatMap((s) => {
64
+ const sub = COMPONENT_SUBMODULES[s];
65
+ if (sub) return sub.restart;
66
+ return [s.startsWith("foundation-") ? s : `foundation-${s}`];
67
+ });
68
+
69
+ if (serviceNames.length) {
70
+ // Stop specific services only
71
+ console.log(chalk.cyan(` Stopping ${serviceNames.join(", ")}...`));
72
+ await dockerCompose(root, ["stop", "-t", "5", ...serviceNames], { timeout: 60000 });
73
+ // Remove the stopped containers so they don't show as "exited"
74
+ await dockerCompose(root, ["rm", "-f", ...serviceNames], { timeout: 30000 });
75
+ } else if (opts.clean) {
61
76
  const result = await dockerCompose(root, ["down", "-v", "--remove-orphans", "-t", "5"], { timeout: 120000 });
62
77
  if (result?.timedOut) {
63
78
  console.log(chalk.yellow(" Compose down timed out — force-killing containers..."));
@@ -403,6 +418,11 @@ async function runUp(program, registry, opts) {
403
418
 
404
419
  // Optional: checkout a component submodule to a specific branch before starting (e.g. fops up backend FOU-2072)
405
420
  const comp = opts.component && COMPONENT_SUBMODULES[opts.component];
421
+ if (opts.component && !comp) {
422
+ console.error(chalk.red(`\n Unknown component: ${opts.component}`));
423
+ console.error(chalk.dim(` Available: ${Object.keys(COMPONENT_SUBMODULES).join(", ")}\n`));
424
+ process.exit(1);
425
+ }
406
426
  if (comp && opts.branch) {
407
427
  const componentDir = path.join(root, comp.dir);
408
428
  if (!fs.existsSync(path.join(componentDir, ".git"))) {
@@ -902,8 +922,8 @@ async function runUp(program, registry, opts) {
902
922
  startSpinner();
903
923
  }
904
924
 
905
- // When fops up <component> <branch>, only pull and up that component (and deps) — avoid full stack rebuild
906
- const componentOnlyUp = comp && opts.branch;
925
+ // When fops up <component> [branch], only pull and up that component — avoid full stack rebuild
926
+ const componentOnlyUp = !!comp;
907
927
 
908
928
  // Pull images only when --pull is passed (or use `fops pull` separately)
909
929
  if (opts.pull) {
package/src/doctor.js CHANGED
@@ -558,7 +558,7 @@ export async function runDoctor(opts = {}, registry = null) {
558
558
  } else {
559
559
  fail("Claude CLI not found", "install globally: npm install -g @anthropic-ai/claude-code", async () => {
560
560
  console.log(chalk.cyan(" ▶ npm install -g @anthropic-ai/claude-code"));
561
- await execa("npm", ["install", "-g", "@anthropic-ai/claude-code"], { stdio: "inherit", timeout: 300_000 });
561
+ await execa("npm", ["install", "-g", "--loglevel=error", "@anthropic-ai/claude-code"], { stdio: "inherit", timeout: 300_000 });
562
562
  });
563
563
  }
564
564
  }
@@ -483,6 +483,7 @@ export default {
483
483
  let landscapeResults;
484
484
  let trinoTables = [];
485
485
  let trinoTableInfo = new Map(); // table name → { catalog, schema }
486
+ const productTableMap = new Map(); // data_product identifier → trino table name (explicit from API)
486
487
 
487
488
  // Helper: load trino table info from cache into trinoTables + trinoTableInfo
488
489
  const loadTrinoFromCache = () => {
@@ -657,6 +658,43 @@ export default {
657
658
  const tEdges = Date.now();
658
659
  if (meshes.length > 0) process.stderr.write(`\r${chalk.dim(`Fetching edges... ${landscapeDone}/${meshes.length} (${((tEdges - tEdgesStart) / 1000).toFixed(1)}s)`)}\n`);
659
660
 
661
+ // Fetch explicit trino table mappings from data product details.
662
+ // The landscape endpoint doesn't include table info, so we fetch each product's detail.
663
+ {
664
+ const dpIdentifiers = new Set();
665
+ for (const { edges } of landscapeResults) {
666
+ for (const e of edges) {
667
+ if (e.parent?.entity_type === "data_product") dpIdentifiers.add(e.parent.identifier);
668
+ if (e.child?.entity_type === "data_product") dpIdentifiers.add(e.child.identifier);
669
+ }
670
+ }
671
+ if (dpIdentifiers.size > 0) {
672
+ const DP_CONCURRENCY = 10;
673
+ const dpIds = [...dpIdentifiers];
674
+ for (let i = 0; i < dpIds.length; i += DP_CONCURRENCY) {
675
+ const batch = dpIds.slice(i, i + DP_CONCURRENCY);
676
+ const results = await Promise.all(
677
+ batch.map((id) =>
678
+ client.get(`/data/data_product/${id}`)
679
+ .then((res) => ({ id, table: res?.table }))
680
+ .catch(() => ({ id, table: null })),
681
+ ),
682
+ );
683
+ for (const { id, table } of results) {
684
+ if (table?.table_name) {
685
+ // table_name from API is the fully-qualified name (schema.table)
686
+ const parts = table.table_name.split(".");
687
+ const shortName = parts[parts.length - 1];
688
+ productTableMap.set(id, shortName);
689
+ }
690
+ }
691
+ }
692
+ if (productTableMap.size > 0) {
693
+ process.stderr.write(chalk.dim(`Resolved ${productTableMap.size}/${dpIdentifiers.size} product→table mappings\n`));
694
+ }
695
+ }
696
+ }
697
+
660
698
  // Best-effort Trino table discovery so landscape can show product->table relationships.
661
699
  // Supports --trino-url / TRINO_URL for HTTP-based discovery, falls back to docker compose exec.
662
700
  // Auto-discovers ALL schemas in the catalog unless TRINO_SCHEMA is explicitly set.
@@ -787,8 +825,20 @@ export default {
787
825
  .replace(/[^a-z0-9]+/g, "_")
788
826
  .replace(/^_+|_+$/g, "");
789
827
  const preMatchedTrino = new Set();
828
+ // Build reverse map: normalized trino table name → original name (for explicit matching)
829
+ const explicitlyLinked = new Map(); // entity identifier → trino table name
790
830
  if (trinoTables.length > 0) {
791
- // Collect entity names from landscape edges (products, objects, sources)
831
+ // 1. Explicit matches from API-fetched product→table mappings
832
+ for (const [dpId, tableName] of productTableMap) {
833
+ const tn = normName(tableName);
834
+ const match = trinoTables.find((t) => normName(t) === tn);
835
+ if (match) {
836
+ preMatchedTrino.add(match);
837
+ explicitlyLinked.set(dpId, match);
838
+ }
839
+ }
840
+
841
+ // 2. Name-based fallback for entities without explicit mapping
792
842
  const PRE_LINKABLE = new Set(["data_product", "data_object", "data_source"]);
793
843
  const allEntityNames = new Set();
794
844
  for (const { edges } of landscapeResults) {
@@ -958,15 +1008,40 @@ export default {
958
1008
  return true;
959
1009
  });
960
1010
 
961
- // Add synthetic entity -> trino_table edges by name similarity.
962
- // Match against data products, data objects, and data sources.
1011
+ // Add synthetic entity -> trino_table edges.
1012
+ // 1. Explicit mappings from API (productTableMap) take priority.
1013
+ // 2. Fall back to name-based similarity for remaining entities.
963
1014
  const LINKABLE_TYPES = new Set(["data_product", "data_object", "data_source"]);
964
1015
  const linkableNodes = new Map();
965
1016
  for (const e of unique) {
966
1017
  if (LINKABLE_TYPES.has(e.parent?.entity_type)) linkableNodes.set(e.parent.identifier, e.parent);
967
1018
  if (LINKABLE_TYPES.has(e.child?.entity_type)) linkableNodes.set(e.child.identifier, e.child);
968
1019
  }
1020
+ const addTrinoEdge = (node, table) => {
1021
+ matchedTrinoTables.add(table);
1022
+ const tNode = {
1023
+ identifier: `trino:${table}`,
1024
+ name: table,
1025
+ entity_type: "trino_table",
1026
+ state: { healthy: true },
1027
+ };
1028
+ const edge = { parent: node, child: tNode };
1029
+ const key = `${edge.parent.identifier}:${edge.child.identifier}`;
1030
+ if (!seen.has(key)) {
1031
+ seen.add(key);
1032
+ unique.push(edge);
1033
+ }
1034
+ };
1035
+ const explicitlyLinkedNodes = new Set();
969
1036
  for (const node of linkableNodes.values()) {
1037
+ // Check explicit API mapping first
1038
+ const explicitTable = explicitlyLinked.get(node.identifier);
1039
+ if (explicitTable) {
1040
+ addTrinoEdge(node, explicitTable);
1041
+ explicitlyLinkedNodes.add(node.identifier);
1042
+ continue;
1043
+ }
1044
+ // Fall back to name-based matching
970
1045
  const nNorm = normName(node.name);
971
1046
  if (!nNorm) continue;
972
1047
  const matches = trinoTables.filter((t) => {
@@ -974,19 +1049,7 @@ export default {
974
1049
  return tn === nNorm || tn.startsWith(`${nNorm}_`) || nNorm.startsWith(`${tn}_`);
975
1050
  });
976
1051
  for (const table of matches) {
977
- matchedTrinoTables.add(table);
978
- const tNode = {
979
- identifier: `trino:${table}`,
980
- name: table,
981
- entity_type: "trino_table",
982
- state: { healthy: true },
983
- };
984
- const edge = { parent: node, child: tNode };
985
- const key = `${edge.parent.identifier}:${edge.child.identifier}`;
986
- if (!seen.has(key)) {
987
- seen.add(key);
988
- unique.push(edge);
989
- }
1052
+ addTrinoEdge(node, table);
990
1053
  }
991
1054
  }
992
1055
  totalEdges += unique.length;
@@ -1037,13 +1100,32 @@ export default {
1037
1100
 
1038
1101
  // --deep: show columns and sample under trino_table nodes
1039
1102
  if (meta && meta.columns.length > 0) {
1040
- const colLine = meta.columns.map((c) => `${c.name}${chalk.dim(`:${c.type}`)}`).join(", ");
1041
- console.log(`${childPrefix}${chalk.dim("columns: " + colLine)}`);
1042
- if (meta.sample && meta.sample.rows.length > 0) {
1043
- for (let r = 0; r < meta.sample.rows.length; r++) {
1044
- const row = meta.sample.rows[r];
1045
- const pairs = meta.sample.columns.map((col, ci) => `${col}=${chalk.white(row[ci] ?? "NULL")}`).join(", ");
1046
- console.log(`${childPrefix}${chalk.dim(` row ${r + 1}: ${pairs}`)}`);
1103
+ // Detect Foundation meta tables (_*_expectations, _*_profiling, _*_validations)
1104
+ const isMeta = e.entity_type === "trino_table" &&
1105
+ /_(expectations|profiling|validations)$/.test(e.name);
1106
+
1107
+ if (isMeta) {
1108
+ // Meta tables: just show column count inline, no sample data
1109
+ const colNames = meta.columns.slice(0, 4).map((c) => c.name).join(", ");
1110
+ const more = meta.columns.length > 4 ? `, +${meta.columns.length - 4}` : "";
1111
+ console.log(`${childPrefix}${chalk.dim(`${colNames}${more}`)}`);
1112
+ } else {
1113
+ // Main data tables: show columns and sample rows
1114
+ const shortType = (t) => t.replace(/timestamp\(\d+\) with time zone/g, "timestamp(tz)")
1115
+ .replace(/character varying/g, "varchar");
1116
+ const colLine = meta.columns.map((c) => `${c.name}${chalk.dim(`:${shortType(c.type)}`)}`).join(", ");
1117
+ console.log(`${childPrefix}${chalk.dim("cols: " + colLine)}`);
1118
+ if (meta.sample && meta.sample.rows.length > 0) {
1119
+ const truncVal = (v, max = 40) => {
1120
+ if (v == null) return "NULL";
1121
+ const s = String(v);
1122
+ return s.length > max ? s.slice(0, max - 1) + "…" : s;
1123
+ };
1124
+ for (let r = 0; r < meta.sample.rows.length; r++) {
1125
+ const row = meta.sample.rows[r];
1126
+ const pairs = meta.sample.columns.map((col, ci) => `${col}=${chalk.white(truncVal(row[ci]))}`).join(" ");
1127
+ console.log(`${childPrefix}${chalk.dim(` row ${r + 1}:`)} ${pairs}`);
1128
+ }
1047
1129
  }
1048
1130
  }
1049
1131
  }
@@ -67,9 +67,9 @@ async function installTool(name, { brew, brewCask, winget, apt, npm: npmPkg } =
67
67
  const cmd = needsSudo ? `sudo npm install -g ${pkg}` : `npm install -g ${pkg}`;
68
68
  console.log(ACCENT(` ▶ ${cmd}`));
69
69
  if (needsSudo) {
70
- await execa("sudo", ["npm", "install", "-g", pkg], { stdio: "inherit", timeout: 300_000 });
70
+ await execa("sudo", ["npm", "install", "-g", "--loglevel=error", pkg], { stdio: "inherit", timeout: 300_000 });
71
71
  } else {
72
- await execa("npm", ["install", "-g", pkg], { stdio: "inherit", timeout: 300_000 });
72
+ await execa("npm", ["install", "-g", "--loglevel=error", pkg], { stdio: "inherit", timeout: 300_000 });
73
73
  }
74
74
  return true;
75
75
  }