@aifabrix/builder 2.44.0 → 2.44.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. package/.cursor/rules/cli-layout.mdc +75 -0
  2. package/.cursor/rules/project-rules.mdc +8 -0
  3. package/.npmrc.token +1 -0
  4. package/.nyc_output/55e9d034-ddab-4579-a706-e02a91d75c91.json +1 -0
  5. package/.nyc_output/processinfo/55e9d034-ddab-4579-a706-e02a91d75c91.json +1 -0
  6. package/.nyc_output/processinfo/index.json +1 -0
  7. package/jest.projects.js +15 -2
  8. package/lib/api/certificates.api.js +62 -0
  9. package/lib/api/index.js +11 -2
  10. package/lib/api/types/certificates.types.js +48 -0
  11. package/lib/api/validation-run.api.js +16 -4
  12. package/lib/api/validation-runner.js +13 -3
  13. package/lib/app/certification-show-enrich.js +129 -0
  14. package/lib/app/certification-verify-rows.js +60 -0
  15. package/lib/app/show-display.js +43 -0
  16. package/lib/app/show.js +92 -8
  17. package/lib/certification/cli-cert-sync-skip.js +21 -0
  18. package/lib/certification/merge-certification-from-artifact.js +185 -0
  19. package/lib/certification/post-unified-cert-sync.js +33 -0
  20. package/lib/certification/sync-after-external-command.js +52 -0
  21. package/lib/certification/sync-system-certification.js +197 -0
  22. package/lib/cli/setup-app.js +4 -0
  23. package/lib/cli/setup-app.test-commands.js +24 -8
  24. package/lib/cli/setup-external-system.js +22 -1
  25. package/lib/cli/setup-secrets.js +34 -13
  26. package/lib/cli/setup-utility.js +18 -2
  27. package/lib/commands/app.js +10 -1
  28. package/lib/commands/datasource-unified-test-cli.js +50 -117
  29. package/lib/commands/datasource-unified-test-cli.options.js +44 -2
  30. package/lib/commands/datasource-unified-test-e2e-cli-helpers.js +106 -0
  31. package/lib/commands/datasource-validation-cli.js +15 -1
  32. package/lib/commands/datasource.js +25 -2
  33. package/lib/commands/upload.js +17 -6
  34. package/lib/datasource/log-viewer.js +105 -14
  35. package/lib/datasource/test-e2e.js +35 -17
  36. package/lib/datasource/unified-validation-run-body.js +3 -0
  37. package/lib/datasource/unified-validation-run.js +2 -1
  38. package/lib/external-system/deploy.js +53 -18
  39. package/lib/infrastructure/compose.js +12 -3
  40. package/lib/infrastructure/helpers-docker-check.js +67 -0
  41. package/lib/infrastructure/helpers.js +47 -58
  42. package/lib/infrastructure/index.js +3 -1
  43. package/lib/infrastructure/services.js +4 -56
  44. package/lib/schema/external-system.schema.json +25 -3
  45. package/lib/schema/type/document-storage.json +15 -2
  46. package/lib/utils/api.js +28 -3
  47. package/lib/utils/configuration-env-resolver.js +11 -8
  48. package/lib/utils/credential-secrets-env.js +5 -5
  49. package/lib/utils/datasource-test-run-certificate-tty.js +82 -0
  50. package/lib/utils/datasource-test-run-display.js +19 -2
  51. package/lib/utils/datasource-test-run-exit.js +25 -0
  52. package/lib/utils/external-system-display.js +8 -0
  53. package/lib/utils/external-system-system-test-tty-overview.js +120 -0
  54. package/lib/utils/external-system-system-test-tty.js +417 -0
  55. package/lib/utils/paths.js +14 -0
  56. package/lib/utils/validation-run-poll.js +28 -5
  57. package/lib/utils/validation-run-post-retry.js +20 -8
  58. package/lib/utils/validation-run-request.js +18 -0
  59. package/lib/validation/validate-external-cert-sync.js +23 -0
  60. package/lib/validation/validate.js +4 -1
  61. package/package.json +4 -3
  62. package/scripts/install-local.js +4 -1
  63. package/scripts/pnpm-global-remove.js +48 -0
  64. package/templates/applications/dataplane/env.template +4 -0
  65. package/templates/infra/compose.yaml.hbs +15 -14
  66. package/templates/infra/servers.json.hbs +3 -1
@@ -0,0 +1,67 @@
1
+ /**
2
+ * @fileoverview Docker / Compose availability check and user-facing failure text for infra helpers.
3
+ * @author AI Fabrix Team
4
+ * @version 2.0.0
5
+ */
6
+
7
+ 'use strict';
8
+
9
+ const dockerUtils = require('../utils/docker');
10
+ const { ensureDevCertsIfNeededForRemoteDocker } = require('../utils/ensure-dev-certs-for-remote-docker');
11
+
12
+ /**
13
+ * User-facing error when Docker/Compose checks fail (tailored by underlying message).
14
+ * @param {string} detail - Error message from ensureDockerAndCompose / Docker CLI
15
+ * @returns {string}
16
+ */
17
+ function formatDockerInfrastructureFailure(detail) {
18
+ const cause = (detail || '').trim() || 'unknown error';
19
+
20
+ if (/Docker Compose is not available/i.test(cause)) {
21
+ return (
22
+ 'Cannot use Docker for infrastructure: Docker Compose check failed (see Cause below).\n\n' +
23
+ `Cause: ${cause}\n\n` +
24
+ 'If Cause mentions TLS, certificate, or handshake, fix client TLS for docker-endpoint (cert.pem, key.pem, ca.pem under ~/.aifabrix/certs/<developer-id>/) or docker-tls-skip-verify when appropriate. ' +
25
+ 'If Cause suggests a missing plugin, install Docker Compose v2 for your user (docker CLI + plugin; no unix socket needed when using tcp:// docker-endpoint). ' +
26
+ 'Or set AIFABRIX_COMPOSE_CMD. Run `aifabrix doctor` for diagnostics.'
27
+ );
28
+ }
29
+
30
+ if (/AIFABRIX_COMPOSE_CMD/i.test(cause) && /is set but failed/i.test(cause)) {
31
+ return (
32
+ 'Cannot use Docker for infrastructure: AIFABRIX_COMPOSE_CMD failed.\n\n' +
33
+ `Cause: ${cause}\n\n` +
34
+ 'Unset or fix AIFABRIX_COMPOSE_CMD, or install a working Compose. Run `aifabrix doctor` for diagnostics.'
35
+ );
36
+ }
37
+
38
+ return (
39
+ 'Cannot use Docker for infrastructure (Docker CLI missing, Compose missing, or remote Docker misconfigured).\n\n' +
40
+ `Cause: ${cause}\n\n` +
41
+ 'Install Docker Engine and Compose on this machine (or set AIFABRIX_COMPOSE_CMD). ' +
42
+ 'If you use docker-endpoint in dev config: install cert.pem, key.pem, and ca.pem for full TLS verify; use `aifabrix dev pin` / ' +
43
+ '`dev init --pin` as needed; or enable TLS skip-verify (config or AIFABRIX_DOCKER_TLS_SKIP_VERIFY) when appropriate. ' +
44
+ 'Run `aifabrix doctor` for diagnostics.'
45
+ );
46
+ }
47
+
48
+ /**
49
+ * Check Docker availability (local daemon or remote via docker-endpoint + TLS).
50
+ * @async
51
+ * @returns {Promise<void>}
52
+ * @throws {Error} If Docker/Compose cannot be used (includes underlying cause)
53
+ */
54
+ async function checkDockerAvailability() {
55
+ await ensureDevCertsIfNeededForRemoteDocker();
56
+ try {
57
+ await dockerUtils.ensureDockerAndCompose();
58
+ } catch (error) {
59
+ const detail = (error && error.message) || String(error);
60
+ throw new Error(formatDockerInfrastructureFailure(detail));
61
+ }
62
+ }
63
+
64
+ module.exports = {
65
+ formatDockerInfrastructureFailure,
66
+ checkDockerAvailability
67
+ };
@@ -17,7 +17,6 @@ const chalk = require('chalk');
17
17
  const handlebars = require('handlebars');
18
18
  const adminSecrets = require('../core/admin-secrets');
19
19
  const logger = require('../utils/logger');
20
- const dockerUtils = require('../utils/docker');
21
20
  const paths = require('../utils/paths');
22
21
  const secretsEnsure = require('../core/secrets-ensure');
23
22
  const {
@@ -25,7 +24,7 @@ const {
25
24
  getInfraParameterCatalog,
26
25
  readRelaxedCatalogDefaults
27
26
  } = require('../parameters/infra-parameter-catalog');
28
- const { ensureDevCertsIfNeededForRemoteDocker } = require('../utils/ensure-dev-certs-for-remote-docker');
27
+ const { checkDockerAvailability } = require('./helpers-docker-check');
29
28
 
30
29
  /**
31
30
  * Lazy-load core/secrets at call time. A top-level require creates a circular dependency:
@@ -59,58 +58,6 @@ function getInfraProjectName(devId) {
59
58
  return idNum === 0 ? 'infra' : `infra-dev${devId}`;
60
59
  }
61
60
 
62
- /**
63
- * User-facing error when Docker/Compose checks fail (tailored by underlying message).
64
- * @param {string} detail - Error message from ensureDockerAndCompose / Docker CLI
65
- * @returns {string}
66
- */
67
- function formatDockerInfrastructureFailure(detail) {
68
- const cause = (detail || '').trim() || 'unknown error';
69
-
70
- if (/Docker Compose is not available/i.test(cause)) {
71
- return (
72
- 'Cannot use Docker for infrastructure: Docker Compose check failed (see Cause below).\n\n' +
73
- `Cause: ${cause}\n\n` +
74
- 'If Cause mentions TLS, certificate, or handshake, fix client TLS for docker-endpoint (cert.pem, key.pem, ca.pem under ~/.aifabrix/certs/<developer-id>/) or docker-tls-skip-verify when appropriate. ' +
75
- 'If Cause suggests a missing plugin, install Docker Compose v2 for your user (docker CLI + plugin; no unix socket needed when using tcp:// docker-endpoint). ' +
76
- 'Or set AIFABRIX_COMPOSE_CMD. Run `aifabrix doctor` for diagnostics.'
77
- );
78
- }
79
-
80
- if (/AIFABRIX_COMPOSE_CMD/i.test(cause) && /is set but failed/i.test(cause)) {
81
- return (
82
- 'Cannot use Docker for infrastructure: AIFABRIX_COMPOSE_CMD failed.\n\n' +
83
- `Cause: ${cause}\n\n` +
84
- 'Unset or fix AIFABRIX_COMPOSE_CMD, or install a working Compose. Run `aifabrix doctor` for diagnostics.'
85
- );
86
- }
87
-
88
- return (
89
- 'Cannot use Docker for infrastructure (Docker CLI missing, Compose missing, or remote Docker misconfigured).\n\n' +
90
- `Cause: ${cause}\n\n` +
91
- 'Install Docker Engine and Compose on this machine (or set AIFABRIX_COMPOSE_CMD). ' +
92
- 'If you use docker-endpoint in dev config: install cert.pem, key.pem, and ca.pem for full TLS verify; use `aifabrix dev pin` / ' +
93
- '`dev init --pin` as needed; or enable TLS skip-verify (config or AIFABRIX_DOCKER_TLS_SKIP_VERIFY) when appropriate. ' +
94
- 'Run `aifabrix doctor` for diagnostics.'
95
- );
96
- }
97
-
98
- /**
99
- * Check Docker availability (local daemon or remote via docker-endpoint + TLS).
100
- * @async
101
- * @returns {Promise<void>}
102
- * @throws {Error} If Docker/Compose cannot be used (includes underlying cause)
103
- */
104
- async function checkDockerAvailability() {
105
- await ensureDevCertsIfNeededForRemoteDocker();
106
- try {
107
- await dockerUtils.ensureDockerAndCompose();
108
- } catch (error) {
109
- const detail = (error && error.message) || String(error);
110
- throw new Error(formatDockerInfrastructureFailure(detail));
111
- }
112
- }
113
-
114
61
  /**
115
62
  * Fallback for admin password/email when validated catalog load failed but YAML is still readable.
116
63
  * @returns {Record<string, string>}
@@ -289,12 +236,37 @@ async function ensureAdminSecrets(options = {}) {
289
236
  return adminSecretsPath;
290
237
  }
291
238
 
239
+ /** Host-side pgpass for pgAdmin bind mount (must exist before container starts so servers.json import succeeds). */
240
+ const PGPASS_BOOTSTRAP_BASENAME = '.pgpass.bootstrap';
241
+
242
+ /**
243
+ * Writes pgpass next to servers.json for Docker bind mount into /pgadmin4 (not under /var/lib/pgadmin volume).
244
+ * Prefer chown to pgAdmin UID so mode 600 is readable in the container; fall back to 644 if not root.
245
+ *
246
+ * @param {string} infraDir - Infrastructure directory path
247
+ * @param {string} postgresPassword - PostgreSQL password for the pgpass line
248
+ */
249
+ function writePgpassBootstrap(infraDir, postgresPassword) {
250
+ const line = `postgres:5432:postgres:pgadmin:${postgresPassword}\n`;
251
+ const dest = path.join(infraDir, PGPASS_BOOTSTRAP_BASENAME);
252
+ fs.writeFileSync(dest, line, { mode: 0o600 });
253
+ try {
254
+ fs.chownSync(dest, 5050, 5050);
255
+ } catch {
256
+ try {
257
+ fs.chmodSync(dest, 0o644);
258
+ } catch {
259
+ // Ignore — container may still read depending on daemon / user namespace
260
+ }
261
+ }
262
+ }
263
+
292
264
  /**
293
- * Generates pgAdmin4 servers.json only. pgpass is not written to disk (ISO 27K);
294
- * it is created temporarily in startDockerServicesAndConfigure and deleted after copy to container.
265
+ * Generates pgAdmin4 servers.json only. pgpass for the container is supplied via bind-mounted
266
+ * `.pgpass.bootstrap` (written by writePgpassBootstrap); not embedded in servers.json.
295
267
  *
296
268
  * @param {string} infraDir - Infrastructure directory path
297
- * @param {string} postgresPassword - PostgreSQL password (for servers.json PassFile reference only; password not stored in file)
269
+ * @param {string} postgresPassword - Used only for consistency / future template fields (password is not written into servers.json)
298
270
  */
299
271
  function generatePgAdminConfig(infraDir, postgresPassword) {
300
272
  const serversJsonTemplatePath = path.join(__dirname, '..', '..', 'templates', 'infra', 'servers.json.hbs');
@@ -401,9 +373,10 @@ psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "miso" -c "GRANT AL
401
373
  * @async
402
374
  * @param {string} devId - Developer ID
403
375
  * @param {string} adminSecretsPath - Path to admin secrets file
376
+ * @param {{ pgpassBootstrap?: boolean }} [prepOptions] - When pgpassBootstrap is false, skip/remove host pgpass bootstrap file (pgAdmin disabled)
404
377
  * @returns {Promise<Object>} Object with infraDir and postgresPassword
405
378
  */
406
- async function prepareInfraDirectory(devId, adminSecretsPath) {
379
+ async function prepareInfraDirectory(devId, adminSecretsPath, prepOptions = {}) {
407
380
  const aifabrixDir = paths.getAifabrixSystemDir();
408
381
  const infraDirName = getInfraDirName(devId);
409
382
  const infraDir = path.join(aifabrixDir, infraDirName);
@@ -427,6 +400,20 @@ async function prepareInfraDirectory(devId, adminSecretsPath) {
427
400
  '';
428
401
  generatePgAdminConfig(infraDir, postgresPassword);
429
402
 
403
+ const pgpassBootstrap = prepOptions.pgpassBootstrap !== false;
404
+ const bootstrapPath = path.join(infraDir, PGPASS_BOOTSTRAP_BASENAME);
405
+ if (pgpassBootstrap) {
406
+ writePgpassBootstrap(infraDir, postgresPassword);
407
+ } else {
408
+ try {
409
+ if (fs.existsSync(bootstrapPath)) {
410
+ fs.unlinkSync(bootstrapPath);
411
+ }
412
+ } catch {
413
+ // Ignore
414
+ }
415
+ }
416
+
430
417
  return { infraDir, postgresPassword };
431
418
  }
432
419
 
@@ -483,6 +470,8 @@ module.exports = {
483
470
  checkDockerAvailability,
484
471
  ensureAdminSecrets,
485
472
  generatePgAdminConfig,
473
+ writePgpassBootstrap,
474
+ PGPASS_BOOTSTRAP_BASENAME,
486
475
  prepareInfraDirectory,
487
476
  resolveInfraStatePaths,
488
477
  ensureMisoInitScript,
@@ -107,7 +107,9 @@ async function prepareInfrastructureEnvironment(developerId, options = {}) {
107
107
  }
108
108
 
109
109
  // Prepare infrastructure directory
110
- const { infraDir } = await prepareInfraDirectory(devId, adminSecretsPath);
110
+ const { infraDir } = await prepareInfraDirectory(devId, adminSecretsPath, {
111
+ pgpassBootstrap: options.pgadmin !== false
112
+ });
111
113
  await ensureMisoInitScript(infraDir);
112
114
 
113
115
  return { devId, idNum, ports, templatePath, infraDir, adminSecretsPath, trustForwardedHeaders };
@@ -50,30 +50,6 @@ async function startDockerServices(composePath, projectName, adminSecretsPath, i
50
50
  logger.log('Infrastructure services started successfully');
51
51
  }
52
52
 
53
- /**
54
- * Copy pgAdmin4 configuration files into container
55
- * @async
56
- * @param {string} pgadminContainerName - pgAdmin container name
57
- * @param {string} serversJsonPath - Path to servers.json file
58
- * @param {string} pgpassPath - Path to pgpass file
59
- */
60
- async function copyPgAdminConfig(pgadminContainerName, serversJsonPath, pgpassPath) {
61
- const { execWithDockerEnv } = require('../utils/docker-exec');
62
- try {
63
- await new Promise(resolve => setTimeout(resolve, 2000)); // Wait for container to be ready
64
- if (fs.existsSync(serversJsonPath)) {
65
- await execWithDockerEnv(`docker cp "${serversJsonPath}" ${pgadminContainerName}:/pgadmin4/servers.json`);
66
- }
67
- if (fs.existsSync(pgpassPath)) {
68
- await execWithDockerEnv(`docker cp "${pgpassPath}" ${pgadminContainerName}:/pgpass`);
69
- await execWithDockerEnv(`docker exec ${pgadminContainerName} chmod 600 /pgpass`);
70
- }
71
- } catch (error) {
72
- // Ignore copy errors - files might already be there or container not ready
73
- logger.log('Note: Could not copy pgAdmin4 config files (this is OK if container was just restarted)');
74
- }
75
- }
76
-
77
53
  /**
78
54
  * Prepare run env file from decrypted admin secrets.
79
55
  * @async
@@ -89,34 +65,12 @@ async function prepareRunEnv(infraDir) {
89
65
  }
90
66
 
91
67
  /**
92
- * Write pgpass file and copy pgAdmin config into container.
93
- * @async
94
- * @param {string} infraDir - Infrastructure directory
95
- * @param {Object} adminObj - Decrypted admin secrets object
96
- * @param {string} devId - Developer ID
97
- * @param {number} idNum - Developer ID number
98
- * @returns {Promise<string>} Path to pgpass run file
99
- */
100
- async function writePgpassAndCopyPgAdminConfig(infraDir, adminObj, devId, idNum) {
101
- const pgpassRunPath = path.join(infraDir, '.pgpass.run');
102
- const pgadminContainerName = idNum === 0 ? 'aifabrix-pgadmin' : `aifabrix-dev${devId}-pgadmin`;
103
- const serversJsonPath = path.join(infraDir, 'servers.json');
104
- const postgresPassword = adminObj.POSTGRES_PASSWORD || '';
105
- const pgpassContent = `postgres:5432:postgres:pgadmin:${postgresPassword}\n`;
106
- fs.writeFileSync(pgpassRunPath, pgpassContent, { mode: 0o600 });
107
- await copyPgAdminConfig(pgadminContainerName, serversJsonPath, pgpassRunPath);
108
- return pgpassRunPath;
109
- }
110
-
111
- /**
112
- * Remove temporary run files (env and pgpass) if they exist.
68
+ * Remove temporary run files (env) if they exist.
113
69
  * @param {string} runEnvPath - Path to .env.run
114
- * @param {string} [pgpassRunPath] - Path to .pgpass.run
115
70
  */
116
- function cleanupRunFiles(runEnvPath, pgpassRunPath) {
71
+ function cleanupRunFiles(runEnvPath) {
117
72
  try {
118
73
  if (fs.existsSync(runEnvPath)) fs.unlinkSync(runEnvPath);
119
- if (pgpassRunPath && fs.existsSync(pgpassRunPath)) fs.unlinkSync(pgpassRunPath);
120
74
  } catch {
121
75
  // Ignore unlink errors
122
76
  }
@@ -136,11 +90,9 @@ function cleanupRunFiles(runEnvPath, pgpassRunPath) {
136
90
  */
137
91
  async function startDockerServicesAndConfigure(composePath, devId, idNum, infraDir, opts = {}) {
138
92
  let runEnvPath;
139
- let pgpassRunPath;
140
- let adminObj;
141
93
  const { pgadmin = true, redisCommander = true, traefik = false } = opts;
142
94
  try {
143
- ({ adminObj, runEnvPath } = await prepareRunEnv(infraDir));
95
+ ({ runEnvPath } = await prepareRunEnv(infraDir));
144
96
  } catch (err) {
145
97
  throw new Error(`Failed to prepare infra env: ${err.message}`);
146
98
  }
@@ -148,13 +100,10 @@ async function startDockerServicesAndConfigure(composePath, devId, idNum, infraD
148
100
  try {
149
101
  const projectName = getInfraProjectName(devId);
150
102
  await startDockerServices(composePath, projectName, runEnvPath, infraDir);
151
- if (pgadmin) {
152
- pgpassRunPath = await writePgpassAndCopyPgAdminConfig(infraDir, adminObj, devId, idNum);
153
- }
154
103
  await waitForServices(devId, { pgadmin, redisCommander, traefik });
155
104
  logger.log('All services are healthy and ready');
156
105
  } finally {
157
- cleanupRunFiles(runEnvPath, pgpassRunPath);
106
+ cleanupRunFiles(runEnvPath);
158
107
  }
159
108
  }
160
109
 
@@ -236,7 +185,6 @@ async function checkInfraHealth(devId = null, options = {}) {
236
185
  module.exports = {
237
186
  execAsyncWithCwd,
238
187
  startDockerServices,
239
- copyPgAdminConfig,
240
188
  startDockerServicesAndConfigure,
241
189
  waitForServices,
242
190
  checkInfraHealth
@@ -679,12 +679,15 @@
679
679
  "publicKey":{
680
680
  "type":"string",
681
681
  "minLength":1,
682
- "description":"Public key used to verify RS256 signatures. Private key must never appear in schema."
682
+ "description":"Public verification material (SPKI PEM for RS256; dev placeholder for HS256). Private keys must never appear in config."
683
683
  },
684
684
  "algorithm":{
685
685
  "type":"string",
686
- "const":"RS256",
687
- "description":"Signing algorithm. Must be RS256."
686
+ "enum":[
687
+ "RS256",
688
+ "HS256"
689
+ ],
690
+ "description":"Signing algorithm for certificate verification (RS256 in production; HS256 only for local dev when the dataplane uses the dev HMAC signer)."
688
691
  },
689
692
  "issuer":{
690
693
  "type":"string",
@@ -695,6 +698,25 @@
695
698
  "type":"string",
696
699
  "minLength":1,
697
700
  "description":"Certification version identifier; must align with external system versioning."
701
+ },
702
+ "status":{
703
+ "type":"string",
704
+ "enum":[
705
+ "passed",
706
+ "not_passed",
707
+ "pending"
708
+ ],
709
+ "description":"Outcome of certification evaluation for this block (aligns with DatasourceTestRun certificate.status)."
710
+ },
711
+ "level":{
712
+ "type":"string",
713
+ "enum":[
714
+ "BRONZE",
715
+ "SILVER",
716
+ "GOLD",
717
+ "PLATINUM"
718
+ ],
719
+ "description":"Achieved certification tier (aligns with integration certificate certificationLevel)."
698
720
  }
699
721
  },
700
722
  "additionalProperties":false
@@ -7,12 +7,12 @@
7
7
  "key": "document-storage-schema",
8
8
  "name": "Document Storage Configuration Schema",
9
9
  "description": "JSON schema for validating document storage configurations",
10
- "version": "1.2.0",
10
+ "version": "1.3.0",
11
11
  "type": "schema",
12
12
  "category": "document-storage",
13
13
  "author": "AI Fabrix Team",
14
14
  "createdAt": "2026-01-02T00:00:00Z",
15
- "updatedAt": "2026-03-31T00:00:00Z",
15
+ "updatedAt": "2026-04-22T00:00:00Z",
16
16
  "compatibility": {
17
17
  "minVersion": "1.0.0",
18
18
  "maxVersion": "2.0.0",
@@ -63,6 +63,14 @@
63
63
  "Added optional securityLevel classification field (public/internal/restricted/confidential)"
64
64
  ],
65
65
  "breaking": false
66
+ },
67
+ {
68
+ "version": "1.3.0",
69
+ "date": "2026-04-22T00:00:00Z",
70
+ "changes": [
71
+ "Added optional parameterLookupCoalesceNestedItemScope (boolean, default true) for manifest-controlled binary parameter lookup enrichment"
72
+ ],
73
+ "breaking": false
66
74
  }
67
75
  ]
68
76
  },
@@ -120,6 +128,11 @@
120
128
  "default": false,
121
129
  "description": "If true, removes fetch.query when applying binary retrieval path override."
122
130
  },
131
+ "parameterLookupCoalesceNestedItemScope": {
132
+ "type": "boolean",
133
+ "default": true,
134
+ "description": "When true, binary parameterMapping and HTTP path templates use a lookup view that merges metadata and coalesces storage-scope ids from a nested item parentReference when the row's parentReference omits them. Set false for strict manifest-only paths."
135
+ },
123
136
  "processing": {
124
137
  "type": "object",
125
138
  "properties": {
package/lib/utils/api.js CHANGED
@@ -16,6 +16,26 @@ const auditLogger = require('../core/audit-logger');
16
16
  /** Default timeout for HTTP requests (ms). Prevents hanging when the controller is unreachable. 30s allows Azure Web App cold start to complete. */
17
17
  const DEFAULT_REQUEST_TIMEOUT_MS = 30000;
18
18
 
19
+ /** Cap for optional per-request ``timeoutMs`` (validation run E2E can block on one POST). */
20
+ const MAX_SINGLE_REQUEST_TIMEOUT_MS = 45 * 60 * 1000;
21
+
22
+ /**
23
+ * Resolve per-request AbortSignal timeout from fetch options.
24
+ * @param {Object} options - Same object passed to fetch (may include timeoutMs / requestTimeoutMs)
25
+ * @returns {number}
26
+ */
27
+ function resolveSingleRequestTimeoutMs(options) {
28
+ const raw = options?.requestTimeoutMs ?? options?.timeoutMs;
29
+ if (raw === undefined || raw === null || raw === '') {
30
+ return DEFAULT_REQUEST_TIMEOUT_MS;
31
+ }
32
+ const n = typeof raw === 'number' ? raw : parseInt(String(raw), 10);
33
+ if (!Number.isFinite(n) || n <= 0) {
34
+ return DEFAULT_REQUEST_TIMEOUT_MS;
35
+ }
36
+ return Math.min(n, MAX_SINGLE_REQUEST_TIMEOUT_MS);
37
+ }
38
+
19
39
  /**
20
40
  * Logs API request performance metrics and errors to audit log
21
41
  * @param {Object} params - Performance logging parameters
@@ -276,9 +296,11 @@ async function handleNetworkError(error, url, options, duration) {
276
296
 
277
297
  /**
278
298
  * Make an API call with proper error handling
279
- * Uses a 30s timeout to avoid hanging when the controller is unreachable (Azure cold start can exceed 5s).
299
+ * Uses a 30s timeout by default. Pass ``options.timeoutMs`` or ``options.requestTimeoutMs`` for
300
+ * longer single requests (e.g. dataplane validation run E2E POST); capped at 45 minutes.
280
301
  * @param {string} url - API endpoint URL
281
302
  * @param {Object} options - Fetch options (signal, method, headers, body, etc.)
303
+ * @param {number} [options.timeoutMs] - Optional per-request timeout (ms) when ``signal`` omitted
282
304
  * @returns {Promise<Object>} Response object with success flag
283
305
  */
284
306
  async function makeApiCall(url, options = {}) {
@@ -292,9 +314,12 @@ async function makeApiCall(url, options = {}) {
292
314
 
293
315
  const startTime = Date.now();
294
316
  const fetchOptions = { ...options };
317
+ const singleRequestTimeoutMs = resolveSingleRequestTimeoutMs(fetchOptions);
295
318
  if (!fetchOptions.signal) {
296
- fetchOptions.signal = AbortSignal.timeout(DEFAULT_REQUEST_TIMEOUT_MS);
319
+ fetchOptions.signal = AbortSignal.timeout(singleRequestTimeoutMs);
297
320
  }
321
+ delete fetchOptions.timeoutMs;
322
+ delete fetchOptions.requestTimeoutMs;
298
323
 
299
324
  try {
300
325
  const response = await fetch(url, fetchOptions);
@@ -309,7 +334,7 @@ async function makeApiCall(url, options = {}) {
309
334
  const duration = Date.now() - startTime;
310
335
  const error = err?.name === 'AbortError'
311
336
  ? new Error(
312
- `Request timed out after ${DEFAULT_REQUEST_TIMEOUT_MS / 1000} seconds. The controller may be unreachable. Check the URL and network.`
337
+ `Request timed out after ${Math.round(singleRequestTimeoutMs / 1000)} seconds. The controller may be unreachable. Check the URL and network.`
313
338
  )
314
339
  : err;
315
340
  return await handleNetworkError(error, url, options, duration);
@@ -10,7 +10,7 @@
10
10
  const path = require('path');
11
11
  const fs = require('fs');
12
12
  const { getIntegrationPath } = require('./paths');
13
- const { parseEnvToMap, resolveKvValue } = require('./credential-secrets-env');
13
+ const { parseEnvToMap } = require('./credential-secrets-env');
14
14
  const { loadSecrets, resolveKvReferences } = require('../core/secrets');
15
15
  const { loadEnvTemplate } = require('./secrets-helpers');
16
16
  const { getActualSecretsPath } = require('./secrets-path');
@@ -81,13 +81,13 @@ function substituteVarPlaceholders(value, envMap, systemKey) {
81
81
 
82
82
  /**
83
83
  * Resolves configuration array values in place by location: variable → {{VAR}} from envMap;
84
- * keyvault → kv:// from secrets. Does not log or expose secret values.
84
+ * keyvault → leaves kv:// as-is (secret value pushed separately by CLI). Does not log or expose secret values.
85
85
  *
86
86
  * @param {Array<{ name?: string, value?: string, location?: string }>} configArray - Configuration array (mutated)
87
87
  * @param {Object.<string, string>} envMap - Resolved env map from buildResolvedEnvMapForIntegration
88
- * @param {Object} secrets - Loaded secrets for kv:// resolution
88
+ * @param {Object} secrets - Loaded secrets (unused for keyvault config values; kept for backward compatibility)
89
89
  * @param {string} [systemKey] - System key for error messages
90
- * @throws {Error} If variable env is missing or keyvault secret unresolved (message never contains secret values)
90
+ * @throws {Error} If variable env is missing or keyvault value is not a kv:// reference (message never contains secret values)
91
91
  */
92
92
  function resolveConfigurationValues(configArray, envMap, secrets, systemKey) {
93
93
  if (!Array.isArray(configArray)) return;
@@ -101,11 +101,14 @@ function resolveConfigurationValues(configArray, envMap, secrets, systemKey) {
101
101
  }
102
102
  item.value = substituteVarPlaceholders(item.value, envMap, systemKey);
103
103
  } else if (location === 'keyvault') {
104
- const resolved = resolveKvValue(secrets, item.value);
105
- if (resolved === null || resolved === undefined) {
106
- throw new Error(`Unresolved keyvault reference for configuration '${item.name || 'unknown'}'.${hint}`);
104
+ if (!item.value.trim().startsWith('kv://')) {
105
+ throw new Error(
106
+ `Configuration entry '${item.name || 'unknown'}' has location 'keyvault' but value is not kv://. ` +
107
+ `Set value to a kv:// reference and push the secret with KV_* env vars.${hint}`
108
+ );
107
109
  }
108
- item.value = resolved;
110
+ // Intentionally do not resolve kv:// here. Upload keeps kv:// references in config and
111
+ // pushes secret values separately via the credential secret API.
109
112
  }
110
113
  }
111
114
  }
@@ -132,7 +132,7 @@ function kvEnvKeyToPath(envKey, systemKey) {
132
132
  * @param {Object.<string, string>} envMap - Key-value map from .env
133
133
  * @returns {Array<{ key: string, value: string }>} Items (key = kv://..., value = raw)
134
134
  */
135
- function collectKvEnvVarsAsSecretItems(envMap) {
135
+ function collectKvEnvVarsAsSecretItems(envMap, systemKey) {
136
136
  if (!envMap || typeof envMap !== 'object') {
137
137
  return [];
138
138
  }
@@ -144,7 +144,7 @@ function collectKvEnvVarsAsSecretItems(envMap) {
144
144
  if (value.startsWith('kv://') && isValidKvPath(value)) {
145
145
  kvPath = value;
146
146
  }
147
- if (!kvPath) kvPath = kvEnvKeyToPath(envKey);
147
+ if (!kvPath) kvPath = kvEnvKeyToPath(envKey, systemKey) || kvEnvKeyToPath(envKey);
148
148
  if (!kvPath) continue;
149
149
  items.push({ key: kvPath, value });
150
150
  }
@@ -223,12 +223,12 @@ function isValidKvPath(key) {
223
223
  * @param {Object} secrets - Loaded secrets
224
224
  * @param {Map<string, string>} itemsByKey - Mutable map to add items to
225
225
  */
226
- function buildItemsFromEnv(envFilePath, secrets, itemsByKey) {
226
+ function buildItemsFromEnv(envFilePath, secrets, itemsByKey, systemKey) {
227
227
  if (!envFilePath || typeof envFilePath !== 'string' || !fs.existsSync(envFilePath)) return;
228
228
  try {
229
229
  const content = fs.readFileSync(envFilePath, 'utf8');
230
230
  const envMap = parseEnvToMap(content);
231
- const fromEnv = collectKvEnvVarsAsSecretItems(envMap);
231
+ const fromEnv = collectKvEnvVarsAsSecretItems(envMap, systemKey);
232
232
  for (const { key, value } of fromEnv) {
233
233
  const resolved = resolveKvValue(secrets, value);
234
234
  // Skip placeholder: value that equals the kv path (e.g. from env.template) must not be pushed as the secret
@@ -320,7 +320,7 @@ async function pushCredentialSecrets(dataplaneUrl, authConfig, options = {}) {
320
320
  secrets = {};
321
321
  }
322
322
  const itemsByKey = new Map();
323
- buildItemsFromEnv(envFilePath, secrets, itemsByKey);
323
+ buildItemsFromEnv(envFilePath, secrets, itemsByKey, appName);
324
324
  buildItemsFromPayload(payload, secrets, itemsByKey);
325
325
 
326
326
  const items = Array.from(itemsByKey.entries())
@@ -0,0 +1,82 @@
1
+ /**
2
+ * @fileoverview Certificate / certification tier lines for DatasourceTestRun TTY output.
3
+ * @author AI Fabrix Team
4
+ * @version 2.0.0
5
+ */
6
+
7
+ 'use strict';
8
+
9
+ const chalk = require('chalk');
10
+ const { sectionTitle } = require('./cli-test-layout-chalk');
11
+
12
+ function trimCertificateField(v) {
13
+ if (v === undefined || v === null) return '';
14
+ return String(v).trim();
15
+ }
16
+
17
+ function certificateEnvelopeGlyph(statusRaw) {
18
+ if (statusRaw === 'passed') return '✔';
19
+ if (statusRaw === 'not_passed') return '✖';
20
+ return statusRaw ? '⚠' : ' ';
21
+ }
22
+
23
+ function certificateTTYHasContent(levelRaw, stRaw, summaryRaw, blockerCount) {
24
+ return Boolean(levelRaw || stRaw || summaryRaw || blockerCount > 0);
25
+ }
26
+
27
+ /**
28
+ * @param {string[]} lines
29
+ * @param {string} stRaw
30
+ * @param {string} levelRaw
31
+ * @param {string} summaryRaw
32
+ */
33
+ function appendCertificateStatusAndSummaryLines(lines, stRaw, levelRaw, summaryRaw) {
34
+ if (stRaw || levelRaw) {
35
+ const cg = certificateEnvelopeGlyph(stRaw);
36
+ const tier = levelRaw ? ` — tier ${levelRaw}` : '';
37
+ lines.push(chalk.white(` ${cg} ${stRaw || 'unknown'}${tier}`));
38
+ }
39
+ if (summaryRaw) {
40
+ lines.push(chalk.gray(` ${summaryRaw}`));
41
+ }
42
+ }
43
+
44
+ /**
45
+ * @param {string[]} lines
46
+ * @param {Object[]} blockers
47
+ * @param {number} maxVisible
48
+ */
49
+ function appendCertificateBlockerLines(lines, blockers, maxVisible) {
50
+ const cap = Math.min(maxVisible, blockers.length);
51
+ for (let i = 0; i < cap; i += 1) {
52
+ const b = blockers[i];
53
+ const msg = b && b.message ? String(b.message) : '';
54
+ if (msg) lines.push(chalk.yellow(` • ${msg}`));
55
+ }
56
+ if (blockers.length > cap) {
57
+ lines.push(chalk.gray(` … and ${blockers.length - cap} more`));
58
+ }
59
+ }
60
+
61
+ /**
62
+ * Certification / certificate tier (integration engine or E2E envelope after active cert attach).
63
+ * @param {string[]} lines
64
+ * @param {Object} envelope
65
+ */
66
+ function appendCertificateTTY(lines, envelope) {
67
+ const cert = envelope && envelope.certificate;
68
+ if (!cert || typeof cert !== 'object') return;
69
+ const levelRaw = trimCertificateField(cert.level);
70
+ const stRaw = trimCertificateField(cert.status);
71
+ const summaryRaw = trimCertificateField(cert.summary);
72
+ const blockers = Array.isArray(cert.blockers) ? cert.blockers : [];
73
+ if (!certificateTTYHasContent(levelRaw, stRaw, summaryRaw, blockers.length)) return;
74
+ lines.push('');
75
+ lines.push(sectionTitle('Certification:'));
76
+ appendCertificateStatusAndSummaryLines(lines, stRaw, levelRaw, summaryRaw);
77
+ appendCertificateBlockerLines(lines, blockers, 5);
78
+ }
79
+
80
+ module.exports = {
81
+ appendCertificateTTY
82
+ };