@tailor-platform/sdk 1.20.0 → 1.22.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. package/CHANGELOG.md +56 -0
  2. package/dist/application-CTQe2HSB.mjs +5723 -0
  3. package/dist/application-CTQe2HSB.mjs.map +1 -0
  4. package/dist/application-DdSu3baZ.mjs +8 -0
  5. package/dist/{brand-BZJCv6UY.mjs → brand-DyPrAzpM.mjs} +1 -1
  6. package/dist/{brand-BZJCv6UY.mjs.map → brand-DyPrAzpM.mjs.map} +1 -1
  7. package/dist/cli/index.mjs +544 -57
  8. package/dist/cli/index.mjs.map +1 -1
  9. package/dist/cli/lib.d.mts +50 -8
  10. package/dist/cli/lib.mjs +15 -16
  11. package/dist/cli/lib.mjs.map +1 -1
  12. package/dist/configure/index.d.mts +4 -4
  13. package/dist/configure/index.mjs +20 -4
  14. package/dist/configure/index.mjs.map +1 -1
  15. package/dist/{enum-constants-CGVvu3dd.mjs → enum-constants-B5Nl-yzx.mjs} +1 -1
  16. package/dist/{enum-constants-CGVvu3dd.mjs.map → enum-constants-B5Nl-yzx.mjs.map} +1 -1
  17. package/dist/{file-utils-GX_tGWl4.mjs → file-utils-sEOwAdJ4.mjs} +1 -1
  18. package/dist/{file-utils-GX_tGWl4.mjs.map → file-utils-sEOwAdJ4.mjs.map} +1 -1
  19. package/dist/{index-i6QUsr5p.d.mts → index-BGPX26_D.d.mts} +2 -2
  20. package/dist/{index-DcY0e3S5.d.mts → index-BiutQT7m.d.mts} +9 -10
  21. package/dist/{index-CGjiOz_W.d.mts → index-ClS0NClx.d.mts} +2 -2
  22. package/dist/{index-B86CIKCW.d.mts → index-Cwi86SUR.d.mts} +2 -2
  23. package/dist/{index-CIXOwe6g.d.mts → index-DPN_P0w3.d.mts} +2 -2
  24. package/dist/{interceptor-D8MeZOxX.mjs → interceptor-DiARwPfw.mjs} +1 -1
  25. package/dist/{interceptor-D8MeZOxX.mjs.map → interceptor-DiARwPfw.mjs.map} +1 -1
  26. package/dist/{job-2Q82qQ6N.mjs → job-CRavYLLk.mjs} +4 -24
  27. package/dist/job-CRavYLLk.mjs.map +1 -0
  28. package/dist/kysely/index.d.mts +2 -2
  29. package/dist/kysely/index.mjs +2 -2
  30. package/dist/kysely/index.mjs.map +1 -1
  31. package/dist/{kysely-type-Cpq5TNGY.mjs → kysely-type-CSlcwNFH.mjs} +1 -1
  32. package/dist/{kysely-type-Cpq5TNGY.mjs.map → kysely-type-CSlcwNFH.mjs.map} +1 -1
  33. package/dist/package-json-BI0ng3_5.mjs +3 -0
  34. package/dist/{package-json-3H5gfhA4.mjs → package-json-iVBhE5Ef.mjs} +1 -1
  35. package/dist/{package-json-3H5gfhA4.mjs.map → package-json-iVBhE5Ef.mjs.map} +1 -1
  36. package/dist/plugin/builtin/enum-constants/index.d.mts +2 -2
  37. package/dist/plugin/builtin/enum-constants/index.mjs +1 -1
  38. package/dist/plugin/builtin/file-utils/index.d.mts +2 -2
  39. package/dist/plugin/builtin/file-utils/index.mjs +1 -1
  40. package/dist/plugin/builtin/kysely-type/index.d.mts +2 -2
  41. package/dist/plugin/builtin/kysely-type/index.mjs +1 -1
  42. package/dist/plugin/builtin/seed/index.d.mts +2 -2
  43. package/dist/plugin/builtin/seed/index.mjs +1 -1
  44. package/dist/plugin/index.d.mts +1 -1
  45. package/dist/plugin/index.mjs +3 -3
  46. package/dist/plugin/index.mjs.map +1 -1
  47. package/dist/{update-DkpWgrzL.mjs → query-Bz2oDGhw.mjs} +1033 -258
  48. package/dist/query-Bz2oDGhw.mjs.map +1 -0
  49. package/dist/{schema-D5Cpd8fQ.mjs → schema-Cjm-OvPF.mjs} +2 -2
  50. package/dist/{schema-D5Cpd8fQ.mjs.map → schema-Cjm-OvPF.mjs.map} +1 -1
  51. package/dist/{seed-CeUEANfQ.mjs → seed-CXvCW3Xc.mjs} +3 -3
  52. package/dist/{seed-CeUEANfQ.mjs.map → seed-CXvCW3Xc.mjs.map} +1 -1
  53. package/dist/telemetry-BAxP8-PR.mjs +3 -0
  54. package/dist/{telemetry-DuBhnd0X.mjs → telemetry-C46fds1l.mjs} +2 -2
  55. package/dist/{telemetry-DuBhnd0X.mjs.map → telemetry-C46fds1l.mjs.map} +1 -1
  56. package/dist/{types-ClK_HJ0G.mjs → types-CBTSg-LK.mjs} +1 -1
  57. package/dist/{types-ClK_HJ0G.mjs.map → types-CBTSg-LK.mjs.map} +1 -1
  58. package/dist/{types-C14GuyPI.d.mts → types-DVMQNdTs.d.mts} +6 -2
  59. package/dist/{types-CNw4p8V7.d.mts → types-bcuNRo1Y.d.mts} +12 -1
  60. package/dist/utils/test/index.d.mts +42 -4
  61. package/dist/utils/test/index.mjs +78 -3
  62. package/dist/utils/test/index.mjs.map +1 -1
  63. package/docs/cli/function.md +83 -3
  64. package/docs/services/resolver.md +61 -0
  65. package/docs/services/tailordb.md +55 -0
  66. package/package.json +7 -5
  67. package/dist/application-Bli2ieqY.mjs +0 -102205
  68. package/dist/application-Bli2ieqY.mjs.map +0 -1
  69. package/dist/application-CZdieD3K.mjs +0 -9
  70. package/dist/chunk-CqAI0b6X.mjs +0 -47
  71. package/dist/jiti-DfS9jItj.mjs +0 -4482
  72. package/dist/jiti-DfS9jItj.mjs.map +0 -1
  73. package/dist/job-2Q82qQ6N.mjs.map +0 -1
  74. package/dist/package-json-DTDAqRRJ.mjs +0 -3
  75. package/dist/src-Bb1UVstT.mjs +0 -1038
  76. package/dist/src-Bb1UVstT.mjs.map +0 -1
  77. package/dist/telemetry-Dhzj9Ncm.mjs +0 -3
  78. package/dist/update-DkpWgrzL.mjs.map +0 -1
@@ -1,11 +1,11 @@
1
- import { t as db } from "./schema-D5Cpd8fQ.mjs";
2
- import { $ as TenantProviderConfig_TenantProviderType, A as WorkflowJobExecution_Status, D as userAgent, E as resolveStaticWebsiteUrls, F as TailorDBType_Permission_Permit, G as AuthInvokerSchema, H as ExecutorTargetType, I as TailorDBType_PermitAction, J as AuthSCIMAttribute_Mutability, K as AuthOAuth2Client_ClientType, L as PipelineResolver_OperationType, M as TailorDBGQLPermission_Operator, N as TailorDBGQLPermission_Permit, O as WorkspacePlatformUserRole, P as TailorDBType_Permission_Operator, R as IdPLang, T as platformBaseUrl, U as ExecutorTriggerType, V as ExecutorJobStatus, W as AuthIDPConfig_AuthType, X as AuthSCIMAttribute_Uniqueness, Y as AuthSCIMAttribute_Type, Z as AuthSCIMConfig_AuthorizationType, _ as loadWorkspaceId, a as buildExecutorArgsExpr, at as PageDirection, b as fetchAll, c as OAuth2ClientSchema, ct as logger, d as getDistDir, et as UserProfileProviderConfig_UserProfileProviderType, f as hashFile, g as loadOrganizationId, h as loadFolderId, i as loadConfig, it as FilterSchema, j as TailorDBGQLPermission_Action, k as WorkflowExecution_Status, l as stringifyFunction, lt as styles, m as loadAccessToken, n as generatePluginFilesIfNeeded, nt as ConditionSchema, o as buildResolverOperationHookExpr, ot as ApplicationSchemaUpdateAttemptStatus, q as AuthOAuth2Client_GrantType, r as loadApplication, rt as Condition_Operator, s as createExecutorService, st as Subgraph_ServiceType, t as defineApplication, tt as GetApplicationSchemaHealthResponse_ApplicationSchemaHealthStatus, u as createBundleCache, ut as symbols, v as readPlatformConfig, w as initOperatorClient, x as fetchMachineUserToken, y as writePlatformConfig, z as FunctionExecution_Status } from "./application-Bli2ieqY.mjs";
3
- import { t as readPackageJson } from "./package-json-3H5gfhA4.mjs";
4
- import { r as withSpan } from "./telemetry-DuBhnd0X.mjs";
1
+ import { t as db } from "./schema-Cjm-OvPF.mjs";
2
+ import { $ as AuthSCIMAttribute_Mutability, A as platformBaseUrl, B as TailorDBType_Permission_Permit, C as readPlatformConfig, E as fetchMachineUserToken, F as WorkflowJobExecution_Status, H as PipelineResolver_OperationType, I as TailorDBGQLPermission_Action, J as ExecutorTriggerType, K as ExecutorJobStatus, L as TailorDBGQLPermission_Operator, M as userAgent, N as WorkspacePlatformUserRole, P as WorkflowExecution_Status, Q as AuthOAuth2Client_GrantType, R as TailorDBGQLPermission_Permit, S as loadWorkspaceId, T as fetchAll, U as IdPLang, V as TailorDBType_PermitAction, W as FunctionExecution_Status, X as AuthInvokerSchema, Y as AuthIDPConfig_AuthType, Z as AuthOAuth2Client_ClientType, _ as hashFile, a as loadConfig, at as UserProfileProviderConfig_UserProfileProviderType, b as loadFolderId, ct as Condition_Operator, d as TailorDBTypeSchema, dt as ApplicationSchemaUpdateAttemptStatus, et as AuthSCIMAttribute_Type, f as stringifyFunction, ft as Subgraph_ServiceType, g as getDistDir, h as createBundleCache, ht as symbols, it as TenantProviderConfig_TenantProviderType, j as resolveStaticWebsiteUrls, k as initOperatorClient, l as OAuth2ClientSchema, lt as FilterSchema, m as loadFilesWithIgnores, mt as styles, n as generatePluginFilesIfNeeded, nt as AuthSCIMConfig_AuthorizationType, ot as GetApplicationSchemaHealthResponse_ApplicationSchemaHealthStatus, p as tailorUserMap, pt as logger, q as ExecutorTargetType, r as loadApplication, s as createExecutorService, st as ConditionSchema, t as defineApplication, tt as AuthSCIMAttribute_Uniqueness, ut as PageDirection, w as writePlatformConfig, x as loadOrganizationId, y as loadAccessToken, z as TailorDBType_Permission_Operator } from "./application-CTQe2HSB.mjs";
3
+ import { t as readPackageJson } from "./package-json-iVBhE5Ef.mjs";
4
+ import { r as withSpan } from "./telemetry-C46fds1l.mjs";
5
5
  import { createRequire } from "node:module";
6
6
  import { arg, defineCommand, runCommand } from "politty";
7
7
  import { z } from "zod";
8
- import * as fs$2 from "node:fs";
8
+ import * as fs$1 from "node:fs";
9
9
  import { parseEnv } from "node:util";
10
10
  import * as path from "pathe";
11
11
  import chalk from "chalk";
@@ -17,19 +17,48 @@ import { resolveTSConfig } from "pkg-types";
17
17
  import { findUpSync } from "find-up-simple";
18
18
  import ml from "multiline-ts";
19
19
  import * as crypto from "node:crypto";
20
+ import { pathToFileURL } from "node:url";
20
21
  import * as inflection from "inflection";
21
- import * as fs$1 from "node:fs/promises";
22
- import { glob } from "node:fs/promises";
23
22
  import * as rolldown from "rolldown";
23
+ import * as fs from "node:fs/promises";
24
+ import { glob } from "node:fs/promises";
24
25
  import { create, fromJson, toJson } from "@bufbuild/protobuf";
25
26
  import ora from "ora";
26
27
  import { setTimeout as setTimeout$1 } from "timers/promises";
27
28
  import { spawn } from "node:child_process";
28
29
  import { watch } from "chokidar";
29
30
  import * as madgeModule from "madge";
31
+ import { astVisitor, parse, toSql } from "pgsql-ast-parser";
30
32
 
31
33
  //#region src/cli/shared/errors.ts
32
34
  /**
35
+ * Format CLI error for output
36
+ * @param error - CLIError instance to format
37
+ * @returns Formatted error message
38
+ */
39
+ function formatError(error) {
40
+ const parts = [chalk.red(`Error${error.code ? ` [${error.code}]` : ""}: ${error.message}`)];
41
+ if (error.details) parts.push(`\n ${chalk.gray("Details:")} ${error.details}`);
42
+ if (error.suggestion) parts.push(`\n ${chalk.cyan("Suggestion:")} ${error.suggestion}`);
43
+ if (error.command) parts.push(`\n ${chalk.gray("Help:")} Run \`tailor-sdk ${error.command} --help\` for usage information.`);
44
+ return parts.join("");
45
+ }
46
+ /**
47
+ * Create a CLI error with formatted output
48
+ * @param options - Options to construct a CLIError
49
+ * @returns Constructed CLIError instance
50
+ */
51
+ function createCLIError(options) {
52
+ const error = new Error(options.message);
53
+ error.name = "CLIError";
54
+ error.code = options.code;
55
+ error.details = options.details;
56
+ error.suggestion = options.suggestion;
57
+ error.command = options.command;
58
+ error.format = () => formatError(error);
59
+ return error;
60
+ }
61
+ /**
33
62
  * Type guard to check if an error is a CLIError
34
63
  * @param error - Error to check
35
64
  * @returns True if the error is a CLIError
@@ -83,11 +112,11 @@ function loadEnvFiles(envFiles, envFilesIfExists) {
83
112
  const load = (files, required) => {
84
113
  for (const file of [files ?? []].flat()) {
85
114
  const envPath = path.resolve(process.cwd(), file);
86
- if (!fs$2.existsSync(envPath)) {
115
+ if (!fs$1.existsSync(envPath)) {
87
116
  if (required) throw new Error(`Environment file not found: ${envPath}`);
88
117
  continue;
89
118
  }
90
- const parsed = parseEnv(fs$2.readFileSync(envPath, "utf-8"));
119
+ const parsed = parseEnv(fs$1.readFileSync(envPath, "utf-8"));
91
120
  for (const [key, value] of Object.entries(parsed)) {
92
121
  if (originalEnvKeys.has(key)) continue;
93
122
  process.env[key] = value;
@@ -107,9 +136,19 @@ function loadEnvFiles(envFiles, envFilesIfExists) {
107
136
  const commonArgs = {
108
137
  "env-file": arg(z.string().optional(), {
109
138
  alias: "e",
110
- description: "Path to the environment file (error if not found)"
139
+ description: "Path to the environment file (error if not found)",
140
+ completion: {
141
+ type: "file",
142
+ matcher: [".env.*", ".env"]
143
+ }
144
+ }),
145
+ "env-file-if-exists": arg(z.string().optional(), {
146
+ description: "Path to the environment file (ignored if not found)",
147
+ completion: {
148
+ type: "file",
149
+ matcher: [".env.*", ".env"]
150
+ }
111
151
  }),
112
- "env-file-if-exists": arg(z.string().optional(), { description: "Path to the environment file (ignored if not found)" }),
113
152
  verbose: arg(z.boolean().default(false), { description: "Enable verbose logging" })
114
153
  };
115
154
  /**
@@ -118,22 +157,32 @@ const commonArgs = {
118
157
  const workspaceArgs = {
119
158
  "workspace-id": arg(z.string().optional(), {
120
159
  alias: "w",
121
- description: "Workspace ID"
160
+ description: "Workspace ID",
161
+ completion: { type: "none" }
122
162
  }),
123
163
  profile: arg(z.string().optional(), {
124
164
  alias: "p",
125
- description: "Workspace profile"
165
+ description: "Workspace profile",
166
+ completion: { type: "none" }
126
167
  })
127
168
  };
128
169
  /**
170
+ * Shared config arg for commands that accept a config file path
171
+ */
172
+ const configArg = { config: arg(z.string().default("tailor.config.ts"), {
173
+ alias: "c",
174
+ description: "Path to SDK config file",
175
+ completion: {
176
+ type: "file",
177
+ extensions: ["ts"]
178
+ }
179
+ }) };
180
+ /**
129
181
  * Arguments for commands that interact with deployed resources (includes config)
130
182
  */
131
183
  const deploymentArgs = {
132
184
  ...workspaceArgs,
133
- config: arg(z.string().default("tailor.config.ts"), {
134
- alias: "c",
135
- description: "Path to SDK config file"
136
- })
185
+ ...configArg
137
186
  };
138
187
  /**
139
188
  * Arguments for commands that require confirmation
@@ -162,7 +211,7 @@ const withCommonArgs = (handler) => async (args) => {
162
211
  try {
163
212
  if ("json" in args && typeof args.json === "boolean") logger.jsonMode = args.json;
164
213
  loadEnvFiles(args["env-file"], args["env-file-if-exists"]);
165
- const { initTelemetry } = await import("./telemetry-Dhzj9Ncm.mjs");
214
+ const { initTelemetry } = await import("./telemetry-BAxP8-PR.mjs");
166
215
  await initTelemetry();
167
216
  await handler(args);
168
217
  } catch (error) {
@@ -175,7 +224,7 @@ const withCommonArgs = (handler) => async (args) => {
175
224
  } else logger.error(`Unknown error: ${error}`);
176
225
  process.exit(1);
177
226
  } finally {
178
- const { shutdownTelemetry } = await import("./telemetry-Dhzj9Ncm.mjs");
227
+ const { shutdownTelemetry } = await import("./telemetry-BAxP8-PR.mjs");
179
228
  await shutdownTelemetry();
180
229
  }
181
230
  process.exit(0);
@@ -229,7 +278,7 @@ const apiCommand = defineCommand({
229
278
  positional: true,
230
279
  description: "API endpoint to call (e.g., 'GetApplication' or 'tailor.v1.OperatorService/GetApplication')"
231
280
  })
232
- }),
281
+ }).strict(),
233
282
  run: withCommonArgs(async (args) => {
234
283
  const result = await apiCall({
235
284
  profile: args.profile,
@@ -283,7 +332,7 @@ function createCacheStore(config) {
283
332
  }
284
333
  function loadManifest() {
285
334
  try {
286
- const raw = fs$2.readFileSync(manifestPath(), "utf-8");
335
+ const raw = fs$1.readFileSync(manifestPath(), "utf-8");
287
336
  const result = cacheManifestSchema.safeParse(JSON.parse(raw));
288
337
  if (!result.success) {
289
338
  cachedManifest = void 0;
@@ -310,15 +359,15 @@ function createCacheStore(config) {
310
359
  return cachedManifest;
311
360
  }
312
361
  function saveManifest(manifest) {
313
- fs$2.mkdirSync(config.cacheDir, { recursive: true });
362
+ fs$1.mkdirSync(config.cacheDir, { recursive: true });
314
363
  const target = manifestPath();
315
364
  const tmpFile = path.join(config.cacheDir, `.manifest.${process.pid}.tmp`);
316
365
  try {
317
- fs$2.writeFileSync(tmpFile, JSON.stringify(manifest, null, 2), "utf-8");
318
- fs$2.renameSync(tmpFile, target);
366
+ fs$1.writeFileSync(tmpFile, JSON.stringify(manifest, null, 2), "utf-8");
367
+ fs$1.renameSync(tmpFile, target);
319
368
  } catch (e) {
320
369
  try {
321
- fs$2.rmSync(tmpFile, { force: true });
370
+ fs$1.rmSync(tmpFile, { force: true });
322
371
  } catch {}
323
372
  throw e;
324
373
  }
@@ -337,29 +386,29 @@ function createCacheStore(config) {
337
386
  }
338
387
  function storeBundleOutput(cacheKey, sourcePath) {
339
388
  const dir = bundlesDir();
340
- fs$2.mkdirSync(dir, { recursive: true });
341
- fs$2.copyFileSync(sourcePath, bundlePath(cacheKey));
389
+ fs$1.mkdirSync(dir, { recursive: true });
390
+ fs$1.copyFileSync(sourcePath, bundlePath(cacheKey));
342
391
  const mapSource = `${sourcePath}.map`;
343
392
  const cachedMapPath = `${bundlePath(cacheKey)}.map`;
344
- if (fs$2.existsSync(mapSource)) fs$2.copyFileSync(mapSource, cachedMapPath);
345
- else fs$2.rmSync(cachedMapPath, { force: true });
393
+ if (fs$1.existsSync(mapSource)) fs$1.copyFileSync(mapSource, cachedMapPath);
394
+ else fs$1.rmSync(cachedMapPath, { force: true });
346
395
  }
347
396
  function restoreBundleOutput(cacheKey, targetPath) {
348
397
  const cached = bundlePath(cacheKey);
349
398
  const targetDir = path.dirname(targetPath);
350
- fs$2.mkdirSync(targetDir, { recursive: true });
399
+ fs$1.mkdirSync(targetDir, { recursive: true });
351
400
  try {
352
- fs$2.copyFileSync(cached, targetPath);
401
+ fs$1.copyFileSync(cached, targetPath);
353
402
  } catch (e) {
354
403
  if (e.code === "ENOENT") return false;
355
404
  throw e;
356
405
  }
357
406
  const cachedMap = `${cached}.map`;
358
- if (fs$2.existsSync(cachedMap)) fs$2.copyFileSync(cachedMap, `${targetPath}.map`);
407
+ if (fs$1.existsSync(cachedMap)) fs$1.copyFileSync(cachedMap, `${targetPath}.map`);
359
408
  return true;
360
409
  }
361
410
  function clean() {
362
- fs$2.rmSync(config.cacheDir, {
411
+ fs$1.rmSync(config.cacheDir, {
363
412
  recursive: true,
364
413
  force: true
365
414
  });
@@ -536,8 +585,8 @@ async function generateUserTypes(options) {
536
585
  if (env) logger.debug(`Extracted Env: ${JSON.stringify(env)}`);
537
586
  const typeDefContent = generateTypeDefinition(attributeMap, attributeList, env);
538
587
  const outputPath = resolveTypeDefinitionPath(configPath);
539
- fs$2.mkdirSync(path.dirname(outputPath), { recursive: true });
540
- fs$2.writeFileSync(outputPath, typeDefContent);
588
+ fs$1.mkdirSync(path.dirname(outputPath), { recursive: true });
589
+ fs$1.writeFileSync(outputPath, typeDefContent);
541
590
  const relativePath = path.relative(process.cwd(), outputPath);
542
591
  logger.newline();
543
592
  logger.success(`Generated type definitions: ${relativePath}`, { mode: "plain" });
@@ -552,7 +601,7 @@ function resolvePackageDirectory(startDir) {
552
601
  while (true) {
553
602
  const candidate = path.join(currentDir, "node_modules", "@tailor-platform", "sdk");
554
603
  const packageJsonPath = path.join(candidate, "package.json");
555
- if (fs$2.existsSync(packageJsonPath)) return candidate;
604
+ if (fs$1.existsSync(packageJsonPath)) return candidate;
556
605
  if (currentDir === root) break;
557
606
  currentDir = path.dirname(currentDir);
558
607
  }
@@ -999,6 +1048,10 @@ async function planApplication(context) {
999
1048
  changeSet.print();
1000
1049
  return changeSet;
1001
1050
  }
1051
+ if (application.subgraphs.length === 0) {
1052
+ changeSet.print();
1053
+ return changeSet;
1054
+ }
1002
1055
  let authNamespace;
1003
1056
  let authIdpConfigName;
1004
1057
  if (application.authService && application.authService.config) {
@@ -2352,6 +2405,73 @@ async function confirmImportantResourceDeletion(resources, yes) {
2352
2405
  `);
2353
2406
  }
2354
2407
 
2408
+ //#endregion
2409
+ //#region src/cli/shared/runtime-args.ts
2410
+ /**
2411
+ * Runtime args transformation for all services.
2412
+ *
2413
+ * Each service transforms server-side args/context into SDK-friendly format:
2414
+ * - Executor: server-side expression evaluated by platform before calling function
2415
+ * - Resolver: operationHook expression evaluated by platform before calling function
2416
+ *
2417
+ * The user field mapping (server → SDK) shared across services is defined in
2418
+ * `@/parser/service/tailordb` as `tailorUserMap`.
2419
+ */
2420
+ /**
2421
+ * Actor field transformation expression.
2422
+ *
2423
+ * Transforms the server's actor object to match the SDK's TailorActor type:
2424
+ * server `attributeMap` → SDK `attributes`
2425
+ * server `attributes` → SDK `attributeList`
2426
+ * other fields → passed through
2427
+ * null/undefined actor → null
2428
+ */
2429
+ const ACTOR_TRANSFORM_EXPR = "actor: args.actor ? (({ attributeMap, attributes: attrList, ...rest }) => ({ ...rest, attributes: attributeMap, attributeList: attrList }))(args.actor) : null";
2430
+ /**
2431
+ * Build the JavaScript expression that transforms server-format executor event
2432
+ * args into SDK-format args at runtime.
2433
+ *
2434
+ * The Tailor Platform server delivers event args with server-side field names.
2435
+ * The SDK exposes different field names to user code. This function produces a
2436
+ * JavaScript expression string that performs the mapping when evaluated
2437
+ * server-side.
2438
+ * @param triggerKind - The trigger kind discriminant from the parsed executor
2439
+ * @param env - Application env record to embed in the expression
2440
+ * @returns A JavaScript expression string, e.g. `({ ...args, ... })`
2441
+ */
2442
+ function buildExecutorArgsExpr(triggerKind, env) {
2443
+ const envExpr = `env: ${JSON.stringify(env)}`;
2444
+ switch (triggerKind) {
2445
+ case "schedule":
2446
+ case "recordCreated":
2447
+ case "recordUpdated":
2448
+ case "recordDeleted":
2449
+ case "idpUserCreated":
2450
+ case "idpUserUpdated":
2451
+ case "idpUserDeleted":
2452
+ case "authAccessTokenIssued":
2453
+ case "authAccessTokenRefreshed":
2454
+ case "authAccessTokenRevoked": return `({ ...args, appNamespace: args.namespaceName, ${ACTOR_TRANSFORM_EXPR}, ${envExpr} })`;
2455
+ case "resolverExecuted": return `({ ...args, appNamespace: args.namespaceName, ${ACTOR_TRANSFORM_EXPR}, success: !!args.succeeded, result: args.succeeded?.result.resolver, error: args.failed?.error, ${envExpr} })`;
2456
+ case "incomingWebhook": return `({ ...args, appNamespace: args.namespaceName, rawBody: args.raw_body, ${envExpr} })`;
2457
+ default: throw new Error(`Unknown trigger kind for args expression: ${triggerKind}`);
2458
+ }
2459
+ }
2460
+ /**
2461
+ * Build the operationHook expression for resolver pipelines.
2462
+ *
2463
+ * Transforms server context to SDK resolver context:
2464
+ * context.args → input
2465
+ * context.pipeline → spread into result
2466
+ * user (global var) → TailorUser (via tailorUserMap: workspace_id→workspaceId, attribute_map→attributes, attributes→attributeList)
2467
+ * env → injected as JSON
2468
+ * @param env - Application env record to embed in the expression
2469
+ * @returns A JavaScript expression string for the operationHook
2470
+ */
2471
+ function buildResolverOperationHookExpr(env) {
2472
+ return `({ ...context.pipeline, input: context.args, user: ${tailorUserMap}, env: ${JSON.stringify(env)} });`;
2473
+ }
2474
+
2355
2475
  //#endregion
2356
2476
  //#region src/cli/commands/apply/function-registry.ts
2357
2477
  const CHUNK_SIZE = 64 * 1024;
@@ -2403,7 +2523,7 @@ function collectFunctionEntries(application, workflowJobs) {
2403
2523
  for (const app of application.applications) for (const pipeline of app.resolverServices) for (const resolver of Object.values(pipeline.resolvers)) {
2404
2524
  const scriptPath = path.join(distDir, "resolvers", `${resolver.name}.js`);
2405
2525
  try {
2406
- const content = fs$2.readFileSync(scriptPath, "utf-8");
2526
+ const content = fs$1.readFileSync(scriptPath, "utf-8");
2407
2527
  entries.push({
2408
2528
  name: resolverFunctionName(pipeline.namespace, resolver.name),
2409
2529
  scriptContent: content,
@@ -2419,7 +2539,7 @@ function collectFunctionEntries(application, workflowJobs) {
2419
2539
  for (const executor of Object.values(executors)) if (executor.operation.kind === "function" || executor.operation.kind === "jobFunction") {
2420
2540
  const scriptPath = path.join(distDir, "executors", `${executor.name}.js`);
2421
2541
  try {
2422
- const content = fs$2.readFileSync(scriptPath, "utf-8");
2542
+ const content = fs$1.readFileSync(scriptPath, "utf-8");
2423
2543
  entries.push({
2424
2544
  name: executorFunctionName(executor.name),
2425
2545
  scriptContent: content,
@@ -2434,7 +2554,7 @@ function collectFunctionEntries(application, workflowJobs) {
2434
2554
  for (const job of workflowJobs) {
2435
2555
  const scriptPath = path.join(distDir, "workflow-jobs", `${job.name}.js`);
2436
2556
  try {
2437
- const content = fs$2.readFileSync(scriptPath, "utf-8");
2557
+ const content = fs$1.readFileSync(scriptPath, "utf-8");
2438
2558
  entries.push({
2439
2559
  name: workflowJobFunctionName(job.name),
2440
2560
  scriptContent: content,
@@ -3040,6 +3160,7 @@ async function planResolvers(client, workspaceId, pipelines, executors, deletedS
3040
3160
  };
3041
3161
  const executorUsedResolvers = /* @__PURE__ */ new Set();
3042
3162
  for (const executor of executors) if (executor.trigger.kind === "resolverExecuted") executorUsedResolvers.add(executor.trigger.resolverName);
3163
+ for (const pipeline of pipelines) for (const resolver of Object.values(pipeline.resolvers)) if (executorUsedResolvers.has(resolver.name) && resolver.publishEvents === false) throw new Error(`Resolver "${resolver.name}" has publishEvents set to false, but it is used by an executor with a resolverExecuted trigger. Either remove the publishEvents: false setting or remove the executor trigger for this resolver.`);
3043
3164
  for (const pipeline of pipelines) {
3044
3165
  const existingResolvers = await fetchResolvers(pipeline.namespace);
3045
3166
  const existingNameSet = /* @__PURE__ */ new Set();
@@ -3102,15 +3223,19 @@ function processResolver(namespace, resolver, executorUsedResolvers, env) {
3102
3223
  const response = protoFields({ "": resolver.output }, `${typeBaseName}Output`, false)[0];
3103
3224
  const resolverDescription = resolver.description || `${resolver.name} resolver`;
3104
3225
  const outputDescription = resolver.output.metadata.description;
3226
+ const combinedDescription = outputDescription ? `${resolverDescription}\n\nReturns:\n${outputDescription}` : resolverDescription;
3227
+ let publishExecutionEvents = false;
3228
+ if (resolver.publishEvents !== void 0) publishExecutionEvents = resolver.publishEvents;
3229
+ else if (executorUsedResolvers.has(resolver.name)) publishExecutionEvents = true;
3105
3230
  return {
3106
3231
  authorization: "true==true",
3107
- description: outputDescription ? `${resolverDescription}\n\nReturns:\n${outputDescription}` : resolverDescription,
3232
+ description: combinedDescription,
3108
3233
  inputs,
3109
3234
  name: resolver.name,
3110
3235
  operationType: resolver.operation,
3111
3236
  response,
3112
3237
  pipelines,
3113
- publishExecutionEvents: executorUsedResolvers.has(resolver.name)
3238
+ publishExecutionEvents
3114
3239
  };
3115
3240
  }
3116
3241
  function protoFields(fields, baseName, isInput) {
@@ -3639,6 +3764,7 @@ function createSnapshotType(type) {
3639
3764
  ...ops.read !== void 0 && { read: ops.read }
3640
3765
  };
3641
3766
  }
3767
+ if (type.settings.publishEvents !== void 0) snapshotType.settings.publishEvents = type.settings.publishEvents;
3642
3768
  }
3643
3769
  if (type.indexes && Object.keys(type.indexes).length > 0) {
3644
3770
  snapshotType.indexes = {};
@@ -3719,7 +3845,7 @@ function createSnapshotFromLocalTypes(types, namespace) {
3719
3845
  * @returns {SchemaSnapshot} Loaded schema snapshot
3720
3846
  */
3721
3847
  function loadSnapshot(filePath) {
3722
- const content = fs$2.readFileSync(filePath, "utf-8");
3848
+ const content = fs$1.readFileSync(filePath, "utf-8");
3723
3849
  return JSON.parse(content);
3724
3850
  }
3725
3851
  /**
@@ -3728,7 +3854,7 @@ function loadSnapshot(filePath) {
3728
3854
  * @returns {MigrationDiff} Loaded migration diff
3729
3855
  */
3730
3856
  function loadDiff(filePath) {
3731
- const content = fs$2.readFileSync(filePath, "utf-8");
3857
+ const content = fs$1.readFileSync(filePath, "utf-8");
3732
3858
  return JSON.parse(content);
3733
3859
  }
3734
3860
  /**
@@ -3737,8 +3863,8 @@ function loadDiff(filePath) {
3737
3863
  * @returns {Array<{number: number, type: "schema" | "diff", path: string}>} Migration files sorted by number
3738
3864
  */
3739
3865
  function getMigrationFiles(migrationsDir) {
3740
- if (!fs$2.existsSync(migrationsDir)) return [];
3741
- const entries = fs$2.readdirSync(migrationsDir, { withFileTypes: true });
3866
+ if (!fs$1.existsSync(migrationsDir)) return [];
3867
+ const entries = fs$1.readdirSync(migrationsDir, { withFileTypes: true });
3742
3868
  const migrations = [];
3743
3869
  for (const entry of entries) {
3744
3870
  if (!entry.isDirectory()) continue;
@@ -3746,13 +3872,13 @@ function getMigrationFiles(migrationsDir) {
3746
3872
  const num = parseInt(entry.name, 10);
3747
3873
  const migrationDir = path.join(migrationsDir, entry.name);
3748
3874
  const schemaPath = path.join(migrationDir, SCHEMA_FILE_NAME);
3749
- if (fs$2.existsSync(schemaPath)) migrations.push({
3875
+ if (fs$1.existsSync(schemaPath)) migrations.push({
3750
3876
  number: num,
3751
3877
  type: "schema",
3752
3878
  path: schemaPath
3753
3879
  });
3754
3880
  const diffPath = path.join(migrationDir, DIFF_FILE_NAME);
3755
- if (fs$2.existsSync(diffPath)) migrations.push({
3881
+ if (fs$1.existsSync(diffPath)) migrations.push({
3756
3882
  number: num,
3757
3883
  type: "diff",
3758
3884
  path: diffPath
@@ -4324,7 +4450,7 @@ function compareLocalTypesWithSnapshot(snapshot, localTypes, namespace) {
4324
4450
  */
4325
4451
  function validateMigrationFiles(migrationsDir) {
4326
4452
  const errors = [];
4327
- if (!fs$2.existsSync(migrationsDir)) return errors;
4453
+ if (!fs$1.existsSync(migrationsDir)) return errors;
4328
4454
  const migrationFiles = getMigrationFiles(migrationsDir);
4329
4455
  if (migrationFiles.length === 0) return errors;
4330
4456
  const schemaFiles = [];
@@ -4567,7 +4693,7 @@ function formatSchemaDrifts(drifts) {
4567
4693
  */
4568
4694
  async function bundleMigrationScript(sourceFile, namespace, migrationNumber) {
4569
4695
  const outputDir = path.resolve(getDistDir(), "migrations");
4570
- fs$2.mkdirSync(outputDir, { recursive: true });
4696
+ fs$1.mkdirSync(outputDir, { recursive: true });
4571
4697
  const entryPath = path.join(outputDir, `migration_${namespace}_${migrationNumber}.entry.js`);
4572
4698
  const outputPath = path.join(outputDir, `migration_${namespace}_${migrationNumber}.js`);
4573
4699
  const entryContent = ml`
@@ -4589,7 +4715,7 @@ async function bundleMigrationScript(sourceFile, namespace, migrationNumber) {
4589
4715
  return { success: true };
4590
4716
  }
4591
4717
  `;
4592
- fs$2.writeFileSync(entryPath, entryContent);
4718
+ fs$1.writeFileSync(entryPath, entryContent);
4593
4719
  let tsconfig;
4594
4720
  try {
4595
4721
  tsconfig = await resolveTSConfig();
@@ -4619,7 +4745,7 @@ async function bundleMigrationScript(sourceFile, namespace, migrationNumber) {
4619
4745
  return {
4620
4746
  namespace,
4621
4747
  migrationNumber,
4622
- bundledCode: fs$2.readFileSync(outputPath, "utf-8")
4748
+ bundledCode: fs$1.readFileSync(outputPath, "utf-8")
4623
4749
  };
4624
4750
  }
4625
4751
 
@@ -4681,7 +4807,7 @@ async function waitForExecution$1(client, workspaceId, executionId, pollInterval
4681
4807
  logs: execution.logs,
4682
4808
  result: execution.result
4683
4809
  };
4684
- await new Promise((resolve) => setTimeout(resolve, pollInterval));
4810
+ await new Promise((resolve$1) => setTimeout(resolve$1, pollInterval));
4685
4811
  }
4686
4812
  }
4687
4813
  /**
@@ -4696,28 +4822,26 @@ async function waitForExecution$1(client, workspaceId, executionId, pollInterval
4696
4822
  */
4697
4823
  async function executeScript(options) {
4698
4824
  const { client, workspaceId, name, code, arg: arg$1, invoker, pollInterval } = options;
4699
- const executionId = (await client.testExecScript({
4825
+ const response = await client.testExecScript({
4700
4826
  workspaceId,
4701
4827
  name,
4702
4828
  code,
4703
4829
  arg: arg$1 ?? JSON.stringify({}),
4704
4830
  invoker
4705
- })).executionId;
4831
+ });
4832
+ const executionId = response.executionId;
4706
4833
  const result = await waitForExecution$1(client, workspaceId, executionId, pollInterval);
4707
4834
  if (result.status === FunctionExecution_Status.SUCCESS) return {
4708
4835
  success: true,
4709
4836
  logs: result.logs,
4710
4837
  result: result.result
4711
4838
  };
4712
- else {
4713
- const errorDetails = [result.logs, result.result].filter(Boolean).join("\n");
4714
- return {
4715
- success: false,
4716
- logs: result.logs,
4717
- result: result.result,
4718
- error: errorDetails || "Script execution failed with unknown error"
4719
- };
4720
- }
4839
+ else return {
4840
+ success: false,
4841
+ logs: result.logs,
4842
+ result: result.result || response.result,
4843
+ error: result.result || response.result || "Script execution failed with unknown error"
4844
+ };
4721
4845
  }
4722
4846
 
4723
4847
  //#endregion
@@ -4760,10 +4884,10 @@ async function detectPendingMigrations(client, workspaceId, namespacesWithMigrat
4760
4884
  for (const file of migrationFiles) {
4761
4885
  if (file.number <= currentMigration) continue;
4762
4886
  const diffPath = getMigrationFilePath(migrationsDir, file.number, "diff");
4763
- if (!fs$2.existsSync(diffPath)) continue;
4887
+ if (!fs$1.existsSync(diffPath)) continue;
4764
4888
  const diff = loadDiff(diffPath);
4765
4889
  const scriptPath = getMigrationFilePath(migrationsDir, file.number, "migrate");
4766
- if (diff.requiresMigrationScript && !fs$2.existsSync(scriptPath)) {
4890
+ if (diff.requiresMigrationScript && !fs$1.existsSync(scriptPath)) {
4767
4891
  logger.warn(`Migration ${namespace}/${file.number} requires a script but migrate.ts not found`);
4768
4892
  continue;
4769
4893
  }
@@ -5498,6 +5622,13 @@ async function planTypes(client, workspaceId, tailordbs, executors, deletedServi
5498
5622
  };
5499
5623
  const executorUsedTypes = /* @__PURE__ */ new Set();
5500
5624
  for (const executor of executors) if (executor.trigger.kind === "recordCreated" || executor.trigger.kind === "recordUpdated" || executor.trigger.kind === "recordDeleted") executorUsedTypes.add(executor.trigger.typeName);
5625
+ for (const tailordb of tailordbs) {
5626
+ const types = filteredTypesByNamespace?.get(tailordb.namespace) ?? tailordb.types;
5627
+ for (const typeName of Object.keys(types)) {
5628
+ const type = types[typeName];
5629
+ if (executorUsedTypes.has(typeName) && type.settings?.publishEvents === false) throw new Error(`Type "${typeName}" has publishEvents set to false, but it is used by an executor with a record trigger. Either remove the publishEvents: false setting or remove the executor trigger for this type.`);
5630
+ }
5631
+ }
5501
5632
  for (const tailordb of tailordbs) {
5502
5633
  const existingTypes = await fetchTypes(tailordb.namespace);
5503
5634
  const existingNameSet = /* @__PURE__ */ new Set();
@@ -5565,7 +5696,8 @@ function generateTailorDBTypeManifest(type, executorUsedTypes, namespaceGqlOpera
5565
5696
  pluralForm,
5566
5697
  publishRecordEvents: false
5567
5698
  };
5568
- if (executorUsedTypes.has(type.name)) defaultSettings.publishRecordEvents = true;
5699
+ if (type.settings?.publishEvents !== void 0) defaultSettings.publishRecordEvents = type.settings.publishEvents;
5700
+ else if (executorUsedTypes.has(type.name)) defaultSettings.publishRecordEvents = true;
5569
5701
  const ops = type.settings?.gqlOperations ?? namespaceGqlOperations;
5570
5702
  if (ops) defaultSettings.disableGqlOperations = {
5571
5703
  create: ops.create === false,
@@ -6214,7 +6346,7 @@ async function apply(options) {
6214
6346
  const packageJson = await readPackageJson();
6215
6347
  const cacheDir = path.resolve(getDistDir(), "cache");
6216
6348
  if (options?.cleanCache) {
6217
- fs$2.rmSync(cacheDir, {
6349
+ fs$1.rmSync(cacheDir, {
6218
6350
  recursive: true,
6219
6351
  force: true
6220
6352
  });
@@ -6704,7 +6836,7 @@ const getCommand$3 = defineCommand({
6704
6836
  ...jsonArgs,
6705
6837
  ...workspaceArgs,
6706
6838
  ...nameArgs$1
6707
- }),
6839
+ }).strict(),
6708
6840
  run: withCommonArgs(async (args) => {
6709
6841
  const executor = await getExecutor({
6710
6842
  name: args.name,
@@ -6907,7 +7039,7 @@ function toWorkflowExecutionInfo(execution) {
6907
7039
  //#endregion
6908
7040
  //#region src/cli/commands/workflow/executions.ts
6909
7041
  function sleep$1(ms) {
6910
- return new Promise((resolve) => setTimeout(resolve, ms));
7042
+ return new Promise((resolve$1) => setTimeout(resolve$1, ms));
6911
7043
  }
6912
7044
  function formatTime$2(date) {
6913
7045
  return date.toLocaleTimeString("en-US", { hour12: false });
@@ -7122,7 +7254,7 @@ const executionsCommand = defineCommand({
7122
7254
  }),
7123
7255
  ...waitArgs,
7124
7256
  logs: arg(z.boolean().default(false), { description: "Display job execution logs (detail mode only)" })
7125
- }),
7257
+ }).strict(),
7126
7258
  run: withCommonArgs(async (args) => {
7127
7259
  if (args.executionId) {
7128
7260
  const interval = parseDuration(args.interval);
@@ -7191,7 +7323,7 @@ const getCommand$2 = defineCommand({
7191
7323
  ...jsonArgs,
7192
7324
  ...workspaceArgs,
7193
7325
  ...nameArgs
7194
- }),
7326
+ }).strict(),
7195
7327
  run: withCommonArgs(async (args) => {
7196
7328
  const workflow = await getWorkflow({
7197
7329
  name: args.name,
@@ -7205,7 +7337,7 @@ const getCommand$2 = defineCommand({
7205
7337
  //#endregion
7206
7338
  //#region src/cli/commands/workflow/start.ts
7207
7339
  function sleep(ms) {
7208
- return new Promise((resolve) => setTimeout(resolve, ms));
7340
+ return new Promise((resolve$1) => setTimeout(resolve$1, ms));
7209
7341
  }
7210
7342
  function formatTime$1(date) {
7211
7343
  return date.toLocaleTimeString("en-US", { hour12: false });
@@ -7377,7 +7509,7 @@ const startCommand = defineCommand({
7377
7509
  description: "Workflow argument (JSON string)"
7378
7510
  }),
7379
7511
  ...waitArgs
7380
- }),
7512
+ }).strict(),
7381
7513
  run: withCommonArgs(async (args) => {
7382
7514
  const { executionId, wait } = await startWorkflowByName({
7383
7515
  name: args.name,
@@ -7711,7 +7843,7 @@ const jobsCommand = defineCommand({
7711
7843
  description: "Display function execution logs after completion (requires --wait)"
7712
7844
  }),
7713
7845
  limit: arg(positiveIntArg.optional(), { description: "Maximum number of jobs to list (default: 50, max: 1000) (list mode only)" })
7714
- }),
7846
+ }).strict(),
7715
7847
  run: withCommonArgs(async (args) => {
7716
7848
  if (args.jobId) {
7717
7849
  if (args.wait) {
@@ -7815,7 +7947,7 @@ const listCommand$6 = defineCommand({
7815
7947
  ...commonArgs,
7816
7948
  ...jsonArgs,
7817
7949
  ...workspaceArgs
7818
- }),
7950
+ }).strict(),
7819
7951
  run: withCommonArgs(async (args) => {
7820
7952
  const executors = await listExecutors({
7821
7953
  workspaceId: args["workspace-id"],
@@ -7960,7 +8092,7 @@ The \`--logs\` option displays logs from the downstream execution when available
7960
8092
  alias: "l",
7961
8093
  description: "Display function execution logs after completion (requires --wait)"
7962
8094
  })
7963
- }),
8095
+ }).strict(),
7964
8096
  run: withCommonArgs(async (args) => {
7965
8097
  const client = await initOperatorClient(await loadAccessToken({
7966
8098
  useProfile: true,
@@ -8090,7 +8222,7 @@ const listWebhookCommand = defineCommand({
8090
8222
  ...commonArgs,
8091
8223
  ...jsonArgs,
8092
8224
  ...workspaceArgs
8093
- }),
8225
+ }).strict(),
8094
8226
  run: withCommonArgs(async (args) => {
8095
8227
  const executors = await listWebhookExecutors({
8096
8228
  workspaceId: args["workspace-id"],
@@ -8518,7 +8650,7 @@ function createDependencyWatcher(options = {}) {
8518
8650
  function createGenerationManager(params) {
8519
8651
  const { application, config, generators = [], pluginManager } = params;
8520
8652
  const baseDir = path.join(getDistDir(), "generated");
8521
- fs$2.mkdirSync(baseDir, { recursive: true });
8653
+ fs$1.mkdirSync(baseDir, { recursive: true });
8522
8654
  const services = {
8523
8655
  tailordb: {},
8524
8656
  resolver: {},
@@ -8527,18 +8659,8 @@ function createGenerationManager(params) {
8527
8659
  let watcher = null;
8528
8660
  const generatorResults = {};
8529
8661
  const generationPlugins = pluginManager?.getPluginsWithGenerationHooks() ?? [];
8530
- function getDeps(gen) {
8531
- return new Set(gen.dependencies);
8532
- }
8533
- function onlyHas(gen, ...required) {
8534
- const deps = getDeps(gen);
8535
- return required.every((r) => deps.has(r)) && deps.size === required.length;
8536
- }
8537
- function hasAll(gen, ...required) {
8538
- return required.every((r) => getDeps(gen).has(r));
8539
- }
8540
- function hasNone(gen, ...excluded) {
8541
- return excluded.every((e) => !getDeps(gen).has(e));
8662
+ function getReadyGenerators(dep) {
8663
+ return generators.filter((g) => g.dependencies.includes(dep));
8542
8664
  }
8543
8665
  function getAuthInput() {
8544
8666
  const authService = application.authService;
@@ -8748,14 +8870,14 @@ function createGenerationManager(params) {
8748
8870
  */
8749
8871
  async function writeGeneratedFiles(sourceId, result) {
8750
8872
  await Promise.all(result.files.map(async (file) => {
8751
- fs$2.mkdirSync(path.dirname(file.path), { recursive: true });
8752
- return new Promise((resolve, reject) => {
8753
- if (file.skipIfExists && fs$2.existsSync(file.path)) {
8873
+ fs$1.mkdirSync(path.dirname(file.path), { recursive: true });
8874
+ return new Promise((resolve$1, reject) => {
8875
+ if (file.skipIfExists && fs$1.existsSync(file.path)) {
8754
8876
  const relativePath = path.relative(process.cwd(), file.path);
8755
8877
  logger.debug(`${sourceId} | skip existing: ${relativePath}`);
8756
- return resolve();
8878
+ return resolve$1();
8757
8879
  }
8758
- fs$2.writeFile(file.path, file.content, (err) => {
8880
+ fs$1.writeFile(file.path, file.content, (err) => {
8759
8881
  if (err) {
8760
8882
  const relativePath = path.relative(process.cwd(), file.path);
8761
8883
  logger.error(`Error writing file ${styles.bold(relativePath)}`);
@@ -8764,15 +8886,15 @@ function createGenerationManager(params) {
8764
8886
  } else {
8765
8887
  const relativePath = path.relative(process.cwd(), file.path);
8766
8888
  logger.log(`${sourceId} | generate: ${styles.success(relativePath)}`);
8767
- if (file.executable) fs$2.chmod(file.path, 493, (chmodErr) => {
8889
+ if (file.executable) fs$1.chmod(file.path, 493, (chmodErr) => {
8768
8890
  if (chmodErr) {
8769
8891
  const relativePath$1 = path.relative(process.cwd(), file.path);
8770
8892
  logger.error(`Error setting executable permission on ${styles.bold(relativePath$1)}`);
8771
8893
  logger.error(String(chmodErr));
8772
8894
  reject(chmodErr);
8773
- } else resolve();
8895
+ } else resolve$1();
8774
8896
  });
8775
- else resolve();
8897
+ else resolve$1();
8776
8898
  }
8777
8899
  });
8778
8900
  });
@@ -8793,13 +8915,15 @@ function createGenerationManager(params) {
8793
8915
  }
8794
8916
  async function runGenerators(gens, watch$1) {
8795
8917
  const results = await Promise.allSettled(gens.map(async (gen) => {
8796
- try {
8797
- await processGenerator(gen);
8798
- } catch (error) {
8799
- logger.error(`Error processing generator ${styles.bold(gen.id)}`);
8800
- logger.error(String(error));
8801
- if (!watch$1) throw error;
8802
- }
8918
+ await withSpan(`generate.generator.${gen.id}`, async () => {
8919
+ try {
8920
+ await processGenerator(gen);
8921
+ } catch (error) {
8922
+ logger.error(`Error processing generator ${styles.bold(gen.id)}`);
8923
+ logger.error(String(error));
8924
+ if (!watch$1) throw error;
8925
+ }
8926
+ });
8803
8927
  }));
8804
8928
  if (!watch$1) {
8805
8929
  const failures = results.filter((r) => r.status === "rejected");
@@ -8845,64 +8969,86 @@ function createGenerationManager(params) {
8845
8969
  logger.newline();
8846
8970
  logger.log(`Generation for application: ${styles.highlight(application.config.name)}`);
8847
8971
  const app = application;
8848
- for (const db$1 of app.tailorDBServices) {
8849
- const namespace = db$1.namespace;
8850
- try {
8851
- await db$1.loadTypes();
8852
- await db$1.processNamespacePlugins();
8853
- services.tailordb[namespace] = {
8854
- types: db$1.types,
8855
- sourceInfo: db$1.typeSourceInfo,
8856
- pluginAttachments: db$1.pluginAttachments
8857
- };
8858
- } catch (error) {
8859
- logger.error(`Error loading types for TailorDB service ${styles.bold(namespace)}`);
8860
- logger.error(String(error));
8861
- if (!watch$1) throw error;
8972
+ await withSpan("generate.loadTailorDBTypes", async (span) => {
8973
+ span.setAttribute("generate.namespace_count", app.tailorDBServices.length);
8974
+ for (const db$1 of app.tailorDBServices) {
8975
+ const namespace = db$1.namespace;
8976
+ await withSpan(`generate.loadTypes.${namespace}`, async () => {
8977
+ try {
8978
+ await db$1.loadTypes();
8979
+ await db$1.processNamespacePlugins();
8980
+ services.tailordb[namespace] = {
8981
+ types: db$1.types,
8982
+ sourceInfo: db$1.typeSourceInfo,
8983
+ pluginAttachments: db$1.pluginAttachments
8984
+ };
8985
+ } catch (error) {
8986
+ logger.error(`Error loading types for TailorDB service ${styles.bold(namespace)}`);
8987
+ logger.error(String(error));
8988
+ if (!watch$1) throw error;
8989
+ }
8990
+ });
8862
8991
  }
8863
- }
8864
- const pluginExecutorFiles = generatePluginFilesIfNeeded(pluginManager, app.tailorDBServices, config.path);
8865
- const executorService = app.executorService ?? (pluginExecutorFiles.length > 0 ? createExecutorService({ config: { files: [] } }) : void 0);
8866
- if (app.authService) await app.authService.resolveNamespaces();
8992
+ });
8993
+ const { pluginExecutorFiles, executorService } = await withSpan("generate.pluginFiles", async () => {
8994
+ const pluginExecutorFiles$1 = generatePluginFilesIfNeeded(pluginManager, app.tailorDBServices, config.path);
8995
+ return {
8996
+ pluginExecutorFiles: pluginExecutorFiles$1,
8997
+ executorService: app.executorService ?? (pluginExecutorFiles$1.length > 0 ? createExecutorService({ config: { files: [] } }) : void 0)
8998
+ };
8999
+ });
9000
+ if (app.authService) await withSpan("generate.resolveAuthNamespaces", async () => app.authService.resolveNamespaces());
8867
9001
  if (app.tailorDBServices.length > 0 || pluginExecutorFiles.length > 0) logger.newline();
8868
- const tailordbOnlyGens = generators.filter((g) => onlyHas(g, "tailordb"));
9002
+ const readyAfterTailorDB = getReadyGenerators("tailordb");
8869
9003
  const hasOnTailorDBReady = generationPlugins.some((p) => p.onTailorDBReady != null);
8870
- if (tailordbOnlyGens.length > 0 || hasOnTailorDBReady) {
8871
- await Promise.all([runGenerators(tailordbOnlyGens, watch$1), runPluginHook("onTailorDBReady", watch$1)]);
9004
+ if (readyAfterTailorDB.length > 0 || hasOnTailorDBReady) {
9005
+ await withSpan("generate.onTailorDBReady", async () => {
9006
+ await Promise.all([runGenerators(readyAfterTailorDB, watch$1), runPluginHook("onTailorDBReady", watch$1)]);
9007
+ });
8872
9008
  logger.newline();
8873
9009
  }
8874
- for (const resolverService of app.resolverServices) {
8875
- const namespace = resolverService.namespace;
8876
- try {
8877
- await resolverService.loadResolvers();
8878
- services.resolver[namespace] = {};
8879
- Object.entries(resolverService.resolvers).forEach(([_, resolver]) => {
8880
- services.resolver[namespace][resolver.name] = resolver;
9010
+ await withSpan("generate.loadResolvers", async () => {
9011
+ for (const resolverService of app.resolverServices) {
9012
+ const namespace = resolverService.namespace;
9013
+ await withSpan(`generate.loadResolvers.${namespace}`, async () => {
9014
+ try {
9015
+ await resolverService.loadResolvers();
9016
+ services.resolver[namespace] = {};
9017
+ Object.entries(resolverService.resolvers).forEach(([_, resolver]) => {
9018
+ services.resolver[namespace][resolver.name] = resolver;
9019
+ });
9020
+ } catch (error) {
9021
+ logger.error(`Error loading resolvers for Resolver service ${styles.bold(namespace)}`);
9022
+ logger.error(String(error));
9023
+ if (!watch$1) throw error;
9024
+ }
8881
9025
  });
8882
- } catch (error) {
8883
- logger.error(`Error loading resolvers for Resolver service ${styles.bold(namespace)}`);
8884
- logger.error(String(error));
8885
- if (!watch$1) throw error;
8886
9026
  }
8887
- }
8888
- const nonExecutorGens = generators.filter((g) => !tailordbOnlyGens.includes(g) && hasNone(g, "executor"));
9027
+ });
9028
+ const readyAfterResolvers = getReadyGenerators("resolver");
8889
9029
  const hasOnResolverReady = generationPlugins.some((p) => p.onResolverReady != null);
8890
- if (nonExecutorGens.length > 0 || hasOnResolverReady) {
8891
- await Promise.all([runGenerators(nonExecutorGens, watch$1), runPluginHook("onResolverReady", watch$1)]);
9030
+ if (readyAfterResolvers.length > 0 || hasOnResolverReady) {
9031
+ await withSpan("generate.onResolversReady", async () => {
9032
+ await Promise.all([runGenerators(readyAfterResolvers, watch$1), runPluginHook("onResolverReady", watch$1)]);
9033
+ });
8892
9034
  logger.newline();
8893
9035
  }
8894
- if (executorService) {
8895
- await executorService.loadExecutors();
8896
- if (pluginExecutorFiles.length > 0) await executorService.loadPluginExecutorFiles([...pluginExecutorFiles]);
8897
- }
8898
- const allExecutors = executorService?.executors ?? {};
8899
- Object.entries(allExecutors).forEach(([key, executor]) => {
8900
- services.executor[key] = executor;
9036
+ await withSpan("generate.loadExecutors", async () => {
9037
+ if (executorService) {
9038
+ await executorService.loadExecutors();
9039
+ if (pluginExecutorFiles.length > 0) await executorService.loadPluginExecutorFiles([...pluginExecutorFiles]);
9040
+ }
9041
+ const allExecutors = executorService?.executors ?? {};
9042
+ Object.entries(allExecutors).forEach(([key, executor]) => {
9043
+ services.executor[key] = executor;
9044
+ });
8901
9045
  });
8902
- const executorGens = generators.filter((g) => hasAll(g, "executor"));
9046
+ const readyAfterExecutors = getReadyGenerators("executor");
8903
9047
  const hasOnExecutorReady = generationPlugins.some((p) => p.onExecutorReady != null);
8904
- if (executorGens.length > 0 || hasOnExecutorReady) {
8905
- await Promise.all([runGenerators(executorGens, watch$1), runPluginHook("onExecutorReady", watch$1)]);
9048
+ if (readyAfterExecutors.length > 0 || hasOnExecutorReady) {
9049
+ await withSpan("generate.onExecutorsReady", async () => {
9050
+ await Promise.all([runGenerators(readyAfterExecutors, watch$1), runPluginHook("onExecutorReady", watch$1)]);
9051
+ });
8906
9052
  logger.newline();
8907
9053
  }
8908
9054
  },
@@ -8931,25 +9077,31 @@ function createGenerationManager(params) {
8931
9077
  * @returns Promise that resolves when generation (and watch, if enabled) completes
8932
9078
  */
8933
9079
  async function generate$1(options) {
8934
- const { config, generators, plugins } = await loadConfig(options?.configPath);
8935
- const watch$1 = options?.watch ?? false;
8936
- await generateUserTypes({
8937
- config,
8938
- configPath: config.path
8939
- });
8940
- let pluginManager;
8941
- if (plugins.length > 0) pluginManager = new PluginManager(plugins);
8942
- const manager = createGenerationManager({
8943
- application: defineApplication({
9080
+ return withSpan("generate", async (rootSpan) => {
9081
+ const { config, generators, plugins } = await withSpan("generate.loadConfig", async () => loadConfig(options?.configPath));
9082
+ const watch$1 = options?.watch ?? false;
9083
+ rootSpan.setAttribute("generate.watch", watch$1);
9084
+ rootSpan.setAttribute("generate.generators.count", generators.length);
9085
+ await withSpan("generate.generateUserTypes", async () => generateUserTypes({
9086
+ config,
9087
+ configPath: config.path
9088
+ }));
9089
+ let pluginManager;
9090
+ if (plugins.length > 0) pluginManager = new PluginManager(plugins);
9091
+ const application = defineApplication({
8944
9092
  config,
8945
9093
  pluginManager
8946
- }),
8947
- config,
8948
- generators,
8949
- pluginManager
9094
+ });
9095
+ rootSpan.setAttribute("app.name", application.config.name);
9096
+ const manager = createGenerationManager({
9097
+ application,
9098
+ config,
9099
+ generators,
9100
+ pluginManager
9101
+ });
9102
+ await manager.generate(watch$1);
9103
+ if (watch$1) await manager.watch();
8950
9104
  });
8951
- await manager.generate(watch$1);
8952
- if (watch$1) await manager.watch();
8953
9105
  }
8954
9106
 
8955
9107
  //#endregion
@@ -9006,7 +9158,7 @@ const listCommand$5 = defineCommand({
9006
9158
  ...commonArgs,
9007
9159
  ...jsonArgs,
9008
9160
  ...deploymentArgs
9009
- }),
9161
+ }).strict(),
9010
9162
  run: withCommonArgs(async (args) => {
9011
9163
  const machineUsers = await listMachineUsers({
9012
9164
  workspaceId: args["workspace-id"],
@@ -9068,7 +9220,7 @@ const tokenCommand = defineCommand({
9068
9220
  positional: true,
9069
9221
  description: "Machine user name"
9070
9222
  })
9071
- }),
9223
+ }).strict(),
9072
9224
  run: withCommonArgs(async (args) => {
9073
9225
  const token = await getMachineUserToken({
9074
9226
  name: args.name,
@@ -9171,7 +9323,7 @@ const getCommand$1 = defineCommand({
9171
9323
  positional: true,
9172
9324
  description: "OAuth2 client name"
9173
9325
  })
9174
- }),
9326
+ }).strict(),
9175
9327
  run: withCommonArgs(async (args) => {
9176
9328
  const credentials = await getOAuth2Client({
9177
9329
  name: args.name,
@@ -9222,7 +9374,7 @@ const listCommand$4 = defineCommand({
9222
9374
  ...commonArgs,
9223
9375
  ...jsonArgs,
9224
9376
  ...deploymentArgs
9225
- }),
9377
+ }).strict(),
9226
9378
  run: withCommonArgs(async (args) => {
9227
9379
  const oauth2Clients = await listOAuth2Clients({
9228
9380
  workspaceId: args["workspace-id"],
@@ -9235,7 +9387,7 @@ const listCommand$4 = defineCommand({
9235
9387
 
9236
9388
  //#endregion
9237
9389
  //#region src/cli/commands/remove.ts
9238
- async function loadOptions$9(options) {
9390
+ async function loadOptions$10(options) {
9239
9391
  const client = await initOperatorClient(await loadAccessToken({
9240
9392
  useProfile: true,
9241
9393
  profile: options?.profile
@@ -9291,7 +9443,7 @@ async function execRemove(client, workspaceId, application, config, confirm) {
9291
9443
  * @returns Promise that resolves when removal completes
9292
9444
  */
9293
9445
  async function remove(options) {
9294
- const { client, workspaceId, application, config } = await loadOptions$9(options);
9446
+ const { client, workspaceId, application, config } = await loadOptions$10(options);
9295
9447
  await execRemove(client, workspaceId, application, config);
9296
9448
  }
9297
9449
  const removeCommand$1 = defineCommand({
@@ -9301,9 +9453,9 @@ const removeCommand$1 = defineCommand({
9301
9453
  ...commonArgs,
9302
9454
  ...deploymentArgs,
9303
9455
  ...confirmationArgs
9304
- }),
9456
+ }).strict(),
9305
9457
  run: withCommonArgs(async (args) => {
9306
- const { client, workspaceId, application, config } = await loadOptions$9({
9458
+ const { client, workspaceId, application, config } = await loadOptions$10({
9307
9459
  workspaceId: args["workspace-id"],
9308
9460
  profile: args.profile,
9309
9461
  configPath: args.config
@@ -9375,7 +9527,7 @@ const showCommand = defineCommand({
9375
9527
  ...commonArgs,
9376
9528
  ...jsonArgs,
9377
9529
  ...deploymentArgs
9378
- }),
9530
+ }).strict(),
9379
9531
  run: withCommonArgs(async (args) => {
9380
9532
  const appInfo$1 = await show({
9381
9533
  workspaceId: args["workspace-id"],
@@ -9636,7 +9788,7 @@ function generateFieldType(config, isOptionalToRequired, enumValueChange) {
9636
9788
  async function writeDbTypesFile(snapshot, migrationsDir, migrationNumber, diff) {
9637
9789
  const content = generateDbTypesFromSnapshot(snapshot, diff);
9638
9790
  const filePath = getMigrationFilePath(migrationsDir, migrationNumber, "db");
9639
- await fs$1.writeFile(filePath, content);
9791
+ await fs.writeFile(filePath, content);
9640
9792
  return filePath;
9641
9793
  }
9642
9794
 
@@ -9658,7 +9810,7 @@ async function writeDbTypesFile(snapshot, migrationsDir, migrationNumber, diff)
9658
9810
  */
9659
9811
  async function fileExists(filePath) {
9660
9812
  try {
9661
- await fs$1.access(filePath);
9813
+ await fs.access(filePath);
9662
9814
  return true;
9663
9815
  } catch {
9664
9816
  return false;
@@ -9681,10 +9833,10 @@ async function ensureFileNotExists(filePath) {
9681
9833
  */
9682
9834
  async function generateSchemaFile(snapshot, migrationsDir, migrationNumber) {
9683
9835
  const migrationDir = getMigrationDirPath(migrationsDir, migrationNumber);
9684
- await fs$1.mkdir(migrationDir, { recursive: true });
9836
+ await fs.mkdir(migrationDir, { recursive: true });
9685
9837
  const filePath = getMigrationFilePath(migrationsDir, migrationNumber, "schema");
9686
9838
  await ensureFileNotExists(filePath);
9687
- await fs$1.writeFile(filePath, JSON.stringify(snapshot, null, 2));
9839
+ await fs.writeFile(filePath, JSON.stringify(snapshot, null, 2));
9688
9840
  return {
9689
9841
  filePath,
9690
9842
  migrationNumber
@@ -9701,7 +9853,7 @@ async function generateSchemaFile(snapshot, migrationsDir, migrationNumber) {
9701
9853
  */
9702
9854
  async function generateDiffFiles(diff, migrationsDir, migrationNumber, previousSnapshot, description) {
9703
9855
  const migrationDir = getMigrationDirPath(migrationsDir, migrationNumber);
9704
- await fs$1.mkdir(migrationDir, { recursive: true });
9856
+ await fs.mkdir(migrationDir, { recursive: true });
9705
9857
  const diffFilePath = getMigrationFilePath(migrationsDir, migrationNumber, "diff");
9706
9858
  const migrateFilePath = getMigrationFilePath(migrationsDir, migrationNumber, "migrate");
9707
9859
  const dbTypesFilePath = getMigrationFilePath(migrationsDir, migrationNumber, "db");
@@ -9714,14 +9866,14 @@ async function generateDiffFiles(diff, migrationsDir, migrationNumber, previousS
9714
9866
  ...diff,
9715
9867
  description
9716
9868
  };
9717
- await fs$1.writeFile(diffFilePath, JSON.stringify(diff, null, 2));
9869
+ await fs.writeFile(diffFilePath, JSON.stringify(diff, null, 2));
9718
9870
  const result = {
9719
9871
  diffFilePath,
9720
9872
  migrationNumber
9721
9873
  };
9722
9874
  if (diff.requiresMigrationScript) {
9723
9875
  const scriptContent = generateMigrationScript(diff);
9724
- await fs$1.writeFile(migrateFilePath, scriptContent);
9876
+ await fs.writeFile(migrateFilePath, scriptContent);
9725
9877
  result.migrateFilePath = migrateFilePath;
9726
9878
  await writeDbTypesFile(previousSnapshot, migrationsDir, migrationNumber, diff);
9727
9879
  result.dbTypesFilePath = dbTypesFilePath;
@@ -9856,7 +10008,7 @@ function generateChangeScript(change) {
9856
10008
  * @returns {Promise<void>}
9857
10009
  */
9858
10010
  async function handleInitOption(namespaces, skipConfirmation) {
9859
- const existingDirs = namespaces.filter(({ migrationsDir }) => fs$2.existsSync(migrationsDir));
10011
+ const existingDirs = namespaces.filter(({ migrationsDir }) => fs$1.existsSync(migrationsDir));
9860
10012
  if (existingDirs.length === 0) {
9861
10013
  logger.info("No existing migration directories found.");
9862
10014
  return;
@@ -9876,7 +10028,7 @@ async function handleInitOption(namespaces, skipConfirmation) {
9876
10028
  logger.newline();
9877
10029
  }
9878
10030
  for (const { namespace, migrationsDir } of existingDirs) try {
9879
- await fs$1.rm(migrationsDir, {
10031
+ await fs.rm(migrationsDir, {
9880
10032
  recursive: true,
9881
10033
  force: true
9882
10034
  });
@@ -9906,7 +10058,7 @@ async function generate(options) {
9906
10058
  if (options.init) await handleInitOption(namespacesWithMigrations, options.yes);
9907
10059
  let pluginManager;
9908
10060
  if (plugins.length > 0) pluginManager = new PluginManager(plugins);
9909
- const { defineApplication: defineApplication$1 } = await import("./application-CZdieD3K.mjs");
10061
+ const { defineApplication: defineApplication$1 } = await import("./application-DdSu3baZ.mjs");
9910
10062
  const application = defineApplication$1({
9911
10063
  config,
9912
10064
  pluginManager
@@ -10017,7 +10169,7 @@ async function openInEditor(filePath) {
10017
10169
  const editor = process.env.EDITOR;
10018
10170
  if (!editor) return;
10019
10171
  try {
10020
- await fs$1.access(filePath);
10172
+ await fs.access(filePath);
10021
10173
  } catch {
10022
10174
  return;
10023
10175
  }
@@ -10028,9 +10180,9 @@ async function openInEditor(filePath) {
10028
10180
  stdio: "inherit",
10029
10181
  detached: false
10030
10182
  });
10031
- await new Promise((resolve) => {
10032
- child.on("close", () => resolve());
10033
- child.on("error", () => resolve());
10183
+ await new Promise((resolve$1) => {
10184
+ child.on("close", () => resolve$1());
10185
+ child.on("error", () => resolve$1());
10034
10186
  });
10035
10187
  }
10036
10188
  /**
@@ -10042,16 +10194,13 @@ const generateCommand = defineCommand({
10042
10194
  args: z.object({
10043
10195
  ...commonArgs,
10044
10196
  ...confirmationArgs,
10045
- config: arg(z.string().default("tailor.config.ts"), {
10046
- alias: "c",
10047
- description: "Path to SDK config file"
10048
- }),
10197
+ ...configArg,
10049
10198
  name: arg(z.string().optional(), {
10050
10199
  alias: "n",
10051
10200
  description: "Optional description for the migration"
10052
10201
  }),
10053
10202
  init: arg(z.boolean().default(false), { description: "Delete existing migrations and start fresh" })
10054
- }),
10203
+ }).strict(),
10055
10204
  run: withCommonArgs(async (args) => {
10056
10205
  await generate({
10057
10206
  configPath: args.config,
@@ -10062,6 +10211,63 @@ const generateCommand = defineCommand({
10062
10211
  })
10063
10212
  });
10064
10213
 
10214
+ //#endregion
10215
+ //#region src/cli/shared/config.ts
10216
+ /**
10217
+ * Extracts all configured namespace names from loaded application config.
10218
+ * Currently namespaces are derived from the `db` section.
10219
+ * @param config - Loaded application configuration.
10220
+ * @returns Namespace names in insertion order.
10221
+ */
10222
+ function extractAllNamespaces(config) {
10223
+ const namespaces = /* @__PURE__ */ new Set();
10224
+ if (config.db) for (const [namespaceName] of Object.entries(config.db)) namespaces.add(namespaceName);
10225
+ return Array.from(namespaces);
10226
+ }
10227
+
10228
+ //#endregion
10229
+ //#region src/cli/shared/tailordb-namespace.ts
10230
+ /**
10231
+ * Resolve TailorDB type names to namespace names.
10232
+ * @param args - Resolution inputs
10233
+ * @returns Type to namespace map for found types
10234
+ */
10235
+ async function resolveTypeNamespaces(args) {
10236
+ const requestedTypesByLowercase = /* @__PURE__ */ new Map();
10237
+ for (const typeName of args.typeNames) {
10238
+ const key = typeName.toLowerCase();
10239
+ const existing = requestedTypesByLowercase.get(key);
10240
+ if (existing) {
10241
+ existing.push(typeName);
10242
+ continue;
10243
+ }
10244
+ requestedTypesByLowercase.set(key, [typeName]);
10245
+ }
10246
+ const unresolvedTypes = new Set(args.typeNames);
10247
+ const typeNamespaceMap = /* @__PURE__ */ new Map();
10248
+ for (const namespace of args.namespaces) {
10249
+ if (unresolvedTypes.size === 0) break;
10250
+ try {
10251
+ const { tailordbTypes } = await args.client.listTailorDBTypes({
10252
+ workspaceId: args.workspaceId,
10253
+ namespaceName: namespace
10254
+ });
10255
+ for (const type of tailordbTypes) {
10256
+ const matchedRequestedTypes = requestedTypesByLowercase.get(type.name.toLowerCase());
10257
+ if (!matchedRequestedTypes) continue;
10258
+ for (const requestedTypeName of matchedRequestedTypes) {
10259
+ if (typeNamespaceMap.has(requestedTypeName)) continue;
10260
+ typeNamespaceMap.set(requestedTypeName, namespace);
10261
+ unresolvedTypes.delete(requestedTypeName);
10262
+ }
10263
+ }
10264
+ } catch {
10265
+ continue;
10266
+ }
10267
+ }
10268
+ return typeNamespaceMap;
10269
+ }
10270
+
10065
10271
  //#endregion
10066
10272
  //#region src/cli/commands/tailordb/truncate.ts
10067
10273
  async function truncateSingleType(options, client) {
@@ -10079,25 +10285,6 @@ async function truncateNamespace(workspaceId, namespaceName, client) {
10079
10285
  });
10080
10286
  logger.success(`Truncated all types in namespace "${namespaceName}"`);
10081
10287
  }
10082
- async function getAllNamespaces(configPath) {
10083
- const { config } = await loadConfig(configPath);
10084
- const namespaces = /* @__PURE__ */ new Set();
10085
- if (config.db) for (const [namespaceName] of Object.entries(config.db)) namespaces.add(namespaceName);
10086
- return Array.from(namespaces);
10087
- }
10088
- async function getTypeNamespace(workspaceId, typeName, client, configPath) {
10089
- const namespaces = await getAllNamespaces(configPath);
10090
- for (const namespace of namespaces) try {
10091
- const { tailordbTypes } = await client.listTailorDBTypes({
10092
- workspaceId,
10093
- namespaceName: namespace
10094
- });
10095
- if (tailordbTypes.some((type) => type.name === typeName)) return namespace;
10096
- } catch {
10097
- continue;
10098
- }
10099
- return null;
10100
- }
10101
10288
  /**
10102
10289
  * Truncate TailorDB data based on the given options.
10103
10290
  * @param options - Truncate options (all, namespace, or types)
@@ -10128,7 +10315,8 @@ async function $truncate(options) {
10128
10315
  ].filter(Boolean).length;
10129
10316
  if (optionCount === 0) throw new Error("Please specify one of: --all, --namespace <name>, or type names");
10130
10317
  if (optionCount > 1) throw new Error("Options --all, --namespace, and type names are mutually exclusive. Please specify only one.");
10131
- const namespaces = await getAllNamespaces(options?.configPath);
10318
+ const { config } = await loadConfig(options?.configPath);
10319
+ const namespaces = extractAllNamespaces(config);
10132
10320
  if (hasAll) {
10133
10321
  if (namespaces.length === 0) {
10134
10322
  logger.warn("No namespaces found in config file.");
@@ -10165,13 +10353,13 @@ async function $truncate(options) {
10165
10353
  }
10166
10354
  if (hasTypes && options?.types) {
10167
10355
  const typeNames = options.types;
10168
- const typeNamespaceMap = /* @__PURE__ */ new Map();
10169
- const notFoundTypes = [];
10170
- for (const typeName of typeNames) {
10171
- const namespace = await getTypeNamespace(workspaceId, typeName, client, options.configPath);
10172
- if (namespace) typeNamespaceMap.set(typeName, namespace);
10173
- else notFoundTypes.push(typeName);
10174
- }
10356
+ const typeNamespaceMap = await resolveTypeNamespaces({
10357
+ workspaceId,
10358
+ namespaces,
10359
+ typeNames,
10360
+ client
10361
+ });
10362
+ const notFoundTypes = typeNames.filter((typeName) => !typeNamespaceMap.has(typeName));
10175
10363
  if (notFoundTypes.length > 0) throw new Error(`The following types were not found in any namespace: ${notFoundTypes.join(", ")}`);
10176
10364
  if (!options.yes) {
10177
10365
  const typeList = typeNames.join(", ");
@@ -10213,7 +10401,7 @@ const truncateCommand = defineCommand({
10213
10401
  alias: "n",
10214
10402
  description: "Truncate all tables in specified namespace"
10215
10403
  })
10216
- }),
10404
+ }).strict(),
10217
10405
  run: withCommonArgs(async (args) => {
10218
10406
  const types = args.types && args.types.length > 0 ? args.types : void 0;
10219
10407
  await $truncate({
@@ -10260,7 +10448,7 @@ const listCommand$3 = defineCommand({
10260
10448
  ...commonArgs,
10261
10449
  ...jsonArgs,
10262
10450
  ...workspaceArgs
10263
- }),
10451
+ }).strict(),
10264
10452
  run: withCommonArgs(async (args) => {
10265
10453
  const workflows = await listWorkflows({
10266
10454
  workspaceId: args["workspace-id"],
@@ -10325,7 +10513,7 @@ const resumeCommand = defineCommand({
10325
10513
  description: "Failed execution ID"
10326
10514
  }),
10327
10515
  ...waitArgs
10328
- }),
10516
+ }).strict(),
10329
10517
  run: withCommonArgs(async (args) => {
10330
10518
  const { executionId, wait } = await resumeWorkflow({
10331
10519
  executionId: args.executionId,
@@ -10393,7 +10581,7 @@ const healthOptionsSchema = z.object({
10393
10581
  profile: z.string().optional(),
10394
10582
  name: z.string().min(1, { message: "name is required" })
10395
10583
  });
10396
- async function loadOptions$8(options) {
10584
+ async function loadOptions$9(options) {
10397
10585
  const result = healthOptionsSchema.safeParse(options);
10398
10586
  if (!result.success) throw new Error(result.error.issues[0].message);
10399
10587
  return {
@@ -10411,7 +10599,7 @@ async function loadOptions$8(options) {
10411
10599
  * @returns Application health information
10412
10600
  */
10413
10601
  async function getAppHealth(options) {
10414
- const { client, workspaceId, name } = await loadOptions$8(options);
10602
+ const { client, workspaceId, name } = await loadOptions$9(options);
10415
10603
  return appHealthInfo(name, await client.getApplicationSchemaHealth({
10416
10604
  workspaceId,
10417
10605
  applicationName: name
@@ -10428,7 +10616,7 @@ const healthCommand = defineCommand({
10428
10616
  description: "Application name",
10429
10617
  alias: "n"
10430
10618
  })
10431
- }),
10619
+ }).strict(),
10432
10620
  run: withCommonArgs(async (args) => {
10433
10621
  const health = await getAppHealth({
10434
10622
  workspaceId: args["workspace-id"],
@@ -10451,7 +10639,7 @@ const listAppsOptionsSchema = z.object({
10451
10639
  profile: z.string().optional(),
10452
10640
  limit: z.coerce.number().int().positive().optional()
10453
10641
  });
10454
- async function loadOptions$7(options) {
10642
+ async function loadOptions$8(options) {
10455
10643
  const result = listAppsOptionsSchema.safeParse(options);
10456
10644
  if (!result.success) throw new Error(result.error.issues[0].message);
10457
10645
  return {
@@ -10469,7 +10657,7 @@ async function loadOptions$7(options) {
10469
10657
  * @returns List of applications
10470
10658
  */
10471
10659
  async function listApps(options) {
10472
- const { client, workspaceId, limit } = await loadOptions$7(options);
10660
+ const { client, workspaceId, limit } = await loadOptions$8(options);
10473
10661
  const hasLimit = limit !== void 0;
10474
10662
  const results = [];
10475
10663
  let pageToken = "";
@@ -10501,7 +10689,7 @@ const listCommand$2 = defineCommand({
10501
10689
  alias: "l",
10502
10690
  description: "Maximum number of applications to list"
10503
10691
  })
10504
- }),
10692
+ }).strict(),
10505
10693
  run: withCommonArgs(async (args) => {
10506
10694
  const apps = await listApps({
10507
10695
  workspaceId: args["workspace-id"],
@@ -10606,7 +10794,7 @@ const createCommand = defineCommand({
10606
10794
  description: "Profile name to create"
10607
10795
  }),
10608
10796
  "profile-user": arg(z.string().optional(), { description: "User email for the profile (defaults to current user)" })
10609
- }),
10797
+ }).strict(),
10610
10798
  run: withCommonArgs(async (args) => {
10611
10799
  const workspace = await createWorkspace({
10612
10800
  name: args.name,
@@ -10654,7 +10842,7 @@ const createCommand = defineCommand({
10654
10842
  //#endregion
10655
10843
  //#region src/cli/commands/workspace/delete.ts
10656
10844
  const deleteWorkspaceOptionsSchema = z.object({ workspaceId: z.uuid({ message: "workspace-id must be a valid UUID" }) });
10657
- async function loadOptions$6(options) {
10845
+ async function loadOptions$7(options) {
10658
10846
  const result = deleteWorkspaceOptionsSchema.safeParse(options);
10659
10847
  if (!result.success) throw new Error(result.error.issues[0].message);
10660
10848
  return {
@@ -10668,7 +10856,7 @@ async function loadOptions$6(options) {
10668
10856
  * @returns Promise that resolves when deletion completes
10669
10857
  */
10670
10858
  async function deleteWorkspace(options) {
10671
- const { client, workspaceId } = await loadOptions$6(options);
10859
+ const { client, workspaceId } = await loadOptions$7(options);
10672
10860
  await client.deleteWorkspace({ workspaceId });
10673
10861
  }
10674
10862
  const deleteCommand = defineCommand({
@@ -10681,9 +10869,9 @@ const deleteCommand = defineCommand({
10681
10869
  description: "Workspace ID"
10682
10870
  }),
10683
10871
  ...confirmationArgs
10684
- }),
10872
+ }).strict(),
10685
10873
  run: withCommonArgs(async (args) => {
10686
- const { client, workspaceId } = await loadOptions$6({ workspaceId: args["workspace-id"] });
10874
+ const { client, workspaceId } = await loadOptions$7({ workspaceId: args["workspace-id"] });
10687
10875
  let workspace;
10688
10876
  try {
10689
10877
  workspace = await client.getWorkspace({ workspaceId });
@@ -10714,7 +10902,7 @@ const getWorkspaceOptionsSchema = z.object({
10714
10902
  workspaceId: z.uuid({ message: "workspace-id must be a valid UUID" }).optional(),
10715
10903
  profile: z.string().optional()
10716
10904
  });
10717
- async function loadOptions$5(options) {
10905
+ async function loadOptions$6(options) {
10718
10906
  const result = getWorkspaceOptionsSchema.safeParse(options);
10719
10907
  if (!result.success) throw new Error(result.error.issues[0].message);
10720
10908
  return {
@@ -10731,7 +10919,7 @@ async function loadOptions$5(options) {
10731
10919
  * @returns Workspace details
10732
10920
  */
10733
10921
  async function getWorkspace(options) {
10734
- const { client, workspaceId } = await loadOptions$5(options);
10922
+ const { client, workspaceId } = await loadOptions$6(options);
10735
10923
  const response = await client.getWorkspace({ workspaceId });
10736
10924
  if (!response.workspace) throw new Error(`Workspace "${workspaceId}" not found.`);
10737
10925
  return workspaceDetails(response.workspace);
@@ -10743,7 +10931,7 @@ const getCommand = defineCommand({
10743
10931
  ...commonArgs,
10744
10932
  ...jsonArgs,
10745
10933
  ...workspaceArgs
10746
- }),
10934
+ }).strict(),
10747
10935
  run: withCommonArgs(async (args) => {
10748
10936
  const workspace = await getWorkspace({
10749
10937
  workspaceId: args["workspace-id"],
@@ -10797,7 +10985,7 @@ const listCommand$1 = defineCommand({
10797
10985
  alias: "l",
10798
10986
  description: "Maximum number of workspaces to list"
10799
10987
  })
10800
- }),
10988
+ }).strict(),
10801
10989
  run: withCommonArgs(async (args) => {
10802
10990
  const workspaces = await listWorkspaces({ limit: args.limit });
10803
10991
  logger.out(workspaces, { display: { updatedAt: null } });
@@ -10807,7 +10995,7 @@ const listCommand$1 = defineCommand({
10807
10995
  //#endregion
10808
10996
  //#region src/cli/commands/workspace/restore.ts
10809
10997
  const restoreWorkspaceOptionsSchema = z.object({ workspaceId: z.uuid({ message: "workspace-id must be a valid UUID" }) });
10810
- async function loadOptions$4(options) {
10998
+ async function loadOptions$5(options) {
10811
10999
  const result = restoreWorkspaceOptionsSchema.safeParse(options);
10812
11000
  if (!result.success) throw new Error(result.error.issues[0].message);
10813
11001
  return {
@@ -10821,7 +11009,7 @@ async function loadOptions$4(options) {
10821
11009
  * @returns Promise that resolves when restoration completes
10822
11010
  */
10823
11011
  async function restoreWorkspace(options) {
10824
- const { client, workspaceId } = await loadOptions$4(options);
11012
+ const { client, workspaceId } = await loadOptions$5(options);
10825
11013
  await client.restoreWorkspace({ workspaceId });
10826
11014
  }
10827
11015
  const restoreCommand = defineCommand({
@@ -10834,9 +11022,9 @@ const restoreCommand = defineCommand({
10834
11022
  description: "Workspace ID"
10835
11023
  }),
10836
11024
  ...confirmationArgs
10837
- }),
11025
+ }).strict(),
10838
11026
  run: withCommonArgs(async (args) => {
10839
- const { client, workspaceId } = await loadOptions$4({ workspaceId: args["workspace-id"] });
11027
+ const { client, workspaceId } = await loadOptions$5({ workspaceId: args["workspace-id"] });
10840
11028
  if (!args.yes) {
10841
11029
  if (await logger.prompt(`Are you sure you want to restore workspace "${workspaceId}"? (yes/no):`, { type: "text" }) !== "yes") {
10842
11030
  logger.info("Workspace restoration cancelled.");
@@ -10887,7 +11075,7 @@ const inviteUserOptionsSchema = z.object({
10887
11075
  email: z.email({ message: "email must be a valid email address" }),
10888
11076
  role: z.enum(validRoles, { message: `role must be one of: ${validRoles.join(", ")}` })
10889
11077
  });
10890
- async function loadOptions$3(options) {
11078
+ async function loadOptions$4(options) {
10891
11079
  const result = inviteUserOptionsSchema.safeParse(options);
10892
11080
  if (!result.success) throw new Error(result.error.issues[0].message);
10893
11081
  return {
@@ -10906,7 +11094,7 @@ async function loadOptions$3(options) {
10906
11094
  * @returns Promise that resolves when invitation is sent
10907
11095
  */
10908
11096
  async function inviteUser(options) {
10909
- const { client, workspaceId, email, role } = await loadOptions$3(options);
11097
+ const { client, workspaceId, email, role } = await loadOptions$4(options);
10910
11098
  await client.inviteWorkspacePlatformUser({
10911
11099
  workspaceId,
10912
11100
  email,
@@ -10924,7 +11112,7 @@ const inviteCommand = defineCommand({
10924
11112
  description: `Role to assign (${validRoles.join(", ")})`,
10925
11113
  alias: "r"
10926
11114
  })
10927
- }),
11115
+ }).strict(),
10928
11116
  run: withCommonArgs(async (args) => {
10929
11117
  await inviteUser({
10930
11118
  workspaceId: args["workspace-id"],
@@ -10943,7 +11131,7 @@ const listUsersOptionsSchema = z.object({
10943
11131
  profile: z.string().optional(),
10944
11132
  limit: z.coerce.number().int().positive().optional()
10945
11133
  });
10946
- async function loadOptions$2(options) {
11134
+ async function loadOptions$3(options) {
10947
11135
  const result = listUsersOptionsSchema.safeParse(options);
10948
11136
  if (!result.success) throw new Error(result.error.issues[0].message);
10949
11137
  return {
@@ -10961,7 +11149,7 @@ async function loadOptions$2(options) {
10961
11149
  * @returns List of workspace users
10962
11150
  */
10963
11151
  async function listUsers(options) {
10964
- const { client, workspaceId, limit } = await loadOptions$2(options);
11152
+ const { client, workspaceId, limit } = await loadOptions$3(options);
10965
11153
  const hasLimit = limit !== void 0;
10966
11154
  const results = [];
10967
11155
  let pageToken = "";
@@ -10993,7 +11181,7 @@ const listCommand = defineCommand({
10993
11181
  alias: "l",
10994
11182
  description: "Maximum number of users to list"
10995
11183
  })
10996
- }),
11184
+ }).strict(),
10997
11185
  run: withCommonArgs(async (args) => {
10998
11186
  const users = await listUsers({
10999
11187
  workspaceId: args["workspace-id"],
@@ -11011,7 +11199,7 @@ const removeUserOptionsSchema = z.object({
11011
11199
  profile: z.string().optional(),
11012
11200
  email: z.string().email({ message: "email must be a valid email address" })
11013
11201
  });
11014
- async function loadOptions$1(options) {
11202
+ async function loadOptions$2(options) {
11015
11203
  const result = removeUserOptionsSchema.safeParse(options);
11016
11204
  if (!result.success) throw new Error(result.error.issues[0].message);
11017
11205
  return {
@@ -11029,7 +11217,7 @@ async function loadOptions$1(options) {
11029
11217
  * @returns Promise that resolves when removal completes
11030
11218
  */
11031
11219
  async function removeUser(options) {
11032
- const { client, workspaceId, email } = await loadOptions$1(options);
11220
+ const { client, workspaceId, email } = await loadOptions$2(options);
11033
11221
  await client.removeWorkspacePlatformUser({
11034
11222
  workspaceId,
11035
11223
  email
@@ -11043,7 +11231,7 @@ const removeCommand = defineCommand({
11043
11231
  ...workspaceArgs,
11044
11232
  email: arg(z.email(), { description: "Email address of the user to remove" }),
11045
11233
  ...confirmationArgs
11046
- }),
11234
+ }).strict(),
11047
11235
  run: withCommonArgs(async (args) => {
11048
11236
  if (!args.yes) {
11049
11237
  if (await logger.prompt(`Are you sure you want to remove user "${args.email}" from the workspace? (yes/no):`, { type: "text" }) !== "yes") {
@@ -11068,7 +11256,7 @@ const updateUserOptionsSchema = z.object({
11068
11256
  email: z.string().email({ message: "email must be a valid email address" }),
11069
11257
  role: z.enum(validRoles, { message: `role must be one of: ${validRoles.join(", ")}` })
11070
11258
  });
11071
- async function loadOptions(options) {
11259
+ async function loadOptions$1(options) {
11072
11260
  const result = updateUserOptionsSchema.safeParse(options);
11073
11261
  if (!result.success) throw new Error(result.error.issues[0].message);
11074
11262
  return {
@@ -11087,7 +11275,7 @@ async function loadOptions(options) {
11087
11275
  * @returns Promise that resolves when update completes
11088
11276
  */
11089
11277
  async function updateUser(options) {
11090
- const { client, workspaceId, email, role } = await loadOptions(options);
11278
+ const { client, workspaceId, email, role } = await loadOptions$1(options);
11091
11279
  await client.updateWorkspacePlatformUser({
11092
11280
  workspaceId,
11093
11281
  email,
@@ -11105,7 +11293,7 @@ const updateCommand = defineCommand({
11105
11293
  description: `New role to assign (${validRoles.join(", ")})`,
11106
11294
  alias: "r"
11107
11295
  })
11108
- }),
11296
+ }).strict(),
11109
11297
  run: withCommonArgs(async (args) => {
11110
11298
  await updateUser({
11111
11299
  workspaceId: args["workspace-id"],
@@ -11118,5 +11306,592 @@ const updateCommand = defineCommand({
11118
11306
  });
11119
11307
 
11120
11308
  //#endregion
11121
- export { listExecutorJobs as $, generateCommand as A, getMigrationFiles as At, getMachineUserToken as B, generateUserTypes as Bt, resumeCommand as C, compareLocalTypesWithSnapshot as Ct, truncate as D, getLatestMigrationNumber as Dt, listWorkflows as E, formatMigrationNumber as Et, removeCommand$1 as F, formatDiffSummary as Ft, listWebhookExecutors as G, deploymentArgs as Gt, listCommand$5 as H, apiCommand as Ht, listCommand$4 as I, formatMigrationDiff as It, triggerExecutor as J, workspaceArgs as Jt, webhookCommand as K, jsonArgs as Kt, listOAuth2Clients as L, hasChanges as Lt, show as M, isValidMigrationNumber as Mt, showCommand as N, loadDiff as Nt, truncateCommand as O, getMigrationDirPath as Ot, remove as P, reconstructSnapshotFromMigrations as Pt, jobsCommand as Q, getCommand$1 as R, getNamespacesWithMigrations as Rt, healthCommand as S, SCHEMA_FILE_NAME as St, listCommand$3 as T, createSnapshotFromLocalTypes as Tt, listMachineUsers as U, commonArgs as Ut, tokenCommand as V, apiCall as Vt, generate$1 as W, confirmationArgs as Wt, listExecutors as X, listCommand$6 as Y, getExecutorJob as Z, createCommand as _, bundleMigrationScript as _t, listCommand as a, executionsCommand as at, listCommand$2 as b, INITIAL_SCHEMA_NUMBER as bt, inviteUser as c, functionExecutionStatusToString as ct, listCommand$1 as d, getExecutor as dt, watchExecutorJob as et, listWorkspaces as f, apply as ft, deleteWorkspace as g, parseMigrationLabelNumber as gt, deleteCommand as h, MIGRATION_LABEL_KEY as ht, removeUser as i, getWorkflow as it, logBetaWarning as j, getNextMigrationNumber as jt, generate as k, getMigrationFilePath as kt, restoreCommand as l, formatKeyValueTable as lt, getWorkspace as m, waitForExecution$1 as mt, updateUser as n, startWorkflow as nt, listUsers as o, getWorkflowExecution as ot, getCommand as p, executeScript as pt, triggerCommand as q, withCommonArgs as qt, removeCommand as r, getCommand$2 as rt, inviteCommand as s, listWorkflowExecutions as st, updateCommand as t, startCommand as tt, restoreWorkspace as u, getCommand$3 as ut, createWorkspace as v, DB_TYPES_FILE_NAME as vt, resumeWorkflow as w, compareSnapshots as wt, getAppHealth as x, MIGRATE_FILE_NAME as xt, listApps as y, DIFF_FILE_NAME as yt, getOAuth2Client as z, trnPrefix as zt };
11122
- //# sourceMappingURL=update-DkpWgrzL.mjs.map
11309
+ //#region src/cli/bundler/query/query-bundler.ts
11310
+ function createSqlEntry() {
11311
+ return ml`
11312
+ import { Kysely, sql } from "@tailor-platform/sdk/kysely";
11313
+ import { TailordbDialect } from "@tailor-platform/function-kysely-tailordb";
11314
+
11315
+ type QueryInput = {
11316
+ namespace: string;
11317
+ queries: string[];
11318
+ };
11319
+
11320
+ function getDB(namespace: string) {
11321
+ const client = new tailordb.Client({ namespace });
11322
+ return new Kysely<Record<string, Record<string, unknown>>>({
11323
+ dialect: new TailordbDialect(client),
11324
+ });
11325
+ }
11326
+
11327
+ export async function main(input: QueryInput) {
11328
+ const db = getDB(input.namespace);
11329
+ const results = [];
11330
+ for (const query of input.queries) {
11331
+ const result = await sql.raw(query).execute(db);
11332
+ const rows = result.rows ?? [];
11333
+ results.push({ rows, rowCount: rows.length });
11334
+ }
11335
+ if (results.length === 1) {
11336
+ return results[0];
11337
+ }
11338
+ return results;
11339
+ }
11340
+ `;
11341
+ }
11342
+ function createGqlEntry() {
11343
+ return ml`
11344
+ type QueryInput = {
11345
+ endpoint: string;
11346
+ accessToken: string;
11347
+ query: string;
11348
+ };
11349
+
11350
+ export async function main(input: QueryInput) {
11351
+ const response = await fetch(input.endpoint, {
11352
+ method: "POST",
11353
+ headers: {
11354
+ "Content-Type": "application/json",
11355
+ Authorization: \`Bearer \${input.accessToken}\`,
11356
+ },
11357
+ body: JSON.stringify({
11358
+ query: input.query,
11359
+ }),
11360
+ });
11361
+ if (!response.ok) {
11362
+ let message = \`HTTP \${response.status}\`;
11363
+ try {
11364
+ const errorJson = await response.json();
11365
+ if (errorJson && typeof errorJson === "object" && "message" in errorJson) {
11366
+ message = String(errorJson.message);
11367
+ }
11368
+ } catch {
11369
+ // Keep default HTTP status message when response body is not JSON.
11370
+ }
11371
+ throw new Error(\`GraphQL request failed: \${message}\`);
11372
+ }
11373
+
11374
+ const json = await response.json();
11375
+ return json;
11376
+ }
11377
+ `;
11378
+ }
11379
+ /**
11380
+ * Bundle a query executor script for TestExecScript.
11381
+ * @param engine - Query engine type
11382
+ * @returns Bundled code
11383
+ */
11384
+ async function bundleQueryScript(engine) {
11385
+ const outputDir = path.resolve(getDistDir(), "query");
11386
+ fs$1.mkdirSync(outputDir, { recursive: true });
11387
+ const entryPath = path.join(outputDir, `query_${engine}.entry.ts`);
11388
+ const outputPath = path.join(outputDir, `query_${engine}.js`);
11389
+ const entryContent = engine === "sql" ? createSqlEntry() : createGqlEntry();
11390
+ fs$1.writeFileSync(entryPath, entryContent);
11391
+ let tsconfig;
11392
+ try {
11393
+ tsconfig = await resolveTSConfig();
11394
+ } catch {
11395
+ tsconfig = void 0;
11396
+ }
11397
+ await rolldown.build(rolldown.defineConfig({
11398
+ input: entryPath,
11399
+ output: {
11400
+ file: outputPath,
11401
+ format: "esm",
11402
+ sourcemap: false,
11403
+ minify: false,
11404
+ inlineDynamicImports: true,
11405
+ globals: { tailordb: "tailordb" }
11406
+ },
11407
+ external: engine === "sql" ? ["tailordb"] : [],
11408
+ resolve: { conditionNames: ["node", "import"] },
11409
+ tsconfig,
11410
+ treeshake: {
11411
+ moduleSideEffects: false,
11412
+ annotations: true,
11413
+ unknownGlobalSideEffects: false
11414
+ },
11415
+ logLevel: "silent"
11416
+ }));
11417
+ return fs$1.readFileSync(outputPath, "utf-8");
11418
+ }
11419
+
11420
+ //#endregion
11421
+ //#region src/cli/query/errors.ts
11422
+ function toErrorMessage(error) {
11423
+ if (error instanceof Error) return error.message;
11424
+ return String(error);
11425
+ }
11426
+ /**
11427
+ * Maps errors from query execution to user-friendly CLI errors with suggestions when possible.
11428
+ * @param args - The error and context information for mapping
11429
+ * @returns A CLIError with a user-friendly message
11430
+ */
11431
+ function mapQueryExecutionError(args) {
11432
+ const message = toErrorMessage(args.error);
11433
+ if (message.includes("machine user does not exist")) return createCLIError({
11434
+ code: "not_found",
11435
+ message: `Machine user '${args.machineUser ?? "unknown"}' was not found.`,
11436
+ suggestion: "Run `tailor-sdk machineuser list` and use an existing name."
11437
+ });
11438
+ if (args.engine === "sql" && message.includes("sqlaccess error: failed to fetch schema: query returned an unexpected number of rows")) return createCLIError({
11439
+ code: "invalid_namespace",
11440
+ message: `Failed to load TailorDB schema for namespace '${args.namespace}'.`,
11441
+ suggestion: "Ensure the query references TailorDB types from a single namespace and re-apply if needed."
11442
+ });
11443
+ if (args.engine === "sql" && message.includes("sqlaccess error: failed to parse:")) {
11444
+ const parserReason = message.split("sqlaccess error: failed to parse:").at(1)?.split("\n").at(0)?.trim();
11445
+ return createCLIError({
11446
+ code: "invalid_sql",
11447
+ message: "SQL parse error.",
11448
+ suggestion: parserReason ?? "The SQL query contains unsupported syntax."
11449
+ });
11450
+ }
11451
+ return args.error instanceof Error ? args.error : new Error(message);
11452
+ }
11453
+
11454
+ //#endregion
11455
+ //#region src/cli/query/sql-type-extractor.ts
11456
+ /**
11457
+ * Extract TailorDB type names from SQL query.
11458
+ * @param query - SQL query
11459
+ * @returns Type names referenced by query
11460
+ */
11461
+ function extractTypeNamesFromSql(query$1) {
11462
+ let statements;
11463
+ try {
11464
+ statements = parse(query$1);
11465
+ } catch (error) {
11466
+ const message = error instanceof Error ? error.message : String(error);
11467
+ throw new Error(`SQL parse error: ${message}\nIf your table name is a reserved keyword (e.g. User), wrap it in double quotes: SELECT * FROM "User"`);
11468
+ }
11469
+ const typeNames = /* @__PURE__ */ new Set();
11470
+ const visitor = astVisitor((mapper) => ({ tableRef: (tableRef) => {
11471
+ typeNames.add(tableRef.name);
11472
+ mapper.super().tableRef(tableRef);
11473
+ return tableRef;
11474
+ } }));
11475
+ for (const statement of statements) visitor.statement(statement);
11476
+ return [...typeNames];
11477
+ }
11478
+ function collectAliasMap(fromClauses) {
11479
+ const aliasMap = /* @__PURE__ */ new Map();
11480
+ for (const from of fromClauses) if (from.type === "table") {
11481
+ const tableName = from.name.name;
11482
+ const alias = from.name.alias ?? tableName;
11483
+ aliasMap.set(alias, tableName);
11484
+ }
11485
+ return aliasMap;
11486
+ }
11487
+ /**
11488
+ * Extract the column template from a SQL query's SELECT clause.
11489
+ * Returns an ordered list of column slots representing explicit columns
11490
+ * and wildcard expansions with their resolved type names.
11491
+ *
11492
+ * Only inspects the top-level SELECT statement, not subqueries.
11493
+ * TailorDB's sqlaccess does not currently support subqueries in FROM clauses,
11494
+ * but we intentionally avoid recursing into nested SELECTs to prevent
11495
+ * false positives if the parser accepts such queries.
11496
+ * @param query - SQL query
11497
+ * @returns Column slots if wildcards are present, null otherwise
11498
+ */
11499
+ function extractColumnTemplate(query$1) {
11500
+ try {
11501
+ const statements = parse(query$1);
11502
+ for (const statement of statements) {
11503
+ if (statement.type !== "select" || !statement.columns) continue;
11504
+ const aliasMap = collectAliasMap(statement.from ?? []);
11505
+ const slots = [];
11506
+ let hasWildcard = false;
11507
+ for (const column of statement.columns) if (column.expr.type === "ref" && column.expr.name === "*") {
11508
+ hasWildcard = true;
11509
+ if (column.expr.table) {
11510
+ const typeName = aliasMap.get(column.expr.table.name);
11511
+ slots.push({
11512
+ type: "wildcard",
11513
+ typeNames: typeName ? [typeName] : []
11514
+ });
11515
+ } else slots.push({
11516
+ type: "wildcard",
11517
+ typeNames: [...new Set(aliasMap.values())]
11518
+ });
11519
+ } else {
11520
+ const name = column.alias?.name ?? (column.expr.type === "ref" ? column.expr.name : null);
11521
+ if (name) slots.push({
11522
+ type: "explicit",
11523
+ name
11524
+ });
11525
+ }
11526
+ return hasWildcard ? slots : null;
11527
+ }
11528
+ return null;
11529
+ } catch {
11530
+ return null;
11531
+ }
11532
+ }
11533
+
11534
+ //#endregion
11535
+ //#region src/cli/query/type-field-order.ts
11536
+ /**
11537
+ * Load field definition order for all TailorDB types in a namespace.
11538
+ * @param config - Loaded application configuration
11539
+ * @param namespace - TailorDB namespace name
11540
+ * @returns Map of type name to field names in definition order
11541
+ */
11542
+ async function loadTypeFieldOrder(config, namespace) {
11543
+ const fieldOrder = /* @__PURE__ */ new Map();
11544
+ const dbConfig = config.db?.[namespace];
11545
+ if (!dbConfig || !("files" in dbConfig) || dbConfig.files.length === 0) return fieldOrder;
11546
+ const typeFiles = loadFilesWithIgnores(dbConfig);
11547
+ await Promise.all(typeFiles.map(async (typeFile) => {
11548
+ try {
11549
+ const module = await import(pathToFileURL(typeFile).href);
11550
+ for (const exportedValue of Object.values(module)) {
11551
+ const result = TailorDBTypeSchema.safeParse(exportedValue);
11552
+ if (!result.success) continue;
11553
+ fieldOrder.set(result.data.name, Object.keys(result.data.fields));
11554
+ }
11555
+ } catch {}
11556
+ }));
11557
+ return fieldOrder;
11558
+ }
11559
+
11560
+ //#endregion
11561
+ //#region src/cli/query/index.ts
11562
+ const queryEngineSchema = z.enum(["sql", "gql"]);
11563
+ const queryOptionsSchema = z.object({
11564
+ workspaceId: z.string().optional(),
11565
+ profile: z.string().optional(),
11566
+ configPath: z.string().optional(),
11567
+ engine: queryEngineSchema,
11568
+ query: z.string(),
11569
+ machineUser: z.string()
11570
+ });
11571
+ async function getNamespaceFromSqlQuery(workspaceId, query$1, client, namespaces) {
11572
+ if (namespaces.length === 0) throw new Error("No namespaces found in configuration.");
11573
+ if (namespaces.length === 1) return namespaces[0];
11574
+ const typeNames = extractTypeNamesFromSql(query$1);
11575
+ if (typeNames.length === 0) throw new Error(`Could not infer namespace from query. Detected namespaces: ${namespaces.join(", ")}.`);
11576
+ const typeNamespaceMap = await resolveTypeNamespaces({
11577
+ workspaceId,
11578
+ namespaces,
11579
+ typeNames,
11580
+ client
11581
+ });
11582
+ const notFoundTypes = typeNames.filter((typeName) => !typeNamespaceMap.has(typeName));
11583
+ if (notFoundTypes.length > 0) throw new Error(`Could not find namespace for types in query: ${notFoundTypes.join(", ")}.`);
11584
+ const namespacesFromTypes = new Set(typeNamespaceMap.values());
11585
+ if (namespacesFromTypes.size === 1) return [...namespacesFromTypes][0];
11586
+ throw new Error(`Query references types from multiple namespaces: ${[...namespacesFromTypes].join(", ")}.`);
11587
+ }
11588
+ async function loadOptions(options) {
11589
+ const result = queryOptionsSchema.safeParse(options);
11590
+ if (!result.success) throw new Error(result.error.issues[0].message);
11591
+ const client = await initOperatorClient(await loadAccessToken({
11592
+ useProfile: true,
11593
+ profile: result.data.profile
11594
+ }));
11595
+ const workspaceId = loadWorkspaceId({
11596
+ workspaceId: result.data.workspaceId,
11597
+ profile: result.data.profile
11598
+ });
11599
+ const { config } = await loadConfig(options.configPath);
11600
+ const namespaces = extractAllNamespaces(config);
11601
+ const { application } = await client.getApplication({
11602
+ workspaceId,
11603
+ applicationName: config.name
11604
+ });
11605
+ if (!application?.authNamespace) throw new Error(`Application ${config.name} does not have an auth configuration.`);
11606
+ const { machineUser: machineUserResource } = await client.getAuthMachineUser({
11607
+ workspaceId,
11608
+ authNamespace: application.authNamespace,
11609
+ name: result.data.machineUser
11610
+ });
11611
+ if (!machineUserResource) throw new Error(`Machine user ${result.data.machineUser} not found.`);
11612
+ if (options.engine === "gql") return {
11613
+ engine: options.engine,
11614
+ client,
11615
+ workspaceId,
11616
+ config,
11617
+ application,
11618
+ machineUserResource
11619
+ };
11620
+ const namespace = await getNamespaceFromSqlQuery(workspaceId, result.data.query, client, namespaces);
11621
+ return {
11622
+ engine: options.engine,
11623
+ client,
11624
+ workspaceId,
11625
+ config,
11626
+ application,
11627
+ machineUserResource,
11628
+ namespace
11629
+ };
11630
+ }
11631
+ async function sqlQuery(client, invoker, args) {
11632
+ const queries = splitSqlStatements(args.query);
11633
+ const executed = await executeScript({
11634
+ client,
11635
+ workspaceId: args.workspaceId,
11636
+ name: `query-sql-${args.namespace}.js`,
11637
+ code: args.bundledCode,
11638
+ arg: JSON.stringify({
11639
+ namespace: args.namespace,
11640
+ queries
11641
+ }),
11642
+ invoker
11643
+ });
11644
+ if (!executed.success) throw new Error(executed.error);
11645
+ return {
11646
+ engine: "sql",
11647
+ namespace: args.namespace,
11648
+ query: args.query,
11649
+ result: parseExecutionResult(executed.result)
11650
+ };
11651
+ }
11652
+ async function gqlQuery(client, invoker, application, machineUser, args) {
11653
+ const { access_token: accessToken } = await fetchMachineUserToken(application.url, machineUser.clientId, machineUser.clientSecret);
11654
+ const executed = await executeScript({
11655
+ client,
11656
+ workspaceId: args.workspaceId,
11657
+ name: `query-gql.js`,
11658
+ code: args.bundledCode,
11659
+ arg: JSON.stringify({
11660
+ endpoint: `${application.url}/query`,
11661
+ accessToken,
11662
+ query: args.query
11663
+ }),
11664
+ invoker
11665
+ });
11666
+ if (!executed.success) throw new Error(executed.error);
11667
+ return {
11668
+ engine: "gql",
11669
+ query: args.query,
11670
+ result: parseExecutionResult(executed.result)
11671
+ };
11672
+ }
11673
+ function parseExecutionResult(result) {
11674
+ if (!result) return null;
11675
+ try {
11676
+ return JSON.parse(result);
11677
+ } catch {
11678
+ return result;
11679
+ }
11680
+ }
11681
+ /**
11682
+ * Dispatch query execution.
11683
+ * @param options - Query command options
11684
+ * @returns Dispatch result
11685
+ */
11686
+ async function query(options) {
11687
+ const { client, workspaceId, config, application, machineUserResource, engine, namespace } = await loadOptions(options);
11688
+ try {
11689
+ const bundledCode = await bundleQueryScript(engine);
11690
+ const invoker = create(AuthInvokerSchema, {
11691
+ namespace: application.authNamespace,
11692
+ machineUserName: machineUserResource.name
11693
+ });
11694
+ switch (engine) {
11695
+ case "sql": return reorderSqlColumns(await sqlQuery(client, invoker, {
11696
+ workspaceId,
11697
+ namespace,
11698
+ bundledCode,
11699
+ query: options.query
11700
+ }), config, namespace, options.query);
11701
+ case "gql": return await gqlQuery(client, invoker, application, machineUserResource, {
11702
+ workspaceId,
11703
+ bundledCode,
11704
+ query: options.query
11705
+ });
11706
+ default: throw new Error(`Unsupported query engine: ${engine}`);
11707
+ }
11708
+ } catch (error) {
11709
+ throw mapQueryExecutionError({
11710
+ error,
11711
+ engine,
11712
+ namespace,
11713
+ machineUser: options.machineUser
11714
+ });
11715
+ }
11716
+ }
11717
+ /**
11718
+ * Execute SQL query directly.
11719
+ * @param options - Shared query options
11720
+ * @returns SQL query result
11721
+ */
11722
+ async function querySql(options) {
11723
+ const result = await query({
11724
+ ...options,
11725
+ engine: "sql"
11726
+ });
11727
+ if (result.engine !== "sql") throw new Error(`Expected sql engine result but got: ${result.engine}`);
11728
+ return result;
11729
+ }
11730
+ /**
11731
+ * Execute GraphQL query directly.
11732
+ * @param options - Shared query options
11733
+ * @returns GraphQL query result
11734
+ */
11735
+ async function queryGql(options) {
11736
+ const result = await query({
11737
+ ...options,
11738
+ engine: "gql"
11739
+ });
11740
+ if (result.engine !== "gql") throw new Error(`Expected gql engine result but got: ${result.engine}`);
11741
+ return result;
11742
+ }
11743
+ async function reorderSqlColumns(result, config, namespace, sqlQuery$1) {
11744
+ if (!isSQLExecutionResult(result.result) || result.result.rows.length === 0) return result;
11745
+ const template = extractColumnTemplate(sqlQuery$1);
11746
+ if (!template) return result;
11747
+ try {
11748
+ const expectedOrder = buildExpectedColumnOrder(template, await loadTypeFieldOrder(config, namespace));
11749
+ if (expectedOrder.length === 0) return result;
11750
+ const orderedRows = result.result.rows.map((row) => reorderRowByTemplate(row, expectedOrder));
11751
+ return {
11752
+ ...result,
11753
+ result: {
11754
+ ...result.result,
11755
+ rows: orderedRows
11756
+ }
11757
+ };
11758
+ } catch {
11759
+ return result;
11760
+ }
11761
+ }
11762
+ const SYSTEM_FIELD_ORDER = ["id"];
11763
+ function buildExpectedColumnOrder(template, fieldOrder) {
11764
+ const order = [];
11765
+ for (const slot of template) if (slot.type === "explicit") order.push(slot.name);
11766
+ else for (const typeName of slot.typeNames) {
11767
+ order.push(...SYSTEM_FIELD_ORDER);
11768
+ order.push(...fieldOrder.get(typeName) ?? []);
11769
+ }
11770
+ return order;
11771
+ }
11772
+ function reorderRowByTemplate(row, expectedOrder) {
11773
+ const ordered = {};
11774
+ const rowKeys = new Set(Object.keys(row));
11775
+ const lowerToOriginal = /* @__PURE__ */ new Map();
11776
+ for (const key of rowKeys) lowerToOriginal.set(key.toLowerCase(), key);
11777
+ for (const key of expectedOrder) {
11778
+ const original = lowerToOriginal.get(key.toLowerCase());
11779
+ if (original != null && rowKeys.has(original)) {
11780
+ ordered[original] = row[original];
11781
+ rowKeys.delete(original);
11782
+ lowerToOriginal.delete(key.toLowerCase());
11783
+ }
11784
+ }
11785
+ for (const key of rowKeys) ordered[key] = row[key];
11786
+ return ordered;
11787
+ }
11788
+ const queryCommand = defineCommand({
11789
+ name: "query",
11790
+ description: "Run SQL/GraphQL query.",
11791
+ args: z.object({
11792
+ ...commonArgs,
11793
+ ...jsonArgs,
11794
+ ...deploymentArgs,
11795
+ engine: arg(queryEngineSchema, { description: "Query engine (sql or gql)" }),
11796
+ query: arg(z.string(), {
11797
+ alias: "q",
11798
+ description: "Query string to execute directly"
11799
+ }),
11800
+ machineuser: arg(z.string(), {
11801
+ alias: "m",
11802
+ description: "Machine user name for query execution"
11803
+ })
11804
+ }).strict(),
11805
+ run: withCommonArgs(async (args) => {
11806
+ const sharedOptions = {
11807
+ workspaceId: args["workspace-id"],
11808
+ profile: args.profile,
11809
+ configPath: args.config,
11810
+ query: args.query,
11811
+ machineUser: args.machineuser
11812
+ };
11813
+ if (args.engine === "sql") {
11814
+ printSqlResult(await querySql(sharedOptions), { json: args.json });
11815
+ return;
11816
+ }
11817
+ printGqlResult(await queryGql(sharedOptions), { json: args.json });
11818
+ })
11819
+ });
11820
+ function isSQLExecutionResult(value) {
11821
+ if (!value || typeof value !== "object") return false;
11822
+ const candidate = value;
11823
+ return Array.isArray(candidate.rows) && typeof candidate.rowCount === "number";
11824
+ }
11825
+ function printSingleSqlResult(execResult, options = {}) {
11826
+ if (execResult.rows.length === 0) {
11827
+ if (options.json) {
11828
+ logger.out({
11829
+ results: [],
11830
+ rowCount: 0
11831
+ });
11832
+ return;
11833
+ }
11834
+ logger.info("No rows returned.");
11835
+ return;
11836
+ }
11837
+ if (options.json) {
11838
+ logger.out({
11839
+ results: execResult.rows,
11840
+ rowCount: execResult.rowCount
11841
+ });
11842
+ return;
11843
+ }
11844
+ logger.out(execResult.rows, { showNull: true });
11845
+ logger.out(`rows: ${execResult.rowCount}`);
11846
+ }
11847
+ function splitSqlStatements(query$1) {
11848
+ try {
11849
+ const statements = parse(query$1);
11850
+ if (statements.length === 0) return [];
11851
+ return statements.map((s) => toSql.statement(s));
11852
+ } catch {
11853
+ const trimmed = query$1.trim();
11854
+ return trimmed.length > 0 ? [trimmed] : [];
11855
+ }
11856
+ }
11857
+ function isSQLExecutionResultArray(value) {
11858
+ return Array.isArray(value) && value.length > 0 && value.every(isSQLExecutionResult);
11859
+ }
11860
+ function printSqlResult(result, options = {}) {
11861
+ if (isSQLExecutionResultArray(result.result)) {
11862
+ if (options.json) {
11863
+ logger.out(result.result.map((r) => ({
11864
+ results: r.rows,
11865
+ rowCount: r.rowCount
11866
+ })));
11867
+ return;
11868
+ }
11869
+ const queries = splitSqlStatements(result.query);
11870
+ for (let i = 0; i < result.result.length; i++) {
11871
+ if (i > 0) logger.log("");
11872
+ logger.info(queries[i] ?? `Statement ${i + 1}`);
11873
+ printSingleSqlResult(result.result[i], options);
11874
+ }
11875
+ return;
11876
+ }
11877
+ if (isSQLExecutionResult(result.result)) {
11878
+ printSingleSqlResult(result.result, options);
11879
+ return;
11880
+ }
11881
+ logger.out({
11882
+ engine: result.engine,
11883
+ query: result.query,
11884
+ result: result.result
11885
+ });
11886
+ }
11887
+ function printGqlResult(result, options = {}) {
11888
+ if (options.json) {
11889
+ logger.out({ result: result.result });
11890
+ return;
11891
+ }
11892
+ logger.out(JSON.stringify(result.result, null, 2));
11893
+ }
11894
+
11895
+ //#endregion
11896
+ export { getExecutorJob as $, truncateCommand as A, getMigrationDirPath as At, getCommand$1 as B, getNamespacesWithMigrations as Bt, getAppHealth as C, MIGRATE_FILE_NAME as Ct, listCommand$3 as D, createSnapshotFromLocalTypes as Dt, resumeWorkflow as E, compareSnapshots as Et, showCommand as F, loadDiff as Ft, listMachineUsers as G, commonArgs as Gt, getMachineUserToken as H, generateUserTypes as Ht, remove as I, reconstructSnapshotFromMigrations as It, webhookCommand as J, jsonArgs as Jt, generate$1 as K, confirmationArgs as Kt, removeCommand$1 as L, formatDiffSummary as Lt, generateCommand as M, getMigrationFiles as Mt, logBetaWarning as N, getNextMigrationNumber as Nt, listWorkflows as O, formatMigrationNumber as Ot, show as P, isValidMigrationNumber as Pt, listExecutors as Q, listCommand$4 as R, formatMigrationDiff as Rt, listCommand$2 as S, INITIAL_SCHEMA_NUMBER as St, resumeCommand as T, compareLocalTypesWithSnapshot as Tt, tokenCommand as U, apiCall as Ut, getOAuth2Client as V, trnPrefix as Vt, listCommand$5 as W, apiCommand as Wt, triggerExecutor as X, workspaceArgs as Xt, triggerCommand as Y, withCommonArgs as Yt, listCommand$6 as Z, deleteCommand as _, MIGRATION_LABEL_KEY as _t, removeCommand as a, getCommand$2 as at, createWorkspace as b, DB_TYPES_FILE_NAME as bt, listUsers as c, getWorkflowExecution as ct, restoreCommand as d, formatKeyValueTable as dt, jobsCommand as et, restoreWorkspace as f, getCommand$3 as ft, getWorkspace as g, waitForExecution$1 as gt, getCommand as h, executeScript as ht, updateUser as i, startWorkflow as it, generate as j, getMigrationFilePath as jt, truncate as k, getLatestMigrationNumber as kt, inviteCommand as l, listWorkflowExecutions as lt, listWorkspaces as m, apply as mt, queryCommand as n, watchExecutorJob as nt, removeUser as o, getWorkflow as ot, listCommand$1 as p, getExecutor as pt, listWebhookExecutors as q, deploymentArgs as qt, updateCommand as r, startCommand as rt, listCommand as s, executionsCommand as st, query as t, listExecutorJobs as tt, inviteUser as u, functionExecutionStatusToString as ut, deleteWorkspace as v, parseMigrationLabelNumber as vt, healthCommand as w, SCHEMA_FILE_NAME as wt, listApps as x, DIFF_FILE_NAME as xt, createCommand as y, bundleMigrationScript as yt, listOAuth2Clients as z, hasChanges as zt };
11897
+ //# sourceMappingURL=query-Bz2oDGhw.mjs.map