@tailor-platform/sdk 1.17.0 → 1.18.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. package/CHANGELOG.md +24 -0
  2. package/dist/application-Csj7Ow5Q.mjs +8 -0
  3. package/dist/{application-DYfVZVPT.mjs → application-gWUyKuzv.mjs} +211 -1653
  4. package/dist/{application-DYfVZVPT.mjs.map → application-gWUyKuzv.mjs.map} +1 -1
  5. package/dist/brand-BZJCv6UY.mjs +28 -0
  6. package/dist/brand-BZJCv6UY.mjs.map +1 -0
  7. package/dist/cli/index.mjs +38 -20
  8. package/dist/cli/index.mjs.map +1 -1
  9. package/dist/cli/lib.d.mts +20 -33
  10. package/dist/cli/lib.mjs +10 -5
  11. package/dist/cli/lib.mjs.map +1 -1
  12. package/dist/configure/index.d.mts +4 -4
  13. package/dist/configure/index.mjs +10 -19
  14. package/dist/configure/index.mjs.map +1 -1
  15. package/dist/enum-constants-Cwd4qdpa.mjs +115 -0
  16. package/dist/enum-constants-Cwd4qdpa.mjs.map +1 -0
  17. package/dist/file-utils-cqcpFk87.mjs +139 -0
  18. package/dist/file-utils-cqcpFk87.mjs.map +1 -0
  19. package/dist/index-BKXch-td.d.mts +18 -0
  20. package/dist/index-C3Ib7pFc.d.mts +18 -0
  21. package/dist/{index-qQXpN674.d.mts → index-DP8EB9FK.d.mts} +13 -5
  22. package/dist/index-SqWgrTnF.d.mts +20 -0
  23. package/dist/index-sSDpuVQY.d.mts +18 -0
  24. package/dist/{jiti-BrELlEYT.mjs → jiti-DHlauMCo.mjs} +2 -2
  25. package/dist/{jiti-BrELlEYT.mjs.map → jiti-DHlauMCo.mjs.map} +1 -1
  26. package/dist/{job-CULA2Pvf.mjs → job-2Q82qQ6N.mjs} +27 -5
  27. package/dist/job-2Q82qQ6N.mjs.map +1 -0
  28. package/dist/kysely-type-DtUUoAi3.mjs +259 -0
  29. package/dist/kysely-type-DtUUoAi3.mjs.map +1 -0
  30. package/dist/plugin/builtin/enum-constants/index.d.mts +4 -0
  31. package/dist/plugin/builtin/enum-constants/index.mjs +3 -0
  32. package/dist/plugin/builtin/file-utils/index.d.mts +4 -0
  33. package/dist/plugin/builtin/file-utils/index.mjs +3 -0
  34. package/dist/plugin/builtin/kysely-type/index.d.mts +4 -0
  35. package/dist/plugin/builtin/kysely-type/index.mjs +3 -0
  36. package/dist/plugin/builtin/seed/index.d.mts +4 -0
  37. package/dist/plugin/builtin/seed/index.mjs +3 -0
  38. package/dist/plugin/index.d.mts +3 -3
  39. package/dist/plugin/index.mjs +11 -11
  40. package/dist/plugin/index.mjs.map +1 -1
  41. package/dist/{schema-R5TxC5Pn.mjs → schema-WDvc7Zel.mjs} +4 -3
  42. package/dist/schema-WDvc7Zel.mjs.map +1 -0
  43. package/dist/seed-Dm7lrGZ3.mjs +1050 -0
  44. package/dist/seed-Dm7lrGZ3.mjs.map +1 -0
  45. package/dist/{src-DMROgdcL.mjs → src-i4uqS1G4.mjs} +2 -2
  46. package/dist/{src-DMROgdcL.mjs.map → src-i4uqS1G4.mjs.map} +1 -1
  47. package/dist/types-Bhl_wAM2.d.mts +151 -0
  48. package/dist/{types-b-ig8nW_.mjs → types-ClK_HJ0G.mjs} +1 -1
  49. package/dist/{types-b-ig8nW_.mjs.map → types-ClK_HJ0G.mjs.map} +1 -1
  50. package/dist/{types-DzvazVmg.d.mts → types-DdvTxFiD.d.mts} +1380 -1044
  51. package/dist/{update-DQKCUNmr.mjs → update-BoNKMti-.mjs} +285 -112
  52. package/dist/update-BoNKMti-.mjs.map +1 -0
  53. package/dist/utils/test/index.d.mts +4 -4
  54. package/dist/utils/test/index.mjs +3 -2
  55. package/dist/utils/test/index.mjs.map +1 -1
  56. package/docs/cli/application.md +106 -14
  57. package/docs/cli/auth.md +92 -12
  58. package/docs/cli/completion.md +18 -2
  59. package/docs/cli/executor.md +122 -14
  60. package/docs/cli/function.md +32 -4
  61. package/docs/cli/secret.md +134 -18
  62. package/docs/cli/staticwebsite.md +60 -8
  63. package/docs/cli/tailordb.md +148 -20
  64. package/docs/cli/user.md +154 -22
  65. package/docs/cli/workflow.md +100 -12
  66. package/docs/cli/workspace.md +274 -38
  67. package/docs/generator/custom.md +2 -2
  68. package/docs/plugin/custom.md +270 -163
  69. package/docs/plugin/index.md +48 -2
  70. package/package.json +22 -2
  71. package/dist/application-D5ZEr4zk.mjs +0 -4
  72. package/dist/job-CULA2Pvf.mjs.map +0 -1
  73. package/dist/schema-R5TxC5Pn.mjs.map +0 -1
  74. package/dist/types-BeNtD-fA.d.mts +0 -369
  75. package/dist/update-DQKCUNmr.mjs.map +0 -1
  76. /package/dist/{chunk-GMkBE123.mjs → chunk-CqAI0b6X.mjs} +0 -0
@@ -1,5 +1,5 @@
1
- import { t as db } from "./schema-R5TxC5Pn.mjs";
2
- import { $ as UserProfileProviderConfig_UserProfileProviderType, A as TailorDBGQLPermission_Action, B as ExecutorJobStatus, C as platformBaseUrl, D as WorkspacePlatformUserRole, E as readPackageJson, F as TailorDBType_PermitAction, G as AuthOAuth2Client_ClientType, H as ExecutorTriggerType, I as PipelineResolver_OperationType, J as AuthSCIMAttribute_Type, K as AuthOAuth2Client_GrantType, L as IdPLang, M as TailorDBGQLPermission_Permit, N as TailorDBType_Permission_Operator, O as WorkflowExecution_Status, P as TailorDBType_Permission_Permit, Q as TenantProviderConfig_TenantProviderType, R as FunctionExecution_Status, S as initOperatorClient, T as userAgent, U as AuthIDPConfig_AuthType, V as ExecutorTargetType, W as AuthInvokerSchema, X as AuthSCIMConfig_AuthorizationType, Y as AuthSCIMAttribute_Uniqueness, _ as writePlatformConfig, a as hasDependency, at as ApplicationSchemaUpdateAttemptStatus, c as OAuth2ClientSchema, ct as styles, et as GetApplicationSchemaHealthResponse_ApplicationSchemaHealthStatus, f as loadAccessToken, g as readPlatformConfig, h as loadWorkspaceId, i as loadConfig, it as PageDirection, j as TailorDBGQLPermission_Operator, k as WorkflowJobExecution_Status, l as stringifyFunction, lt as symbols, m as loadOrganizationId, n as generatePluginFilesIfNeeded, nt as Condition_Operator, o as getDistDir, ot as Subgraph_ServiceType, p as loadFolderId, q as AuthSCIMAttribute_Mutability, r as loadApplication, rt as FilterSchema, s as createExecutorService, st as logger, t as defineApplication, tt as ConditionSchema, u as tailorUserMap, v as fetchAll, w as resolveStaticWebsiteUrls, y as fetchMachineUserToken } from "./application-DYfVZVPT.mjs";
1
+ import { t as db } from "./schema-WDvc7Zel.mjs";
2
+ import { $ as UserProfileProviderConfig_UserProfileProviderType, A as TailorDBGQLPermission_Action, B as ExecutorJobStatus, C as platformBaseUrl, D as WorkspacePlatformUserRole, E as readPackageJson, F as TailorDBType_PermitAction, G as AuthOAuth2Client_ClientType, H as ExecutorTriggerType, I as PipelineResolver_OperationType, J as AuthSCIMAttribute_Type, K as AuthOAuth2Client_GrantType, L as IdPLang, M as TailorDBGQLPermission_Permit, N as TailorDBType_Permission_Operator, O as WorkflowExecution_Status, P as TailorDBType_Permission_Permit, Q as TenantProviderConfig_TenantProviderType, R as FunctionExecution_Status, S as initOperatorClient, T as userAgent, U as AuthIDPConfig_AuthType, V as ExecutorTargetType, W as AuthInvokerSchema, X as AuthSCIMConfig_AuthorizationType, Y as AuthSCIMAttribute_Uniqueness, _ as writePlatformConfig, a as buildExecutorArgsExpr, at as ApplicationSchemaUpdateAttemptStatus, c as createExecutorService, ct as styles, et as GetApplicationSchemaHealthResponse_ApplicationSchemaHealthStatus, f as loadAccessToken, g as readPlatformConfig, h as loadWorkspaceId, i as loadConfig, it as PageDirection, j as TailorDBGQLPermission_Operator, k as WorkflowJobExecution_Status, l as OAuth2ClientSchema, lt as symbols, m as loadOrganizationId, n as generatePluginFilesIfNeeded, nt as Condition_Operator, o as buildResolverOperationHookExpr, ot as Subgraph_ServiceType, p as loadFolderId, q as AuthSCIMAttribute_Mutability, r as loadApplication, rt as FilterSchema, s as getDistDir, st as logger, t as defineApplication, tt as ConditionSchema, u as stringifyFunction, v as fetchAll, w as resolveStaticWebsiteUrls, y as fetchMachineUserToken } from "./application-gWUyKuzv.mjs";
3
3
  import { createRequire } from "node:module";
4
4
  import { arg, defineCommand, runCommand } from "politty";
5
5
  import { z } from "zod";
@@ -373,6 +373,35 @@ function resolvePackageDirectory(startDir) {
373
373
  }
374
374
  }
375
375
 
376
+ //#endregion
377
+ //#region src/parser/plugin-config/generation-types.ts
378
+ /**
379
+ * Derives generation-time dependency set from hook presence on a plugin.
380
+ * @param plugin - Plugin to check for generation hooks
381
+ * @param plugin.onTailorDBReady - TailorDB phase-complete hook
382
+ * @param plugin.onResolverReady - Resolver phase-complete hook
383
+ * @param plugin.onExecutorReady - Executor phase-complete hook
384
+ * @returns Set of dependency kinds based on which hooks are implemented
385
+ */
386
+ function getPluginGenerationDependencies(plugin) {
387
+ const deps = /* @__PURE__ */ new Set();
388
+ if (plugin.onTailorDBReady) deps.add("tailordb");
389
+ if (plugin.onResolverReady) deps.add("resolver");
390
+ if (plugin.onExecutorReady) deps.add("executor");
391
+ return deps;
392
+ }
393
+ /**
394
+ * Checks if a plugin has any generation-time hooks.
395
+ * @param plugin - Plugin to check
396
+ * @param plugin.onTailorDBReady - TailorDB phase-complete hook
397
+ * @param plugin.onResolverReady - Resolver phase-complete hook
398
+ * @param plugin.onExecutorReady - Executor phase-complete hook
399
+ * @returns True if the plugin has at least one generation hook
400
+ */
401
+ function hasGenerationHooks(plugin) {
402
+ return !!(plugin.onTailorDBReady || plugin.onResolverReady || plugin.onExecutorReady);
403
+ }
404
+
376
405
  //#endregion
377
406
  //#region src/plugin/manager.ts
378
407
  /**
@@ -409,13 +438,13 @@ var PluginManager = class {
409
438
  success: false,
410
439
  error: `Plugin "${plugin.id}" requires typeConfig, but none was provided for type "${context.type.name}".`
411
440
  };
412
- if (!plugin.processType) return {
441
+ if (!plugin.onTypeLoaded) return {
413
442
  success: false,
414
- error: `Plugin "${plugin.id}" does not support type-attached processing (missing processType method). Use processNamespace via definePlugins() instead.`
443
+ error: `Plugin "${plugin.id}" does not support type-attached processing (missing onTypeLoaded method). Use onNamespaceLoaded via definePlugins() instead.`
415
444
  };
416
445
  let output;
417
446
  try {
418
- output = await plugin.processType({
447
+ output = await plugin.onTypeLoaded({
419
448
  type: context.type,
420
449
  typeConfig: context.typeConfig,
421
450
  pluginConfig: plugin.pluginConfig,
@@ -453,14 +482,14 @@ var PluginManager = class {
453
482
  }
454
483
  /**
455
484
  * Process namespace plugins that don't require a source type.
456
- * This method is called once per namespace for plugins with processNamespace method.
485
+ * This method is called once per namespace for plugins with onNamespaceLoaded method.
457
486
  * @param namespace - The target namespace for generated types
458
487
  * @returns Array of results with plugin outputs and configs
459
488
  */
460
489
  async processNamespacePlugins(namespace) {
461
490
  const results = [];
462
491
  for (const [pluginId, plugin] of this.plugins) {
463
- if (!plugin.processNamespace) continue;
492
+ if (!plugin.onNamespaceLoaded) continue;
464
493
  const config = plugin.pluginConfig;
465
494
  const context = {
466
495
  pluginConfig: config,
@@ -468,7 +497,7 @@ var PluginManager = class {
468
497
  };
469
498
  let output;
470
499
  try {
471
- output = await plugin.processNamespace(context);
500
+ output = await plugin.onNamespaceLoaded(context);
472
501
  } catch (error) {
473
502
  const message = error instanceof Error ? error.message : String(error);
474
503
  results.push({
@@ -520,11 +549,11 @@ var PluginManager = class {
520
549
  return results;
521
550
  }
522
551
  /**
523
- * Get plugins that have processNamespace method
552
+ * Get plugins that have onNamespaceLoaded method
524
553
  * @returns Array of plugin IDs that support namespace processing
525
554
  */
526
555
  getNamespacePluginIds() {
527
- return Array.from(this.plugins.entries()).filter(([, plugin]) => plugin.processNamespace !== void 0).map(([id]) => id);
556
+ return Array.from(this.plugins.entries()).filter(([, plugin]) => plugin.onNamespaceLoaded !== void 0).map(([id]) => id);
528
557
  }
529
558
  /**
530
559
  * Get the count of registered plugins
@@ -582,6 +611,23 @@ var PluginManager = class {
582
611
  return this.generatedExecutors.filter((info) => info.namespace === namespace);
583
612
  }
584
613
  /**
614
+ * Get plugins that have any generation-time hooks.
615
+ * @returns Array of plugins with generation hooks
616
+ */
617
+ getPluginsWithGenerationHooks() {
618
+ return Array.from(this.plugins.values()).filter((plugin) => hasGenerationHooks(plugin));
619
+ }
620
+ /**
621
+ * Get the generation-time dependencies for a specific plugin.
622
+ * @param pluginId - The plugin ID to look up
623
+ * @returns Set of dependency kinds, or empty set if plugin not found
624
+ */
625
+ getPluginGenerationDependencies(pluginId) {
626
+ const plugin = this.plugins.get(pluginId);
627
+ if (!plugin) return /* @__PURE__ */ new Set();
628
+ return getPluginGenerationDependencies(plugin);
629
+ }
630
+ /**
585
631
  * Generate plugin files (types and executors) and store the executor file paths.
586
632
  * @param params - Parameters for file generation
587
633
  * @returns Generated executor file paths
@@ -738,11 +784,12 @@ function trn$6(workspaceId, name) {
738
784
  async function planApplication(context) {
739
785
  const { client, workspaceId, application, forRemoval } = context;
740
786
  const changeSet = createChangeSet("Applications");
741
- const existingApplications = await fetchAll(async (pageToken) => {
787
+ const existingApplications = await fetchAll(async (pageToken, maxPageSize) => {
742
788
  try {
743
789
  const { applications, nextPageToken } = await client.listApplications({
744
790
  workspaceId,
745
- pageToken
791
+ pageToken,
792
+ pageSize: maxPageSize
746
793
  });
747
794
  return [applications, nextPageToken];
748
795
  } catch (error) {
@@ -769,12 +816,13 @@ async function planApplication(context) {
769
816
  if (idProvider) authIdpConfigName = idProvider.name;
770
817
  } else if (application.config.auth) {
771
818
  authNamespace = application.config.auth.name;
772
- const idpConfigs = await fetchAll(async (pageToken) => {
819
+ const idpConfigs = await fetchAll(async (pageToken, maxPageSize) => {
773
820
  try {
774
821
  const { idpConfigs: idpConfigs$1, nextPageToken } = await client.listAuthIDPConfigs({
775
822
  workspaceId,
776
823
  namespaceName: authNamespace,
777
- pageToken
824
+ pageToken,
825
+ pageSize: maxPageSize
778
826
  });
779
827
  return [idpConfigs$1, nextPageToken];
780
828
  } catch (error) {
@@ -953,11 +1001,12 @@ async function planServices$3(client, workspaceId, appName, idps) {
953
1001
  const conflicts = [];
954
1002
  const unmanaged = [];
955
1003
  const resourceOwners = /* @__PURE__ */ new Set();
956
- const withoutLabel = await fetchAll(async (pageToken) => {
1004
+ const withoutLabel = await fetchAll(async (pageToken, maxPageSize) => {
957
1005
  try {
958
1006
  const { idpServices, nextPageToken } = await client.listIdPServices({
959
1007
  workspaceId,
960
- pageToken
1008
+ pageToken,
1009
+ pageSize: maxPageSize
961
1010
  });
962
1011
  return [idpServices, nextPageToken];
963
1012
  } catch (error) {
@@ -1049,12 +1098,13 @@ async function planServices$3(client, workspaceId, appName, idps) {
1049
1098
  async function planClients(client, workspaceId, idps, deletedServices) {
1050
1099
  const changeSet = createChangeSet("IdP clients");
1051
1100
  const fetchClients = (namespaceName) => {
1052
- return fetchAll(async (pageToken) => {
1101
+ return fetchAll(async (pageToken, maxPageSize) => {
1053
1102
  try {
1054
1103
  const { clients, nextPageToken } = await client.listIdPClients({
1055
1104
  workspaceId,
1056
1105
  namespaceName,
1057
- pageToken
1106
+ pageToken,
1107
+ pageSize: maxPageSize
1058
1108
  });
1059
1109
  return [clients, nextPageToken];
1060
1110
  } catch (error) {
@@ -1223,11 +1273,12 @@ async function planServices$2(client, workspaceId, appName, auths) {
1223
1273
  const conflicts = [];
1224
1274
  const unmanaged = [];
1225
1275
  const resourceOwners = /* @__PURE__ */ new Set();
1226
- const withoutLabel = await fetchAll(async (pageToken) => {
1276
+ const withoutLabel = await fetchAll(async (pageToken, maxPageSize) => {
1227
1277
  try {
1228
1278
  const { authServices, nextPageToken } = await client.listAuthServices({
1229
1279
  workspaceId,
1230
- pageToken
1280
+ pageToken,
1281
+ pageSize: maxPageSize
1231
1282
  });
1232
1283
  return [authServices, nextPageToken];
1233
1284
  } catch (error) {
@@ -1299,12 +1350,13 @@ async function planServices$2(client, workspaceId, appName, auths) {
1299
1350
  async function planIdPConfigs(client, workspaceId, auths, deletedServices) {
1300
1351
  const changeSet = createChangeSet("Auth idpConfigs");
1301
1352
  const fetchIdPConfigs = (namespaceName) => {
1302
- return fetchAll(async (pageToken) => {
1353
+ return fetchAll(async (pageToken, maxPageSize) => {
1303
1354
  try {
1304
1355
  const { idpConfigs, nextPageToken } = await client.listAuthIDPConfigs({
1305
1356
  workspaceId,
1306
1357
  namespaceName,
1307
- pageToken
1358
+ pageToken,
1359
+ pageSize: maxPageSize
1308
1360
  });
1309
1361
  return [idpConfigs, nextPageToken];
1310
1362
  } catch (error) {
@@ -1602,12 +1654,13 @@ function protoTenantConfig(tenantConfig) {
1602
1654
  async function planMachineUsers(client, workspaceId, auths, deletedServices) {
1603
1655
  const changeSet = createChangeSet("Auth machineUsers");
1604
1656
  const fetchMachineUsers = (authNamespace) => {
1605
- return fetchAll(async (pageToken) => {
1657
+ return fetchAll(async (pageToken, maxPageSize) => {
1606
1658
  try {
1607
1659
  const { machineUsers, nextPageToken } = await client.listAuthMachineUsers({
1608
1660
  workspaceId,
1609
1661
  authNamespace,
1610
- pageToken
1662
+ pageToken,
1663
+ pageSize: maxPageSize
1611
1664
  });
1612
1665
  return [machineUsers, nextPageToken];
1613
1666
  } catch (error) {
@@ -1680,12 +1733,13 @@ function protoMachineUserAttributeMap(attributeMap) {
1680
1733
  async function planOAuth2Clients(client, workspaceId, auths, deletedServices) {
1681
1734
  const changeSet = createChangeSet("Auth oauth2Clients");
1682
1735
  const fetchOAuth2Clients = (namespaceName) => {
1683
- return fetchAll(async (pageToken) => {
1736
+ return fetchAll(async (pageToken, maxPageSize) => {
1684
1737
  try {
1685
1738
  const { oauth2Clients, nextPageToken } = await client.listAuthOAuth2Clients({
1686
1739
  workspaceId,
1687
1740
  namespaceName,
1688
- pageToken
1741
+ pageToken,
1742
+ pageSize: maxPageSize
1689
1743
  });
1690
1744
  return [oauth2Clients, nextPageToken];
1691
1745
  } catch (error) {
@@ -2135,7 +2189,7 @@ function workflowJobFunctionName(jobName) {
2135
2189
  function collectFunctionEntries(application, workflowJobs) {
2136
2190
  const entries = [];
2137
2191
  const distDir = getDistDir();
2138
- for (const app of application.applications) for (const pipeline of app.resolverServices) for (const resolver of Object.values(pipeline.getResolvers())) {
2192
+ for (const app of application.applications) for (const pipeline of app.resolverServices) for (const resolver of Object.values(pipeline.resolvers)) {
2139
2193
  const scriptPath = path.join(distDir, "resolvers", `${resolver.name}.js`);
2140
2194
  try {
2141
2195
  const content = fs$2.readFileSync(scriptPath, "utf-8");
@@ -2150,7 +2204,7 @@ function collectFunctionEntries(application, workflowJobs) {
2150
2204
  }
2151
2205
  }
2152
2206
  if (application.executorService) {
2153
- const executors = application.executorService.getExecutors();
2207
+ const executors = application.executorService.executors;
2154
2208
  for (const executor of Object.values(executors)) if (executor.operation.kind === "function" || executor.operation.kind === "jobFunction") {
2155
2209
  const scriptPath = path.join(distDir, "executors", `${executor.name}.js`);
2156
2210
  try {
@@ -2195,11 +2249,12 @@ async function planFunctionRegistry(client, workspaceId, appName, entries) {
2195
2249
  const conflicts = [];
2196
2250
  const unmanaged = [];
2197
2251
  const resourceOwners = /* @__PURE__ */ new Set();
2198
- const existingFunctions = await fetchAll(async (pageToken) => {
2252
+ const existingFunctions = await fetchAll(async (pageToken, maxPageSize) => {
2199
2253
  try {
2200
2254
  const response = await client.listFunctionRegistries({
2201
2255
  workspaceId,
2202
- pageToken
2256
+ pageToken,
2257
+ pageSize: maxPageSize
2203
2258
  });
2204
2259
  return [response.functions.map((f) => ({
2205
2260
  name: f.name,
@@ -2362,11 +2417,12 @@ async function planExecutor(context) {
2362
2417
  const conflicts = [];
2363
2418
  const unmanaged = [];
2364
2419
  const resourceOwners = /* @__PURE__ */ new Set();
2365
- const withoutLabel = await fetchAll(async (pageToken) => {
2420
+ const withoutLabel = await fetchAll(async (pageToken, maxPageSize) => {
2366
2421
  try {
2367
2422
  const { executors: executors$1, nextPageToken } = await client.listExecutorExecutors({
2368
2423
  workspaceId,
2369
- pageToken
2424
+ pageToken,
2425
+ pageSize: maxPageSize
2370
2426
  });
2371
2427
  return [executors$1, nextPageToken];
2372
2428
  } catch (error) {
@@ -2433,33 +2489,11 @@ async function planExecutor(context) {
2433
2489
  resourceOwners
2434
2490
  };
2435
2491
  }
2436
- const actorTransformExpr = `actor: args.actor ? (({ attributeMap, attributes: attrList, ...rest }) => ({ ...rest, attributes: attributeMap, attributeList: attrList }))(args.actor) : null`;
2437
- /**
2438
- * Build args expression for resolverExecuted trigger.
2439
- * Transforms server's succeeded/failed fields to success/result/error fields.
2440
- * @param additionalFields - Additional fields to include in the args expression
2441
- * @returns JavaScript expression for resolverExecuted trigger args
2442
- */
2443
- function buildResolverExecutedArgsExpr(additionalFields) {
2444
- const baseFields = `...args, appNamespace: args.namespaceName, ${actorTransformExpr}, success: !!args.succeeded, result: args.succeeded?.result.resolver, error: args.failed?.error`;
2445
- return additionalFields ? `({ ${baseFields}, ${additionalFields} })` : `({ ${baseFields} })`;
2446
- }
2447
- /**
2448
- * Build args expression for incomingWebhook trigger.
2449
- * Transforms server's raw_body field to rawBody field.
2450
- * @param additionalFields - Additional fields to include in the args expression
2451
- * @returns JavaScript expression for incomingWebhook trigger args
2452
- */
2453
- function buildIncomingWebhookArgsExpr(additionalFields) {
2454
- const baseFields = `...args, appNamespace: args.namespaceName, rawBody: args.raw_body`;
2455
- return additionalFields ? `({ ${baseFields}, ${additionalFields} })` : `({ ${baseFields} })`;
2456
- }
2457
2492
  function protoExecutor(appName, executor, env) {
2458
2493
  const trigger = executor.trigger;
2459
2494
  let triggerType;
2460
2495
  let triggerConfig;
2461
- const envField = `env: ${JSON.stringify(env)}`;
2462
- const baseArgsExpr = `({ ...args, appNamespace: args.namespaceName, ${actorTransformExpr}, ${envField} })`;
2496
+ const argsExpr = buildExecutorArgsExpr(trigger.kind, env);
2463
2497
  const eventType = {
2464
2498
  recordCreated: "tailordb.type_record.created",
2465
2499
  recordUpdated: "tailordb.type_record.updated",
@@ -2491,7 +2525,7 @@ function protoExecutor(appName, executor, env) {
2491
2525
  case: "event",
2492
2526
  value: {
2493
2527
  eventType: eventType[trigger.kind],
2494
- condition: { expr: [`args.typeName === "${trigger.typeName}"`, ...trigger.condition ? [`(${stringifyFunction(trigger.condition)})(${baseArgsExpr})`] : []].join(" && ") }
2528
+ condition: { expr: [`args.typeName === "${trigger.typeName}"`, ...trigger.condition ? [`(${stringifyFunction(trigger.condition)})(${argsExpr})`] : []].join(" && ") }
2495
2529
  }
2496
2530
  } };
2497
2531
  break;
@@ -2501,7 +2535,7 @@ function protoExecutor(appName, executor, env) {
2501
2535
  case: "event",
2502
2536
  value: {
2503
2537
  eventType: eventType[trigger.kind],
2504
- condition: { expr: [`args.resolverName === "${trigger.resolverName}"`, ...trigger.condition ? [`(${stringifyFunction(trigger.condition)})(${buildResolverExecutedArgsExpr(envField)})`] : []].join(" && ") }
2538
+ condition: { expr: [`args.resolverName === "${trigger.resolverName}"`, ...trigger.condition ? [`(${stringifyFunction(trigger.condition)})(${argsExpr})`] : []].join(" && ") }
2505
2539
  }
2506
2540
  } };
2507
2541
  break;
@@ -2529,7 +2563,6 @@ function protoExecutor(appName, executor, env) {
2529
2563
  const target = executor.operation;
2530
2564
  let targetType;
2531
2565
  let targetConfig;
2532
- const argsExpr = trigger.kind === "resolverExecuted" ? buildResolverExecutedArgsExpr(envField) : trigger.kind === "incomingWebhook" ? buildIncomingWebhookArgsExpr(envField) : baseArgsExpr;
2533
2566
  switch (target.kind) {
2534
2567
  case "webhook":
2535
2568
  targetType = ExecutorTargetType.WEBHOOK;
@@ -2701,11 +2734,12 @@ async function planServices$1(client, workspaceId, appName, pipelines) {
2701
2734
  const conflicts = [];
2702
2735
  const unmanaged = [];
2703
2736
  const resourceOwners = /* @__PURE__ */ new Set();
2704
- const withoutLabel = await fetchAll(async (pageToken) => {
2737
+ const withoutLabel = await fetchAll(async (pageToken, maxPageSize) => {
2705
2738
  try {
2706
2739
  const { pipelineServices, nextPageToken } = await client.listPipelineServices({
2707
2740
  workspaceId,
2708
- pageToken
2741
+ pageToken,
2742
+ pageSize: maxPageSize
2709
2743
  });
2710
2744
  return [pipelineServices, nextPageToken];
2711
2745
  } catch (error) {
@@ -2774,12 +2808,13 @@ async function planServices$1(client, workspaceId, appName, pipelines) {
2774
2808
  async function planResolvers(client, workspaceId, pipelines, executors, deletedServices, env) {
2775
2809
  const changeSet = createChangeSet("Pipeline resolvers");
2776
2810
  const fetchResolvers = (namespaceName) => {
2777
- return fetchAll(async (pageToken) => {
2811
+ return fetchAll(async (pageToken, maxPageSize) => {
2778
2812
  try {
2779
2813
  const { pipelineResolvers, nextPageToken } = await client.listPipelineResolvers({
2780
2814
  workspaceId,
2781
2815
  namespaceName,
2782
- pageToken
2816
+ pageToken,
2817
+ pageSize: maxPageSize
2783
2818
  });
2784
2819
  return [pipelineResolvers, nextPageToken];
2785
2820
  } catch (error) {
@@ -2796,7 +2831,7 @@ async function planResolvers(client, workspaceId, pipelines, executors, deletedS
2796
2831
  existingResolvers.forEach((resolver) => {
2797
2832
  existingNameSet.add(resolver.name);
2798
2833
  });
2799
- for (const resolver of Object.values(pipeline.getResolvers())) if (existingNameSet.has(resolver.name)) {
2834
+ for (const resolver of Object.values(pipeline.resolvers)) if (existingNameSet.has(resolver.name)) {
2800
2835
  changeSet.updates.push({
2801
2836
  name: resolver.name,
2802
2837
  request: {
@@ -2844,7 +2879,7 @@ function processResolver(namespace, resolver, executorUsedResolvers, env) {
2844
2879
  description: `${resolver.name} function body`,
2845
2880
  operationType: PipelineResolver_OperationType.FUNCTION,
2846
2881
  operationSourceRef: resolverFunctionName(namespace, resolver.name),
2847
- operationHook: { expr: `({ ...context.pipeline, input: context.args, user: ${tailorUserMap}, env: ${JSON.stringify(env)} });` },
2882
+ operationHook: { expr: buildResolverOperationHookExpr(env) },
2848
2883
  postScript: `args.body`
2849
2884
  }];
2850
2885
  const typeBaseName = inflection.camelize(resolver.name);
@@ -2931,11 +2966,12 @@ async function planStaticWebsite(context) {
2931
2966
  const conflicts = [];
2932
2967
  const unmanaged = [];
2933
2968
  const resourceOwners = /* @__PURE__ */ new Set();
2934
- const withoutLabel = await fetchAll(async (pageToken) => {
2969
+ const withoutLabel = await fetchAll(async (pageToken, maxPageSize) => {
2935
2970
  try {
2936
2971
  const { staticwebsites, nextPageToken } = await client.listStaticWebsites({
2937
2972
  workspaceId,
2938
- pageToken
2973
+ pageToken,
2974
+ pageSize: maxPageSize
2939
2975
  });
2940
2976
  return [staticwebsites, nextPageToken];
2941
2977
  } catch (error) {
@@ -4652,12 +4688,13 @@ function groupMigrationsByNamespace(migrations) {
4652
4688
  * @returns {Promise<ProtoTailorDBType[]>} Remote TailorDB types
4653
4689
  */
4654
4690
  async function fetchRemoteTypes(client, workspaceId, namespace) {
4655
- return fetchAll(async (pageToken) => {
4691
+ return fetchAll(async (pageToken, maxPageSize) => {
4656
4692
  try {
4657
4693
  const { tailordbTypes, nextPageToken } = await client.listTailorDBTypes({
4658
4694
  workspaceId,
4659
4695
  namespaceName: namespace,
4660
- pageToken
4696
+ pageToken,
4697
+ pageSize: maxPageSize
4661
4698
  });
4662
4699
  return [tailordbTypes, nextPageToken];
4663
4700
  } catch (error) {
@@ -4824,7 +4861,7 @@ async function applyTailorDB(client, result, phase = "create-update") {
4824
4861
  let pendingMigrations = [];
4825
4862
  const typesByNamespace = /* @__PURE__ */ new Map();
4826
4863
  for (const tailordb of migrationContext.application.tailorDBServices) {
4827
- const types = tailordb.getTypes();
4864
+ const types = tailordb.types;
4828
4865
  if (types) typesByNamespace.set(tailordb.namespace, types);
4829
4866
  }
4830
4867
  pendingMigrations = await validateAndDetectMigrations(client, migrationContext.workspaceId, typesByNamespace, migrationContext.config, migrationContext.noSchemaCheck);
@@ -5153,11 +5190,12 @@ async function planServices(client, workspaceId, appName, tailordbs) {
5153
5190
  const conflicts = [];
5154
5191
  const unmanaged = [];
5155
5192
  const resourceOwners = /* @__PURE__ */ new Set();
5156
- const withoutLabel = await fetchAll(async (pageToken) => {
5193
+ const withoutLabel = await fetchAll(async (pageToken, maxPageSize) => {
5157
5194
  try {
5158
5195
  const { tailordbServices, nextPageToken } = await client.listTailorDBServices({
5159
5196
  workspaceId,
5160
- pageToken
5197
+ pageToken,
5198
+ pageSize: maxPageSize
5161
5199
  });
5162
5200
  return [tailordbServices, nextPageToken];
5163
5201
  } catch (error) {
@@ -5224,12 +5262,13 @@ async function planServices(client, workspaceId, appName, tailordbs) {
5224
5262
  async function planTypes(client, workspaceId, tailordbs, executors, deletedServices, filteredTypesByNamespace) {
5225
5263
  const changeSet = createChangeSet("TailorDB types");
5226
5264
  const fetchTypes = (namespaceName) => {
5227
- return fetchAll(async (pageToken) => {
5265
+ return fetchAll(async (pageToken, maxPageSize) => {
5228
5266
  try {
5229
5267
  const { tailordbTypes, nextPageToken } = await client.listTailorDBTypes({
5230
5268
  workspaceId,
5231
5269
  namespaceName,
5232
- pageToken
5270
+ pageToken,
5271
+ pageSize: maxPageSize
5233
5272
  });
5234
5273
  return [tailordbTypes, nextPageToken];
5235
5274
  } catch (error) {
@@ -5244,7 +5283,7 @@ async function planTypes(client, workspaceId, tailordbs, executors, deletedServi
5244
5283
  const existingTypes = await fetchTypes(tailordb.namespace);
5245
5284
  const existingNameSet = /* @__PURE__ */ new Set();
5246
5285
  existingTypes.forEach((type) => existingNameSet.add(type.name));
5247
- const types = filteredTypesByNamespace?.get(tailordb.namespace) ?? tailordb.getTypes();
5286
+ const types = filteredTypesByNamespace?.get(tailordb.namespace) ?? tailordb.types;
5248
5287
  for (const typeName of Object.keys(types)) {
5249
5288
  const tailordbType = generateTailorDBTypeManifest(types[typeName], executorUsedTypes, tailordb.config.gqlOperations);
5250
5289
  if (existingNameSet.has(typeName)) {
@@ -5524,12 +5563,13 @@ function protoOperand(operand) {
5524
5563
  async function planGqlPermissions(client, workspaceId, tailordbs, deletedServices) {
5525
5564
  const changeSet = createChangeSet("TailorDB gqlPermissions");
5526
5565
  const fetchGqlPermissions = (namespaceName) => {
5527
- return fetchAll(async (pageToken) => {
5566
+ return fetchAll(async (pageToken, maxPageSize) => {
5528
5567
  try {
5529
5568
  const { permissions, nextPageToken } = await client.listTailorDBGQLPermissions({
5530
5569
  workspaceId,
5531
5570
  namespaceName,
5532
- pageToken
5571
+ pageToken,
5572
+ pageSize: maxPageSize
5533
5573
  });
5534
5574
  return [permissions, nextPageToken];
5535
5575
  } catch (error) {
@@ -5544,7 +5584,7 @@ async function planGqlPermissions(client, workspaceId, tailordbs, deletedService
5544
5584
  existingGqlPermissions.forEach((gqlPermission) => {
5545
5585
  existingNameSet.add(gqlPermission.typeName);
5546
5586
  });
5547
- const types = tailordb.getTypes();
5587
+ const types = tailordb.types;
5548
5588
  for (const typeName of Object.keys(types)) {
5549
5589
  const gqlPermission = types[typeName].permissions.gql;
5550
5590
  if (!gqlPermission) continue;
@@ -5809,10 +5849,11 @@ async function registerJobFunctions(client, changeSet, appName) {
5809
5849
  const { workspaceId } = firstWorkflow;
5810
5850
  const allUsedJobNames = /* @__PURE__ */ new Set();
5811
5851
  for (const item of [...changeSet.creates, ...changeSet.updates]) for (const jobName of item.usedJobNames) allUsedJobNames.add(jobName);
5812
- const existingJobFunctions = await fetchAll(async (pageToken) => {
5852
+ const existingJobFunctions = await fetchAll(async (pageToken, maxPageSize) => {
5813
5853
  const response = await client.listWorkflowJobFunctions({
5814
5854
  workspaceId,
5815
- pageToken
5855
+ pageToken,
5856
+ pageSize: maxPageSize
5816
5857
  });
5817
5858
  return [response.jobFunctions.map((j) => j.name), response.nextPageToken];
5818
5859
  });
@@ -5864,10 +5905,11 @@ async function planWorkflow(client, workspaceId, appName, workflows, mainJobDeps
5864
5905
  const conflicts = [];
5865
5906
  const unmanaged = [];
5866
5907
  const resourceOwners = /* @__PURE__ */ new Set();
5867
- const withoutLabel = await fetchAll(async (pageToken) => {
5908
+ const withoutLabel = await fetchAll(async (pageToken, maxPageSize) => {
5868
5909
  const response = await client.listWorkflows({
5869
5910
  workspaceId,
5870
- pageToken
5911
+ pageToken,
5912
+ pageSize: maxPageSize
5871
5913
  });
5872
5914
  return [response.workflows.map((w) => ({
5873
5915
  id: w.id,
@@ -5950,7 +5992,7 @@ async function apply(options) {
5950
5992
  config,
5951
5993
  configPath: config.path
5952
5994
  });
5953
- const { application, workflowResult, workflowBuildResult } = await loadApplication({
5995
+ const { application, workflowBuildResult } = await loadApplication({
5954
5996
  config,
5955
5997
  pluginManager
5956
5998
  });
@@ -5963,7 +6005,8 @@ async function apply(options) {
5963
6005
  workspaceId: options?.workspaceId,
5964
6006
  profile: options?.profile
5965
6007
  });
5966
- const functionEntries = collectFunctionEntries(application, workflowResult?.jobs ?? []);
6008
+ const workflowService = application.workflowService;
6009
+ const functionEntries = collectFunctionEntries(application, workflowService?.jobs ?? []);
5967
6010
  const ctx = {
5968
6011
  client,
5969
6012
  workspaceId,
@@ -5980,7 +6023,7 @@ async function apply(options) {
5980
6023
  const pipeline = await planPipeline(ctx);
5981
6024
  const app = await planApplication(ctx);
5982
6025
  const executor = await planExecutor(ctx);
5983
- const workflow = await planWorkflow(client, workspaceId, application.name, workflowResult?.workflows ?? {}, workflowBuildResult?.mainJobDeps ?? {});
6026
+ const workflow = await planWorkflow(client, workspaceId, application.name, workflowService?.workflows ?? {}, workflowBuildResult?.mainJobDeps ?? {});
5984
6027
  const allConflicts = [
5985
6028
  ...functionRegistry.conflicts,
5986
6029
  ...tailorDB.conflicts,
@@ -6653,10 +6696,11 @@ async function listWorkflowExecutions(options) {
6653
6696
  }) }));
6654
6697
  }
6655
6698
  const filter = filters.length > 0 ? create(FilterSchema, { and: filters }) : void 0;
6656
- return (await fetchAll(async (pageToken) => {
6699
+ return (await fetchAll(async (pageToken, maxPageSize) => {
6657
6700
  const { executions, nextPageToken } = await client.listWorkflowExecutions({
6658
6701
  workspaceId,
6659
6702
  pageToken,
6703
+ pageSize: maxPageSize,
6660
6704
  pageDirection: PageDirection.DESC,
6661
6705
  filter
6662
6706
  });
@@ -7153,11 +7197,12 @@ async function getExecutorJob(options) {
7153
7197
  if (!job) throw new Error(`Job '${options.jobId}' not found.`);
7154
7198
  const jobInfo = toExecutorJobInfo(job);
7155
7199
  if (options.attempts) {
7156
- const attempts = await fetchAll(async (pageToken) => {
7200
+ const attempts = await fetchAll(async (pageToken, maxPageSize) => {
7157
7201
  const { attempts: attempts$1, nextPageToken } = await client.listExecutorJobAttempts({
7158
7202
  workspaceId,
7159
7203
  jobId: options.jobId,
7160
7204
  pageToken,
7205
+ pageSize: maxPageSize,
7161
7206
  pageDirection: PageDirection.DESC
7162
7207
  });
7163
7208
  return [attempts$1, nextPageToken];
@@ -7209,11 +7254,12 @@ async function watchExecutorJob(options) {
7209
7254
  const coloredStatus = colorizeExecutorJobStatus(jobInfo.status);
7210
7255
  if (job.status === ExecutorJobStatus.SUCCESS) spinner.succeed(`Executor job completed: ${coloredStatus}`);
7211
7256
  else spinner.fail(`Executor job completed: ${coloredStatus}`);
7212
- const attemptInfos = (await fetchAll(async (pageToken) => {
7257
+ const attemptInfos = (await fetchAll(async (pageToken, maxPageSize) => {
7213
7258
  const { attempts, nextPageToken } = await client.listExecutorJobAttempts({
7214
7259
  workspaceId,
7215
7260
  jobId: options.jobId,
7216
7261
  pageToken,
7262
+ pageSize: maxPageSize,
7217
7263
  pageDirection: PageDirection.DESC
7218
7264
  });
7219
7265
  return [attempts, nextPageToken];
@@ -7485,10 +7531,11 @@ async function listExecutors(options) {
7485
7531
  workspaceId: options?.workspaceId,
7486
7532
  profile: options?.profile
7487
7533
  });
7488
- return (await fetchAll(async (pageToken) => {
7534
+ return (await fetchAll(async (pageToken, maxPageSize) => {
7489
7535
  const { executors, nextPageToken } = await client.listExecutorExecutors({
7490
7536
  workspaceId,
7491
- pageToken
7537
+ pageToken,
7538
+ pageSize: maxPageSize
7492
7539
  });
7493
7540
  return [executors, nextPageToken];
7494
7541
  })).map((e) => toExecutorListInfo(e));
@@ -7755,10 +7802,11 @@ async function listWebhookExecutors(options) {
7755
7802
  workspaceId: options?.workspaceId,
7756
7803
  profile: options?.profile
7757
7804
  });
7758
- return (await fetchAll(async (pageToken) => {
7805
+ return (await fetchAll(async (pageToken, maxPageSize) => {
7759
7806
  const { executors, nextPageToken } = await client.listExecutorExecutors({
7760
7807
  workspaceId,
7761
- pageToken
7808
+ pageToken,
7809
+ pageSize: maxPageSize
7762
7810
  });
7763
7811
  return [executors, nextPageToken];
7764
7812
  })).filter((e) => e.triggerType === ExecutorTriggerType.INCOMING_WEBHOOK).map((e) => ({
@@ -7797,6 +7845,19 @@ const webhookCommand = defineCommand({
7797
7845
  }
7798
7846
  });
7799
7847
 
7848
+ //#endregion
7849
+ //#region src/cli/generator/types.ts
7850
+ /**
7851
+ * Type guard to check if a generator has a specific dependency.
7852
+ * @template D
7853
+ * @param generator - Code generator instance
7854
+ * @param dependency - Dependency kind to check
7855
+ * @returns True if the generator has the dependency
7856
+ */
7857
+ function hasDependency(generator, dependency) {
7858
+ return generator.dependencies.includes(dependency);
7859
+ }
7860
+
7800
7861
  //#endregion
7801
7862
  //#region src/cli/generator/watch/index.ts
7802
7863
  /**
@@ -8197,6 +8258,7 @@ function createGenerationManager(params) {
8197
8258
  };
8198
8259
  let watcher = null;
8199
8260
  const generatorResults = {};
8261
+ const generationPlugins = pluginManager?.getPluginsWithGenerationHooks() ?? [];
8200
8262
  function getDeps(gen) {
8201
8263
  return new Set(gen.dependencies);
8202
8264
  }
@@ -8317,12 +8379,112 @@ function createGenerationManager(params) {
8317
8379
  baseDir: path.join(baseDir, gen.id),
8318
8380
  configPath: config.path
8319
8381
  });
8382
+ await writeGeneratedFiles(gen.id, result);
8383
+ }
8384
+ /**
8385
+ * Build TailorDB namespace data array from loaded services.
8386
+ * @returns Array of TailorDB namespace data
8387
+ */
8388
+ function buildTailorDBData() {
8389
+ return Object.entries(services.tailordb).map(([namespace, info]) => ({
8390
+ namespace,
8391
+ types: info.types,
8392
+ sourceInfo: new Map(Object.entries(info.sourceInfo)),
8393
+ pluginAttachments: info.pluginAttachments
8394
+ }));
8395
+ }
8396
+ /**
8397
+ * Build resolver namespace data array from loaded services.
8398
+ * @returns Array of resolver namespace data
8399
+ */
8400
+ function buildResolverData() {
8401
+ return Object.entries(services.resolver).map(([namespace, resolvers]) => ({
8402
+ namespace,
8403
+ resolvers
8404
+ }));
8405
+ }
8406
+ /**
8407
+ * Run a plugin's phase-complete hook and write any generated files.
8408
+ * @param plugin - Plugin to run the hook on
8409
+ * @param hookName - Name of the hook to call
8410
+ * @returns Promise that resolves when hook completes
8411
+ */
8412
+ async function runPluginPhaseHook(plugin, hookName) {
8413
+ if (!plugin[hookName]) return;
8414
+ const pluginBaseDir = path.join(baseDir, plugin.id);
8415
+ const auth = getAuthInput();
8416
+ const tailordb = buildTailorDBData();
8417
+ let result;
8418
+ switch (hookName) {
8419
+ case "onTailorDBReady":
8420
+ result = await plugin.onTailorDBReady({
8421
+ tailordb,
8422
+ auth,
8423
+ baseDir: pluginBaseDir,
8424
+ configPath: config.path,
8425
+ pluginConfig: plugin.pluginConfig
8426
+ });
8427
+ break;
8428
+ case "onResolverReady":
8429
+ result = await plugin.onResolverReady({
8430
+ tailordb,
8431
+ resolvers: buildResolverData(),
8432
+ auth,
8433
+ baseDir: pluginBaseDir,
8434
+ configPath: config.path,
8435
+ pluginConfig: plugin.pluginConfig
8436
+ });
8437
+ break;
8438
+ case "onExecutorReady":
8439
+ result = await plugin.onExecutorReady({
8440
+ tailordb,
8441
+ resolvers: buildResolverData(),
8442
+ executors: { ...services.executor },
8443
+ auth,
8444
+ baseDir: pluginBaseDir,
8445
+ configPath: config.path,
8446
+ pluginConfig: plugin.pluginConfig
8447
+ });
8448
+ break;
8449
+ }
8450
+ await writeGeneratedFiles(plugin.id, result);
8451
+ }
8452
+ /**
8453
+ * Run a specific generation-time hook for all plugins that implement it.
8454
+ * Each hook runs at its natural pipeline phase, ensuring outputs from earlier
8455
+ * phases are available when later phases load resolvers/executors.
8456
+ * @param hookName - Name of the hook to call
8457
+ * @param watch - Whether running in watch mode (suppresses throws)
8458
+ */
8459
+ async function runPluginHook(hookName, watch$1) {
8460
+ const plugins = generationPlugins.filter((p) => p[hookName] != null);
8461
+ if (plugins.length === 0) return;
8462
+ const results = await Promise.allSettled(plugins.map(async (plugin) => {
8463
+ try {
8464
+ await runPluginPhaseHook(plugin, hookName);
8465
+ } catch (error) {
8466
+ logger.error(`Error processing plugin ${styles.bold(plugin.id)} (${hookName})`);
8467
+ logger.error(String(error));
8468
+ if (!watch$1) throw error;
8469
+ }
8470
+ }));
8471
+ if (!watch$1) {
8472
+ const failures = results.filter((r) => r.status === "rejected");
8473
+ if (failures.length > 0) throw new AggregateError(failures.map((f) => f.reason));
8474
+ }
8475
+ }
8476
+ /**
8477
+ * Write generated files to disk.
8478
+ * @param sourceId - Generator or plugin ID for logging
8479
+ * @param result - Generator result containing files to write
8480
+ */
8481
+ async function writeGeneratedFiles(sourceId, result) {
8320
8482
  await Promise.all(result.files.map(async (file) => {
8321
8483
  fs$2.mkdirSync(path.dirname(file.path), { recursive: true });
8322
8484
  return new Promise((resolve, reject) => {
8323
8485
  if (file.skipIfExists && fs$2.existsSync(file.path)) {
8324
8486
  const relativePath = path.relative(process.cwd(), file.path);
8325
- logger.debug(`${gen.id} | skip existing: ${relativePath}`);
8487
+ logger.debug(`${sourceId} | skip existing: ${relativePath}`);
8326
8488
  return resolve();
8327
8489
  }
8328
8490
  fs$2.writeFile(file.path, file.content, (err) => {
@@ -8333,7 +8495,7 @@ function createGenerationManager(params) {
8333
8495
  reject(err);
8334
8496
  } else {
8335
8497
  const relativePath = path.relative(process.cwd(), file.path);
8336
- logger.log(`${gen.id} | generate: ${styles.success(relativePath)}`);
8498
+ logger.log(`${sourceId} | generate: ${styles.success(relativePath)}`);
8337
8499
  if (file.executable) fs$2.chmod(file.path, 493, (chmodErr) => {
8338
8500
  if (chmodErr) {
8339
8501
  const relativePath$1 = path.relative(process.cwd(), file.path);
@@ -8362,7 +8524,7 @@ function createGenerationManager(params) {
8362
8524
  await aggregate(gen);
8363
8525
  }
8364
8526
  async function runGenerators(gens, watch$1) {
8365
- await Promise.allSettled(gens.map(async (gen) => {
8527
+ const results = await Promise.allSettled(gens.map(async (gen) => {
8366
8528
  try {
8367
8529
  await processGenerator(gen);
8368
8530
  } catch (error) {
@@ -8371,6 +8533,10 @@ function createGenerationManager(params) {
8371
8533
  if (!watch$1) throw error;
8372
8534
  }
8373
8535
  }));
8536
+ if (!watch$1) {
8537
+ const failures = results.filter((r) => r.status === "rejected");
8538
+ if (failures.length > 0) throw new AggregateError(failures.map((f) => f.reason));
8539
+ }
8374
8540
  }
8375
8541
  async function restartWatchProcess() {
8376
8542
  logger.newline();
@@ -8417,9 +8583,9 @@ function createGenerationManager(params) {
8417
8583
  await db$1.loadTypes();
8418
8584
  await db$1.processNamespacePlugins();
8419
8585
  services.tailordb[namespace] = {
8420
- types: db$1.getTypes(),
8421
- sourceInfo: db$1.getTypeSourceInfo(),
8422
- pluginAttachments: db$1.getPluginAttachments()
8586
+ types: db$1.types,
8587
+ sourceInfo: db$1.typeSourceInfo,
8588
+ pluginAttachments: db$1.pluginAttachments
8423
8589
  };
8424
8590
  } catch (error) {
8425
8591
  logger.error(`Error loading types for TailorDB service ${styles.bold(namespace)}`);
@@ -8432,8 +8598,9 @@ function createGenerationManager(params) {
8432
8598
  if (app.authService) await app.authService.resolveNamespaces();
8433
8599
  if (app.tailorDBServices.length > 0 || pluginExecutorFiles.length > 0) logger.newline();
8434
8600
  const tailordbOnlyGens = generators.filter((g) => onlyHas(g, "tailordb"));
8435
- if (tailordbOnlyGens.length > 0) {
8436
- await runGenerators(tailordbOnlyGens, watch$1);
8601
+ const hasOnTailorDBReady = generationPlugins.some((p) => p.onTailorDBReady != null);
8602
+ if (tailordbOnlyGens.length > 0 || hasOnTailorDBReady) {
8603
+ await Promise.all([runGenerators(tailordbOnlyGens, watch$1), runPluginHook("onTailorDBReady", watch$1)]);
8437
8604
  logger.newline();
8438
8605
  }
8439
8606
  for (const resolverService of app.resolverServices) {
@@ -8441,7 +8608,7 @@ function createGenerationManager(params) {
8441
8608
  try {
8442
8609
  await resolverService.loadResolvers();
8443
8610
  services.resolver[namespace] = {};
8444
- Object.entries(resolverService.getResolvers()).forEach(([_, resolver]) => {
8611
+ Object.entries(resolverService.resolvers).forEach(([_, resolver]) => {
8445
8612
  services.resolver[namespace][resolver.name] = resolver;
8446
8613
  });
8447
8614
  } catch (error) {
@@ -8451,21 +8618,23 @@ function createGenerationManager(params) {
8451
8618
  }
8452
8619
  }
8453
8620
  const nonExecutorGens = generators.filter((g) => !tailordbOnlyGens.includes(g) && hasNone(g, "executor"));
8454
- if (nonExecutorGens.length > 0) {
8455
- await runGenerators(nonExecutorGens, watch$1);
8621
+ const hasOnResolverReady = generationPlugins.some((p) => p.onResolverReady != null);
8622
+ if (nonExecutorGens.length > 0 || hasOnResolverReady) {
8623
+ await Promise.all([runGenerators(nonExecutorGens, watch$1), runPluginHook("onResolverReady", watch$1)]);
8456
8624
  logger.newline();
8457
8625
  }
8458
8626
  if (executorService) {
8459
8627
  await executorService.loadExecutors();
8460
8628
  if (pluginExecutorFiles.length > 0) await executorService.loadPluginExecutorFiles([...pluginExecutorFiles]);
8461
8629
  }
8462
- const allExecutors = executorService?.getExecutors() ?? {};
8630
+ const allExecutors = executorService?.executors ?? {};
8463
8631
  Object.entries(allExecutors).forEach(([key, executor]) => {
8464
8632
  services.executor[key] = executor;
8465
8633
  });
8466
8634
  const executorGens = generators.filter((g) => hasAll(g, "executor"));
8467
- if (executorGens.length > 0) {
8468
- await runGenerators(executorGens, watch$1);
8635
+ const hasOnExecutorReady = generationPlugins.some((p) => p.onExecutorReady != null);
8636
+ if (executorGens.length > 0 || hasOnExecutorReady) {
8637
+ await Promise.all([runGenerators(executorGens, watch$1), runPluginHook("onExecutorReady", watch$1)]);
8469
8638
  logger.newline();
8470
8639
  }
8471
8640
  },
@@ -8573,10 +8742,11 @@ async function listMachineUsers(options) {
8573
8742
  applicationName: config.name
8574
8743
  });
8575
8744
  if (!application?.authNamespace) throw new Error(`Application ${config.name} does not have an auth configuration.`);
8576
- return (await fetchAll(async (pageToken) => {
8745
+ return (await fetchAll(async (pageToken, maxPageSize) => {
8577
8746
  const { machineUsers, nextPageToken } = await client.listAuthMachineUsers({
8578
8747
  workspaceId,
8579
8748
  pageToken,
8749
+ pageSize: maxPageSize,
8580
8750
  authNamespace: application.authNamespace
8581
8751
  });
8582
8752
  return [machineUsers, nextPageToken];
@@ -8788,10 +8958,11 @@ async function listOAuth2Clients(options) {
8788
8958
  applicationName: config.name
8789
8959
  });
8790
8960
  if (!application?.authNamespace) throw new Error(`Application ${config.name} does not have an auth configuration.`);
8791
- return (await fetchAll(async (pageToken) => {
8961
+ return (await fetchAll(async (pageToken, maxPageSize) => {
8792
8962
  const { oauth2Clients, nextPageToken } = await client.listAuthOAuth2Clients({
8793
8963
  workspaceId,
8794
8964
  pageToken,
8965
+ pageSize: maxPageSize,
8795
8966
  namespaceName: application.authNamespace
8796
8967
  });
8797
8968
  return [oauth2Clients, nextPageToken];
@@ -9487,7 +9658,7 @@ async function generate(options) {
9487
9658
  if (options.init) await handleInitOption(namespacesWithMigrations, options.yes);
9488
9659
  let pluginManager;
9489
9660
  if (plugins.length > 0) pluginManager = new PluginManager(plugins);
9490
- const { defineApplication: defineApplication$1 } = await import("./application-D5ZEr4zk.mjs");
9661
+ const { defineApplication: defineApplication$1 } = await import("./application-Csj7Ow5Q.mjs");
9491
9662
  const application = defineApplication$1({
9492
9663
  config,
9493
9664
  pluginManager
@@ -9502,7 +9673,8 @@ async function generate(options) {
9502
9673
  }
9503
9674
  await tailordbService.loadTypes();
9504
9675
  await tailordbService.processNamespacePlugins();
9505
- const currentSnapshot = createSnapshotFromLocalTypes(tailordbService.getTypes(), namespace);
9676
+ const localTypesObj = tailordbService.types;
9677
+ const currentSnapshot = createSnapshotFromLocalTypes(localTypesObj, namespace);
9506
9678
  let previousSnapshot = null;
9507
9679
  try {
9508
9680
  previousSnapshot = reconstructSnapshotFromMigrations(migrationsDir);
@@ -9824,10 +9996,11 @@ async function listWorkflows(options) {
9824
9996
  workspaceId: options?.workspaceId,
9825
9997
  profile: options?.profile
9826
9998
  });
9827
- return (await fetchAll(async (pageToken) => {
9999
+ return (await fetchAll(async (pageToken, maxPageSize) => {
9828
10000
  const { workflows, nextPageToken } = await client.listWorkflows({
9829
10001
  workspaceId,
9830
- pageToken
10002
+ pageToken,
10003
+ pageSize: maxPageSize
9831
10004
  });
9832
10005
  return [workflows, nextPageToken];
9833
10006
  })).map(toWorkflowListInfo);
@@ -10698,4 +10871,4 @@ const updateCommand = defineCommand({
10698
10871
 
10699
10872
  //#endregion
10700
10873
  export { jobsCommand as $, generateCommand as A, getMigrationDirPath as At, getMachineUserToken as B, getNamespacesWithMigrations as Bt, resumeCommand as C, MIGRATE_FILE_NAME as Ct, truncate as D, createSnapshotFromLocalTypes as Dt, listWorkflows as E, compareSnapshots as Et, removeCommand$1 as F, loadDiff as Ft, generateCommand$1 as G, commonArgs as Gt, listCommand$5 as H, generateUserTypes as Ht, listCommand$4 as I, reconstructSnapshotFromMigrations as It, triggerCommand as J, jsonArgs as Jt, listWebhookExecutors as K, confirmationArgs as Kt, listOAuth2Clients as L, formatDiffSummary as Lt, show as M, getMigrationFiles as Mt, showCommand as N, getNextMigrationNumber as Nt, truncateCommand as O, formatMigrationNumber as Ot, remove as P, isValidMigrationNumber as Pt, getExecutorJob as Q, getCommand$1 as R, formatMigrationDiff as Rt, healthCommand as S, INITIAL_SCHEMA_NUMBER as St, listCommand$3 as T, compareLocalTypesWithSnapshot as Tt, listMachineUsers as U, apiCall as Ut, tokenCommand as V, trnPrefix as Vt, generate$1 as W, apiCommand as Wt, listCommand$6 as X, workspaceArgs as Xt, triggerExecutor as Y, withCommonArgs as Yt, listExecutors as Z, createCommand as _, MIGRATION_LABEL_KEY as _t, listCommand as a, getWorkflow as at, listCommand$2 as b, DB_TYPES_FILE_NAME as bt, inviteUser as c, listWorkflowExecutions as ct, listCommand$1 as d, getCommand$3 as dt, listExecutorJobs as et, listWorkspaces as f, getExecutor as ft, deleteWorkspace as g, waitForExecution$1 as gt, deleteCommand as h, executeScript as ht, removeUser as i, getCommand$2 as it, logBetaWarning as j, getMigrationFilePath as jt, generate as k, getLatestMigrationNumber as kt, restoreCommand as l, functionExecutionStatusToString as lt, getWorkspace as m, applyCommand as mt, updateUser as n, startCommand as nt, listUsers as o, executionsCommand as ot, getCommand as p, apply as pt, webhookCommand as q, deploymentArgs as qt, removeCommand as r, startWorkflow as rt, inviteCommand as s, getWorkflowExecution as st, updateCommand as t, watchExecutorJob as tt, restoreWorkspace as u, formatKeyValueTable as ut, createWorkspace as v, parseMigrationLabelNumber as vt, resumeWorkflow as w, SCHEMA_FILE_NAME as wt, getAppHealth as x, DIFF_FILE_NAME as xt, listApps as y, bundleMigrationScript as yt, getOAuth2Client as z, hasChanges as zt };
10701
- //# sourceMappingURL=update-DQKCUNmr.mjs.map
10874
+ //# sourceMappingURL=update-BoNKMti-.mjs.map