@tailor-platform/sdk 1.20.0 → 1.21.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/CHANGELOG.md +12 -0
  2. package/dist/{application-Bli2ieqY.mjs → application-CEv5c7TU.mjs} +7 -5
  3. package/dist/{application-Bli2ieqY.mjs.map → application-CEv5c7TU.mjs.map} +1 -1
  4. package/dist/{application-CZdieD3K.mjs → application-DiCzM9b0.mjs} +2 -2
  5. package/dist/cli/index.mjs +3 -3
  6. package/dist/cli/lib.d.mts +7 -6
  7. package/dist/cli/lib.mjs +3 -3
  8. package/dist/configure/index.d.mts +3 -3
  9. package/dist/configure/index.mjs +5 -0
  10. package/dist/configure/index.mjs.map +1 -1
  11. package/dist/{index-B86CIKCW.d.mts → index-BWVAwea4.d.mts} +2 -2
  12. package/dist/{index-DcY0e3S5.d.mts → index-CnHd6BNg.d.mts} +8 -3
  13. package/dist/{index-i6QUsr5p.d.mts → index-Dn61THJK.d.mts} +2 -2
  14. package/dist/{index-CGjiOz_W.d.mts → index-DxlmLUag.d.mts} +2 -2
  15. package/dist/{index-CIXOwe6g.d.mts → index-oZXVKyfX.d.mts} +2 -2
  16. package/dist/plugin/builtin/enum-constants/index.d.mts +2 -2
  17. package/dist/plugin/builtin/file-utils/index.d.mts +2 -2
  18. package/dist/plugin/builtin/kysely-type/index.d.mts +2 -2
  19. package/dist/plugin/builtin/seed/index.d.mts +2 -2
  20. package/dist/plugin/builtin/seed/index.mjs +1 -1
  21. package/dist/plugin/index.d.mts +1 -1
  22. package/dist/{seed-CeUEANfQ.mjs → seed-D-rYCN5F.mjs} +2 -2
  23. package/dist/{seed-CeUEANfQ.mjs.map → seed-D-rYCN5F.mjs.map} +1 -1
  24. package/dist/{types-C14GuyPI.d.mts → types-C0o90Cmb.d.mts} +2 -2
  25. package/dist/types-ClK_HJ0G.mjs.map +1 -1
  26. package/dist/{types-CNw4p8V7.d.mts → types-QKq1usl7.d.mts} +19 -8
  27. package/dist/{update-DkpWgrzL.mjs → update-9MTRN1UA.mjs} +121 -87
  28. package/dist/update-9MTRN1UA.mjs.map +1 -0
  29. package/dist/utils/test/index.d.mts +3 -3
  30. package/docs/services/resolver.md +61 -0
  31. package/docs/services/tailordb.md +55 -0
  32. package/package.json +1 -1
  33. package/dist/update-DkpWgrzL.mjs.map +0 -1
@@ -1,5 +1,5 @@
1
1
  import { t as db } from "./schema-D5Cpd8fQ.mjs";
2
- import { $ as TenantProviderConfig_TenantProviderType, A as WorkflowJobExecution_Status, D as userAgent, E as resolveStaticWebsiteUrls, F as TailorDBType_Permission_Permit, G as AuthInvokerSchema, H as ExecutorTargetType, I as TailorDBType_PermitAction, J as AuthSCIMAttribute_Mutability, K as AuthOAuth2Client_ClientType, L as PipelineResolver_OperationType, M as TailorDBGQLPermission_Operator, N as TailorDBGQLPermission_Permit, O as WorkspacePlatformUserRole, P as TailorDBType_Permission_Operator, R as IdPLang, T as platformBaseUrl, U as ExecutorTriggerType, V as ExecutorJobStatus, W as AuthIDPConfig_AuthType, X as AuthSCIMAttribute_Uniqueness, Y as AuthSCIMAttribute_Type, Z as AuthSCIMConfig_AuthorizationType, _ as loadWorkspaceId, a as buildExecutorArgsExpr, at as PageDirection, b as fetchAll, c as OAuth2ClientSchema, ct as logger, d as getDistDir, et as UserProfileProviderConfig_UserProfileProviderType, f as hashFile, g as loadOrganizationId, h as loadFolderId, i as loadConfig, it as FilterSchema, j as TailorDBGQLPermission_Action, k as WorkflowExecution_Status, l as stringifyFunction, lt as styles, m as loadAccessToken, n as generatePluginFilesIfNeeded, nt as ConditionSchema, o as buildResolverOperationHookExpr, ot as ApplicationSchemaUpdateAttemptStatus, q as AuthOAuth2Client_GrantType, r as loadApplication, rt as Condition_Operator, s as createExecutorService, st as Subgraph_ServiceType, t as defineApplication, tt as GetApplicationSchemaHealthResponse_ApplicationSchemaHealthStatus, u as createBundleCache, ut as symbols, v as readPlatformConfig, w as initOperatorClient, x as fetchMachineUserToken, y as writePlatformConfig, z as FunctionExecution_Status } from "./application-Bli2ieqY.mjs";
2
+ import { $ as TenantProviderConfig_TenantProviderType, A as WorkflowJobExecution_Status, D as userAgent, E as resolveStaticWebsiteUrls, F as TailorDBType_Permission_Permit, G as AuthInvokerSchema, H as ExecutorTargetType, I as TailorDBType_PermitAction, J as AuthSCIMAttribute_Mutability, K as AuthOAuth2Client_ClientType, L as PipelineResolver_OperationType, M as TailorDBGQLPermission_Operator, N as TailorDBGQLPermission_Permit, O as WorkspacePlatformUserRole, P as TailorDBType_Permission_Operator, R as IdPLang, T as platformBaseUrl, U as ExecutorTriggerType, V as ExecutorJobStatus, W as AuthIDPConfig_AuthType, X as AuthSCIMAttribute_Uniqueness, Y as AuthSCIMAttribute_Type, Z as AuthSCIMConfig_AuthorizationType, _ as loadWorkspaceId, a as buildExecutorArgsExpr, at as PageDirection, b as fetchAll, c as OAuth2ClientSchema, ct as logger, d as getDistDir, et as UserProfileProviderConfig_UserProfileProviderType, f as hashFile, g as loadOrganizationId, h as loadFolderId, i as loadConfig, it as FilterSchema, j as TailorDBGQLPermission_Action, k as WorkflowExecution_Status, l as stringifyFunction, lt as styles, m as loadAccessToken, n as generatePluginFilesIfNeeded, nt as ConditionSchema, o as buildResolverOperationHookExpr, ot as ApplicationSchemaUpdateAttemptStatus, q as AuthOAuth2Client_GrantType, r as loadApplication, rt as Condition_Operator, s as createExecutorService, st as Subgraph_ServiceType, t as defineApplication, tt as GetApplicationSchemaHealthResponse_ApplicationSchemaHealthStatus, u as createBundleCache, ut as symbols, v as readPlatformConfig, w as initOperatorClient, x as fetchMachineUserToken, y as writePlatformConfig, z as FunctionExecution_Status } from "./application-CEv5c7TU.mjs";
3
3
  import { t as readPackageJson } from "./package-json-3H5gfhA4.mjs";
4
4
  import { r as withSpan } from "./telemetry-DuBhnd0X.mjs";
5
5
  import { createRequire } from "node:module";
@@ -3040,6 +3040,7 @@ async function planResolvers(client, workspaceId, pipelines, executors, deletedS
3040
3040
  };
3041
3041
  const executorUsedResolvers = /* @__PURE__ */ new Set();
3042
3042
  for (const executor of executors) if (executor.trigger.kind === "resolverExecuted") executorUsedResolvers.add(executor.trigger.resolverName);
3043
+ for (const pipeline of pipelines) for (const resolver of Object.values(pipeline.resolvers)) if (executorUsedResolvers.has(resolver.name) && resolver.publishEvents === false) throw new Error(`Resolver "${resolver.name}" has publishEvents set to false, but it is used by an executor with a resolverExecuted trigger. Either remove the publishEvents: false setting or remove the executor trigger for this resolver.`);
3043
3044
  for (const pipeline of pipelines) {
3044
3045
  const existingResolvers = await fetchResolvers(pipeline.namespace);
3045
3046
  const existingNameSet = /* @__PURE__ */ new Set();
@@ -3102,15 +3103,19 @@ function processResolver(namespace, resolver, executorUsedResolvers, env) {
3102
3103
  const response = protoFields({ "": resolver.output }, `${typeBaseName}Output`, false)[0];
3103
3104
  const resolverDescription = resolver.description || `${resolver.name} resolver`;
3104
3105
  const outputDescription = resolver.output.metadata.description;
3106
+ const combinedDescription = outputDescription ? `${resolverDescription}\n\nReturns:\n${outputDescription}` : resolverDescription;
3107
+ let publishExecutionEvents = false;
3108
+ if (resolver.publishEvents !== void 0) publishExecutionEvents = resolver.publishEvents;
3109
+ else if (executorUsedResolvers.has(resolver.name)) publishExecutionEvents = true;
3105
3110
  return {
3106
3111
  authorization: "true==true",
3107
- description: outputDescription ? `${resolverDescription}\n\nReturns:\n${outputDescription}` : resolverDescription,
3112
+ description: combinedDescription,
3108
3113
  inputs,
3109
3114
  name: resolver.name,
3110
3115
  operationType: resolver.operation,
3111
3116
  response,
3112
3117
  pipelines,
3113
- publishExecutionEvents: executorUsedResolvers.has(resolver.name)
3118
+ publishExecutionEvents
3114
3119
  };
3115
3120
  }
3116
3121
  function protoFields(fields, baseName, isInput) {
@@ -3639,6 +3644,7 @@ function createSnapshotType(type) {
3639
3644
  ...ops.read !== void 0 && { read: ops.read }
3640
3645
  };
3641
3646
  }
3647
+ if (type.settings.publishEvents !== void 0) snapshotType.settings.publishEvents = type.settings.publishEvents;
3642
3648
  }
3643
3649
  if (type.indexes && Object.keys(type.indexes).length > 0) {
3644
3650
  snapshotType.indexes = {};
@@ -5498,6 +5504,13 @@ async function planTypes(client, workspaceId, tailordbs, executors, deletedServi
5498
5504
  };
5499
5505
  const executorUsedTypes = /* @__PURE__ */ new Set();
5500
5506
  for (const executor of executors) if (executor.trigger.kind === "recordCreated" || executor.trigger.kind === "recordUpdated" || executor.trigger.kind === "recordDeleted") executorUsedTypes.add(executor.trigger.typeName);
5507
+ for (const tailordb of tailordbs) {
5508
+ const types = filteredTypesByNamespace?.get(tailordb.namespace) ?? tailordb.types;
5509
+ for (const typeName of Object.keys(types)) {
5510
+ const type = types[typeName];
5511
+ if (executorUsedTypes.has(typeName) && type.settings?.publishEvents === false) throw new Error(`Type "${typeName}" has publishEvents set to false, but it is used by an executor with a record trigger. Either remove the publishEvents: false setting or remove the executor trigger for this type.`);
5512
+ }
5513
+ }
5501
5514
  for (const tailordb of tailordbs) {
5502
5515
  const existingTypes = await fetchTypes(tailordb.namespace);
5503
5516
  const existingNameSet = /* @__PURE__ */ new Set();
@@ -5565,7 +5578,8 @@ function generateTailorDBTypeManifest(type, executorUsedTypes, namespaceGqlOpera
5565
5578
  pluralForm,
5566
5579
  publishRecordEvents: false
5567
5580
  };
5568
- if (executorUsedTypes.has(type.name)) defaultSettings.publishRecordEvents = true;
5581
+ if (type.settings?.publishEvents !== void 0) defaultSettings.publishRecordEvents = type.settings.publishEvents;
5582
+ else if (executorUsedTypes.has(type.name)) defaultSettings.publishRecordEvents = true;
5569
5583
  const ops = type.settings?.gqlOperations ?? namespaceGqlOperations;
5570
5584
  if (ops) defaultSettings.disableGqlOperations = {
5571
5585
  create: ops.create === false,
@@ -8527,18 +8541,8 @@ function createGenerationManager(params) {
8527
8541
  let watcher = null;
8528
8542
  const generatorResults = {};
8529
8543
  const generationPlugins = pluginManager?.getPluginsWithGenerationHooks() ?? [];
8530
- function getDeps(gen) {
8531
- return new Set(gen.dependencies);
8532
- }
8533
- function onlyHas(gen, ...required) {
8534
- const deps = getDeps(gen);
8535
- return required.every((r) => deps.has(r)) && deps.size === required.length;
8536
- }
8537
- function hasAll(gen, ...required) {
8538
- return required.every((r) => getDeps(gen).has(r));
8539
- }
8540
- function hasNone(gen, ...excluded) {
8541
- return excluded.every((e) => !getDeps(gen).has(e));
8544
+ function getReadyGenerators(dep) {
8545
+ return generators.filter((g) => g.dependencies.includes(dep));
8542
8546
  }
8543
8547
  function getAuthInput() {
8544
8548
  const authService = application.authService;
@@ -8793,13 +8797,15 @@ function createGenerationManager(params) {
8793
8797
  }
8794
8798
  async function runGenerators(gens, watch$1) {
8795
8799
  const results = await Promise.allSettled(gens.map(async (gen) => {
8796
- try {
8797
- await processGenerator(gen);
8798
- } catch (error) {
8799
- logger.error(`Error processing generator ${styles.bold(gen.id)}`);
8800
- logger.error(String(error));
8801
- if (!watch$1) throw error;
8802
- }
8800
+ await withSpan(`generate.generator.${gen.id}`, async () => {
8801
+ try {
8802
+ await processGenerator(gen);
8803
+ } catch (error) {
8804
+ logger.error(`Error processing generator ${styles.bold(gen.id)}`);
8805
+ logger.error(String(error));
8806
+ if (!watch$1) throw error;
8807
+ }
8808
+ });
8803
8809
  }));
8804
8810
  if (!watch$1) {
8805
8811
  const failures = results.filter((r) => r.status === "rejected");
@@ -8845,64 +8851,86 @@ function createGenerationManager(params) {
8845
8851
  logger.newline();
8846
8852
  logger.log(`Generation for application: ${styles.highlight(application.config.name)}`);
8847
8853
  const app = application;
8848
- for (const db$1 of app.tailorDBServices) {
8849
- const namespace = db$1.namespace;
8850
- try {
8851
- await db$1.loadTypes();
8852
- await db$1.processNamespacePlugins();
8853
- services.tailordb[namespace] = {
8854
- types: db$1.types,
8855
- sourceInfo: db$1.typeSourceInfo,
8856
- pluginAttachments: db$1.pluginAttachments
8857
- };
8858
- } catch (error) {
8859
- logger.error(`Error loading types for TailorDB service ${styles.bold(namespace)}`);
8860
- logger.error(String(error));
8861
- if (!watch$1) throw error;
8854
+ await withSpan("generate.loadTailorDBTypes", async (span) => {
8855
+ span.setAttribute("generate.namespace_count", app.tailorDBServices.length);
8856
+ for (const db$1 of app.tailorDBServices) {
8857
+ const namespace = db$1.namespace;
8858
+ await withSpan(`generate.loadTypes.${namespace}`, async () => {
8859
+ try {
8860
+ await db$1.loadTypes();
8861
+ await db$1.processNamespacePlugins();
8862
+ services.tailordb[namespace] = {
8863
+ types: db$1.types,
8864
+ sourceInfo: db$1.typeSourceInfo,
8865
+ pluginAttachments: db$1.pluginAttachments
8866
+ };
8867
+ } catch (error) {
8868
+ logger.error(`Error loading types for TailorDB service ${styles.bold(namespace)}`);
8869
+ logger.error(String(error));
8870
+ if (!watch$1) throw error;
8871
+ }
8872
+ });
8862
8873
  }
8863
- }
8864
- const pluginExecutorFiles = generatePluginFilesIfNeeded(pluginManager, app.tailorDBServices, config.path);
8865
- const executorService = app.executorService ?? (pluginExecutorFiles.length > 0 ? createExecutorService({ config: { files: [] } }) : void 0);
8866
- if (app.authService) await app.authService.resolveNamespaces();
8874
+ });
8875
+ const { pluginExecutorFiles, executorService } = await withSpan("generate.pluginFiles", async () => {
8876
+ const pluginExecutorFiles$1 = generatePluginFilesIfNeeded(pluginManager, app.tailorDBServices, config.path);
8877
+ return {
8878
+ pluginExecutorFiles: pluginExecutorFiles$1,
8879
+ executorService: app.executorService ?? (pluginExecutorFiles$1.length > 0 ? createExecutorService({ config: { files: [] } }) : void 0)
8880
+ };
8881
+ });
8882
+ if (app.authService) await withSpan("generate.resolveAuthNamespaces", async () => app.authService.resolveNamespaces());
8867
8883
  if (app.tailorDBServices.length > 0 || pluginExecutorFiles.length > 0) logger.newline();
8868
- const tailordbOnlyGens = generators.filter((g) => onlyHas(g, "tailordb"));
8884
+ const readyAfterTailorDB = getReadyGenerators("tailordb");
8869
8885
  const hasOnTailorDBReady = generationPlugins.some((p) => p.onTailorDBReady != null);
8870
- if (tailordbOnlyGens.length > 0 || hasOnTailorDBReady) {
8871
- await Promise.all([runGenerators(tailordbOnlyGens, watch$1), runPluginHook("onTailorDBReady", watch$1)]);
8886
+ if (readyAfterTailorDB.length > 0 || hasOnTailorDBReady) {
8887
+ await withSpan("generate.onTailorDBReady", async () => {
8888
+ await Promise.all([runGenerators(readyAfterTailorDB, watch$1), runPluginHook("onTailorDBReady", watch$1)]);
8889
+ });
8872
8890
  logger.newline();
8873
8891
  }
8874
- for (const resolverService of app.resolverServices) {
8875
- const namespace = resolverService.namespace;
8876
- try {
8877
- await resolverService.loadResolvers();
8878
- services.resolver[namespace] = {};
8879
- Object.entries(resolverService.resolvers).forEach(([_, resolver]) => {
8880
- services.resolver[namespace][resolver.name] = resolver;
8892
+ await withSpan("generate.loadResolvers", async () => {
8893
+ for (const resolverService of app.resolverServices) {
8894
+ const namespace = resolverService.namespace;
8895
+ await withSpan(`generate.loadResolvers.${namespace}`, async () => {
8896
+ try {
8897
+ await resolverService.loadResolvers();
8898
+ services.resolver[namespace] = {};
8899
+ Object.entries(resolverService.resolvers).forEach(([_, resolver]) => {
8900
+ services.resolver[namespace][resolver.name] = resolver;
8901
+ });
8902
+ } catch (error) {
8903
+ logger.error(`Error loading resolvers for Resolver service ${styles.bold(namespace)}`);
8904
+ logger.error(String(error));
8905
+ if (!watch$1) throw error;
8906
+ }
8881
8907
  });
8882
- } catch (error) {
8883
- logger.error(`Error loading resolvers for Resolver service ${styles.bold(namespace)}`);
8884
- logger.error(String(error));
8885
- if (!watch$1) throw error;
8886
8908
  }
8887
- }
8888
- const nonExecutorGens = generators.filter((g) => !tailordbOnlyGens.includes(g) && hasNone(g, "executor"));
8909
+ });
8910
+ const readyAfterResolvers = getReadyGenerators("resolver");
8889
8911
  const hasOnResolverReady = generationPlugins.some((p) => p.onResolverReady != null);
8890
- if (nonExecutorGens.length > 0 || hasOnResolverReady) {
8891
- await Promise.all([runGenerators(nonExecutorGens, watch$1), runPluginHook("onResolverReady", watch$1)]);
8912
+ if (readyAfterResolvers.length > 0 || hasOnResolverReady) {
8913
+ await withSpan("generate.onResolversReady", async () => {
8914
+ await Promise.all([runGenerators(readyAfterResolvers, watch$1), runPluginHook("onResolverReady", watch$1)]);
8915
+ });
8892
8916
  logger.newline();
8893
8917
  }
8894
- if (executorService) {
8895
- await executorService.loadExecutors();
8896
- if (pluginExecutorFiles.length > 0) await executorService.loadPluginExecutorFiles([...pluginExecutorFiles]);
8897
- }
8898
- const allExecutors = executorService?.executors ?? {};
8899
- Object.entries(allExecutors).forEach(([key, executor]) => {
8900
- services.executor[key] = executor;
8918
+ await withSpan("generate.loadExecutors", async () => {
8919
+ if (executorService) {
8920
+ await executorService.loadExecutors();
8921
+ if (pluginExecutorFiles.length > 0) await executorService.loadPluginExecutorFiles([...pluginExecutorFiles]);
8922
+ }
8923
+ const allExecutors = executorService?.executors ?? {};
8924
+ Object.entries(allExecutors).forEach(([key, executor]) => {
8925
+ services.executor[key] = executor;
8926
+ });
8901
8927
  });
8902
- const executorGens = generators.filter((g) => hasAll(g, "executor"));
8928
+ const readyAfterExecutors = getReadyGenerators("executor");
8903
8929
  const hasOnExecutorReady = generationPlugins.some((p) => p.onExecutorReady != null);
8904
- if (executorGens.length > 0 || hasOnExecutorReady) {
8905
- await Promise.all([runGenerators(executorGens, watch$1), runPluginHook("onExecutorReady", watch$1)]);
8930
+ if (readyAfterExecutors.length > 0 || hasOnExecutorReady) {
8931
+ await withSpan("generate.onExecutorsReady", async () => {
8932
+ await Promise.all([runGenerators(readyAfterExecutors, watch$1), runPluginHook("onExecutorReady", watch$1)]);
8933
+ });
8906
8934
  logger.newline();
8907
8935
  }
8908
8936
  },
@@ -8931,25 +8959,31 @@ function createGenerationManager(params) {
8931
8959
  * @returns Promise that resolves when generation (and watch, if enabled) completes
8932
8960
  */
8933
8961
  async function generate$1(options) {
8934
- const { config, generators, plugins } = await loadConfig(options?.configPath);
8935
- const watch$1 = options?.watch ?? false;
8936
- await generateUserTypes({
8937
- config,
8938
- configPath: config.path
8939
- });
8940
- let pluginManager;
8941
- if (plugins.length > 0) pluginManager = new PluginManager(plugins);
8942
- const manager = createGenerationManager({
8943
- application: defineApplication({
8962
+ return withSpan("generate", async (rootSpan) => {
8963
+ const { config, generators, plugins } = await withSpan("generate.loadConfig", async () => loadConfig(options?.configPath));
8964
+ const watch$1 = options?.watch ?? false;
8965
+ rootSpan.setAttribute("generate.watch", watch$1);
8966
+ rootSpan.setAttribute("generate.generators.count", generators.length);
8967
+ await withSpan("generate.generateUserTypes", async () => generateUserTypes({
8968
+ config,
8969
+ configPath: config.path
8970
+ }));
8971
+ let pluginManager;
8972
+ if (plugins.length > 0) pluginManager = new PluginManager(plugins);
8973
+ const application = defineApplication({
8944
8974
  config,
8945
8975
  pluginManager
8946
- }),
8947
- config,
8948
- generators,
8949
- pluginManager
8976
+ });
8977
+ rootSpan.setAttribute("app.name", application.config.name);
8978
+ const manager = createGenerationManager({
8979
+ application,
8980
+ config,
8981
+ generators,
8982
+ pluginManager
8983
+ });
8984
+ await manager.generate(watch$1);
8985
+ if (watch$1) await manager.watch();
8950
8986
  });
8951
- await manager.generate(watch$1);
8952
- if (watch$1) await manager.watch();
8953
8987
  }
8954
8988
 
8955
8989
  //#endregion
@@ -9906,7 +9940,7 @@ async function generate(options) {
9906
9940
  if (options.init) await handleInitOption(namespacesWithMigrations, options.yes);
9907
9941
  let pluginManager;
9908
9942
  if (plugins.length > 0) pluginManager = new PluginManager(plugins);
9909
- const { defineApplication: defineApplication$1 } = await import("./application-CZdieD3K.mjs");
9943
+ const { defineApplication: defineApplication$1 } = await import("./application-DiCzM9b0.mjs");
9910
9944
  const application = defineApplication$1({
9911
9945
  config,
9912
9946
  pluginManager
@@ -11119,4 +11153,4 @@ const updateCommand = defineCommand({
11119
11153
 
11120
11154
  //#endregion
11121
11155
  export { listExecutorJobs as $, generateCommand as A, getMigrationFiles as At, getMachineUserToken as B, generateUserTypes as Bt, resumeCommand as C, compareLocalTypesWithSnapshot as Ct, truncate as D, getLatestMigrationNumber as Dt, listWorkflows as E, formatMigrationNumber as Et, removeCommand$1 as F, formatDiffSummary as Ft, listWebhookExecutors as G, deploymentArgs as Gt, listCommand$5 as H, apiCommand as Ht, listCommand$4 as I, formatMigrationDiff as It, triggerExecutor as J, workspaceArgs as Jt, webhookCommand as K, jsonArgs as Kt, listOAuth2Clients as L, hasChanges as Lt, show as M, isValidMigrationNumber as Mt, showCommand as N, loadDiff as Nt, truncateCommand as O, getMigrationDirPath as Ot, remove as P, reconstructSnapshotFromMigrations as Pt, jobsCommand as Q, getCommand$1 as R, getNamespacesWithMigrations as Rt, healthCommand as S, SCHEMA_FILE_NAME as St, listCommand$3 as T, createSnapshotFromLocalTypes as Tt, listMachineUsers as U, commonArgs as Ut, tokenCommand as V, apiCall as Vt, generate$1 as W, confirmationArgs as Wt, listExecutors as X, listCommand$6 as Y, getExecutorJob as Z, createCommand as _, bundleMigrationScript as _t, listCommand as a, executionsCommand as at, listCommand$2 as b, INITIAL_SCHEMA_NUMBER as bt, inviteUser as c, functionExecutionStatusToString as ct, listCommand$1 as d, getExecutor as dt, watchExecutorJob as et, listWorkspaces as f, apply as ft, deleteWorkspace as g, parseMigrationLabelNumber as gt, deleteCommand as h, MIGRATION_LABEL_KEY as ht, removeUser as i, getWorkflow as it, logBetaWarning as j, getNextMigrationNumber as jt, generate as k, getMigrationFilePath as kt, restoreCommand as l, formatKeyValueTable as lt, getWorkspace as m, waitForExecution$1 as mt, updateUser as n, startWorkflow as nt, listUsers as o, getWorkflowExecution as ot, getCommand as p, executeScript as pt, triggerCommand as q, withCommonArgs as qt, removeCommand as r, getCommand$2 as rt, inviteCommand as s, listWorkflowExecutions as st, updateCommand as t, startCommand as tt, restoreWorkspace as u, getCommand$3 as ut, createWorkspace as v, DB_TYPES_FILE_NAME as vt, resumeWorkflow as w, compareSnapshots as wt, getAppHealth as x, MIGRATE_FILE_NAME as xt, listApps as y, DIFF_FILE_NAME as yt, getOAuth2Client as z, trnPrefix as zt };
11122
- //# sourceMappingURL=update-DkpWgrzL.mjs.map
11156
+ //# sourceMappingURL=update-9MTRN1UA.mjs.map