@tailor-platform/sdk 1.14.1 → 1.15.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/CHANGELOG.md +46 -0
  2. package/README.md +19 -0
  3. package/dist/{application-DnWZVbDO.mjs → application-DPunZ4lc.mjs} +95 -44
  4. package/dist/application-DPunZ4lc.mjs.map +1 -0
  5. package/dist/application-JwJ_-_PQ.mjs +4 -0
  6. package/dist/cli/index.mjs +5 -5
  7. package/dist/cli/lib.d.mts +62 -52
  8. package/dist/cli/lib.mjs +4 -4
  9. package/dist/cli/lib.mjs.map +1 -1
  10. package/dist/cli/skills.d.mts +2 -0
  11. package/dist/cli/skills.mjs +51 -0
  12. package/dist/cli/skills.mjs.map +1 -0
  13. package/dist/configure/index.d.mts +4 -4
  14. package/dist/configure/index.mjs +2 -9
  15. package/dist/configure/index.mjs.map +1 -1
  16. package/dist/{index-BlUBAAvu.d.mts → index-Bs9AsQb2.d.mts} +28 -11
  17. package/dist/index-DomkP6gz.d.mts +396 -0
  18. package/dist/{jiti-DuCiUfMj.mjs → jiti-BrELlEYT.mjs} +2 -2
  19. package/dist/{jiti-DuCiUfMj.mjs.map → jiti-BrELlEYT.mjs.map} +1 -1
  20. package/dist/{job-zGAXCidt.mjs → job-XiwGyFJt.mjs} +1 -1
  21. package/dist/{job-zGAXCidt.mjs.map → job-XiwGyFJt.mjs.map} +1 -1
  22. package/dist/plugin/index.d.mts +16 -2
  23. package/dist/plugin/index.mjs +208 -1
  24. package/dist/plugin/index.mjs.map +1 -1
  25. package/dist/{schema-BmKdDzr1.mjs → schema-DRYB-nzA.mjs} +1 -1
  26. package/dist/{schema-BmKdDzr1.mjs.map → schema-DRYB-nzA.mjs.map} +1 -1
  27. package/dist/{src-QNTCsO6J.mjs → src-DMROgdcL.mjs} +2 -2
  28. package/dist/{src-QNTCsO6J.mjs.map → src-DMROgdcL.mjs.map} +1 -1
  29. package/dist/{index-Bid18Opo.d.mts → types-Db1oxr0U.d.mts} +584 -509
  30. package/dist/{types-r-ZratAg.mjs → types-b-ig8nW_.mjs} +1 -1
  31. package/dist/types-b-ig8nW_.mjs.map +1 -0
  32. package/dist/{update-2eb6jz9o.mjs → update-C_ZTRB63.mjs} +419 -437
  33. package/dist/update-C_ZTRB63.mjs.map +1 -0
  34. package/dist/utils/test/index.d.mts +3 -3
  35. package/dist/utils/test/index.mjs +1 -1
  36. package/docs/plugin/custom.md +156 -83
  37. package/docs/plugin/index.md +2 -2
  38. package/package.json +5 -3
  39. package/skills/tailor-sdk/SKILL.md +34 -0
  40. package/dist/application-DM4zTgXU.mjs +0 -4
  41. package/dist/application-DnWZVbDO.mjs.map +0 -1
  42. package/dist/env-4RO7szrH.d.mts +0 -66
  43. package/dist/types-r-ZratAg.mjs.map +0 -1
  44. package/dist/update-2eb6jz9o.mjs.map +0 -1
  45. /package/dist/{chunk-C3Kl5s5P.mjs → chunk-GMkBE123.mjs} +0 -0
@@ -1,8 +1,7 @@
1
- import { t as isPluginGeneratedType } from "./types-r-ZratAg.mjs";
2
- import { t as db } from "./schema-BmKdDzr1.mjs";
3
- import { a as ExecutorSchema, c as stringifyFunction, d as logger, f as styles, i as createExecutorService, l as tailorUserMap, n as WorkflowJobSchema, o as OAuth2ClientSchema, p as symbols, r as WorkflowSchema, s as ResolverSchema, t as defineApplication, u as loadFilesWithIgnores } from "./application-DnWZVbDO.mjs";
1
+ import { t as isPluginGeneratedType } from "./types-b-ig8nW_.mjs";
2
+ import { t as db } from "./schema-DRYB-nzA.mjs";
3
+ import { a as ExecutorSchema, c as functionSchema, d as loadFilesWithIgnores, f as logger, i as createExecutorService, l as stringifyFunction, m as symbols, n as WorkflowJobSchema, o as OAuth2ClientSchema, p as styles, r as WorkflowSchema, s as ResolverSchema, t as defineApplication, u as tailorUserMap } from "./application-DPunZ4lc.mjs";
4
4
  import { createRequire } from "node:module";
5
- import { cloneDeep } from "es-toolkit";
6
5
  import { arg, defineCommand, runCommand } from "politty";
7
6
  import { z } from "zod";
8
7
  import * as fs$2 from "node:fs";
@@ -29,6 +28,7 @@ import { glob } from "node:fs/promises";
29
28
  import * as rolldown from "rolldown";
30
29
  import { parseSync } from "oxc-parser";
31
30
  import { create, fromJson, toJson } from "@bufbuild/protobuf";
31
+ import * as crypto from "node:crypto";
32
32
  import ora from "ora";
33
33
  import { setTimeout as setTimeout$1 } from "timers/promises";
34
34
  import { spawn } from "node:child_process";
@@ -522,7 +522,7 @@ const file_tailor_v1_function_registry = /* @__PURE__ */ fileDesc("CiF0YWlsb3Ivd
522
522
  /**
523
523
  * Describes the file tailor/v1/idp_resource.proto.
524
524
  */
525
- const file_tailor_v1_idp_resource = /* @__PURE__ */ fileDesc("Chx0YWlsb3IvdjEvaWRwX3Jlc291cmNlLnByb3RvEgl0YWlsb3IudjEi3gEKCklkUFNlcnZpY2USJwoJbmFtZXNwYWNlGAEgASgLMhQudGFpbG9yLnYxLk5hbWVzcGFjZRIVCg1hdXRob3JpemF0aW9uGAIgASgJEhkKDHByb3ZpZGVyX3VybBgDIAEoCUID4EEDEjYKEHVzZXJfYXV0aF9wb2xpY3kYBCABKAsyHC50YWlsb3IudjEuSWRQVXNlckF1dGhQb2xpY3kSIAoEbGFuZxgFIAEoDjISLnRhaWxvci52MS5JZFBMYW5nEhsKE3B1Ymxpc2hfdXNlcl9ldmVudHMYBiABKAgiTQoJSWRQQ2xpZW50EgwKBG5hbWUYASABKAkSFgoJY2xpZW50X2lkGAIgASgJQgPgQQMSGgoNY2xpZW50X3NlY3JldBgDIAEoCUID4EEDIrEIChFJZFBVc2VyQXV0aFBvbGljeRIgChh1c2Vfbm9uX2VtYWlsX2lkZW50aWZpZXIYASABKAgSIQoZYWxsb3dfc2VsZl9wYXNzd29yZF9yZXNldBgCIAEoCBIiChpwYXNzd29yZF9yZXF1aXJlX3VwcGVyY2FzZRgDIAEoCBIiChpwYXNzd29yZF9yZXF1aXJlX2xvd2VyY2FzZRgEIAEoCBIpCiFwYXNzd29yZF9yZXF1aXJlX25vbl9hbHBoYW51bWVyaWMYBSABKAgSIAoYcGFzc3dvcmRfcmVxdWlyZV9udW1lcmljGAYgASgIEqYBChNwYXNzd29yZF9taW5fbGVuZ3RoGAcgASgFQogBukiEAboBgAEKGXBhc3N3b3JkX21pbl9sZW5ndGhfcmFuZ2USO3Bhc3N3b3JkX21pbl9sZW5ndGggbXVzdCBiZSAwIChkZWZhdWx0KSBvciBiZXR3ZWVuIDYgYW5kIDMwGiZ0aGlzID09IDAgfHwgKHRoaXMgPj0gNiAmJiB0aGlzIDw9IDMwKRKqAQoTcGFzc3dvcmRfbWF4X2xlbmd0aBgIIAEoBUKMAbpIiAG6AYQBChlwYXNzd29yZF9tYXhfbGVuZ3RoX3JhbmdlEj1wYXNzd29yZF9tYXhfbGVuZ3RoIG11c3QgYmUgMCAoZGVmYXVsdCkgb3IgYmV0d2VlbiA2IGFuZCA0MDk2Gih0aGlzID09IDAgfHwgKHRoaXMgPj0gNiAmJiB0aGlzIDw9IDQwOTYpEi8KFWFsbG93ZWRfZW1haWxfZG9tYWlucxgJIAMoCUIQukgNkgEKEGQYASIEcgJoATq6A7pItgMa6gEKG3Bhc3N3b3JkX2xlbmd0aF9jb25zaXN0ZW5jeRJFcGFzc3dvcmRfbWluX2xlbmd0aCBtdXN0IGJlIGxlc3MgdGhhbiBvciBlcXVhbCB0byBwYXNzd29yZF9tYXhfbGVuZ3RoGoMBKHRoaXMucGFzc3dvcmRfbWluX2xlbmd0aCA9PSAwID8gNiA6IHRoaXMucGFzc3dvcmRfbWluX2xlbmd0aCkgPD0gKHRoaXMucGFzc3dvcmRfbWF4X2xlbmd0aCA9PSAwID8gNDA5NiA6IHRoaXMucGFzc3dvcmRfbWF4X2xlbmd0aCkaxgEKL2FsbG93ZWRfZW1haWxfZG9tYWluc19yZXF1aXJlc19lbWFpbF9pZGVudGlmaWVyEklhbGxvd2VkX2VtYWlsX2RvbWFpbnMgY2Fubm90IGJlIHNldCB3aGVuIHVzZV9ub25fZW1haWxfaWRlbnRpZmllciBpcyB0cnVlGkh0aGlzLmFsbG93ZWRfZW1haWxfZG9tYWlucy5zaXplKCkgPT0gMCB8fCAhdGhpcy51c2Vfbm9uX2VtYWlsX2lkZW50aWZpZXIqSAoHSWRQTGFuZxIZChVJRF9QX0xBTkdfVU5TUEVDSUZJRUQQABIQCgxJRF9QX0xBTkdfRU4QARIQCgxJRF9QX0xBTkdfSkEQAmIGcHJvdG8z", [
525
+ const file_tailor_v1_idp_resource = /* @__PURE__ */ fileDesc("Chx0YWlsb3IvdjEvaWRwX3Jlc291cmNlLnByb3RvEgl0YWlsb3IudjEi3gEKCklkUFNlcnZpY2USJwoJbmFtZXNwYWNlGAEgASgLMhQudGFpbG9yLnYxLk5hbWVzcGFjZRIVCg1hdXRob3JpemF0aW9uGAIgASgJEhkKDHByb3ZpZGVyX3VybBgDIAEoCUID4EEDEjYKEHVzZXJfYXV0aF9wb2xpY3kYBCABKAsyHC50YWlsb3IudjEuSWRQVXNlckF1dGhQb2xpY3kSIAoEbGFuZxgFIAEoDjISLnRhaWxvci52MS5JZFBMYW5nEhsKE3B1Ymxpc2hfdXNlcl9ldmVudHMYBiABKAgiTQoJSWRQQ2xpZW50EgwKBG5hbWUYASABKAkSFgoJY2xpZW50X2lkGAIgASgJQgPgQQMSGgoNY2xpZW50X3NlY3JldBgDIAEoCUID4EEDIoYKChFJZFBVc2VyQXV0aFBvbGljeRIgChh1c2Vfbm9uX2VtYWlsX2lkZW50aWZpZXIYASABKAgSIQoZYWxsb3dfc2VsZl9wYXNzd29yZF9yZXNldBgCIAEoCBIiChpwYXNzd29yZF9yZXF1aXJlX3VwcGVyY2FzZRgDIAEoCBIiChpwYXNzd29yZF9yZXF1aXJlX2xvd2VyY2FzZRgEIAEoCBIpCiFwYXNzd29yZF9yZXF1aXJlX25vbl9hbHBoYW51bWVyaWMYBSABKAgSIAoYcGFzc3dvcmRfcmVxdWlyZV9udW1lcmljGAYgASgIEqYBChNwYXNzd29yZF9taW5fbGVuZ3RoGAcgASgFQogBukiEAboBgAEKGXBhc3N3b3JkX21pbl9sZW5ndGhfcmFuZ2USO3Bhc3N3b3JkX21pbl9sZW5ndGggbXVzdCBiZSAwIChkZWZhdWx0KSBvciBiZXR3ZWVuIDYgYW5kIDMwGiZ0aGlzID09IDAgfHwgKHRoaXMgPj0gNiAmJiB0aGlzIDw9IDMwKRKqAQoTcGFzc3dvcmRfbWF4X2xlbmd0aBgIIAEoBUKMAbpIiAG6AYQBChlwYXNzd29yZF9tYXhfbGVuZ3RoX3JhbmdlEj1wYXNzd29yZF9tYXhfbGVuZ3RoIG11c3QgYmUgMCAoZGVmYXVsdCkgb3IgYmV0d2VlbiA2IGFuZCA0MDk2Gih0aGlzID09IDAgfHwgKHRoaXMgPj0gNiAmJiB0aGlzIDw9IDQwOTYpEi8KFWFsbG93ZWRfZW1haWxfZG9tYWlucxgJIAMoCUIQukgNkgEKEGQYASIEcgJoARIaChJhbGxvd19nb29nbGVfb2F1dGgYCiABKAg68wS6SO8EGuoBChtwYXNzd29yZF9sZW5ndGhfY29uc2lzdGVuY3kSRXBhc3N3b3JkX21pbl9sZW5ndGggbXVzdCBiZSBsZXNzIHRoYW4gb3IgZXF1YWwgdG8gcGFzc3dvcmRfbWF4X2xlbmd0aBqDASh0aGlzLnBhc3N3b3JkX21pbl9sZW5ndGggPT0gMCA/IDYgOiB0aGlzLnBhc3N3b3JkX21pbl9sZW5ndGgpIDw9ICh0aGlzLnBhc3N3b3JkX21heF9sZW5ndGggPT0gMCA/IDQwOTYgOiB0aGlzLnBhc3N3b3JkX21heF9sZW5ndGgpGsYBCi9hbGxvd2VkX2VtYWlsX2RvbWFpbnNfcmVxdWlyZXNfZW1haWxfaWRlbnRpZmllchJJYWxsb3dlZF9lbWFpbF9kb21haW5zIGNhbm5vdCBiZSBzZXQgd2hlbiB1c2Vfbm9uX2VtYWlsX2lkZW50aWZpZXIgaXMgdHJ1ZRpIdGhpcy5hbGxvd2VkX2VtYWlsX2RvbWFpbnMuc2l6ZSgpID09IDAgfHwgIXRoaXMudXNlX25vbl9lbWFpbF9pZGVudGlmaWVyGrYBCixhbGxvd19nb29nbGVfb2F1dGhfcmVxdWlyZXNfZW1haWxfaWRlbnRpZmllchJKYWxsb3dfZ29vZ2xlX29hdXRoIGNhbm5vdCBiZSBlbmFibGVkIHdoZW4gdXNlX25vbl9lbWFpbF9pZGVudGlmaWVyIGlzIHRydWUaOiF0aGlzLmFsbG93X2dvb2dsZV9vYXV0aCB8fCAhdGhpcy51c2Vfbm9uX2VtYWlsX2lkZW50aWZpZXIqSAoHSWRQTGFuZxIZChVJRF9QX0xBTkdfVU5TUEVDSUZJRUQQABIQCgxJRF9QX0xBTkdfRU4QARIQCgxJRF9QX0xBTkdfSkEQAmIGcHJvdG8z", [
526
526
  file_buf_validate_validate,
527
527
  file_google_api_field_behavior,
528
528
  file_tailor_v1_resource
@@ -2685,159 +2685,21 @@ function createGeneratorConfigSchema(builtinGenerators$1) {
2685
2685
 
2686
2686
  //#endregion
2687
2687
  //#region src/parser/plugin-config/schema.ts
2688
- const unauthenticatedTailorUser$1 = {
2689
- id: "00000000-0000-0000-0000-000000000000",
2690
- type: "",
2691
- workspaceId: "00000000-0000-0000-0000-000000000000",
2692
- attributes: null,
2693
- attributeList: []
2694
- };
2695
- const PluginGeneratedTypeSchema = z.object({
2696
- name: z.string(),
2697
- fields: z.record(z.string(), z.unknown())
2698
- });
2699
- const PluginGeneratedResolverSchema = z.object({
2700
- name: z.string(),
2701
- operation: z.enum(["query", "mutation"]),
2702
- inputFields: z.record(z.string(), z.unknown()).optional(),
2703
- outputFields: z.record(z.string(), z.unknown()),
2704
- body: z.string()
2705
- });
2706
- const PluginTriggerConfigSchema = z.object({
2707
- kind: z.enum([
2708
- "recordCreated",
2709
- "recordUpdated",
2710
- "recordDeleted",
2711
- "schedule",
2712
- "incomingWebhook"
2713
- ]),
2714
- type: z.string().optional(),
2715
- schedule: z.string().optional()
2716
- });
2717
- const PluginOperationConfigSchema = z.object({
2718
- kind: z.enum([
2719
- "function",
2720
- "webhook",
2721
- "graphql",
2722
- "workflow"
2723
- ]),
2724
- body: z.string().optional(),
2725
- url: z.string().optional(),
2726
- query: z.string().optional()
2727
- });
2728
- const PluginGeneratedExecutorSchema = z.object({
2729
- name: z.string(),
2730
- description: z.string().optional(),
2731
- trigger: PluginTriggerConfigSchema,
2732
- operation: PluginOperationConfigSchema
2733
- });
2734
- z.object({
2735
- types: z.array(PluginGeneratedTypeSchema).optional(),
2736
- resolvers: z.array(PluginGeneratedResolverSchema).optional(),
2737
- executors: z.array(PluginGeneratedExecutorSchema).optional()
2738
- });
2739
2688
  const CustomPluginSchema = z.object({
2740
2689
  id: z.string(),
2741
2690
  description: z.string(),
2742
2691
  importPath: z.string(),
2743
- configSchema: z.any().optional(),
2744
- pluginConfigSchema: z.any().optional(),
2745
- pluginConfig: z.any().optional(),
2746
- process: z.any().optional(),
2747
- processNamespace: z.any().optional()
2748
- }).superRefine((plugin, ctx) => {
2749
- if (plugin.process && !plugin.configSchema) ctx.addIssue({
2750
- code: z.ZodIssueCode.custom,
2751
- message: "process requires configSchema to be defined.",
2752
- path: ["configSchema"]
2753
- });
2692
+ pluginConfig: z.unknown().optional(),
2693
+ processType: functionSchema.optional(),
2694
+ processNamespace: functionSchema.optional(),
2695
+ typeConfigRequired: z.union([z.boolean(), functionSchema]).optional()
2754
2696
  }).passthrough();
2755
- const CustomPluginTupleSchema = z.tuple([CustomPluginSchema, z.unknown()]);
2756
- /**
2757
- * Type guard to check if a value is a PluginBase object
2758
- * @param value - Value to check
2759
- * @returns True if value is a PluginBase object
2760
- */
2761
- function isPluginBase(value) {
2762
- return CustomPluginSchema.safeParse(value).success;
2763
- }
2764
- function normalizePluginConfigSchema(schema) {
2765
- const seen = /* @__PURE__ */ new Set();
2766
- const stack = [schema];
2767
- while (stack.length > 0) {
2768
- const field = stack.pop();
2769
- if (!field || seen.has(field)) continue;
2770
- seen.add(field);
2771
- const requiredExplicit = field._metadata.requiredExplicit === true;
2772
- field._metadata.required = requiredExplicit;
2773
- for (const nestedField of Object.values(field.fields)) stack.push(nestedField);
2774
- }
2775
- return schema;
2776
- }
2777
- function clonePluginConfigSchema(schema) {
2778
- return cloneDeep(schema);
2779
- }
2780
- function normalizePluginBase(plugin) {
2781
- let normalized = plugin;
2782
- if (normalized.configSchema) {
2783
- const clonedConfigSchema = clonePluginConfigSchema(normalized.configSchema);
2784
- normalizePluginConfigSchema(clonedConfigSchema);
2785
- normalized = {
2786
- ...normalized,
2787
- configSchema: clonedConfigSchema
2788
- };
2789
- }
2790
- if (normalized.pluginConfigSchema) {
2791
- const pluginConfigSchema = clonePluginConfigSchema(normalized.pluginConfigSchema);
2792
- normalizePluginConfigSchema(pluginConfigSchema);
2793
- normalized = {
2794
- ...normalized,
2795
- pluginConfigSchema
2796
- };
2797
- if (normalized.pluginConfig !== void 0) {
2798
- const validationErrors = validatePluginConfig$1(normalized.pluginConfig, pluginConfigSchema);
2799
- if (validationErrors.length > 0) {
2800
- const errorDetails = validationErrors.map((e) => e.field ? `${e.field}: ${e.message}` : e.message).join("; ");
2801
- throw new Error(`Invalid pluginConfig for plugin "${normalized.id}": ${errorDetails}`);
2802
- }
2803
- }
2804
- }
2805
- return normalized;
2806
- }
2807
- /**
2808
- * Validate plugin config against its schema
2809
- * @param config - The config object to validate
2810
- * @param schema - The schema defining expected fields
2811
- * @returns Array of validation errors (empty if valid)
2812
- */
2813
- function validatePluginConfig$1(config, schema) {
2814
- const result = schema.parse({
2815
- value: config,
2816
- data: config,
2817
- user: unauthenticatedTailorUser$1
2818
- });
2819
- if ("issues" in result && result.issues) return result.issues.map((issue) => ({
2820
- field: Array.isArray(issue.path) ? issue.path.join(".") : "",
2821
- message: issue.message
2822
- }));
2823
- return [];
2824
- }
2825
2697
  /**
2826
2698
  * Creates a PluginConfigSchema for custom plugins
2827
- * @returns Plugin config schema that validates and transforms PluginBase instances
2699
+ * @returns Plugin config schema that validates and transforms Plugin instances
2828
2700
  */
2829
2701
  function createPluginConfigSchema() {
2830
- return z.union([CustomPluginSchema, CustomPluginTupleSchema]).transform((plugin) => {
2831
- if (Array.isArray(plugin)) {
2832
- const [first, options] = plugin;
2833
- if (isPluginBase(first)) return normalizePluginBase({
2834
- ...first,
2835
- pluginConfig: options
2836
- });
2837
- throw new Error(`Invalid plugin configuration: expected PluginBase object`);
2838
- }
2839
- return normalizePluginBase(plugin);
2840
- }).brand("Plugin");
2702
+ return CustomPluginSchema.transform((plugin) => plugin).brand("Plugin");
2841
2703
  }
2842
2704
 
2843
2705
  //#endregion
@@ -3367,32 +3229,6 @@ function createKyselyGenerator(options) {
3367
3229
  };
3368
3230
  }
3369
3231
 
3370
- //#endregion
3371
- //#region src/cli/utils/plugin-import.ts
3372
- /**
3373
- * Collect base directories for resolving plugin import paths.
3374
- * @param configPath - Path to tailor.config.ts
3375
- * @returns Ordered list of base directories
3376
- */
3377
- function getPluginImportBaseDirs(configPath) {
3378
- if (configPath) return [path.dirname(configPath)];
3379
- return [process.cwd()];
3380
- }
3381
- /**
3382
- * Resolve a relative plugin import path against candidate base directories.
3383
- * @param pluginImportPath - Relative plugin import path
3384
- * @param baseDirs - Candidate base directories
3385
- * @returns Absolute path if found, otherwise null
3386
- */
3387
- function resolveRelativePluginImportPath(pluginImportPath, baseDirs) {
3388
- if (!pluginImportPath.startsWith(".")) return null;
3389
- for (const baseDir of baseDirs) {
3390
- const absolutePath = path.resolve(baseDir, pluginImportPath);
3391
- if (fs$2.existsSync(absolutePath)) return absolutePath;
3392
- }
3393
- return null;
3394
- }
3395
-
3396
3232
  //#endregion
3397
3233
  //#region src/cli/generator/builtin/seed/idp-user-processor.ts
3398
3234
  /**
@@ -3681,7 +3517,7 @@ function generateLinesDbSchemaFile(metadata, importPath) {
3681
3517
  function generateLinesDbSchemaFileWithPluginAPI(metadata, params) {
3682
3518
  const { typeName, exportName, optionalFields, omitFields, foreignKeys, indexes, pluginSource } = metadata;
3683
3519
  if (!pluginSource) throw new Error(`pluginSource is required for plugin-generated type "${typeName}"`);
3684
- const { pluginImportPath, originalImportPath } = params;
3520
+ const { configImportPath, originalImportPath } = params;
3685
3521
  const schemaTypeCode = ml`
3686
3522
  const schemaType = t.object({
3687
3523
  ...${exportName}.pickFields(${JSON.stringify(optionalFields)}, { optional: true }),
@@ -3690,13 +3526,15 @@ function generateLinesDbSchemaFileWithPluginAPI(metadata, params) {
3690
3526
  `;
3691
3527
  const schemaOptionsCode = generateSchemaOptions(foreignKeys, indexes);
3692
3528
  if (pluginSource.originalExportName && originalImportPath && pluginSource.generatedTypeKind) return ml`
3529
+ import { join } from "node:path";
3693
3530
  import { t } from "@tailor-platform/sdk";
3531
+ import { getGeneratedType } from "@tailor-platform/sdk/plugin";
3694
3532
  import { createTailorDBHook, createStandardSchema } from "@tailor-platform/sdk/test";
3695
3533
  import { defineSchema } from "@toiroakr/lines-db";
3696
- import { getGeneratedType } from "${pluginImportPath}";
3697
3534
  import { ${pluginSource.originalExportName} } from "${originalImportPath}";
3698
3535
 
3699
- const ${exportName} = getGeneratedType(${pluginSource.originalExportName}, "${pluginSource.generatedTypeKind}");
3536
+ const configPath = join(import.meta.dirname, "${configImportPath}");
3537
+ const ${exportName} = await getGeneratedType(configPath, "${pluginSource.pluginId}", ${pluginSource.originalExportName}, "${pluginSource.generatedTypeKind}");
3700
3538
 
3701
3539
  ${schemaTypeCode}
3702
3540
 
@@ -3709,12 +3547,14 @@ function generateLinesDbSchemaFileWithPluginAPI(metadata, params) {
3709
3547
  `;
3710
3548
  if (!pluginSource.generatedTypeKind) throw new Error(`Namespace plugin "${pluginSource.pluginId}" must provide generatedTypeKind for type "${typeName}"`);
3711
3549
  return ml`
3550
+ import { join } from "node:path";
3712
3551
  import { t } from "@tailor-platform/sdk";
3552
+ import { getGeneratedType } from "@tailor-platform/sdk/plugin";
3713
3553
  import { createTailorDBHook, createStandardSchema } from "@tailor-platform/sdk/test";
3714
3554
  import { defineSchema } from "@toiroakr/lines-db";
3715
- import { getGeneratedType } from "${pluginImportPath}";
3716
3555
 
3717
- const ${exportName} = getGeneratedType(null, "${pluginSource.generatedTypeKind}");
3556
+ const configPath = join(import.meta.dirname, "${configImportPath}");
3557
+ const ${exportName} = await getGeneratedType(configPath, "${pluginSource.pluginId}", null, "${pluginSource.generatedTypeKind}");
3718
3558
 
3719
3559
  ${schemaTypeCode}
3720
3560
 
@@ -4371,7 +4211,6 @@ function createSeedGenerator(options) {
4371
4211
  processTailorDBNamespace: ({ types }) => types,
4372
4212
  aggregate: ({ input, configPath }) => {
4373
4213
  const files = [];
4374
- const pluginImportBaseDirs = getPluginImportBaseDirs(configPath);
4375
4214
  const namespaceConfigs = [];
4376
4215
  for (const nsResult of input.tailordb) {
4377
4216
  if (!nsResult.types) continue;
@@ -4394,14 +4233,8 @@ function createSeedGenerator(options) {
4394
4233
  const relativePath = path.relative(path.dirname(schemaOutputPath), linesDb.pluginSource.originalFilePath);
4395
4234
  originalImportPath = relativePath.replace(/\.ts$/, "").startsWith(".") ? relativePath.replace(/\.ts$/, "") : `./${relativePath.replace(/\.ts$/, "")}`;
4396
4235
  }
4397
- let pluginImportPath = linesDb.pluginSource.pluginImportPath;
4398
- if (pluginImportPath.startsWith("./") || pluginImportPath.startsWith("../")) {
4399
- const resolvedPluginPath = resolveRelativePluginImportPath(pluginImportPath, pluginImportBaseDirs) ?? path.resolve(pluginImportBaseDirs[0] ?? process.cwd(), pluginImportPath);
4400
- const relativePluginPath = path.relative(path.dirname(schemaOutputPath), resolvedPluginPath);
4401
- pluginImportPath = relativePluginPath.startsWith(".") ? relativePluginPath : `./${relativePluginPath}`;
4402
- }
4403
4236
  const schemaContent = generateLinesDbSchemaFileWithPluginAPI(linesDb, {
4404
- pluginImportPath,
4237
+ configImportPath: path.relative(path.dirname(schemaOutputPath), configPath),
4405
4238
  originalImportPath
4406
4239
  });
4407
4240
  files.push({
@@ -4519,6 +4352,32 @@ async function loadConfig(configPath) {
4519
4352
  };
4520
4353
  }
4521
4354
 
4355
+ //#endregion
4356
+ //#region src/cli/utils/plugin-import.ts
4357
+ /**
4358
+ * Collect base directories for resolving plugin import paths.
4359
+ * @param configPath - Path to tailor.config.ts
4360
+ * @returns Ordered list of base directories
4361
+ */
4362
+ function getPluginImportBaseDirs(configPath) {
4363
+ if (configPath) return [path.dirname(configPath)];
4364
+ return [process.cwd()];
4365
+ }
4366
+ /**
4367
+ * Resolve a relative plugin import path against candidate base directories.
4368
+ * @param pluginImportPath - Relative plugin import path
4369
+ * @param baseDirs - Candidate base directories
4370
+ * @returns Absolute path if found, otherwise null
4371
+ */
4372
+ function resolveRelativePluginImportPath(pluginImportPath, baseDirs) {
4373
+ if (!pluginImportPath.startsWith(".")) return null;
4374
+ for (const baseDir of baseDirs) {
4375
+ const absolutePath = path.resolve(baseDir, pluginImportPath);
4376
+ if (fs$2.existsSync(absolutePath)) return absolutePath;
4377
+ }
4378
+ return null;
4379
+ }
4380
+
4522
4381
  //#endregion
4523
4382
  //#region src/parser/plugin-config/types.ts
4524
4383
  /**
@@ -5036,10 +4895,9 @@ function extractAttributesFromConfig(config) {
5036
4895
  * @param attributeMap - Attribute map configuration
5037
4896
  * @param attributeList - Attribute list configuration
5038
4897
  * @param env - Environment configuration
5039
- * @param pluginConfigs - Plugin configuration entries for PluginConfigs interface
5040
4898
  * @returns Generated type definition source
5041
4899
  */
5042
- function generateTypeDefinition(attributeMap, attributeList, env, pluginConfigs) {
4900
+ function generateTypeDefinition(attributeMap, attributeList, env) {
5043
4901
  const mapFields = attributeMap ? Object.entries(attributeMap).map(([key, value]) => ` ${key}: ${value};`).join("\n") : "";
5044
4902
  const mapBody = !attributeMap || Object.keys(attributeMap).length === 0 ? "{}" : `{
5045
4903
  ${mapFields}
@@ -5060,7 +4918,7 @@ declare module "@tailor-platform/sdk" {
5060
4918
  interface AttributeList ${listBody}
5061
4919
  interface Env ${!env || Object.keys(env).length === 0 ? "{}" : `{
5062
4920
  ${envFields}
5063
- }`}${pluginConfigs && pluginConfigs.length > 0 ? `\n${generatePluginConfigsDefinition(pluginConfigs)}` : ""}${pluginConfigs && pluginConfigs.length > 0 ? `\n${generatePluginMethodOverload(pluginConfigs)}` : ""}
4921
+ }`}
5064
4922
  }
5065
4923
 
5066
4924
  export {};
@@ -5123,7 +4981,7 @@ function resolveTypeDefinitionPath(configPath) {
5123
4981
  * @returns Promise that resolves when types are generated
5124
4982
  */
5125
4983
  async function generateUserTypes(options) {
5126
- const { config, configPath, plugins } = options;
4984
+ const { config, configPath } = options;
5127
4985
  try {
5128
4986
  const { attributeMap, attributeList } = extractAttributesFromConfig(config);
5129
4987
  if (!attributeMap && !attributeList) logger.info("No attributes found in configuration", { mode: "plain" });
@@ -5131,14 +4989,7 @@ async function generateUserTypes(options) {
5131
4989
  if (attributeList) logger.debug(`Extracted AttributeList: ${JSON.stringify(attributeList)}`);
5132
4990
  const env = config.env;
5133
4991
  if (env) logger.debug(`Extracted Env: ${JSON.stringify(env)}`);
5134
- const pluginConfigs = plugins?.filter((p) => p.configSchema !== void 0).map((p) => ({
5135
- id: p.id,
5136
- configSchema: p.configSchema,
5137
- configTypeTemplate: p.configTypeTemplate,
5138
- typeConfigRequired: isTypeConfigRequired(p)
5139
- }));
5140
- if (pluginConfigs && pluginConfigs.length > 0) logger.debug(`Extracted PluginConfigs: ${pluginConfigs.map((p) => p.id).join(", ")}`);
5141
- const typeDefContent = generateTypeDefinition(attributeMap, attributeList, env, pluginConfigs);
4992
+ const typeDefContent = generateTypeDefinition(attributeMap, attributeList, env);
5142
4993
  const outputPath = resolveTypeDefinitionPath(configPath);
5143
4994
  fs$2.mkdirSync(path.dirname(outputPath), { recursive: true });
5144
4995
  fs$2.writeFileSync(outputPath, typeDefContent);
@@ -5167,116 +5018,9 @@ function resolvePackageDirectory(startDir) {
5167
5018
  return null;
5168
5019
  }
5169
5020
  }
5170
- /**
5171
- * Convert a ConfigSchemaField to its TypeScript type string representation.
5172
- * @param field - The field to convert
5173
- * @param indent - Current indentation level
5174
- * @returns TypeScript type string
5175
- */
5176
- function fieldToTypeString(field, indent = 0) {
5177
- const indentStr = " ".repeat(indent);
5178
- const metadata = field.metadata;
5179
- let baseType;
5180
- switch (field.type) {
5181
- case "string":
5182
- case "uuid":
5183
- case "date":
5184
- case "datetime":
5185
- case "time":
5186
- baseType = "string";
5187
- break;
5188
- case "integer":
5189
- case "float":
5190
- baseType = "number";
5191
- break;
5192
- case "boolean":
5193
- baseType = "boolean";
5194
- break;
5195
- case "enum":
5196
- if (metadata.allowedValues && metadata.allowedValues.length > 0) baseType = metadata.allowedValues.map((v) => `"${v.value}"`).join(" | ");
5197
- else baseType = "string";
5198
- break;
5199
- case "nested": {
5200
- const fieldEntries = Object.entries(field.fields);
5201
- if (fieldEntries.length === 0) baseType = "Record<string, unknown>";
5202
- else baseType = `{\n${fieldEntries.map(([name, nestedField]) => {
5203
- const nestedType = fieldToTypeString(nestedField, indent + 1);
5204
- return `${indentStr} ${name}${!nestedField.metadata.required ? "?" : ""}: ${nestedType};`;
5205
- }).join("\n")}\n${indentStr}}`;
5206
- break;
5207
- }
5208
- default: baseType = "unknown";
5209
- }
5210
- if (metadata.array) baseType = `(${baseType})[]`;
5211
- return baseType;
5212
- }
5213
- function isTypeConfigRequired(plugin) {
5214
- const required = plugin.typeConfigRequired;
5215
- if (typeof required === "function") return required(plugin.pluginConfig);
5216
- return required === true;
5217
- }
5218
- /**
5219
- * Generate PluginConfigs interface extension for user-defined.d.ts
5220
- * @param plugins - Array of plugin configurations
5221
- * @returns TypeScript interface extension string, or empty string if no plugins
5222
- */
5223
- function generatePluginConfigsDefinition(plugins) {
5224
- if (plugins.length === 0) return "";
5225
- return ` interface PluginConfigs<Fields extends string> {
5226
- ${plugins.map((plugin) => {
5227
- const typeString = plugin.configTypeTemplate ?? fieldToTypeString(plugin.configSchema, 2);
5228
- const optionalMarker = plugin.typeConfigRequired ? "" : "?";
5229
- return ` "${plugin.id}"${optionalMarker}: ${typeString};`;
5230
- }).join("\n")}
5231
- }`;
5232
- }
5233
- /**
5234
- * Generate TailorDBType.plugin() method overload for IDE completion.
5235
- * This adds an overload that uses `keyof Fields` directly for field-aware types,
5236
- * enabling IDE autocompletion for field names in plugin configs.
5237
- * @param plugins - Array of plugin configurations
5238
- * @returns TypeScript interface extension string
5239
- */
5240
- function generatePluginMethodOverload(plugins) {
5241
- return ` // Overload for .plugin() method to enable IDE completion for field names
5242
- interface TailorDBType<Fields, User> {
5243
- plugin(config: {
5244
- ${plugins.map((plugin) => {
5245
- const typeString = (plugin.configTypeTemplate ?? fieldToTypeString(plugin.configSchema, 3)).replace(/\bFields\b/g, "keyof Fields & string");
5246
- const optionalMarker = plugin.typeConfigRequired ? "" : "?";
5247
- return ` "${plugin.id}"${optionalMarker}: ${typeString};`;
5248
- }).join("\n")}
5249
- }): this;
5250
- }`;
5251
- }
5252
5021
 
5253
5022
  //#endregion
5254
5023
  //#region src/plugin/manager.ts
5255
- const unauthenticatedTailorUser = {
5256
- id: "00000000-0000-0000-0000-000000000000",
5257
- type: "",
5258
- workspaceId: "00000000-0000-0000-0000-000000000000",
5259
- attributes: null,
5260
- attributeList: []
5261
- };
5262
- /**
5263
- * Validate plugin config against its schema
5264
- * @param config - The config object to validate
5265
- * @param schema - The schema defining expected fields
5266
- * @returns Array of validation errors (empty if valid)
5267
- */
5268
- function validatePluginConfig(config, schema) {
5269
- const result = schema.parse({
5270
- value: config,
5271
- data: config,
5272
- user: unauthenticatedTailorUser
5273
- });
5274
- if ("issues" in result && result.issues) return result.issues.map((issue) => ({
5275
- field: Array.isArray(issue.path) ? issue.path.join(".") : "",
5276
- message: issue.message
5277
- }));
5278
- return [];
5279
- }
5280
5024
  /**
5281
5025
  * Manages plugin registration and processing
5282
5026
  */
@@ -5305,29 +5049,19 @@ var PluginManager = class {
5305
5049
  error: `Plugin "${context.pluginId}" not found`
5306
5050
  };
5307
5051
  const typeConfigRequired = plugin.typeConfigRequired;
5308
- if ((typeof typeConfigRequired === "function" ? typeConfigRequired(plugin.pluginConfig) : typeConfigRequired === true) && (context.config === void 0 || context.config === null)) return {
5052
+ if ((typeof typeConfigRequired === "function" ? typeConfigRequired(plugin.pluginConfig) : typeConfigRequired === true) && (context.typeConfig === void 0 || context.typeConfig === null)) return {
5309
5053
  success: false,
5310
- error: `Plugin "${plugin.id}" requires config, but none was provided for type "${context.type.name}".`
5054
+ error: `Plugin "${plugin.id}" requires typeConfig, but none was provided for type "${context.type.name}".`
5311
5055
  };
5312
- if (plugin.configSchema) {
5313
- const validationErrors = validatePluginConfig(context.config, plugin.configSchema);
5314
- if (validationErrors.length > 0) {
5315
- const errorDetails = validationErrors.map((e) => e.field ? `${e.field}: ${e.message}` : e.message).join("; ");
5316
- return {
5317
- success: false,
5318
- error: `Invalid config for plugin "${plugin.id}" on type "${context.type.name}": ${errorDetails}`
5319
- };
5320
- }
5321
- }
5322
- if (!plugin.process) return {
5056
+ if (!plugin.processType) return {
5323
5057
  success: false,
5324
- error: `Plugin "${plugin.id}" does not support type-attached processing (missing process method). Use processNamespace via definePlugins() instead.`
5058
+ error: `Plugin "${plugin.id}" does not support type-attached processing (missing processType method). Use processNamespace via definePlugins() instead.`
5325
5059
  };
5326
5060
  let output;
5327
5061
  try {
5328
- output = await plugin.process({
5062
+ output = await plugin.processType({
5329
5063
  type: context.type,
5330
- config: context.config,
5064
+ typeConfig: context.typeConfig,
5331
5065
  pluginConfig: plugin.pluginConfig,
5332
5066
  namespace: context.namespace
5333
5067
  });
@@ -5345,7 +5079,9 @@ var PluginManager = class {
5345
5079
  pluginImportPath: importPath,
5346
5080
  sourceTypeName: context.type.name,
5347
5081
  kind,
5348
- type
5082
+ type,
5083
+ namespace: context.namespace,
5084
+ pluginConfig: plugin.pluginConfig
5349
5085
  });
5350
5086
  }
5351
5087
  if (output.executors && output.executors.length > 0) for (const executor of output.executors) this.generatedExecutors.push({
@@ -5363,35 +5099,16 @@ var PluginManager = class {
5363
5099
  * Process namespace plugins that don't require a source type.
5364
5100
  * This method is called once per namespace for plugins with processNamespace method.
5365
5101
  * @param namespace - The target namespace for generated types
5366
- * @param types - TailorDB types in the namespace (after type-attached processing)
5367
- * @param generatedTypes - Plugin-generated types in the namespace
5368
5102
  * @returns Array of results with plugin outputs and configs
5369
5103
  */
5370
- async processNamespacePlugins(namespace, types, generatedTypes) {
5104
+ async processNamespacePlugins(namespace) {
5371
5105
  const results = [];
5372
5106
  for (const [pluginId, plugin] of this.plugins) {
5373
5107
  if (!plugin.processNamespace) continue;
5374
5108
  const config = plugin.pluginConfig;
5375
- if (plugin.pluginConfigSchema && config !== void 0) {
5376
- const validationErrors = validatePluginConfig(config, plugin.pluginConfigSchema);
5377
- if (validationErrors.length > 0) {
5378
- const errorDetails = validationErrors.map((e) => e.field ? `${e.field}: ${e.message}` : e.message).join("; ");
5379
- results.push({
5380
- pluginId,
5381
- config,
5382
- result: {
5383
- success: false,
5384
- error: `Invalid pluginConfig for plugin "${plugin.id}": ${errorDetails}`
5385
- }
5386
- });
5387
- continue;
5388
- }
5389
- }
5390
5109
  const context = {
5391
5110
  pluginConfig: config,
5392
- namespace,
5393
- types,
5394
- generatedTypes
5111
+ namespace
5395
5112
  };
5396
5113
  let output;
5397
5114
  try {
@@ -5429,7 +5146,9 @@ var PluginManager = class {
5429
5146
  pluginImportPath: importPath,
5430
5147
  sourceTypeName: "(namespace)",
5431
5148
  kind,
5432
- type
5149
+ type,
5150
+ namespace,
5151
+ pluginConfig: plugin.pluginConfig
5433
5152
  });
5434
5153
  }
5435
5154
  }
@@ -5459,6 +5178,14 @@ var PluginManager = class {
5459
5178
  return this.plugins.size;
5460
5179
  }
5461
5180
  /**
5181
+ * Get a plugin by its ID
5182
+ * @param pluginId - The plugin ID to look up
5183
+ * @returns The plugin instance, or undefined if not found
5184
+ */
5185
+ getPlugin(pluginId) {
5186
+ return this.plugins.get(pluginId);
5187
+ }
5188
+ /**
5462
5189
  * Get the import path for a plugin
5463
5190
  * @param pluginId - The plugin ID to look up
5464
5191
  * @returns The plugin's import path, or undefined if not found
@@ -6993,6 +6720,245 @@ async function confirmImportantResourceDeletion(resources, yes) {
6993
6720
  `);
6994
6721
  }
6995
6722
 
6723
+ //#endregion
6724
+ //#region src/cli/apply/services/function-registry.ts
6725
+ const CHUNK_SIZE = 64 * 1024;
6726
+ /**
6727
+ * Compute SHA-256 content hash for a script string.
6728
+ * @param content - Script content to hash
6729
+ * @returns Hex-encoded SHA-256 hash
6730
+ */
6731
+ function computeContentHash(content) {
6732
+ return crypto.createHash("sha256").update(content, "utf-8").digest("hex");
6733
+ }
6734
+ function functionRegistryTrn(workspaceId, name) {
6735
+ return `trn:v1:workspace:${workspaceId}:function_registry:${name}`;
6736
+ }
6737
+ /**
6738
+ * Build a function registry name for a resolver.
6739
+ * @param namespace - Resolver namespace
6740
+ * @param resolverName - Resolver name
6741
+ * @returns Function registry name
6742
+ */
6743
+ function resolverFunctionName(namespace, resolverName) {
6744
+ return `resolver--${namespace}--${resolverName}`;
6745
+ }
6746
+ /**
6747
+ * Build a function registry name for an executor.
6748
+ * @param executorName - Executor name
6749
+ * @returns Function registry name
6750
+ */
6751
+ function executorFunctionName(executorName) {
6752
+ return `executor--${executorName}`;
6753
+ }
6754
+ /**
6755
+ * Build a function registry name for a workflow job.
6756
+ * @param jobName - Workflow job name
6757
+ * @returns Function registry name
6758
+ */
6759
+ function workflowJobFunctionName(jobName) {
6760
+ return `workflow--${jobName}`;
6761
+ }
6762
+ /**
6763
+ * Collect all function entries from bundled scripts for all services.
6764
+ * @param application - Application definition
6765
+ * @param workflowJobs - Collected workflow jobs from config
6766
+ * @returns Array of function entries to register
6767
+ */
6768
+ function collectFunctionEntries(application, workflowJobs) {
6769
+ const entries = [];
6770
+ const distDir = getDistDir();
6771
+ for (const app of application.applications) for (const pipeline of app.resolverServices) for (const resolver of Object.values(pipeline.getResolvers())) {
6772
+ const scriptPath = path.join(distDir, "resolvers", `${resolver.name}.js`);
6773
+ try {
6774
+ const content = fs$2.readFileSync(scriptPath, "utf-8");
6775
+ entries.push({
6776
+ name: resolverFunctionName(pipeline.namespace, resolver.name),
6777
+ scriptContent: content,
6778
+ contentHash: computeContentHash(content),
6779
+ description: `Resolver: ${pipeline.namespace}/${resolver.name}`
6780
+ });
6781
+ } catch {
6782
+ logger.warn(`Function file not found: ${scriptPath}`);
6783
+ }
6784
+ }
6785
+ if (application.executorService) {
6786
+ const executors = application.executorService.getExecutors();
6787
+ for (const executor of Object.values(executors)) if (executor.operation.kind === "function" || executor.operation.kind === "jobFunction") {
6788
+ const scriptPath = path.join(distDir, "executors", `${executor.name}.js`);
6789
+ try {
6790
+ const content = fs$2.readFileSync(scriptPath, "utf-8");
6791
+ entries.push({
6792
+ name: executorFunctionName(executor.name),
6793
+ scriptContent: content,
6794
+ contentHash: computeContentHash(content),
6795
+ description: `Executor: ${executor.name}`
6796
+ });
6797
+ } catch {
6798
+ logger.warn(`Function file not found: ${scriptPath}`);
6799
+ }
6800
+ }
6801
+ }
6802
+ for (const job of workflowJobs) {
6803
+ const scriptPath = path.join(distDir, "workflow-jobs", `${job.name}.js`);
6804
+ try {
6805
+ const content = fs$2.readFileSync(scriptPath, "utf-8");
6806
+ entries.push({
6807
+ name: workflowJobFunctionName(job.name),
6808
+ scriptContent: content,
6809
+ contentHash: computeContentHash(content),
6810
+ description: `Workflow job: ${job.name}`
6811
+ });
6812
+ } catch {
6813
+ logger.warn(`Function file not found: ${scriptPath}`);
6814
+ }
6815
+ }
6816
+ return entries;
6817
+ }
6818
+ /**
6819
+ * Plan function registry changes based on current and desired state.
6820
+ * @param client - Operator client instance
6821
+ * @param workspaceId - Workspace ID
6822
+ * @param appName - Application name
6823
+ * @param entries - Desired function entries
6824
+ * @returns Planned changes
6825
+ */
6826
+ async function planFunctionRegistry(client, workspaceId, appName, entries) {
6827
+ const changeSet = createChangeSet("Function registry");
6828
+ const conflicts = [];
6829
+ const unmanaged = [];
6830
+ const resourceOwners = /* @__PURE__ */ new Set();
6831
+ const existingFunctions = await fetchAll(async (pageToken) => {
6832
+ try {
6833
+ const response = await client.listFunctionRegistries({
6834
+ workspaceId,
6835
+ pageToken
6836
+ });
6837
+ return [response.functions.map((f) => ({
6838
+ name: f.name,
6839
+ contentHash: f.contentHash
6840
+ })), response.nextPageToken];
6841
+ } catch (error) {
6842
+ if (error instanceof ConnectError && error.code === Code.NotFound) return [[], ""];
6843
+ throw error;
6844
+ }
6845
+ });
6846
+ const existingMap = {};
6847
+ await Promise.all(existingFunctions.map(async (func) => {
6848
+ const { metadata } = await client.getMetadata({ trn: functionRegistryTrn(workspaceId, func.name) });
6849
+ existingMap[func.name] = {
6850
+ resource: func,
6851
+ label: metadata?.labels[sdkNameLabelKey]
6852
+ };
6853
+ }));
6854
+ for (const entry of entries) {
6855
+ const existing = existingMap[entry.name];
6856
+ const metaRequest = await buildMetaRequest(functionRegistryTrn(workspaceId, entry.name), appName);
6857
+ if (existing) {
6858
+ if (!existing.label) unmanaged.push({
6859
+ resourceType: "Function registry",
6860
+ resourceName: entry.name
6861
+ });
6862
+ else if (existing.label !== appName) conflicts.push({
6863
+ resourceType: "Function registry",
6864
+ resourceName: entry.name,
6865
+ currentOwner: existing.label
6866
+ });
6867
+ changeSet.updates.push({
6868
+ name: entry.name,
6869
+ entry,
6870
+ metaRequest
6871
+ });
6872
+ delete existingMap[entry.name];
6873
+ } else changeSet.creates.push({
6874
+ name: entry.name,
6875
+ entry,
6876
+ metaRequest
6877
+ });
6878
+ }
6879
+ for (const [name, existing] of Object.entries(existingMap)) {
6880
+ if (!existing) continue;
6881
+ const label = existing.label;
6882
+ if (label && label !== appName) resourceOwners.add(label);
6883
+ if (label === appName) changeSet.deletes.push({
6884
+ name,
6885
+ workspaceId
6886
+ });
6887
+ }
6888
+ changeSet.print();
6889
+ return {
6890
+ changeSet,
6891
+ conflicts,
6892
+ unmanaged,
6893
+ resourceOwners
6894
+ };
6895
+ }
6896
+ /**
6897
+ * Upload a function script to the function registry using client streaming.
6898
+ * @param client - Operator client instance
6899
+ * @param workspaceId - Workspace ID
6900
+ * @param entry - Function entry to upload
6901
+ * @param isCreate - Whether this is a create (true) or update (false)
6902
+ */
6903
+ async function uploadFunctionScript(client, workspaceId, entry, isCreate) {
6904
+ const buffer = Buffer.from(entry.scriptContent, "utf-8");
6905
+ const info = {
6906
+ workspaceId,
6907
+ name: entry.name,
6908
+ description: entry.description,
6909
+ sizeBytes: BigInt(buffer.length),
6910
+ contentHash: entry.contentHash
6911
+ };
6912
+ if (isCreate) {
6913
+ async function* createStream() {
6914
+ yield { payload: {
6915
+ case: "info",
6916
+ value: info
6917
+ } };
6918
+ for (let i = 0; i < buffer.length; i += CHUNK_SIZE) yield { payload: {
6919
+ case: "chunk",
6920
+ value: buffer.subarray(i, Math.min(i + CHUNK_SIZE, buffer.length))
6921
+ } };
6922
+ }
6923
+ await client.createFunctionRegistry(createStream());
6924
+ } else {
6925
+ async function* updateStream() {
6926
+ yield { payload: {
6927
+ case: "info",
6928
+ value: info
6929
+ } };
6930
+ for (let i = 0; i < buffer.length; i += CHUNK_SIZE) yield { payload: {
6931
+ case: "chunk",
6932
+ value: buffer.subarray(i, Math.min(i + CHUNK_SIZE, buffer.length))
6933
+ } };
6934
+ }
6935
+ await client.updateFunctionRegistry(updateStream());
6936
+ }
6937
+ }
6938
+ /**
6939
+ * Apply function registry changes for the given phase.
6940
+ * @param client - Operator client instance
6941
+ * @param workspaceId
6942
+ * @param result - Planned function registry changes
6943
+ * @param phase - Apply phase
6944
+ */
6945
+ async function applyFunctionRegistry(client, workspaceId, result, phase = "create-update") {
6946
+ const { changeSet } = result;
6947
+ if (phase === "create-update") {
6948
+ for (const create$1 of changeSet.creates) {
6949
+ await uploadFunctionScript(client, workspaceId, create$1.entry, true);
6950
+ await client.setMetadata(create$1.metaRequest);
6951
+ }
6952
+ for (const update of changeSet.updates) {
6953
+ await uploadFunctionScript(client, workspaceId, update.entry, false);
6954
+ await client.setMetadata(update.metaRequest);
6955
+ }
6956
+ } else if (phase === "delete") await Promise.all(changeSet.deletes.map((del) => client.deleteFunctionRegistry({
6957
+ workspaceId: del.workspaceId,
6958
+ name: del.name
6959
+ })));
6960
+ }
6961
+
6996
6962
  //#endregion
6997
6963
  //#region src/cli/apply/services/executor.ts
6998
6964
  /**
@@ -7237,22 +7203,19 @@ function protoExecutor(appName, executor, env) {
7237
7203
  } };
7238
7204
  break;
7239
7205
  case "function":
7240
- case "jobFunction": {
7206
+ case "jobFunction":
7241
7207
  if (target.kind === "function") targetType = ExecutorTargetType.FUNCTION;
7242
7208
  else targetType = ExecutorTargetType.JOB_FUNCTION;
7243
- const scriptPath = path.join(getDistDir(), "executors", `${executor.name}.js`);
7244
- const script = fs$2.readFileSync(scriptPath, "utf-8");
7245
7209
  targetConfig = { config: {
7246
7210
  case: "function",
7247
7211
  value: {
7248
- name: `${executor.name}__target`,
7249
- script,
7212
+ name: "operation",
7213
+ scriptRef: executorFunctionName(executor.name),
7250
7214
  variables: { expr: argsExpr },
7251
7215
  invoker: target.authInvoker ?? void 0
7252
7216
  }
7253
7217
  } };
7254
7218
  break;
7255
- }
7256
7219
  case "workflow":
7257
7220
  targetType = ExecutorTargetType.WORKFLOW;
7258
7221
  targetConfig = { config: {
@@ -7470,7 +7433,7 @@ async function planResolvers(client, workspaceId, pipelines, executors, deletedS
7470
7433
  request: {
7471
7434
  workspaceId,
7472
7435
  namespaceName: pipeline.namespace,
7473
- pipelineResolver: processResolver(resolver, executorUsedResolvers, env)
7436
+ pipelineResolver: processResolver(pipeline.namespace, resolver, executorUsedResolvers, env)
7474
7437
  }
7475
7438
  });
7476
7439
  existingNameSet.delete(resolver.name);
@@ -7479,7 +7442,7 @@ async function planResolvers(client, workspaceId, pipelines, executors, deletedS
7479
7442
  request: {
7480
7443
  workspaceId,
7481
7444
  namespaceName: pipeline.namespace,
7482
- pipelineResolver: processResolver(resolver, executorUsedResolvers, env)
7445
+ pipelineResolver: processResolver(pipeline.namespace, resolver, executorUsedResolvers, env)
7483
7446
  }
7484
7447
  });
7485
7448
  existingNameSet.forEach((name) => {
@@ -7505,20 +7468,13 @@ async function planResolvers(client, workspaceId, pipelines, executors, deletedS
7505
7468
  });
7506
7469
  return changeSet;
7507
7470
  }
7508
- function processResolver(resolver, executorUsedResolvers, env) {
7509
- const functionPath = path.join(getDistDir(), "resolvers", `${resolver.name}.js`);
7510
- let functionCode = "";
7511
- try {
7512
- functionCode = fs$2.readFileSync(functionPath, "utf-8");
7513
- } catch {
7514
- logger.warn(`Function file not found: ${functionPath}`);
7515
- }
7471
+ function processResolver(namespace, resolver, executorUsedResolvers, env) {
7516
7472
  const pipelines = [{
7517
7473
  name: "body",
7518
7474
  operationName: "body",
7519
7475
  description: `${resolver.name} function body`,
7520
7476
  operationType: PipelineResolver_OperationType.FUNCTION,
7521
- operationSource: functionCode,
7477
+ operationSourceRef: resolverFunctionName(namespace, resolver.name),
7522
7478
  operationHook: { expr: `({ ...context.pipeline, input: context.args, user: ${tailorUserMap}, env: ${JSON.stringify(env)} });` },
7523
7479
  postScript: `args.body`
7524
7480
  }];
@@ -8605,7 +8561,7 @@ function compareFiles(ctx, typeName, oldFiles, newFiles) {
8605
8561
  * Compare type-level relationships
8606
8562
  * @param {DiffContext} ctx - Diff context
8607
8563
  * @param {string} typeName - Type name
8608
- * @param relationshipType
8564
+ * @param {"forward" | "backward"} relationshipType - Relationship direction to compare
8609
8565
  * @param {Record<string, SnapshotRelationship> | undefined} oldRelationships - Previous relationships
8610
8566
  * @param {Record<string, SnapshotRelationship> | undefined} newRelationships - Current relationships
8611
8567
  * @returns {void}
@@ -9998,11 +9954,7 @@ function generateTailorDBTypeManifest(type, executorUsedTypes, namespaceGqlOpera
9998
9954
  type: fieldType,
9999
9955
  allowedValues: fieldType === "enum" ? fieldConfig.allowedValues || [] : [],
10000
9956
  description: fieldConfig.description || "",
10001
- validate: (fieldConfig.validate || []).map((val) => ({
10002
- action: TailorDBType_PermitAction.DENY,
10003
- errorMessage: val.errorMessage || "",
10004
- ...val.script && { script: { expr: val.script.expr ? `!${val.script.expr}` : "" } }
10005
- })),
9957
+ validate: toProtoFieldValidate(fieldConfig),
10006
9958
  array: fieldConfig.array || false,
10007
9959
  index: fieldConfig.index || false,
10008
9960
  unique: fieldConfig.unique || false,
@@ -10011,10 +9963,7 @@ function generateTailorDBTypeManifest(type, executorUsedTypes, namespaceGqlOpera
10011
9963
  foreignKeyField: fieldConfig.foreignKeyField,
10012
9964
  required: fieldConfig.required !== false,
10013
9965
  vector: fieldConfig.vector || false,
10014
- ...fieldConfig.hooks && { hooks: {
10015
- create: fieldConfig.hooks?.create ? { expr: fieldConfig.hooks.create.expr || "" } : void 0,
10016
- update: fieldConfig.hooks?.update ? { expr: fieldConfig.hooks.update.expr || "" } : void 0
10017
- } },
9966
+ ...toProtoFieldHooks(fieldConfig),
10018
9967
  ...fieldConfig.serial && { serial: {
10019
9968
  start: fieldConfig.serial.start,
10020
9969
  ...fieldConfig.serial.maxValue && { maxValue: fieldConfig.serial.maxValue },
@@ -10071,6 +10020,20 @@ function generateTailorDBTypeManifest(type, executorUsedTypes, namespaceGqlOpera
10071
10020
  }
10072
10021
  };
10073
10022
  }
10023
+ function toProtoFieldValidate(fieldConfig) {
10024
+ return (fieldConfig.validate || []).map((val) => ({
10025
+ action: TailorDBType_PermitAction.DENY,
10026
+ errorMessage: val.errorMessage || "",
10027
+ ...val.script && { script: { expr: val.script.expr ? `!${val.script.expr}` : "" } }
10028
+ }));
10029
+ }
10030
+ function toProtoFieldHooks(fieldConfig) {
10031
+ if (!fieldConfig.hooks) return {};
10032
+ return { hooks: {
10033
+ create: fieldConfig.hooks.create ? { expr: fieldConfig.hooks.create.expr || "" } : void 0,
10034
+ update: fieldConfig.hooks.update ? { expr: fieldConfig.hooks.update.expr || "" } : void 0
10035
+ } };
10036
+ }
10074
10037
  function processNestedFields(fields) {
10075
10038
  const nestedFields = {};
10076
10039
  Object.entries(fields).forEach(([nestedFieldName, nestedFieldConfig]) => {
@@ -10081,26 +10044,28 @@ function processNestedFields(fields) {
10081
10044
  type: "nested",
10082
10045
  allowedValues: nestedFieldConfig.allowedValues || [],
10083
10046
  description: nestedFieldConfig.description || "",
10084
- validate: [],
10047
+ validate: toProtoFieldValidate(nestedFieldConfig),
10085
10048
  required: nestedFieldConfig.required ?? true,
10086
10049
  array: nestedFieldConfig.array ?? false,
10087
10050
  index: false,
10088
10051
  unique: false,
10089
10052
  foreignKey: false,
10090
10053
  vector: false,
10054
+ ...toProtoFieldHooks(nestedFieldConfig),
10091
10055
  fields: deepNestedFields
10092
10056
  };
10093
10057
  } else nestedFields[nestedFieldName] = {
10094
10058
  type: nestedType,
10095
10059
  allowedValues: nestedType === "enum" ? nestedFieldConfig.allowedValues || [] : [],
10096
10060
  description: nestedFieldConfig.description || "",
10097
- validate: [],
10061
+ validate: toProtoFieldValidate(nestedFieldConfig),
10098
10062
  required: nestedFieldConfig.required ?? true,
10099
10063
  array: nestedFieldConfig.array ?? false,
10100
10064
  index: false,
10101
10065
  unique: false,
10102
10066
  foreignKey: false,
10103
10067
  vector: false,
10068
+ ...toProtoFieldHooks(nestedFieldConfig),
10104
10069
  ...nestedFieldConfig.serial && { serial: {
10105
10070
  start: nestedFieldConfig.serial.start,
10106
10071
  ...nestedFieldConfig.serial.maxValue && { maxValue: nestedFieldConfig.serial.maxValue },
@@ -10459,7 +10424,7 @@ async function registerJobFunctions(client, changeSet, appName) {
10459
10424
  const jobFunctionVersions = {};
10460
10425
  const firstWorkflow = changeSet.creates[0] || changeSet.updates[0];
10461
10426
  if (!firstWorkflow) return jobFunctionVersions;
10462
- const { workspaceId, scripts } = firstWorkflow;
10427
+ const { workspaceId } = firstWorkflow;
10463
10428
  const allUsedJobNames = /* @__PURE__ */ new Set();
10464
10429
  for (const item of [...changeSet.creates, ...changeSet.updates]) for (const jobName of item.usedJobNames) allUsedJobNames.add(jobName);
10465
10430
  const existingJobFunctions = await fetchAll(async (pageToken) => {
@@ -10471,16 +10436,14 @@ async function registerJobFunctions(client, changeSet, appName) {
10471
10436
  });
10472
10437
  const existingJobNamesSet = new Set(existingJobFunctions);
10473
10438
  const results = await Promise.all(Array.from(allUsedJobNames).map(async (jobName) => {
10474
- const script = scripts.get(jobName);
10475
- if (!script) throw new Error(`No bundled script found for job "${jobName}". Please run "generate" command before "apply".`);
10476
10439
  const response = existingJobNamesSet.has(jobName) ? await client.updateWorkflowJobFunction({
10477
10440
  workspaceId,
10478
10441
  jobFunctionName: jobName,
10479
- script
10442
+ scriptRef: workflowJobFunctionName(jobName)
10480
10443
  }) : await client.createWorkflowJobFunction({
10481
10444
  workspaceId,
10482
10445
  jobFunctionName: jobName,
10483
- script
10446
+ scriptRef: workflowJobFunctionName(jobName)
10484
10447
  });
10485
10448
  await client.setMetadata(await buildMetaRequest(jobFunctionTrn(workspaceId, jobName), appName));
10486
10449
  return {
@@ -10537,7 +10500,6 @@ async function planWorkflow(client, workspaceId, appName, workflows, mainJobDeps
10537
10500
  label: metadata?.labels[sdkNameLabelKey]
10538
10501
  };
10539
10502
  }));
10540
- const allScripts = await loadWorkflowScripts();
10541
10503
  for (const workflow of Object.values(workflows)) {
10542
10504
  const existing = existingWorkflows[workflow.name];
10543
10505
  const metaRequest = await buildMetaRequest(workflowTrn(workspaceId, workflow.name), appName);
@@ -10557,7 +10519,6 @@ async function planWorkflow(client, workspaceId, appName, workflows, mainJobDeps
10557
10519
  name: workflow.name,
10558
10520
  workspaceId,
10559
10521
  workflow,
10560
- scripts: allScripts,
10561
10522
  usedJobNames,
10562
10523
  metaRequest
10563
10524
  });
@@ -10566,7 +10527,6 @@ async function planWorkflow(client, workspaceId, appName, workflows, mainJobDeps
10566
10527
  name: workflow.name,
10567
10528
  workspaceId,
10568
10529
  workflow,
10569
- scripts: allScripts,
10570
10530
  usedJobNames,
10571
10531
  metaRequest
10572
10532
  });
@@ -10589,19 +10549,6 @@ async function planWorkflow(client, workspaceId, appName, workflows, mainJobDeps
10589
10549
  appName
10590
10550
  };
10591
10551
  }
10592
- async function loadWorkflowScripts() {
10593
- const scripts = /* @__PURE__ */ new Map();
10594
- const jobsDir = path.join(getDistDir(), "workflow-jobs");
10595
- if (!fs$2.existsSync(jobsDir)) return scripts;
10596
- const files = fs$2.readdirSync(jobsDir);
10597
- for (const file of files) if (/^[^.]+\.js$/.test(file)) {
10598
- const jobName = file.replace(/\.js$/, "");
10599
- const scriptPath = path.join(jobsDir, file);
10600
- const script = fs$2.readFileSync(scriptPath, "utf-8");
10601
- scripts.set(jobName, script);
10602
- }
10603
- return scripts;
10604
- }
10605
10552
 
10606
10553
  //#endregion
10607
10554
  //#region src/cli/apply/index.ts
@@ -10619,8 +10566,7 @@ async function apply(options) {
10619
10566
  if (plugins.length > 0) pluginManager = new PluginManager(plugins);
10620
10567
  await generateUserTypes({
10621
10568
  config,
10622
- configPath: config.path,
10623
- plugins
10569
+ configPath: config.path
10624
10570
  });
10625
10571
  const application = defineApplication({
10626
10572
  config,
@@ -10689,6 +10635,7 @@ async function apply(options) {
10689
10635
  }
10690
10636
  if (workflowResult) printLoadedWorkflows(workflowResult);
10691
10637
  logger.newline();
10638
+ const functionEntries = collectFunctionEntries(application, workflowResult?.jobs ?? []);
10692
10639
  const ctx = {
10693
10640
  client,
10694
10641
  workspaceId,
@@ -10697,6 +10644,7 @@ async function apply(options) {
10697
10644
  config,
10698
10645
  noSchemaCheck: options?.noSchemaCheck
10699
10646
  };
10647
+ const functionRegistry = await planFunctionRegistry(client, workspaceId, application.name, functionEntries);
10700
10648
  const tailorDB = await planTailorDB(ctx);
10701
10649
  const staticWebsite = await planStaticWebsite(ctx);
10702
10650
  const idp = await planIdP(ctx);
@@ -10706,6 +10654,7 @@ async function apply(options) {
10706
10654
  const executor = await planExecutor(ctx);
10707
10655
  const workflow = await planWorkflow(client, workspaceId, application.name, workflowResult?.workflows ?? {}, workflowBuildResult?.mainJobDeps ?? {});
10708
10656
  const allConflicts = [
10657
+ ...functionRegistry.conflicts,
10709
10658
  ...tailorDB.conflicts,
10710
10659
  ...staticWebsite.conflicts,
10711
10660
  ...idp.conflicts,
@@ -10716,6 +10665,7 @@ async function apply(options) {
10716
10665
  ];
10717
10666
  await confirmOwnerConflict(allConflicts, application.name, yes);
10718
10667
  await confirmUnmanagedResources([
10668
+ ...functionRegistry.unmanaged,
10719
10669
  ...tailorDB.unmanaged,
10720
10670
  ...staticWebsite.unmanaged,
10721
10671
  ...idp.unmanaged,
@@ -10743,6 +10693,7 @@ async function apply(options) {
10743
10693
  });
10744
10694
  await confirmImportantResourceDeletion(importantDeletions, yes);
10745
10695
  const resourceOwners = new Set([
10696
+ ...functionRegistry.resourceOwners,
10746
10697
  ...tailorDB.resourceOwners,
10747
10698
  ...staticWebsite.resourceOwners,
10748
10699
  ...idp.resourceOwners,
@@ -10763,6 +10714,7 @@ async function apply(options) {
10763
10714
  logger.info("Dry run enabled. No changes applied.");
10764
10715
  return;
10765
10716
  }
10717
+ await applyFunctionRegistry(client, workspaceId, functionRegistry, "create-update");
10766
10718
  await applyStaticWebsite(client, staticWebsite, "create-update");
10767
10719
  await applyIdP(client, idp, "create-update");
10768
10720
  await applyAuth(client, auth, "create-update");
@@ -10782,6 +10734,7 @@ async function apply(options) {
10782
10734
  await applyAuth(client, auth, "delete-services");
10783
10735
  await applyIdP(client, idp, "delete-services");
10784
10736
  await applyTailorDB(client, tailorDB, "delete-services");
10737
+ await applyFunctionRegistry(client, workspaceId, functionRegistry, "delete");
10785
10738
  logger.success("Successfully applied changes.");
10786
10739
  }
10787
10740
  async function buildPipeline(namespace, config, triggerContext) {
@@ -11717,32 +11670,11 @@ function getRunningJobs(execution) {
11717
11670
  function isTerminalStatus(status) {
11718
11671
  return status === WorkflowExecution_Status.SUCCESS || status === WorkflowExecution_Status.FAILED || status === WorkflowExecution_Status.PENDING_RESUME;
11719
11672
  }
11720
- /**
11721
- * Start a workflow and return a handle to wait for completion.
11722
- * @param options - Start options
11723
- * @returns Start result with wait helper
11724
- */
11725
- async function startWorkflow(options) {
11726
- const client = await initOperatorClient(await loadAccessToken({
11727
- useProfile: true,
11728
- profile: options.profile
11729
- }));
11730
- const workspaceId = loadWorkspaceId({
11731
- workspaceId: options.workspaceId,
11732
- profile: options.profile
11733
- });
11734
- const { config } = await loadConfig(options.configPath);
11735
- const { application } = await client.getApplication({
11736
- workspaceId,
11737
- applicationName: config.name
11738
- });
11739
- if (!application?.authNamespace) throw new Error(`Application ${config.name} does not have an auth configuration.`);
11673
+ async function startWorkflowCore(options) {
11674
+ const { client, workspaceId, workflowName } = options;
11740
11675
  try {
11741
- const workflow = await resolveWorkflow(client, workspaceId, options.name);
11742
- const authInvoker = create(AuthInvokerSchema, {
11743
- namespace: application.authNamespace,
11744
- machineUserName: options.machineUser
11745
- });
11676
+ const workflow = await resolveWorkflow(client, workspaceId, workflowName);
11677
+ const authInvoker = create(AuthInvokerSchema, options.authInvoker);
11746
11678
  const arg$1 = options.arg === void 0 ? void 0 : typeof options.arg === "string" ? options.arg : JSON.stringify(options.arg);
11747
11679
  const { executionId } = await client.testStartWorkflow({
11748
11680
  workspaceId,
@@ -11762,10 +11694,54 @@ async function startWorkflow(options) {
11762
11694
  })
11763
11695
  };
11764
11696
  } catch (error) {
11765
- if (error instanceof ConnectError && error.code === Code.NotFound) throw new Error(`Workflow '${options.name}' not found.`);
11697
+ if (error instanceof ConnectError && error.code === Code.NotFound) throw new Error(`Workflow '${workflowName}' not found.`);
11766
11698
  throw error;
11767
11699
  }
11768
11700
  }
11701
+ async function startWorkflowByName(options) {
11702
+ const client = await initOperatorClient(await loadAccessToken({
11703
+ useProfile: true,
11704
+ profile: options.profile
11705
+ }));
11706
+ const workspaceId = loadWorkspaceId({
11707
+ workspaceId: options.workspaceId,
11708
+ profile: options.profile
11709
+ });
11710
+ const { config } = await loadConfig(options.configPath);
11711
+ const { application } = await client.getApplication({
11712
+ workspaceId,
11713
+ applicationName: config.name
11714
+ });
11715
+ if (!application?.authNamespace) throw new Error(`Application ${config.name} does not have an auth configuration.`);
11716
+ return await startWorkflowCore({
11717
+ client,
11718
+ workspaceId,
11719
+ workflowName: options.name,
11720
+ authInvoker: {
11721
+ namespace: application.authNamespace,
11722
+ machineUserName: options.machineUser
11723
+ },
11724
+ arg: options.arg,
11725
+ interval: options.interval
11726
+ });
11727
+ }
11728
+ async function startWorkflow(options) {
11729
+ if ("name" in options) return await startWorkflowByName(options);
11730
+ return await startWorkflowCore({
11731
+ client: await initOperatorClient(await loadAccessToken({
11732
+ useProfile: true,
11733
+ profile: options.profile
11734
+ })),
11735
+ workspaceId: loadWorkspaceId({
11736
+ workspaceId: options.workspaceId,
11737
+ profile: options.profile
11738
+ }),
11739
+ workflowName: options.workflow.name,
11740
+ authInvoker: options.authInvoker,
11741
+ arg: options.arg,
11742
+ interval: options.interval
11743
+ });
11744
+ }
11769
11745
  const startCommand = defineCommand({
11770
11746
  name: "start",
11771
11747
  description: "Start a workflow execution.",
@@ -11785,7 +11761,7 @@ const startCommand = defineCommand({
11785
11761
  ...waitArgs
11786
11762
  }),
11787
11763
  run: withCommonArgs(async (args) => {
11788
- const { executionId, wait } = await startWorkflow({
11764
+ const { executionId, wait } = await startWorkflowByName({
11789
11765
  name: args.name,
11790
11766
  machineUser: args.machineuser,
11791
11767
  arg: args.arg,
@@ -12276,12 +12252,7 @@ const headerArg = z.string().superRefine((val, ctx) => {
12276
12252
  value: val.slice(colonIndex + 1).trim()
12277
12253
  };
12278
12254
  }).refine((h) => h.key.length > 0, { message: "Header name cannot be empty" });
12279
- /**
12280
- * Trigger an executor and return the job ID.
12281
- * @param options - Options for triggering executor
12282
- * @returns Result containing the job ID if available
12283
- */
12284
- async function triggerExecutor(options) {
12255
+ async function triggerExecutorByName(options) {
12285
12256
  const client = await initOperatorClient(await loadAccessToken({
12286
12257
  useProfile: true,
12287
12258
  profile: options.profile
@@ -12302,6 +12273,16 @@ async function triggerExecutor(options) {
12302
12273
  throw error;
12303
12274
  }
12304
12275
  }
12276
+ async function triggerExecutor(options) {
12277
+ if ("executorName" in options) return await triggerExecutorByName(options);
12278
+ if (options.executor.trigger.kind !== "incomingWebhook" && options.payload !== void 0) throw new Error(`Executor '${options.executor.name}' has '${options.executor.trigger.kind}' trigger type. The payload is only available for 'incomingWebhook' trigger type.`);
12279
+ return await triggerExecutorByName({
12280
+ executorName: options.executor.name,
12281
+ payload: options.payload,
12282
+ workspaceId: options.workspaceId,
12283
+ profile: options.profile
12284
+ });
12285
+ }
12305
12286
  const triggerCommand = defineCommand({
12306
12287
  name: "trigger",
12307
12288
  description: "Trigger an executor manually.",
@@ -12395,7 +12376,7 @@ The \`--logs\` option displays logs from the downstream execution when available
12395
12376
  body: body ?? {},
12396
12377
  headers
12397
12378
  };
12398
- const result = await triggerExecutor({
12379
+ const result = await triggerExecutorByName({
12399
12380
  executorName: args.executorName,
12400
12381
  payload,
12401
12382
  workspaceId: args["workspace-id"],
@@ -13238,8 +13219,7 @@ async function generate$1(options) {
13238
13219
  const watch$1 = options?.watch ?? false;
13239
13220
  await generateUserTypes({
13240
13221
  config,
13241
- configPath: config.path,
13242
- plugins
13222
+ configPath: config.path
13243
13223
  });
13244
13224
  const manager = createGenerationManager(config, generators, plugins, config.path);
13245
13225
  await manager.generate(watch$1);
@@ -13581,7 +13561,8 @@ async function execRemove(client, workspaceId, application, config, confirm) {
13581
13561
  const app = await planApplication(ctx);
13582
13562
  const executor = await planExecutor(ctx);
13583
13563
  const workflow = await planWorkflow(client, workspaceId, application.name, {}, {});
13584
- if (tailorDB.changeSet.service.deletes.length === 0 && staticWebsite.changeSet.deletes.length === 0 && idp.changeSet.service.deletes.length === 0 && auth.changeSet.service.deletes.length === 0 && pipeline.changeSet.service.deletes.length === 0 && app.deletes.length === 0 && executor.changeSet.deletes.length === 0 && workflow.changeSet.deletes.length === 0) return;
13564
+ const functionRegistry = await planFunctionRegistry(client, workspaceId, application.name, []);
13565
+ if (tailorDB.changeSet.service.deletes.length === 0 && staticWebsite.changeSet.deletes.length === 0 && idp.changeSet.service.deletes.length === 0 && auth.changeSet.service.deletes.length === 0 && pipeline.changeSet.service.deletes.length === 0 && app.deletes.length === 0 && executor.changeSet.deletes.length === 0 && workflow.changeSet.deletes.length === 0 && functionRegistry.changeSet.deletes.length === 0) return;
13585
13566
  if (confirm) await confirm();
13586
13567
  await applyWorkflow(client, workflow, "delete");
13587
13568
  await applyExecutor(client, executor, "delete");
@@ -13595,6 +13576,7 @@ async function execRemove(client, workspaceId, application, config, confirm) {
13595
13576
  await applyIdP(client, idp, "delete-services");
13596
13577
  await applyTailorDB(client, tailorDB, "delete-resources");
13597
13578
  await applyTailorDB(client, tailorDB, "delete-services");
13579
+ await applyFunctionRegistry(client, workspaceId, functionRegistry, "delete");
13598
13580
  }
13599
13581
  /**
13600
13582
  * Remove all resources managed by the current application.
@@ -14216,7 +14198,7 @@ async function generate(options) {
14216
14198
  if (options.init) await handleInitOption(namespacesWithMigrations, options.yes);
14217
14199
  let pluginManager;
14218
14200
  if (plugins.length > 0) pluginManager = new PluginManager(plugins);
14219
- const { defineApplication: defineApplication$1 } = await import("./application-DM4zTgXU.mjs");
14201
+ const { defineApplication: defineApplication$1 } = await import("./application-JwJ_-_PQ.mjs");
14220
14202
  const application = defineApplication$1({
14221
14203
  config,
14222
14204
  pluginManager
@@ -15427,4 +15409,4 @@ const updateCommand = defineCommand({
15427
15409
 
15428
15410
  //#endregion
15429
15411
  export { jobsCommand as $, initOperatorClient as $t, generateCommand as A, getMigrationFiles as At, getMachineUserToken as B, generateUserTypes as Bt, resumeCommand as C, compareLocalTypesWithSnapshot as Ct, truncate as D, getLatestMigrationNumber as Dt, listWorkflows as E, formatMigrationNumber as Et, removeCommand$1 as F, formatDiffSummary as Ft, generateCommand$1 as G, fetchLatestToken as Gt, listCommand$5 as H, getDistDir as Ht, listCommand$4 as I, formatMigrationDiff as It, triggerCommand as J, readPlatformConfig as Jt, listWebhookExecutors as K, loadAccessToken as Kt, listOAuth2Clients as L, hasChanges as Lt, show as M, isValidMigrationNumber as Mt, showCommand as N, loadDiff as Nt, truncateCommand as O, getMigrationDirPath as Ot, remove as P, reconstructSnapshotFromMigrations as Pt, getExecutorJob as Q, initOAuth2Client as Qt, getCommand$1 as R, getNamespacesWithMigrations as Rt, healthCommand as S, SCHEMA_FILE_NAME as St, listCommand$3 as T, createSnapshotFromLocalTypes as Tt, listMachineUsers as U, apiCall as Ut, tokenCommand as V, loadConfig as Vt, generate$1 as W, apiCommand as Wt, listCommand$6 as X, fetchAll as Xt, triggerExecutor as Y, writePlatformConfig as Yt, listExecutors as Z, fetchUserInfo as Zt, createCommand as _, bundleMigrationScript as _t, listCommand as a, jsonArgs as an, getWorkflow as at, listCommand$2 as b, INITIAL_SCHEMA_NUMBER as bt, inviteUser as c, listWorkflowExecutions as ct, listCommand$1 as d, apply as dt, readPackageJson as en, listExecutorJobs as et, listWorkspaces as f, applyCommand as ft, deleteWorkspace as g, parseMigrationLabelNumber as gt, deleteCommand as h, MIGRATION_LABEL_KEY as ht, removeUser as i, deploymentArgs as in, getCommand$2 as it, logBetaWarning as j, getNextMigrationNumber as jt, generate as k, getMigrationFilePath as kt, restoreCommand as l, getCommand$3 as lt, getWorkspace as m, waitForExecution$1 as mt, updateUser as n, commonArgs as nn, startCommand as nt, listUsers as o, withCommonArgs as on, executionsCommand as ot, getCommand as p, executeScript as pt, webhookCommand as q, loadWorkspaceId as qt, removeCommand as r, confirmationArgs as rn, startWorkflow as rt, inviteCommand as s, workspaceArgs as sn, getWorkflowExecution as st, updateCommand as t, PATScope as tn, watchExecutorJob as tt, restoreWorkspace as u, getExecutor as ut, createWorkspace as v, DB_TYPES_FILE_NAME as vt, resumeWorkflow as w, compareSnapshots as wt, getAppHealth as x, MIGRATE_FILE_NAME as xt, listApps as y, DIFF_FILE_NAME as yt, getOAuth2Client as z, trnPrefix as zt };
15430
- //# sourceMappingURL=update-2eb6jz9o.mjs.map
15412
+ //# sourceMappingURL=update-C_ZTRB63.mjs.map