@tailor-platform/sdk 0.10.3 → 0.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,4 @@
1
- import { getDistDir, tailorUserMap } from "./auth-Di3vQUrT.mjs";
1
+ import { WORKFLOW_JOB_BRAND, getDistDir, tailorUserMap } from "./job-CSwByDTq.mjs";
2
2
  import { createRequire } from "node:module";
3
3
  import { defineCommand } from "citty";
4
4
  import * as path from "node:path";
@@ -7,12 +7,14 @@ import { styleText } from "node:util";
7
7
  import * as fs from "node:fs";
8
8
  import { z } from "zod";
9
9
  import * as inflection from "inflection";
10
+ import ml from "multiline-ts";
10
11
  import { readPackageJSON, resolveTSConfig } from "pkg-types";
11
12
  import * as rolldown from "rolldown";
12
- import ml from "multiline-ts";
13
+ import { parseSync } from "oxc-parser";
13
14
  import { loadEnvFile } from "node:process";
14
15
  import { consola } from "consola";
15
16
  import { table } from "table";
17
+ import { OAuth2Client } from "@badgateway/oauth2-client";
16
18
  import { MethodOptions_IdempotencyLevel, ValueSchema, file_google_protobuf_descriptor, file_google_protobuf_duration, file_google_protobuf_field_mask, file_google_protobuf_struct, file_google_protobuf_timestamp, timestampDate } from "@bufbuild/protobuf/wkt";
17
19
  import { Code, ConnectError, createClient } from "@connectrpc/connect";
18
20
  import { createConnectTransport } from "@connectrpc/connect-node";
@@ -201,8 +203,8 @@ var ExecutorService = class {
201
203
  }
202
204
  async loadExecutorForFile(executorFile, timestamp) {
203
205
  try {
204
- const baseUrl$1 = pathToFileURL(executorFile).href;
205
- const executorModule = await (timestamp === void 0 ? import(baseUrl$1) : import(`${baseUrl$1}?t=${timestamp.getTime()}`));
206
+ const baseUrl = pathToFileURL(executorFile).href;
207
+ const executorModule = await (timestamp === void 0 ? import(baseUrl) : import(`${baseUrl}?t=${timestamp.getTime()}`));
206
208
  const result = ExecutorSchema.safeParse(executorModule.default);
207
209
  if (result.success) {
208
210
  const relativePath = path.relative(process.cwd(), executorFile);
@@ -286,8 +288,8 @@ var ResolverService = class {
286
288
  }
287
289
  async loadResolverForFile(resolverFile, timestamp) {
288
290
  try {
289
- const baseUrl$1 = pathToFileURL(resolverFile).href;
290
- const resolverModule = await (timestamp === void 0 ? import(baseUrl$1) : import(`${baseUrl$1}?t=${timestamp.getTime()}`));
291
+ const baseUrl = pathToFileURL(resolverFile).href;
292
+ const resolverModule = await (timestamp === void 0 ? import(baseUrl) : import(`${baseUrl}?t=${timestamp.getTime()}`));
291
293
  const result = ResolverSchema.safeParse(resolverModule.default);
292
294
  if (result.success) {
293
295
  const relativePath = path.relative(process.cwd(), resolverFile);
@@ -336,8 +338,8 @@ var TailorDBService = class {
336
338
  async loadTypesForFile(typeFile, timestamp) {
337
339
  this.rawTypes[typeFile] = {};
338
340
  try {
339
- const baseUrl$1 = pathToFileURL(typeFile).href;
340
- const module = await (timestamp === void 0 ? import(baseUrl$1) : import(`${baseUrl$1}?t=${timestamp.getTime()}`));
341
+ const baseUrl = pathToFileURL(typeFile).href;
342
+ const module = await (timestamp === void 0 ? import(baseUrl) : import(`${baseUrl}?t=${timestamp.getTime()}`));
341
343
  for (const exportName of Object.keys(module)) {
342
344
  const exportedValue = module[exportName];
343
345
  if (exportedValue && typeof exportedValue === "object" && exportedValue.constructor?.name === "TailorDBType" && typeof exportedValue.name === "string" && typeof exportedValue.fields === "object" && exportedValue.metadata && typeof exportedValue.metadata === "object") {
@@ -466,6 +468,7 @@ var Application = class {
466
468
  _authService = void 0;
467
469
  _subgraphs = [];
468
470
  _executorService = void 0;
471
+ _workflowConfig = void 0;
469
472
  _staticWebsiteServices = [];
470
473
  _env = {};
471
474
  constructor(name, config) {
@@ -497,6 +500,9 @@ var Application = class {
497
500
  get executorService() {
498
501
  return this._executorService;
499
502
  }
503
+ get workflowConfig() {
504
+ return this._workflowConfig;
505
+ }
500
506
  get staticWebsiteServices() {
501
507
  return this._staticWebsiteServices;
502
508
  }
@@ -549,6 +555,10 @@ var Application = class {
549
555
  if (!config) return;
550
556
  this._executorService = new ExecutorService(config);
551
557
  }
558
+ defineWorkflow(config) {
559
+ if (!config) return;
560
+ this._workflowConfig = config;
561
+ }
552
562
  defineStaticWebsites(websites) {
553
563
  const websiteNames = /* @__PURE__ */ new Set();
554
564
  (websites ?? []).forEach((config) => {
@@ -566,232 +576,730 @@ function defineApplication(config) {
566
576
  app.defineIdp(config.idp);
567
577
  app.defineAuth(config.auth);
568
578
  app.defineExecutor(config.executor);
579
+ app.defineWorkflow(config.workflow);
569
580
  app.defineStaticWebsites(config.staticWebsites);
570
581
  return app;
571
582
  }
572
583
 
573
584
  //#endregion
574
- //#region src/cli/bundler/bundler.ts
575
- var Bundler = class {
576
- loader;
577
- transformer;
578
- constructor(config) {
579
- this.config = config;
580
- this.loader = config.loader;
581
- this.transformer = config.transformer;
582
- }
583
- async bundle() {
584
- try {
585
- const files = await this.detectFiles();
586
- if (files.length === 0) throw new Error(`No files found matching pattern: ${this.config.serviceConfig.files?.join(", ")}`);
587
- console.log(`Found ${files.length} files for service "${this.config.namespace}"`);
588
- await Promise.all(files.map(async (file) => {
589
- await this.processFile(file);
590
- }));
591
- console.log(`Successfully bundled files for service "${this.config.namespace}"`);
592
- } catch (error) {
593
- console.error(`Bundle failed for service ${this.config.namespace}:`, error);
594
- throw error;
585
+ //#region src/parser/service/workflow/schema.ts
586
+ const WorkflowJobSchema = z.object({
587
+ name: z.string(),
588
+ get deps() {
589
+ return z.array(WorkflowJobSchema).optional();
590
+ },
591
+ body: functionSchema
592
+ });
593
+ const WorkflowSchema = z.object({
594
+ name: z.string(),
595
+ mainJob: WorkflowJobSchema
596
+ });
597
+
598
+ //#endregion
599
+ //#region src/cli/application/workflow/service.ts
600
+ /**
601
+ * Load workflow files and collect all jobs in a single pass.
602
+ */
603
+ async function loadAndCollectJobs(config) {
604
+ const workflows = {};
605
+ const workflowSources = [];
606
+ const collectedJobs = [];
607
+ let unusedJobs = [];
608
+ if (!config.files || config.files.length === 0) return {
609
+ workflows,
610
+ workflowSources,
611
+ jobs: collectedJobs,
612
+ unusedJobs,
613
+ fileCount: 0
614
+ };
615
+ const workflowFiles = loadFilesWithIgnores(config);
616
+ const fileCount = workflowFiles.length;
617
+ const allJobsMap = /* @__PURE__ */ new Map();
618
+ for (const workflowFile of workflowFiles) {
619
+ const { jobs, workflow } = await loadFileContent(workflowFile);
620
+ if (workflow) {
621
+ workflowSources.push({
622
+ workflow,
623
+ sourceFile: workflowFile
624
+ });
625
+ workflows[workflowFile] = workflow;
626
+ }
627
+ for (const job of jobs) {
628
+ const existing = allJobsMap.get(job.name);
629
+ if (existing) throw new Error(`Duplicate job name "${job.name}" found:\n - ${existing.sourceFile} (export: ${existing.exportName})\n - ${job.sourceFile} (export: ${job.exportName})\nEach job must have a unique name.`);
630
+ allJobsMap.set(job.name, job);
595
631
  }
596
632
  }
597
- async detectFiles() {
598
- if (!this.config.serviceConfig.files || this.config.serviceConfig.files.length === 0) return [];
599
- return loadFilesWithIgnores(this.config.serviceConfig);
633
+ const tracedJobs = /* @__PURE__ */ new Map();
634
+ for (const { workflow } of workflowSources) traceJobDependencies(workflow.mainJob, tracedJobs);
635
+ const notExportedJobs = [];
636
+ for (const jobName of tracedJobs.keys()) if (!allJobsMap.has(jobName)) notExportedJobs.push(jobName);
637
+ if (notExportedJobs.length > 0) throw new Error(`The following workflow jobs are used but not exported:\n` + notExportedJobs.map((name) => ` - "${name}"`).join("\n") + "\n\nAll workflow jobs must be named exports. Example:\n export const myJob = createWorkflowJob({ name: \"my-job\", ... });\n\nAlso ensure that files containing job exports are included in the workflow.files glob pattern.");
638
+ unusedJobs = Array.from(allJobsMap.keys()).filter((jobName) => !tracedJobs.has(jobName));
639
+ for (const [jobName, job] of tracedJobs) {
640
+ const exportedMetadata = allJobsMap.get(jobName);
641
+ const depNames = job.deps?.map((dep) => dep.name);
642
+ collectedJobs.push({
643
+ ...exportedMetadata,
644
+ deps: depNames
645
+ });
600
646
  }
601
- async processFile(file) {
602
- const item = await this.loader.load(file);
603
- if (!item) {
604
- console.log(`Skipping file ${file} as it could not be loaded`);
605
- return;
606
- }
607
- if (this.config.shouldProcess && !this.config.shouldProcess(item)) {
608
- console.log(`Skipping item based on shouldProcess condition: ${file}`);
609
- return;
610
- }
611
- const itemName = item.name;
612
- const outputFile = path.join(getDistDir(), this.config.outputDirs.preBundle, `${itemName}.js`);
613
- await this.preBundle(file, outputFile);
614
- const transformedFiles = await this.transformer.transform(outputFile, getDistDir());
615
- if (transformedFiles.length > 0) await this.postBundle(transformedFiles);
616
- }
617
- async preBundle(input, output) {
618
- const outputDir = path.dirname(output);
619
- if (!fs.existsSync(outputDir)) fs.mkdirSync(outputDir, { recursive: true });
620
- let tsconfig;
621
- try {
622
- tsconfig = await resolveTSConfig();
623
- } catch {
624
- tsconfig = void 0;
625
- }
626
- await rolldown.build(rolldown.defineConfig({
627
- input,
628
- output: {
629
- file: output,
630
- format: "esm",
631
- sourcemap: false,
632
- minify: false,
633
- inlineDynamicImports: true
634
- },
635
- tsconfig,
636
- treeshake: {
637
- moduleSideEffects: false,
638
- annotations: true,
639
- unknownGlobalSideEffects: false
640
- },
641
- logLevel: "silent"
642
- }));
643
- const stats = fs.statSync(output);
644
- console.log(`Pre-bundle output size: ${(stats.size / 1024).toFixed(2)} KB`);
647
+ return {
648
+ workflows,
649
+ workflowSources,
650
+ jobs: collectedJobs,
651
+ unusedJobs,
652
+ fileCount
653
+ };
654
+ }
655
+ /**
656
+ * Print workflow loading logs.
657
+ */
658
+ function printLoadedWorkflows(result) {
659
+ if (result.fileCount === 0) return;
660
+ console.log("");
661
+ console.log("Found", styleText("cyanBright", result.fileCount.toString()), "workflow files");
662
+ for (const { workflow, sourceFile } of result.workflowSources) {
663
+ const relativePath = path.relative(process.cwd(), sourceFile);
664
+ console.log("Workflow:", styleText("greenBright", `"${workflow.name}"`), "loaded from", styleText("cyan", relativePath));
645
665
  }
646
- async postBundle(files) {
647
- const outputDir = path.join(getDistDir(), this.config.outputDirs.postBundle);
648
- if (!fs.existsSync(outputDir)) fs.mkdirSync(outputDir, { recursive: true });
649
- let tsconfig;
650
- try {
651
- tsconfig = await resolveTSConfig();
652
- } catch {
653
- tsconfig = void 0;
666
+ if (result.unusedJobs.length > 0) console.warn(`⚠️ Warning: Unused workflow jobs found: ${result.unusedJobs.join(", ")}`);
667
+ }
668
+ /**
669
+ * Load a single file and extract jobs and workflow
670
+ */
671
+ async function loadFileContent(filePath) {
672
+ const jobs = [];
673
+ let workflow = null;
674
+ try {
675
+ const module = await import(`${pathToFileURL(filePath).href}?t=${Date.now()}`);
676
+ for (const [exportName, exportValue] of Object.entries(module)) {
677
+ if (exportName === "default") {
678
+ const workflowResult = WorkflowSchema.safeParse(exportValue);
679
+ if (workflowResult.success) workflow = workflowResult.data;
680
+ continue;
681
+ }
682
+ if (isWorkflowJob(exportValue)) {
683
+ const jobResult = WorkflowJobSchema.safeParse(exportValue);
684
+ if (jobResult.success) jobs.push({
685
+ name: jobResult.data.name,
686
+ exportName,
687
+ sourceFile: filePath
688
+ });
689
+ }
654
690
  }
655
- await Promise.all(files.map(async (file) => {
656
- const outputFile = path.join(outputDir, path.basename(file));
657
- await rolldown.build(rolldown.defineConfig({
658
- input: path.resolve(file),
659
- output: {
660
- file: outputFile,
661
- format: "esm",
662
- sourcemap: true,
663
- minify: true,
664
- inlineDynamicImports: true
665
- },
666
- tsconfig,
667
- treeshake: {
668
- moduleSideEffects: false,
669
- annotations: true,
670
- unknownGlobalSideEffects: false
671
- },
672
- logLevel: "silent"
673
- }));
674
- }));
691
+ } catch (error) {
692
+ const relativePath = path.relative(process.cwd(), filePath);
693
+ console.error(styleText("red", "Failed to load workflow from"), styleText("redBright", relativePath));
694
+ console.error(error);
695
+ throw error;
675
696
  }
676
- };
697
+ return {
698
+ jobs,
699
+ workflow
700
+ };
701
+ }
702
+ /**
703
+ * Check if a value is a WorkflowJob by looking for the brand symbol
704
+ */
705
+ function isWorkflowJob(value) {
706
+ return value != null && typeof value === "object" && WORKFLOW_JOB_BRAND in value && value[WORKFLOW_JOB_BRAND] === true;
707
+ }
708
+ /**
709
+ * Recursively trace all job dependencies
710
+ */
711
+ function traceJobDependencies(job, visited) {
712
+ if (visited.has(job.name)) return;
713
+ visited.set(job.name, job);
714
+ if (job.deps && Array.isArray(job.deps)) for (const dep of job.deps) traceJobDependencies(dep, visited);
715
+ }
677
716
 
678
717
  //#endregion
679
718
  //#region src/cli/bundler/executor/loader.ts
680
- var ExecutorLoader = class {
681
- async load(executorFilePath) {
682
- const executor = (await import(`${pathToFileURL(executorFilePath).href}?t=${Date.now()}`)).default;
683
- const parseResult = ExecutorSchema.safeParse(executor);
684
- if (!parseResult.success) return null;
685
- return parseResult.data;
686
- }
687
- };
719
+ async function loadExecutor(executorFilePath) {
720
+ const executor = (await import(`${pathToFileURL(executorFilePath).href}?t=${Date.now()}`)).default;
721
+ const parseResult = ExecutorSchema.safeParse(executor);
722
+ if (!parseResult.success) return null;
723
+ return parseResult.data;
724
+ }
688
725
 
689
726
  //#endregion
690
- //#region src/cli/bundler/executor/transformer.ts
691
- var ExecutorTransformer = class {
692
- constructor() {}
693
- async transform(filePath, tempDir) {
694
- const sourceText = fs.readFileSync(filePath).toString();
695
- const transformedPath = path.join(path.dirname(filePath), path.basename(filePath, ".js") + ".transformed.js");
696
- const executor = (await import(`${pathToFileURL(filePath)}?t=${(/* @__PURE__ */ new Date()).getTime()}`)).default;
697
- const exec = executor.operation;
698
- if (exec.kind !== "function" && exec.kind !== "jobFunction") return [];
699
- if (!exec.body) throw new Error(`Function reference not found in executor ${executor.name}`);
700
- fs.writeFileSync(transformedPath, ml`
701
- ${sourceText}
727
+ //#region src/cli/bundler/executor/executor-bundler.ts
728
+ /**
729
+ * Bundle executors from the specified configuration
730
+ *
731
+ * This function:
732
+ * 1. Creates entry file that extracts operation.body
733
+ * 2. Bundles in a single step with tree-shaking
734
+ */
735
+ async function bundleExecutors(config) {
736
+ const files = loadFilesWithIgnores(config);
737
+ if (files.length === 0) throw new Error(`No files found matching pattern: ${config.files?.join(", ")}`);
738
+ console.log("");
739
+ console.log("Bundling", styleText("cyanBright", files.length.toString()), "files for", styleText("cyan", "\"executor\""));
740
+ const executors = [];
741
+ for (const file of files) {
742
+ const executor = await loadExecutor(file);
743
+ if (!executor) {
744
+ console.log(styleText("dim", ` Skipping: ${file} (could not be loaded)`));
745
+ continue;
746
+ }
747
+ if (!["function", "jobFunction"].includes(executor.operation.kind)) {
748
+ console.log(styleText("dim", ` Skipping: ${executor.name} (not a function executor)`));
749
+ continue;
750
+ }
751
+ executors.push({
752
+ name: executor.name,
753
+ sourceFile: file
754
+ });
755
+ }
756
+ if (executors.length === 0) {
757
+ console.log(styleText("dim", " No function executors to bundle"));
758
+ return;
759
+ }
760
+ const outputDir = path.resolve(getDistDir(), "executors");
761
+ fs.mkdirSync(outputDir, { recursive: true });
762
+ let tsconfig;
763
+ try {
764
+ tsconfig = await resolveTSConfig();
765
+ } catch {
766
+ tsconfig = void 0;
767
+ }
768
+ await Promise.all(executors.map((executor) => bundleSingleExecutor(executor, outputDir, tsconfig)));
769
+ console.log(styleText("green", "Bundled"), styleText("cyan", "\"executor\""));
770
+ }
771
+ async function bundleSingleExecutor(executor, outputDir, tsconfig) {
772
+ const entryPath = path.join(outputDir, `${executor.name}.entry.js`);
773
+ const absoluteSourcePath = path.resolve(executor.sourceFile).replace(/\\/g, "/");
774
+ const entryContent = ml`
775
+ import _internalExecutor from "${absoluteSourcePath}";
702
776
 
703
- // Export the executor function
704
- export const __executor_function = ${exec.body.toString()};
705
- `);
706
- const stepsDir = path.join(tempDir, "executor_steps");
707
- fs.mkdirSync(stepsDir, { recursive: true });
708
- const executorFilePath = path.join(stepsDir, `${executor.name}.js`);
709
- const relativePath = path.relative(stepsDir, transformedPath).replace(/\\/g, "/");
710
- const executorContent = ml`
711
- import { __executor_function } from "${relativePath}";
777
+ const __executor_function = _internalExecutor.operation.body;
712
778
 
713
- globalThis.main = __executor_function;
714
- `;
715
- fs.writeFileSync(executorFilePath, executorContent);
716
- return [executorFilePath];
717
- }
718
- };
779
+ globalThis.main = __executor_function;
780
+ `;
781
+ fs.writeFileSync(entryPath, entryContent);
782
+ const outputPath = path.join(outputDir, `${executor.name}.js`);
783
+ await rolldown.build(rolldown.defineConfig({
784
+ input: entryPath,
785
+ output: {
786
+ file: outputPath,
787
+ format: "esm",
788
+ sourcemap: true,
789
+ minify: true,
790
+ inlineDynamicImports: true
791
+ },
792
+ tsconfig,
793
+ treeshake: {
794
+ moduleSideEffects: false,
795
+ annotations: true,
796
+ unknownGlobalSideEffects: false
797
+ },
798
+ logLevel: "silent"
799
+ }));
800
+ }
719
801
 
720
802
  //#endregion
721
803
  //#region src/cli/bundler/resolver/loader.ts
722
- var ResolverLoader = class {
723
- async load(resolverFilePath) {
724
- const resolver = (await import(`${pathToFileURL(resolverFilePath).href}?t=${Date.now()}`)).default;
725
- const parseResult = ResolverSchema.safeParse(resolver);
726
- if (!parseResult.success) return null;
727
- return parseResult.data;
728
- }
729
- };
804
+ async function loadResolver(resolverFilePath) {
805
+ const resolver = (await import(`${pathToFileURL(resolverFilePath).href}?t=${Date.now()}`)).default;
806
+ const parseResult = ResolverSchema.safeParse(resolver);
807
+ if (!parseResult.success) return null;
808
+ return parseResult.data;
809
+ }
730
810
 
731
811
  //#endregion
732
- //#region src/cli/bundler/resolver/transformer.ts
733
- var CodeTransformer = class {
734
- constructor() {}
735
- async transform(filePath, tempDir) {
736
- const sourceText = fs.readFileSync(filePath).toString();
737
- const transformedPath = path.join(path.dirname(filePath), path.basename(filePath, ".js") + ".transformed.js");
738
- const resolver = (await import(`${pathToFileURL(filePath)}?t=${(/* @__PURE__ */ new Date()).getTime()}`)).default;
739
- const hasInput = resolver.input && Object.keys(resolver.input).length > 0;
740
- const bodyVariableName = "$tailor_resolver_body";
741
- const bodyFnStr = resolver.body?.toString() || "() => {}";
742
- let modifiedSourceText = sourceText;
743
- const defaultExportRegex = /export\s+default\s+createResolver\s*\(/;
744
- if (defaultExportRegex.test(sourceText)) {
745
- modifiedSourceText = sourceText.replace(defaultExportRegex, "const _internalResolver = createResolver(");
746
- modifiedSourceText += "\nexport default _internalResolver;";
747
- } else {
748
- const bundledExportMatch = sourceText.match(/export\s*\{\s*(\w+)\s+as\s+default\s*\}/);
749
- if (bundledExportMatch) {
750
- const exportedName = bundledExportMatch[1];
751
- modifiedSourceText += `\nconst _internalResolver = ${exportedName};`;
752
- }
812
+ //#region src/cli/bundler/resolver/resolver-bundler.ts
813
+ /**
814
+ * Bundle resolvers for the specified namespace
815
+ *
816
+ * This function:
817
+ * 1. Uses a transform plugin to add validation wrapper during bundling
818
+ * 2. Creates entry file
819
+ * 3. Bundles in a single step with tree-shaking
820
+ */
821
+ async function bundleResolvers(namespace, config) {
822
+ const files = loadFilesWithIgnores(config);
823
+ if (files.length === 0) throw new Error(`No files found matching pattern: ${config.files?.join(", ")}`);
824
+ console.log("");
825
+ console.log("Bundling", styleText("cyanBright", files.length.toString()), "files for", styleText("cyan", `"${namespace}"`));
826
+ const resolvers = [];
827
+ for (const file of files) {
828
+ const resolver = await loadResolver(file);
829
+ if (!resolver) {
830
+ console.log(styleText("dim", ` Skipping: ${file} (could not be loaded)`));
831
+ continue;
753
832
  }
754
- const wrappedBodyCode = hasInput ? ml`
755
- async (context) => {
756
- if (_internalResolver.input) {
757
- const result = t.object(_internalResolver.input).parse({
758
- value: context.input,
759
- data: context.input,
760
- user: context.user,
761
- });
833
+ resolvers.push({
834
+ name: resolver.name,
835
+ sourceFile: file
836
+ });
837
+ }
838
+ const outputDir = path.resolve(getDistDir(), "resolvers");
839
+ fs.mkdirSync(outputDir, { recursive: true });
840
+ let tsconfig;
841
+ try {
842
+ tsconfig = await resolveTSConfig();
843
+ } catch {
844
+ tsconfig = void 0;
845
+ }
846
+ await Promise.all(resolvers.map((resolver) => bundleSingleResolver(resolver, outputDir, tsconfig)));
847
+ console.log(styleText("green", "Bundled"), styleText("cyan", `"${namespace}"`));
848
+ }
849
+ async function bundleSingleResolver(resolver, outputDir, tsconfig) {
850
+ const entryPath = path.join(outputDir, `${resolver.name}.entry.js`);
851
+ const absoluteSourcePath = path.resolve(resolver.sourceFile).replace(/\\/g, "/");
852
+ const entryContent = ml`
853
+ import _internalResolver from "${absoluteSourcePath}";
854
+ import { t } from "@tailor-platform/sdk";
762
855
 
763
- if (result.issues) {
764
- const errorMessages = result.issues
765
- .map(issue => {
766
- const path = issue.path ? issue.path.join('.') : '';
767
- return path ? \` \${path}: \${issue.message}\` : issue.message;
768
- })
769
- .join('\\n');
770
- throw new Error(\`Failed to input validation:\\n\${errorMessages}\`);
771
- }
772
- }
856
+ const $tailor_resolver_body = async (context) => {
857
+ if (_internalResolver.input) {
858
+ const result = t.object(_internalResolver.input).parse({
859
+ value: context.input,
860
+ data: context.input,
861
+ user: context.user,
862
+ });
863
+
864
+ if (result.issues) {
865
+ const errorMessages = result.issues
866
+ .map(issue => {
867
+ const path = issue.path ? issue.path.join('.') : '';
868
+ return path ? \` \${path}: \${issue.message}\` : issue.message;
869
+ })
870
+ .join('\\n');
871
+ throw new Error(\`Failed to input validation:\\n\${errorMessages}\`);
872
+ }
873
+ }
773
874
 
774
- const originalBody = ${bodyFnStr};
775
- return originalBody(context);
875
+ return _internalResolver.body(context);
876
+ };
877
+
878
+ globalThis.main = $tailor_resolver_body;
879
+ `;
880
+ fs.writeFileSync(entryPath, entryContent);
881
+ const outputPath = path.join(outputDir, `${resolver.name}.js`);
882
+ await rolldown.build(rolldown.defineConfig({
883
+ input: entryPath,
884
+ output: {
885
+ file: outputPath,
886
+ format: "esm",
887
+ sourcemap: true,
888
+ minify: true,
889
+ inlineDynamicImports: true
890
+ },
891
+ tsconfig,
892
+ treeshake: {
893
+ moduleSideEffects: false,
894
+ annotations: true,
895
+ unknownGlobalSideEffects: false
896
+ },
897
+ logLevel: "silent"
898
+ }));
776
899
  }
777
- ` : bodyFnStr;
778
- fs.writeFileSync(transformedPath, ml`
779
- ${modifiedSourceText}
780
900
 
781
- export const ${bodyVariableName} = ${wrappedBodyCode};
782
- `);
783
- const functionDir = path.join(tempDir, "functions");
784
- fs.mkdirSync(functionDir, { recursive: true });
785
- const bodyFilePath = path.join(functionDir, `${resolver.name}__body.js`);
786
- const relativePath = path.relative(functionDir, transformedPath).replace(/\\/g, "/");
787
- const bodyContent = ml`
788
- import { ${bodyVariableName} } from "${relativePath}";
789
- globalThis.main = ${bodyVariableName};
790
- `;
791
- fs.writeFileSync(bodyFilePath, bodyContent);
792
- return [bodyFilePath];
901
+ //#endregion
902
+ //#region src/cli/bundler/workflow/ast-transformer.ts
903
+ /**
904
+ * Check if a module source is from @tailor-platform/sdk (including subpaths)
905
+ */
906
+ function isTailorSdkSource(source) {
907
+ return /^@tailor-platform\/sdk(\/|$)/.test(source);
908
+ }
909
+ /**
910
+ * Get the source string from a dynamic import or require call
911
+ */
912
+ function getImportSource(node) {
913
+ if (!node) return null;
914
+ if (node.type === "ImportExpression") {
915
+ const source = node.source;
916
+ if (source.type === "Literal" && typeof source.value === "string") return source.value;
917
+ }
918
+ if (node.type === "CallExpression") {
919
+ const callExpr = node;
920
+ if (callExpr.callee.type === "Identifier" && callExpr.callee.name === "require") {
921
+ const arg = callExpr.arguments[0];
922
+ if (arg && "type" in arg && arg.type === "Literal" && "value" in arg && typeof arg.value === "string") return arg.value;
923
+ }
793
924
  }
794
- };
925
+ return null;
926
+ }
927
+ /**
928
+ * Unwrap AwaitExpression to get the inner expression
929
+ */
930
+ function unwrapAwait(node) {
931
+ if (node?.type === "AwaitExpression") return node.argument;
932
+ return node;
933
+ }
934
+ /**
935
+ * Collect all import bindings for createWorkflowJob from @tailor-platform/sdk
936
+ * Returns a Set of local names that refer to createWorkflowJob
937
+ */
938
+ function collectCreateWorkflowJobBindings(program) {
939
+ const bindings = /* @__PURE__ */ new Set();
940
+ function walk(node) {
941
+ if (!node || typeof node !== "object") return;
942
+ const nodeType = node.type;
943
+ if (nodeType === "ImportDeclaration") {
944
+ const importDecl = node;
945
+ const source = importDecl.source?.value;
946
+ if (typeof source === "string" && isTailorSdkSource(source)) {
947
+ for (const specifier of importDecl.specifiers || []) if (specifier.type === "ImportSpecifier") {
948
+ const importSpec = specifier;
949
+ const imported = importSpec.imported.type === "Identifier" ? importSpec.imported.name : importSpec.imported.value;
950
+ if (imported === "createWorkflowJob") bindings.add(importSpec.local?.name || imported);
951
+ } else if (specifier.type === "ImportDefaultSpecifier" || specifier.type === "ImportNamespaceSpecifier") {
952
+ const spec = specifier;
953
+ bindings.add(`__namespace__:${spec.local?.name}`);
954
+ }
955
+ }
956
+ }
957
+ if (nodeType === "VariableDeclaration") {
958
+ const varDecl = node;
959
+ for (const decl of varDecl.declarations || []) {
960
+ const init = unwrapAwait(decl.init);
961
+ const source = getImportSource(init);
962
+ if (source && isTailorSdkSource(source)) {
963
+ const id = decl.id;
964
+ if (id?.type === "Identifier") bindings.add(`__namespace__:${id.name}`);
965
+ else if (id?.type === "ObjectPattern") {
966
+ const objPattern = id;
967
+ for (const prop of objPattern.properties || []) if (prop.type === "Property") {
968
+ const bindingProp = prop;
969
+ const keyName = bindingProp.key.type === "Identifier" ? bindingProp.key.name : bindingProp.key.value;
970
+ if (keyName === "createWorkflowJob") {
971
+ const localName = bindingProp.value.type === "Identifier" ? bindingProp.value.name : keyName;
972
+ bindings.add(localName ?? "");
973
+ }
974
+ }
975
+ }
976
+ }
977
+ }
978
+ }
979
+ for (const key of Object.keys(node)) {
980
+ const child = node[key];
981
+ if (Array.isArray(child)) child.forEach((c) => walk(c));
982
+ else if (child && typeof child === "object") walk(child);
983
+ }
984
+ }
985
+ walk(program);
986
+ return bindings;
987
+ }
988
+ /**
989
+ * Check if a CallExpression is a createWorkflowJob call
990
+ */
991
+ function isCreateWorkflowJobCall(node, bindings) {
992
+ if (node.type !== "CallExpression") return false;
993
+ const callee = node.callee;
994
+ if (callee.type === "Identifier") {
995
+ const identifier = callee;
996
+ return bindings.has(identifier.name);
997
+ }
998
+ if (callee.type === "MemberExpression") {
999
+ const memberExpr = callee;
1000
+ if (!memberExpr.computed) {
1001
+ const object = memberExpr.object;
1002
+ const property = memberExpr.property;
1003
+ if (object.type === "Identifier" && bindings.has(`__namespace__:${object.name}`) && property.name === "createWorkflowJob") return true;
1004
+ }
1005
+ }
1006
+ return false;
1007
+ }
1008
+ /**
1009
+ * Check if a node is a string literal
1010
+ */
1011
+ function isStringLiteral(node) {
1012
+ return node?.type === "Literal" && typeof node.value === "string";
1013
+ }
1014
+ /**
1015
+ * Check if a node is a function expression (arrow or regular)
1016
+ */
1017
+ function isFunctionExpression(node) {
1018
+ return node?.type === "ArrowFunctionExpression" || node?.type === "FunctionExpression";
1019
+ }
1020
+ /**
1021
+ * Find a property in an object expression
1022
+ */
1023
+ function findProperty(properties, name) {
1024
+ for (const prop of properties) if (prop.type === "Property") {
1025
+ const objProp = prop;
1026
+ if ((objProp.key.type === "Identifier" ? objProp.key.name : objProp.key.type === "Literal" ? objProp.key.value : null) === name) return {
1027
+ key: objProp.key,
1028
+ value: objProp.value,
1029
+ start: objProp.start,
1030
+ end: objProp.end
1031
+ };
1032
+ }
1033
+ return null;
1034
+ }
1035
+ /**
1036
+ * Find all workflow jobs by detecting createWorkflowJob calls from @tailor-platform/sdk
1037
+ */
1038
+ function findAllJobs(program, _sourceText) {
1039
+ const jobs = [];
1040
+ const bindings = collectCreateWorkflowJobBindings(program);
1041
+ function walk(node, parents = []) {
1042
+ if (!node || typeof node !== "object") return;
1043
+ if (isCreateWorkflowJobCall(node, bindings)) {
1044
+ const args = node.arguments;
1045
+ if (args?.length >= 1 && args[0]?.type === "ObjectExpression") {
1046
+ const configObj = args[0];
1047
+ const nameProp = findProperty(configObj.properties, "name");
1048
+ const bodyProp = findProperty(configObj.properties, "body");
1049
+ const depsProp = findProperty(configObj.properties, "deps");
1050
+ if (nameProp && isStringLiteral(nameProp.value) && bodyProp && isFunctionExpression(bodyProp.value)) {
1051
+ let statementRange;
1052
+ for (let i = 0; i < parents.length; i++) {
1053
+ const parent = parents[i];
1054
+ if (parent.type === "ExportNamedDeclaration" || parent.type === "VariableDeclaration") {
1055
+ statementRange = {
1056
+ start: parent.start,
1057
+ end: parent.end
1058
+ };
1059
+ break;
1060
+ }
1061
+ }
1062
+ jobs.push({
1063
+ name: nameProp.value.value,
1064
+ nameRange: {
1065
+ start: nameProp.start,
1066
+ end: nameProp.end
1067
+ },
1068
+ depsRange: depsProp ? {
1069
+ start: depsProp.start,
1070
+ end: depsProp.end
1071
+ } : void 0,
1072
+ bodyValueRange: {
1073
+ start: bodyProp.value.start,
1074
+ end: bodyProp.value.end
1075
+ },
1076
+ statementRange
1077
+ });
1078
+ }
1079
+ }
1080
+ }
1081
+ const newParents = [...parents, node];
1082
+ for (const key of Object.keys(node)) {
1083
+ const child = node[key];
1084
+ if (Array.isArray(child)) child.forEach((c) => walk(c, newParents));
1085
+ else if (child && typeof child === "object") walk(child, newParents);
1086
+ }
1087
+ }
1088
+ walk(program);
1089
+ return jobs;
1090
+ }
1091
+ /**
1092
+ * Apply string replacements to source code
1093
+ * Replacements are applied from end to start to maintain positions
1094
+ */
1095
+ function applyReplacements(source, replacements) {
1096
+ const sorted = [...replacements].sort((a, b) => b.start - a.start);
1097
+ let result = source;
1098
+ for (const r of sorted) result = result.slice(0, r.start) + r.text + result.slice(r.end);
1099
+ return result;
1100
+ }
1101
+ /**
1102
+ * Find the end position including trailing comma
1103
+ */
1104
+ function findTrailingCommaEnd(source, position) {
1105
+ let i = position;
1106
+ while (i < source.length) {
1107
+ const char = source[i];
1108
+ if (char === ",") return i + 1;
1109
+ if (!/\s/.test(char)) break;
1110
+ i++;
1111
+ }
1112
+ return position;
1113
+ }
1114
+ /**
1115
+ * Find the end of a statement including any trailing newline
1116
+ */
1117
+ function findStatementEnd(source, position) {
1118
+ let i = position;
1119
+ while (i < source.length && (source[i] === ";" || source[i] === " " || source[i] === " ")) i++;
1120
+ if (i < source.length && source[i] === "\n") i++;
1121
+ return i;
1122
+ }
1123
+ /**
1124
+ * Find variable declarations by export names
1125
+ * Returns a map of export name to statement range
1126
+ */
1127
+ function findVariableDeclarationsByName(program) {
1128
+ const declarations = /* @__PURE__ */ new Map();
1129
+ function walk(node) {
1130
+ if (!node || typeof node !== "object") return;
1131
+ const nodeType = node.type;
1132
+ if (nodeType === "VariableDeclaration") {
1133
+ const varDecl = node;
1134
+ for (const decl of varDecl.declarations || []) if (decl.id?.type === "Identifier" && decl.id.name) {
1135
+ if (!declarations.has(decl.id.name)) declarations.set(decl.id.name, {
1136
+ start: varDecl.start,
1137
+ end: varDecl.end
1138
+ });
1139
+ }
1140
+ }
1141
+ if (nodeType === "ExportNamedDeclaration") {
1142
+ const exportDecl = node;
1143
+ const declaration = exportDecl.declaration;
1144
+ if (declaration?.type === "VariableDeclaration") {
1145
+ const varDecl = declaration;
1146
+ for (const decl of varDecl.declarations || []) if (decl.id?.type === "Identifier" && decl.id.name) declarations.set(decl.id.name, {
1147
+ start: exportDecl.start,
1148
+ end: exportDecl.end
1149
+ });
1150
+ }
1151
+ }
1152
+ for (const key of Object.keys(node)) {
1153
+ const child = node[key];
1154
+ if (Array.isArray(child)) child.forEach((c) => walk(c));
1155
+ else if (child && typeof child === "object") walk(child);
1156
+ }
1157
+ }
1158
+ walk(program);
1159
+ return declarations;
1160
+ }
1161
+ /**
1162
+ * Transform workflow source code
1163
+ * - Target job: remove deps
1164
+ * - Other jobs: remove entire variable declaration
1165
+ *
1166
+ * @param source - The source code to transform
1167
+ * @param targetJobName - The name of the target job (from job config)
1168
+ * @param targetJobExportName - The export name of the target job (optional, for enhanced detection)
1169
+ * @param otherJobExportNames - Export names of other jobs to remove (optional, for enhanced detection)
1170
+ */
1171
+ function transformWorkflowSource(source, targetJobName, targetJobExportName, otherJobExportNames) {
1172
+ const { program } = parseSync("input.ts", source);
1173
+ const detectedJobs = findAllJobs(program, source);
1174
+ const allDeclarations = findVariableDeclarationsByName(program);
1175
+ const replacements = [];
1176
+ const removedRanges = /* @__PURE__ */ new Set();
1177
+ const markRemoved = (start, end) => {
1178
+ removedRanges.add(`${start}-${end}`);
1179
+ };
1180
+ const isRemoved = (start, end) => {
1181
+ return removedRanges.has(`${start}-${end}`);
1182
+ };
1183
+ for (const job of detectedJobs) if (job.name === targetJobName) {
1184
+ if (job.depsRange) replacements.push({
1185
+ start: job.depsRange.start,
1186
+ end: findTrailingCommaEnd(source, job.depsRange.end),
1187
+ text: ""
1188
+ });
1189
+ } else if (job.statementRange && !isRemoved(job.statementRange.start, job.statementRange.end)) {
1190
+ replacements.push({
1191
+ start: job.statementRange.start,
1192
+ end: findStatementEnd(source, job.statementRange.end),
1193
+ text: ""
1194
+ });
1195
+ markRemoved(job.statementRange.start, job.statementRange.end);
1196
+ } else if (!job.statementRange) replacements.push({
1197
+ start: job.bodyValueRange.start,
1198
+ end: job.bodyValueRange.end,
1199
+ text: "() => {}"
1200
+ });
1201
+ if (otherJobExportNames) for (const exportName of otherJobExportNames) {
1202
+ if (exportName === targetJobExportName) continue;
1203
+ const declRange = allDeclarations.get(exportName);
1204
+ if (declRange && !isRemoved(declRange.start, declRange.end)) {
1205
+ replacements.push({
1206
+ start: declRange.start,
1207
+ end: findStatementEnd(source, declRange.end),
1208
+ text: ""
1209
+ });
1210
+ markRemoved(declRange.start, declRange.end);
1211
+ }
1212
+ }
1213
+ return applyReplacements(source, replacements);
1214
+ }
1215
+
1216
+ //#endregion
1217
+ //#region src/cli/bundler/workflow/workflow-bundler.ts
1218
+ /**
1219
+ * Bundle workflow jobs
1220
+ *
1221
+ * This function:
1222
+ * 1. Uses a transform plugin to remove deps during bundling (preserves module resolution)
1223
+ * 2. Creates entry file
1224
+ * 3. Bundles in a single step with tree-shaking
1225
+ */
1226
+ async function bundleWorkflowJobs(allJobs) {
1227
+ if (allJobs.length === 0) {
1228
+ console.log(styleText("dim", "No workflow jobs to bundle"));
1229
+ return;
1230
+ }
1231
+ console.log("");
1232
+ console.log("Bundling", styleText("cyanBright", allJobs.length.toString()), "files for", styleText("cyan", "\"workflow-job\""));
1233
+ const outputDir = path.resolve(getDistDir(), "workflow-jobs");
1234
+ fs.mkdirSync(outputDir, { recursive: true });
1235
+ let tsconfig;
1236
+ try {
1237
+ tsconfig = await resolveTSConfig();
1238
+ } catch {
1239
+ tsconfig = void 0;
1240
+ }
1241
+ await Promise.all(allJobs.map((job) => bundleSingleJob(job, allJobs, outputDir, tsconfig)));
1242
+ console.log(styleText("green", "Bundled"), styleText("cyan", "\"workflow-job\""));
1243
+ }
1244
+ async function bundleSingleJob(job, allJobs, outputDir, tsconfig) {
1245
+ const depsJobNames = findJobDeps(job.name, allJobs);
1246
+ const jobsObject = generateJobsObject(depsJobNames);
1247
+ const entryPath = path.join(outputDir, `${job.name}.entry.js`);
1248
+ const absoluteSourcePath = path.resolve(job.sourceFile).replace(/\\/g, "/");
1249
+ const entryContent = ml`
1250
+ import { ${job.exportName} } from "${absoluteSourcePath}";
1251
+
1252
+ const jobs = {
1253
+ ${jobsObject}
1254
+ };
1255
+
1256
+ globalThis.main = async (input) => {
1257
+ return await ${job.exportName}.body(input, jobs);
1258
+ };
1259
+ `;
1260
+ fs.writeFileSync(entryPath, entryContent);
1261
+ const outputPath = path.join(outputDir, `${job.name}.js`);
1262
+ const otherJobExportNames = allJobs.filter((j) => j.name !== job.name).map((j) => j.exportName);
1263
+ await rolldown.build(rolldown.defineConfig({
1264
+ input: entryPath,
1265
+ output: {
1266
+ file: outputPath,
1267
+ format: "esm",
1268
+ sourcemap: true,
1269
+ minify: true,
1270
+ inlineDynamicImports: true
1271
+ },
1272
+ tsconfig,
1273
+ plugins: [{
1274
+ name: "workflow-transform",
1275
+ transform: {
1276
+ filter: { id: { include: [/\.ts$/, /\.js$/] } },
1277
+ handler(code) {
1278
+ if (!code.includes("createWorkflowJob")) return null;
1279
+ return { code: transformWorkflowSource(code, job.name, job.exportName, otherJobExportNames) };
1280
+ }
1281
+ }
1282
+ }],
1283
+ treeshake: {
1284
+ moduleSideEffects: false,
1285
+ annotations: true,
1286
+ unknownGlobalSideEffects: false
1287
+ },
1288
+ logLevel: "silent"
1289
+ }));
1290
+ }
1291
+ /**
1292
+ * Find the dependencies of a specific job
1293
+ */
1294
+ function findJobDeps(targetJobName, allJobs) {
1295
+ return allJobs.find((j) => j.name === targetJobName)?.deps ?? [];
1296
+ }
1297
+ function generateJobsObject(jobNames) {
1298
+ if (jobNames.length === 0) return "";
1299
+ return jobNames.map((jobName) => {
1300
+ return `"${jobName.replace(/[-\s]/g, "_")}": (args) => tailor.workflow.triggerJobFunction("${jobName}", args)`;
1301
+ }).join(",\n ");
1302
+ }
795
1303
 
796
1304
  //#endregion
797
1305
  //#region src/parser/generator-config/index.ts
@@ -2290,11 +2798,20 @@ async function readPackageJson() {
2290
2798
 
2291
2799
  //#endregion
2292
2800
  //#region src/cli/client.ts
2293
- const baseUrl = process.env.PLATFORM_URL ?? "https://api.tailor.tech";
2801
+ const platformBaseUrl = process.env.PLATFORM_URL ?? "https://api.tailor.tech";
2802
+ const oauth2ClientId = "cpoc_0Iudir72fqSpqC6GQ58ri1cLAqcq5vJl";
2803
+ const oauth2DiscoveryEndpoint = "/.well-known/oauth-authorization-server/oauth2/platform";
2804
+ function initOAuth2Client() {
2805
+ return new OAuth2Client({
2806
+ clientId: oauth2ClientId,
2807
+ server: platformBaseUrl,
2808
+ discoveryEndpoint: oauth2DiscoveryEndpoint
2809
+ });
2810
+ }
2294
2811
  async function initOperatorClient(accessToken) {
2295
2812
  const transport = createConnectTransport({
2296
2813
  httpVersion: "2",
2297
- baseUrl,
2814
+ baseUrl: platformBaseUrl,
2298
2815
  interceptors: [
2299
2816
  await userAgentInterceptor(),
2300
2817
  await bearerTokenInterceptor(accessToken),
@@ -2321,26 +2838,6 @@ async function bearerTokenInterceptor(accessToken) {
2321
2838
  return await next(req);
2322
2839
  };
2323
2840
  }
2324
- async function refreshToken(refreshToken$1) {
2325
- const refreshUrl = new URL("/auth/platform/token/refresh", baseUrl).href;
2326
- const formData = new URLSearchParams();
2327
- formData.append("refresh_token", refreshToken$1);
2328
- const resp = await fetch(refreshUrl, {
2329
- method: "POST",
2330
- headers: {
2331
- "User-Agent": await userAgent(),
2332
- "Content-Type": "application/x-www-form-urlencoded"
2333
- },
2334
- body: formData
2335
- });
2336
- if (!resp.ok) throw new Error("Failed to refresh token");
2337
- const rawJson = await resp.json();
2338
- return z.object({
2339
- access_token: z.string(),
2340
- refresh_token: z.string(),
2341
- expires_in: z.number()
2342
- }).parse(rawJson);
2343
- }
2344
2841
  function retryInterceptor() {
2345
2842
  return (next) => async (req) => {
2346
2843
  if (req.stream) return await next(req);
@@ -2385,6 +2882,16 @@ async function fetchAll(fn) {
2385
2882
  }
2386
2883
  return items;
2387
2884
  }
2885
+ async function fetchUserInfo(accessToken) {
2886
+ const userInfoUrl = new URL("/auth/platform/userinfo", platformBaseUrl).href;
2887
+ const resp = await fetch(userInfoUrl, { headers: {
2888
+ Authorization: `Bearer ${accessToken}`,
2889
+ "User-Agent": await userAgent()
2890
+ } });
2891
+ if (!resp.ok) throw new Error(`Failed to fetch user info: ${resp.statusText}`);
2892
+ const rawJson = await resp.json();
2893
+ return z.object({ email: z.string() }).parse(rawJson);
2894
+ }
2388
2895
  async function resolveStaticWebsiteUrls(client, workspaceId, urls, context) {
2389
2896
  if (!urls) return [];
2390
2897
  return (await Promise.all(urls.map(async (url) => {
@@ -2557,16 +3064,18 @@ async function fetchLatestToken(config, user) {
2557
3064
  Please verify your user name and login using 'tailor-sdk login' command.
2558
3065
  `);
2559
3066
  if (new Date(tokens.token_expires_at) > /* @__PURE__ */ new Date()) return tokens.access_token;
2560
- const resp = await refreshToken(tokens.refresh_token);
2561
- const newExpiresAt = /* @__PURE__ */ new Date();
2562
- newExpiresAt.setSeconds(newExpiresAt.getSeconds() + resp.expires_in);
3067
+ const resp = await initOAuth2Client().refreshToken({
3068
+ accessToken: tokens.access_token,
3069
+ refreshToken: tokens.refresh_token,
3070
+ expiresAt: Date.parse(tokens.token_expires_at)
3071
+ });
2563
3072
  config.users[user] = {
2564
- access_token: resp.access_token,
2565
- refresh_token: resp.refresh_token,
2566
- token_expires_at: newExpiresAt.toISOString()
3073
+ access_token: resp.accessToken,
3074
+ refresh_token: resp.refreshToken,
3075
+ token_expires_at: new Date(resp.expiresAt).toISOString()
2567
3076
  };
2568
3077
  writePlatformConfig(config);
2569
- return resp.access_token;
3078
+ return resp.accessToken;
2570
3079
  }
2571
3080
  function loadConfigPath(configPath) {
2572
3081
  if (configPath) return configPath;
@@ -4312,7 +4821,7 @@ async function planResolvers(client, workspaceId, pipelines, executors, deletedS
4312
4821
  return changeSet;
4313
4822
  }
4314
4823
  function processResolver(resolver, executorUsedResolvers, env) {
4315
- const functionPath = path.join(getDistDir(), "functions", `${resolver.name}__body.js`);
4824
+ const functionPath = path.join(getDistDir(), "resolvers", `${resolver.name}.js`);
4316
4825
  let functionCode = "";
4317
4826
  try {
4318
4827
  functionCode = fs.readFileSync(functionPath, "utf-8");
@@ -5023,6 +5532,134 @@ function protoGqlOperand(operand) {
5023
5532
  } };
5024
5533
  }
5025
5534
 
5535
+ //#endregion
5536
+ //#region src/cli/apply/services/workflow.ts
5537
+ async function applyWorkflow(client, changeSet, phase = "create-update") {
5538
+ if (phase === "create-update") {
5539
+ await Promise.all(changeSet.creates.map(async (create) => {
5540
+ const jobFunctions = {};
5541
+ for (const [jobName, script] of create.scripts.entries()) {
5542
+ const response = await client.createWorkflowJobFunction({
5543
+ workspaceId: create.workspaceId,
5544
+ jobFunctionName: jobName,
5545
+ script
5546
+ });
5547
+ if (response.jobFunction) jobFunctions[jobName] = response.jobFunction.version;
5548
+ }
5549
+ await client.createWorkflow({
5550
+ workspaceId: create.workspaceId,
5551
+ workflowName: create.workflow.name,
5552
+ mainJobFunctionName: create.workflow.mainJob.name,
5553
+ jobFunctions
5554
+ });
5555
+ }));
5556
+ await Promise.all(changeSet.updates.map(async (update) => {
5557
+ const jobFunctions = {};
5558
+ for (const [jobName, script] of update.scripts.entries()) {
5559
+ const response = await client.updateWorkflowJobFunction({
5560
+ workspaceId: update.workspaceId,
5561
+ jobFunctionName: jobName,
5562
+ script
5563
+ });
5564
+ if (response.jobFunction) jobFunctions[jobName] = response.jobFunction.version;
5565
+ }
5566
+ await client.updateWorkflow({
5567
+ workspaceId: update.workspaceId,
5568
+ workflowName: update.workflow.name,
5569
+ mainJobFunctionName: update.workflow.mainJob.name,
5570
+ jobFunctions
5571
+ });
5572
+ }));
5573
+ } else if (phase === "delete") await Promise.all(changeSet.deletes.map((del) => client.deleteWorkflow({
5574
+ workspaceId: del.workspaceId,
5575
+ workflowId: del.workflowId
5576
+ })));
5577
+ }
5578
+ /**
5579
+ * Recursively collect all job names from a workflow's mainJob and its dependencies
5580
+ */
5581
+ function collectJobNamesFromWorkflow(workflow) {
5582
+ const jobNames = /* @__PURE__ */ new Set();
5583
+ const collectFromJob = (job) => {
5584
+ if (!job || jobNames.has(job.name)) return;
5585
+ jobNames.add(job.name);
5586
+ if (job.deps && Array.isArray(job.deps)) for (const dep of job.deps) collectFromJob(dep);
5587
+ };
5588
+ collectFromJob(workflow.mainJob);
5589
+ return jobNames;
5590
+ }
5591
+ async function planWorkflow(client, workspaceId, workflows) {
5592
+ const changeSet = new ChangeSet("Workflows");
5593
+ const existingWorkflows = await fetchAll(async (pageToken) => {
5594
+ const response = await client.listWorkflows({
5595
+ workspaceId,
5596
+ pageToken,
5597
+ pageSize: 100,
5598
+ pageDirection: 0
5599
+ });
5600
+ return [response.workflows.map((w) => ({
5601
+ id: w.id,
5602
+ name: w.name
5603
+ })), response.nextPageToken];
5604
+ });
5605
+ const existingWorkflowMap = /* @__PURE__ */ new Map();
5606
+ existingWorkflows.forEach((workflow) => {
5607
+ existingWorkflowMap.set(workflow.name, {
5608
+ id: workflow.id,
5609
+ name: workflow.name
5610
+ });
5611
+ });
5612
+ const allScripts = await loadWorkflowScripts();
5613
+ for (const workflow of Object.values(workflows)) {
5614
+ const requiredJobNames = collectJobNamesFromWorkflow(workflow);
5615
+ const scripts = /* @__PURE__ */ new Map();
5616
+ for (const jobName of requiredJobNames) {
5617
+ const script = allScripts.get(jobName);
5618
+ if (script) scripts.set(jobName, script);
5619
+ else console.warn(`Warning: Script for job "${jobName}" not found in workflow "${workflow.name}"`);
5620
+ }
5621
+ if (existingWorkflowMap.get(workflow.name)) {
5622
+ changeSet.updates.push({
5623
+ name: workflow.name,
5624
+ workspaceId,
5625
+ workflow,
5626
+ scripts
5627
+ });
5628
+ existingWorkflowMap.delete(workflow.name);
5629
+ } else changeSet.creates.push({
5630
+ name: workflow.name,
5631
+ workspaceId,
5632
+ workflow,
5633
+ scripts
5634
+ });
5635
+ }
5636
+ existingWorkflowMap.forEach((existing) => {
5637
+ changeSet.deletes.push({
5638
+ name: existing.name,
5639
+ workspaceId,
5640
+ workflowId: existing.id
5641
+ });
5642
+ });
5643
+ changeSet.print();
5644
+ return changeSet;
5645
+ }
5646
+ async function loadWorkflowScripts() {
5647
+ const scripts = /* @__PURE__ */ new Map();
5648
+ const jobsDir = path.join(getDistDir(), "workflow-jobs");
5649
+ if (!fs.existsSync(jobsDir)) {
5650
+ console.warn(`Warning: workflow-jobs directory not found at ${jobsDir}`);
5651
+ return scripts;
5652
+ }
5653
+ const files = fs.readdirSync(jobsDir);
5654
+ for (const file of files) if (file.endsWith(".js") && !file.endsWith(".base.js") && !file.endsWith(".transformed.js") && !file.endsWith(".map")) {
5655
+ const jobName = file.replace(/\.js$/, "");
5656
+ const scriptPath = path.join(jobsDir, file);
5657
+ const script = fs.readFileSync(scriptPath, "utf-8");
5658
+ scripts.set(jobName, script);
5659
+ }
5660
+ return scripts;
5661
+ }
5662
+
5026
5663
  //#endregion
5027
5664
  //#region src/cli/apply/index.ts
5028
5665
  async function apply(options) {
@@ -5033,8 +5670,11 @@ async function apply(options) {
5033
5670
  const buildOnly = options?.buildOnly ?? false;
5034
5671
  await generateUserTypes(config, configPath);
5035
5672
  const application = defineApplication(config);
5673
+ let workflowResult;
5674
+ if (application.workflowConfig) workflowResult = await loadAndCollectJobs(application.workflowConfig);
5036
5675
  for (const app$1 of application.applications) for (const pipeline$1 of app$1.resolverServices) await buildPipeline(pipeline$1.namespace, pipeline$1.config);
5037
5676
  if (application.executorService) await buildExecutor(application.executorService.config);
5677
+ if (workflowResult && workflowResult.jobs.length > 0) await buildWorkflow(workflowResult.jobs);
5038
5678
  if (buildOnly) return;
5039
5679
  const accessToken = await loadAccessToken({
5040
5680
  useProfile: true,
@@ -5048,6 +5688,7 @@ async function apply(options) {
5048
5688
  for (const tailordb of application.tailorDBServices) await tailordb.loadTypes();
5049
5689
  for (const pipeline$1 of application.resolverServices) await pipeline$1.loadResolvers();
5050
5690
  if (application.executorService) await application.executorService.loadExecutors();
5691
+ if (workflowResult) printLoadedWorkflows(workflowResult);
5051
5692
  console.log("");
5052
5693
  const ctx = {
5053
5694
  client,
@@ -5061,6 +5702,7 @@ async function apply(options) {
5061
5702
  const pipeline = await planPipeline(ctx);
5062
5703
  const app = await planApplication(ctx);
5063
5704
  const executor = await planExecutor(ctx);
5705
+ const workflow = await planWorkflow(client, workspaceId, workflowResult?.workflows ?? {});
5064
5706
  const allConflicts = [
5065
5707
  ...tailorDB.conflicts,
5066
5708
  ...staticWebsite.conflicts,
@@ -5116,6 +5758,8 @@ async function apply(options) {
5116
5758
  await applyPipeline(client, pipeline, "create-update");
5117
5759
  await applyApplication(client, app, "create-update");
5118
5760
  await applyExecutor(client, executor, "create-update");
5761
+ await applyWorkflow(client, workflow, "create-update");
5762
+ await applyWorkflow(client, workflow, "delete");
5119
5763
  await applyExecutor(client, executor, "delete");
5120
5764
  await applyApplication(client, app, "delete");
5121
5765
  await applyPipeline(client, pipeline, "delete");
@@ -5126,31 +5770,13 @@ async function apply(options) {
5126
5770
  console.log("Successfully applied changes.");
5127
5771
  }
5128
5772
  async function buildPipeline(namespace, config) {
5129
- const bundlerConfig = {
5130
- namespace,
5131
- serviceConfig: config,
5132
- loader: new ResolverLoader(),
5133
- transformer: new CodeTransformer(),
5134
- outputDirs: {
5135
- preBundle: "resolvers",
5136
- postBundle: "functions"
5137
- }
5138
- };
5139
- await new Bundler(bundlerConfig).bundle();
5773
+ await bundleResolvers(namespace, config);
5140
5774
  }
5141
5775
  async function buildExecutor(config) {
5142
- const bundlerConfig = {
5143
- namespace: "executor",
5144
- serviceConfig: config,
5145
- loader: new ExecutorLoader(),
5146
- transformer: new ExecutorTransformer(),
5147
- outputDirs: {
5148
- preBundle: "executors",
5149
- postBundle: "executors"
5150
- },
5151
- shouldProcess: (executor) => ["function", "jobFunction"].includes(executor.operation.kind)
5152
- };
5153
- await new Bundler(bundlerConfig).bundle();
5776
+ await bundleExecutors(config);
5777
+ }
5778
+ async function buildWorkflow(collectedJobs) {
5779
+ await bundleWorkflowJobs(collectedJobs);
5154
5780
  }
5155
5781
  const applyCommand = defineCommand({
5156
5782
  meta: {
@@ -6316,4 +6942,5 @@ const tokenCommand = defineCommand({
6316
6942
  });
6317
6943
 
6318
6944
  //#endregion
6319
- export { PATScope, apply, applyCommand, commonArgs, createCommand, deleteCommand, fetchAll, fetchLatestToken, formatArgs, generate, generateCommand, generateUserTypes, initOperatorClient, listCommand, listCommand$1, loadAccessToken, loadConfig, loadConfigPath, loadWorkspaceId, machineUserList, machineUserToken, parseFormat, printWithFormat, readPackageJson, readPlatformConfig, show, showCommand, tokenCommand, userAgent, withCommonArgs, workspaceCreate, workspaceDelete, workspaceList, writePlatformConfig };
6945
+ export { PATScope, apply, applyCommand, commonArgs, createCommand, deleteCommand, fetchAll, fetchLatestToken, fetchUserInfo, formatArgs, generate, generateCommand, generateUserTypes, initOAuth2Client, initOperatorClient, listCommand, listCommand$1, loadAccessToken, loadConfig, loadConfigPath, loadWorkspaceId, machineUserList, machineUserToken, parseFormat, printWithFormat, readPackageJson, readPlatformConfig, show, showCommand, tokenCommand, withCommonArgs, workspaceCreate, workspaceDelete, workspaceList, writePlatformConfig };
6946
+ //# sourceMappingURL=token-43KGC4QJ.mjs.map