@tailor-platform/sdk 0.10.4 → 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +13 -0
- package/dist/cli/api.d.mts +3 -2
- package/dist/cli/api.mjs +2 -2
- package/dist/cli/index.mjs +4 -3
- package/dist/cli/index.mjs.map +1 -0
- package/dist/configure/index.d.mts +3 -3
- package/dist/configure/index.mjs +9 -2
- package/dist/configure/index.mjs.map +1 -0
- package/dist/{index-DENkoB_m.d.mts → index-BTj95IAw.d.mts} +47 -2
- package/dist/{auth-Di3vQUrT.mjs → job-CSwByDTq.mjs} +19 -1
- package/dist/job-CSwByDTq.mjs.map +1 -0
- package/dist/{token-DIx5IQOi.mjs → token-43KGC4QJ.mjs} +849 -224
- package/dist/token-43KGC4QJ.mjs.map +1 -0
- package/dist/{types-CCSNkBzX.d.mts → types-U0wDhMcX.d.mts} +20 -9
- package/dist/utils/test/index.d.mts +4 -3
- package/dist/utils/test/index.mjs +2 -1
- package/dist/utils/test/index.mjs.map +1 -0
- package/docs/configuration.md +20 -0
- package/docs/core-concepts.md +114 -0
- package/package.json +5 -2
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { getDistDir, tailorUserMap } from "./
|
|
1
|
+
import { WORKFLOW_JOB_BRAND, getDistDir, tailorUserMap } from "./job-CSwByDTq.mjs";
|
|
2
2
|
import { createRequire } from "node:module";
|
|
3
3
|
import { defineCommand } from "citty";
|
|
4
4
|
import * as path from "node:path";
|
|
@@ -7,9 +7,10 @@ import { styleText } from "node:util";
|
|
|
7
7
|
import * as fs from "node:fs";
|
|
8
8
|
import { z } from "zod";
|
|
9
9
|
import * as inflection from "inflection";
|
|
10
|
+
import ml from "multiline-ts";
|
|
10
11
|
import { readPackageJSON, resolveTSConfig } from "pkg-types";
|
|
11
12
|
import * as rolldown from "rolldown";
|
|
12
|
-
import
|
|
13
|
+
import { parseSync } from "oxc-parser";
|
|
13
14
|
import { loadEnvFile } from "node:process";
|
|
14
15
|
import { consola } from "consola";
|
|
15
16
|
import { table } from "table";
|
|
@@ -467,6 +468,7 @@ var Application = class {
|
|
|
467
468
|
_authService = void 0;
|
|
468
469
|
_subgraphs = [];
|
|
469
470
|
_executorService = void 0;
|
|
471
|
+
_workflowConfig = void 0;
|
|
470
472
|
_staticWebsiteServices = [];
|
|
471
473
|
_env = {};
|
|
472
474
|
constructor(name, config) {
|
|
@@ -498,6 +500,9 @@ var Application = class {
|
|
|
498
500
|
get executorService() {
|
|
499
501
|
return this._executorService;
|
|
500
502
|
}
|
|
503
|
+
get workflowConfig() {
|
|
504
|
+
return this._workflowConfig;
|
|
505
|
+
}
|
|
501
506
|
get staticWebsiteServices() {
|
|
502
507
|
return this._staticWebsiteServices;
|
|
503
508
|
}
|
|
@@ -550,6 +555,10 @@ var Application = class {
|
|
|
550
555
|
if (!config) return;
|
|
551
556
|
this._executorService = new ExecutorService(config);
|
|
552
557
|
}
|
|
558
|
+
defineWorkflow(config) {
|
|
559
|
+
if (!config) return;
|
|
560
|
+
this._workflowConfig = config;
|
|
561
|
+
}
|
|
553
562
|
defineStaticWebsites(websites) {
|
|
554
563
|
const websiteNames = /* @__PURE__ */ new Set();
|
|
555
564
|
(websites ?? []).forEach((config) => {
|
|
@@ -567,232 +576,730 @@ function defineApplication(config) {
|
|
|
567
576
|
app.defineIdp(config.idp);
|
|
568
577
|
app.defineAuth(config.auth);
|
|
569
578
|
app.defineExecutor(config.executor);
|
|
579
|
+
app.defineWorkflow(config.workflow);
|
|
570
580
|
app.defineStaticWebsites(config.staticWebsites);
|
|
571
581
|
return app;
|
|
572
582
|
}
|
|
573
583
|
|
|
574
584
|
//#endregion
|
|
575
|
-
//#region src/
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
585
|
+
//#region src/parser/service/workflow/schema.ts
|
|
586
|
+
const WorkflowJobSchema = z.object({
|
|
587
|
+
name: z.string(),
|
|
588
|
+
get deps() {
|
|
589
|
+
return z.array(WorkflowJobSchema).optional();
|
|
590
|
+
},
|
|
591
|
+
body: functionSchema
|
|
592
|
+
});
|
|
593
|
+
const WorkflowSchema = z.object({
|
|
594
|
+
name: z.string(),
|
|
595
|
+
mainJob: WorkflowJobSchema
|
|
596
|
+
});
|
|
597
|
+
|
|
598
|
+
//#endregion
|
|
599
|
+
//#region src/cli/application/workflow/service.ts
|
|
600
|
+
/**
|
|
601
|
+
* Load workflow files and collect all jobs in a single pass.
|
|
602
|
+
*/
|
|
603
|
+
async function loadAndCollectJobs(config) {
|
|
604
|
+
const workflows = {};
|
|
605
|
+
const workflowSources = [];
|
|
606
|
+
const collectedJobs = [];
|
|
607
|
+
let unusedJobs = [];
|
|
608
|
+
if (!config.files || config.files.length === 0) return {
|
|
609
|
+
workflows,
|
|
610
|
+
workflowSources,
|
|
611
|
+
jobs: collectedJobs,
|
|
612
|
+
unusedJobs,
|
|
613
|
+
fileCount: 0
|
|
614
|
+
};
|
|
615
|
+
const workflowFiles = loadFilesWithIgnores(config);
|
|
616
|
+
const fileCount = workflowFiles.length;
|
|
617
|
+
const allJobsMap = /* @__PURE__ */ new Map();
|
|
618
|
+
for (const workflowFile of workflowFiles) {
|
|
619
|
+
const { jobs, workflow } = await loadFileContent(workflowFile);
|
|
620
|
+
if (workflow) {
|
|
621
|
+
workflowSources.push({
|
|
622
|
+
workflow,
|
|
623
|
+
sourceFile: workflowFile
|
|
624
|
+
});
|
|
625
|
+
workflows[workflowFile] = workflow;
|
|
626
|
+
}
|
|
627
|
+
for (const job of jobs) {
|
|
628
|
+
const existing = allJobsMap.get(job.name);
|
|
629
|
+
if (existing) throw new Error(`Duplicate job name "${job.name}" found:\n - ${existing.sourceFile} (export: ${existing.exportName})\n - ${job.sourceFile} (export: ${job.exportName})\nEach job must have a unique name.`);
|
|
630
|
+
allJobsMap.set(job.name, job);
|
|
596
631
|
}
|
|
597
632
|
}
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
633
|
+
const tracedJobs = /* @__PURE__ */ new Map();
|
|
634
|
+
for (const { workflow } of workflowSources) traceJobDependencies(workflow.mainJob, tracedJobs);
|
|
635
|
+
const notExportedJobs = [];
|
|
636
|
+
for (const jobName of tracedJobs.keys()) if (!allJobsMap.has(jobName)) notExportedJobs.push(jobName);
|
|
637
|
+
if (notExportedJobs.length > 0) throw new Error(`The following workflow jobs are used but not exported:\n` + notExportedJobs.map((name) => ` - "${name}"`).join("\n") + "\n\nAll workflow jobs must be named exports. Example:\n export const myJob = createWorkflowJob({ name: \"my-job\", ... });\n\nAlso ensure that files containing job exports are included in the workflow.files glob pattern.");
|
|
638
|
+
unusedJobs = Array.from(allJobsMap.keys()).filter((jobName) => !tracedJobs.has(jobName));
|
|
639
|
+
for (const [jobName, job] of tracedJobs) {
|
|
640
|
+
const exportedMetadata = allJobsMap.get(jobName);
|
|
641
|
+
const depNames = job.deps?.map((dep) => dep.name);
|
|
642
|
+
collectedJobs.push({
|
|
643
|
+
...exportedMetadata,
|
|
644
|
+
deps: depNames
|
|
645
|
+
});
|
|
601
646
|
}
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
}
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
if (!fs.existsSync(outputDir)) fs.mkdirSync(outputDir, { recursive: true });
|
|
621
|
-
let tsconfig;
|
|
622
|
-
try {
|
|
623
|
-
tsconfig = await resolveTSConfig();
|
|
624
|
-
} catch {
|
|
625
|
-
tsconfig = void 0;
|
|
626
|
-
}
|
|
627
|
-
await rolldown.build(rolldown.defineConfig({
|
|
628
|
-
input,
|
|
629
|
-
output: {
|
|
630
|
-
file: output,
|
|
631
|
-
format: "esm",
|
|
632
|
-
sourcemap: false,
|
|
633
|
-
minify: false,
|
|
634
|
-
inlineDynamicImports: true
|
|
635
|
-
},
|
|
636
|
-
tsconfig,
|
|
637
|
-
treeshake: {
|
|
638
|
-
moduleSideEffects: false,
|
|
639
|
-
annotations: true,
|
|
640
|
-
unknownGlobalSideEffects: false
|
|
641
|
-
},
|
|
642
|
-
logLevel: "silent"
|
|
643
|
-
}));
|
|
644
|
-
const stats = fs.statSync(output);
|
|
645
|
-
console.log(`Pre-bundle output size: ${(stats.size / 1024).toFixed(2)} KB`);
|
|
647
|
+
return {
|
|
648
|
+
workflows,
|
|
649
|
+
workflowSources,
|
|
650
|
+
jobs: collectedJobs,
|
|
651
|
+
unusedJobs,
|
|
652
|
+
fileCount
|
|
653
|
+
};
|
|
654
|
+
}
|
|
655
|
+
/**
|
|
656
|
+
* Print workflow loading logs.
|
|
657
|
+
*/
|
|
658
|
+
function printLoadedWorkflows(result) {
|
|
659
|
+
if (result.fileCount === 0) return;
|
|
660
|
+
console.log("");
|
|
661
|
+
console.log("Found", styleText("cyanBright", result.fileCount.toString()), "workflow files");
|
|
662
|
+
for (const { workflow, sourceFile } of result.workflowSources) {
|
|
663
|
+
const relativePath = path.relative(process.cwd(), sourceFile);
|
|
664
|
+
console.log("Workflow:", styleText("greenBright", `"${workflow.name}"`), "loaded from", styleText("cyan", relativePath));
|
|
646
665
|
}
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
|
|
666
|
+
if (result.unusedJobs.length > 0) console.warn(`⚠️ Warning: Unused workflow jobs found: ${result.unusedJobs.join(", ")}`);
|
|
667
|
+
}
|
|
668
|
+
/**
|
|
669
|
+
* Load a single file and extract jobs and workflow
|
|
670
|
+
*/
|
|
671
|
+
async function loadFileContent(filePath) {
|
|
672
|
+
const jobs = [];
|
|
673
|
+
let workflow = null;
|
|
674
|
+
try {
|
|
675
|
+
const module = await import(`${pathToFileURL(filePath).href}?t=${Date.now()}`);
|
|
676
|
+
for (const [exportName, exportValue] of Object.entries(module)) {
|
|
677
|
+
if (exportName === "default") {
|
|
678
|
+
const workflowResult = WorkflowSchema.safeParse(exportValue);
|
|
679
|
+
if (workflowResult.success) workflow = workflowResult.data;
|
|
680
|
+
continue;
|
|
681
|
+
}
|
|
682
|
+
if (isWorkflowJob(exportValue)) {
|
|
683
|
+
const jobResult = WorkflowJobSchema.safeParse(exportValue);
|
|
684
|
+
if (jobResult.success) jobs.push({
|
|
685
|
+
name: jobResult.data.name,
|
|
686
|
+
exportName,
|
|
687
|
+
sourceFile: filePath
|
|
688
|
+
});
|
|
689
|
+
}
|
|
655
690
|
}
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
file: outputFile,
|
|
662
|
-
format: "esm",
|
|
663
|
-
sourcemap: true,
|
|
664
|
-
minify: true,
|
|
665
|
-
inlineDynamicImports: true
|
|
666
|
-
},
|
|
667
|
-
tsconfig,
|
|
668
|
-
treeshake: {
|
|
669
|
-
moduleSideEffects: false,
|
|
670
|
-
annotations: true,
|
|
671
|
-
unknownGlobalSideEffects: false
|
|
672
|
-
},
|
|
673
|
-
logLevel: "silent"
|
|
674
|
-
}));
|
|
675
|
-
}));
|
|
691
|
+
} catch (error) {
|
|
692
|
+
const relativePath = path.relative(process.cwd(), filePath);
|
|
693
|
+
console.error(styleText("red", "Failed to load workflow from"), styleText("redBright", relativePath));
|
|
694
|
+
console.error(error);
|
|
695
|
+
throw error;
|
|
676
696
|
}
|
|
677
|
-
|
|
697
|
+
return {
|
|
698
|
+
jobs,
|
|
699
|
+
workflow
|
|
700
|
+
};
|
|
701
|
+
}
|
|
702
|
+
/**
|
|
703
|
+
* Check if a value is a WorkflowJob by looking for the brand symbol
|
|
704
|
+
*/
|
|
705
|
+
function isWorkflowJob(value) {
|
|
706
|
+
return value != null && typeof value === "object" && WORKFLOW_JOB_BRAND in value && value[WORKFLOW_JOB_BRAND] === true;
|
|
707
|
+
}
|
|
708
|
+
/**
|
|
709
|
+
* Recursively trace all job dependencies
|
|
710
|
+
*/
|
|
711
|
+
function traceJobDependencies(job, visited) {
|
|
712
|
+
if (visited.has(job.name)) return;
|
|
713
|
+
visited.set(job.name, job);
|
|
714
|
+
if (job.deps && Array.isArray(job.deps)) for (const dep of job.deps) traceJobDependencies(dep, visited);
|
|
715
|
+
}
|
|
678
716
|
|
|
679
717
|
//#endregion
|
|
680
718
|
//#region src/cli/bundler/executor/loader.ts
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
}
|
|
688
|
-
};
|
|
719
|
+
async function loadExecutor(executorFilePath) {
|
|
720
|
+
const executor = (await import(`${pathToFileURL(executorFilePath).href}?t=${Date.now()}`)).default;
|
|
721
|
+
const parseResult = ExecutorSchema.safeParse(executor);
|
|
722
|
+
if (!parseResult.success) return null;
|
|
723
|
+
return parseResult.data;
|
|
724
|
+
}
|
|
689
725
|
|
|
690
726
|
//#endregion
|
|
691
|
-
//#region src/cli/bundler/executor/
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
727
|
+
//#region src/cli/bundler/executor/executor-bundler.ts
|
|
728
|
+
/**
|
|
729
|
+
* Bundle executors from the specified configuration
|
|
730
|
+
*
|
|
731
|
+
* This function:
|
|
732
|
+
* 1. Creates entry file that extracts operation.body
|
|
733
|
+
* 2. Bundles in a single step with tree-shaking
|
|
734
|
+
*/
|
|
735
|
+
async function bundleExecutors(config) {
|
|
736
|
+
const files = loadFilesWithIgnores(config);
|
|
737
|
+
if (files.length === 0) throw new Error(`No files found matching pattern: ${config.files?.join(", ")}`);
|
|
738
|
+
console.log("");
|
|
739
|
+
console.log("Bundling", styleText("cyanBright", files.length.toString()), "files for", styleText("cyan", "\"executor\""));
|
|
740
|
+
const executors = [];
|
|
741
|
+
for (const file of files) {
|
|
742
|
+
const executor = await loadExecutor(file);
|
|
743
|
+
if (!executor) {
|
|
744
|
+
console.log(styleText("dim", ` Skipping: ${file} (could not be loaded)`));
|
|
745
|
+
continue;
|
|
746
|
+
}
|
|
747
|
+
if (!["function", "jobFunction"].includes(executor.operation.kind)) {
|
|
748
|
+
console.log(styleText("dim", ` Skipping: ${executor.name} (not a function executor)`));
|
|
749
|
+
continue;
|
|
750
|
+
}
|
|
751
|
+
executors.push({
|
|
752
|
+
name: executor.name,
|
|
753
|
+
sourceFile: file
|
|
754
|
+
});
|
|
755
|
+
}
|
|
756
|
+
if (executors.length === 0) {
|
|
757
|
+
console.log(styleText("dim", " No function executors to bundle"));
|
|
758
|
+
return;
|
|
759
|
+
}
|
|
760
|
+
const outputDir = path.resolve(getDistDir(), "executors");
|
|
761
|
+
fs.mkdirSync(outputDir, { recursive: true });
|
|
762
|
+
let tsconfig;
|
|
763
|
+
try {
|
|
764
|
+
tsconfig = await resolveTSConfig();
|
|
765
|
+
} catch {
|
|
766
|
+
tsconfig = void 0;
|
|
767
|
+
}
|
|
768
|
+
await Promise.all(executors.map((executor) => bundleSingleExecutor(executor, outputDir, tsconfig)));
|
|
769
|
+
console.log(styleText("green", "Bundled"), styleText("cyan", "\"executor\""));
|
|
770
|
+
}
|
|
771
|
+
async function bundleSingleExecutor(executor, outputDir, tsconfig) {
|
|
772
|
+
const entryPath = path.join(outputDir, `${executor.name}.entry.js`);
|
|
773
|
+
const absoluteSourcePath = path.resolve(executor.sourceFile).replace(/\\/g, "/");
|
|
774
|
+
const entryContent = ml`
|
|
775
|
+
import _internalExecutor from "${absoluteSourcePath}";
|
|
703
776
|
|
|
704
|
-
|
|
705
|
-
export const __executor_function = ${exec.body.toString()};
|
|
706
|
-
`);
|
|
707
|
-
const stepsDir = path.join(tempDir, "executor_steps");
|
|
708
|
-
fs.mkdirSync(stepsDir, { recursive: true });
|
|
709
|
-
const executorFilePath = path.join(stepsDir, `${executor.name}.js`);
|
|
710
|
-
const relativePath = path.relative(stepsDir, transformedPath).replace(/\\/g, "/");
|
|
711
|
-
const executorContent = ml`
|
|
712
|
-
import { __executor_function } from "${relativePath}";
|
|
777
|
+
const __executor_function = _internalExecutor.operation.body;
|
|
713
778
|
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
779
|
+
globalThis.main = __executor_function;
|
|
780
|
+
`;
|
|
781
|
+
fs.writeFileSync(entryPath, entryContent);
|
|
782
|
+
const outputPath = path.join(outputDir, `${executor.name}.js`);
|
|
783
|
+
await rolldown.build(rolldown.defineConfig({
|
|
784
|
+
input: entryPath,
|
|
785
|
+
output: {
|
|
786
|
+
file: outputPath,
|
|
787
|
+
format: "esm",
|
|
788
|
+
sourcemap: true,
|
|
789
|
+
minify: true,
|
|
790
|
+
inlineDynamicImports: true
|
|
791
|
+
},
|
|
792
|
+
tsconfig,
|
|
793
|
+
treeshake: {
|
|
794
|
+
moduleSideEffects: false,
|
|
795
|
+
annotations: true,
|
|
796
|
+
unknownGlobalSideEffects: false
|
|
797
|
+
},
|
|
798
|
+
logLevel: "silent"
|
|
799
|
+
}));
|
|
800
|
+
}
|
|
720
801
|
|
|
721
802
|
//#endregion
|
|
722
803
|
//#region src/cli/bundler/resolver/loader.ts
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
}
|
|
730
|
-
};
|
|
804
|
+
async function loadResolver(resolverFilePath) {
|
|
805
|
+
const resolver = (await import(`${pathToFileURL(resolverFilePath).href}?t=${Date.now()}`)).default;
|
|
806
|
+
const parseResult = ResolverSchema.safeParse(resolver);
|
|
807
|
+
if (!parseResult.success) return null;
|
|
808
|
+
return parseResult.data;
|
|
809
|
+
}
|
|
731
810
|
|
|
732
811
|
//#endregion
|
|
733
|
-
//#region src/cli/bundler/resolver/
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
}
|
|
812
|
+
//#region src/cli/bundler/resolver/resolver-bundler.ts
|
|
813
|
+
/**
|
|
814
|
+
* Bundle resolvers for the specified namespace
|
|
815
|
+
*
|
|
816
|
+
* This function:
|
|
817
|
+
* 1. Uses a transform plugin to add validation wrapper during bundling
|
|
818
|
+
* 2. Creates entry file
|
|
819
|
+
* 3. Bundles in a single step with tree-shaking
|
|
820
|
+
*/
|
|
821
|
+
async function bundleResolvers(namespace, config) {
|
|
822
|
+
const files = loadFilesWithIgnores(config);
|
|
823
|
+
if (files.length === 0) throw new Error(`No files found matching pattern: ${config.files?.join(", ")}`);
|
|
824
|
+
console.log("");
|
|
825
|
+
console.log("Bundling", styleText("cyanBright", files.length.toString()), "files for", styleText("cyan", `"${namespace}"`));
|
|
826
|
+
const resolvers = [];
|
|
827
|
+
for (const file of files) {
|
|
828
|
+
const resolver = await loadResolver(file);
|
|
829
|
+
if (!resolver) {
|
|
830
|
+
console.log(styleText("dim", ` Skipping: ${file} (could not be loaded)`));
|
|
831
|
+
continue;
|
|
754
832
|
}
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
833
|
+
resolvers.push({
|
|
834
|
+
name: resolver.name,
|
|
835
|
+
sourceFile: file
|
|
836
|
+
});
|
|
837
|
+
}
|
|
838
|
+
const outputDir = path.resolve(getDistDir(), "resolvers");
|
|
839
|
+
fs.mkdirSync(outputDir, { recursive: true });
|
|
840
|
+
let tsconfig;
|
|
841
|
+
try {
|
|
842
|
+
tsconfig = await resolveTSConfig();
|
|
843
|
+
} catch {
|
|
844
|
+
tsconfig = void 0;
|
|
845
|
+
}
|
|
846
|
+
await Promise.all(resolvers.map((resolver) => bundleSingleResolver(resolver, outputDir, tsconfig)));
|
|
847
|
+
console.log(styleText("green", "Bundled"), styleText("cyan", `"${namespace}"`));
|
|
848
|
+
}
|
|
849
|
+
async function bundleSingleResolver(resolver, outputDir, tsconfig) {
|
|
850
|
+
const entryPath = path.join(outputDir, `${resolver.name}.entry.js`);
|
|
851
|
+
const absoluteSourcePath = path.resolve(resolver.sourceFile).replace(/\\/g, "/");
|
|
852
|
+
const entryContent = ml`
|
|
853
|
+
import _internalResolver from "${absoluteSourcePath}";
|
|
854
|
+
import { t } from "@tailor-platform/sdk";
|
|
763
855
|
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
.
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
throw new Error(\`Failed to input validation:\\n\${errorMessages}\`);
|
|
772
|
-
}
|
|
773
|
-
}
|
|
856
|
+
const $tailor_resolver_body = async (context) => {
|
|
857
|
+
if (_internalResolver.input) {
|
|
858
|
+
const result = t.object(_internalResolver.input).parse({
|
|
859
|
+
value: context.input,
|
|
860
|
+
data: context.input,
|
|
861
|
+
user: context.user,
|
|
862
|
+
});
|
|
774
863
|
|
|
775
|
-
|
|
776
|
-
|
|
864
|
+
if (result.issues) {
|
|
865
|
+
const errorMessages = result.issues
|
|
866
|
+
.map(issue => {
|
|
867
|
+
const path = issue.path ? issue.path.join('.') : '';
|
|
868
|
+
return path ? \` \${path}: \${issue.message}\` : issue.message;
|
|
869
|
+
})
|
|
870
|
+
.join('\\n');
|
|
871
|
+
throw new Error(\`Failed to input validation:\\n\${errorMessages}\`);
|
|
872
|
+
}
|
|
873
|
+
}
|
|
874
|
+
|
|
875
|
+
return _internalResolver.body(context);
|
|
876
|
+
};
|
|
877
|
+
|
|
878
|
+
globalThis.main = $tailor_resolver_body;
|
|
879
|
+
`;
|
|
880
|
+
fs.writeFileSync(entryPath, entryContent);
|
|
881
|
+
const outputPath = path.join(outputDir, `${resolver.name}.js`);
|
|
882
|
+
await rolldown.build(rolldown.defineConfig({
|
|
883
|
+
input: entryPath,
|
|
884
|
+
output: {
|
|
885
|
+
file: outputPath,
|
|
886
|
+
format: "esm",
|
|
887
|
+
sourcemap: true,
|
|
888
|
+
minify: true,
|
|
889
|
+
inlineDynamicImports: true
|
|
890
|
+
},
|
|
891
|
+
tsconfig,
|
|
892
|
+
treeshake: {
|
|
893
|
+
moduleSideEffects: false,
|
|
894
|
+
annotations: true,
|
|
895
|
+
unknownGlobalSideEffects: false
|
|
896
|
+
},
|
|
897
|
+
logLevel: "silent"
|
|
898
|
+
}));
|
|
777
899
|
}
|
|
778
|
-
` : bodyFnStr;
|
|
779
|
-
fs.writeFileSync(transformedPath, ml`
|
|
780
|
-
${modifiedSourceText}
|
|
781
900
|
|
|
782
|
-
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
790
|
-
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
|
|
901
|
+
//#endregion
|
|
902
|
+
//#region src/cli/bundler/workflow/ast-transformer.ts
|
|
903
|
+
/**
|
|
904
|
+
* Check if a module source is from @tailor-platform/sdk (including subpaths)
|
|
905
|
+
*/
|
|
906
|
+
function isTailorSdkSource(source) {
|
|
907
|
+
return /^@tailor-platform\/sdk(\/|$)/.test(source);
|
|
908
|
+
}
|
|
909
|
+
/**
|
|
910
|
+
* Get the source string from a dynamic import or require call
|
|
911
|
+
*/
|
|
912
|
+
function getImportSource(node) {
|
|
913
|
+
if (!node) return null;
|
|
914
|
+
if (node.type === "ImportExpression") {
|
|
915
|
+
const source = node.source;
|
|
916
|
+
if (source.type === "Literal" && typeof source.value === "string") return source.value;
|
|
917
|
+
}
|
|
918
|
+
if (node.type === "CallExpression") {
|
|
919
|
+
const callExpr = node;
|
|
920
|
+
if (callExpr.callee.type === "Identifier" && callExpr.callee.name === "require") {
|
|
921
|
+
const arg = callExpr.arguments[0];
|
|
922
|
+
if (arg && "type" in arg && arg.type === "Literal" && "value" in arg && typeof arg.value === "string") return arg.value;
|
|
923
|
+
}
|
|
794
924
|
}
|
|
795
|
-
|
|
925
|
+
return null;
|
|
926
|
+
}
|
|
927
|
+
/**
|
|
928
|
+
* Unwrap AwaitExpression to get the inner expression
|
|
929
|
+
*/
|
|
930
|
+
function unwrapAwait(node) {
|
|
931
|
+
if (node?.type === "AwaitExpression") return node.argument;
|
|
932
|
+
return node;
|
|
933
|
+
}
|
|
934
|
+
/**
|
|
935
|
+
* Collect all import bindings for createWorkflowJob from @tailor-platform/sdk
|
|
936
|
+
* Returns a Set of local names that refer to createWorkflowJob
|
|
937
|
+
*/
|
|
938
|
+
function collectCreateWorkflowJobBindings(program) {
|
|
939
|
+
const bindings = /* @__PURE__ */ new Set();
|
|
940
|
+
function walk(node) {
|
|
941
|
+
if (!node || typeof node !== "object") return;
|
|
942
|
+
const nodeType = node.type;
|
|
943
|
+
if (nodeType === "ImportDeclaration") {
|
|
944
|
+
const importDecl = node;
|
|
945
|
+
const source = importDecl.source?.value;
|
|
946
|
+
if (typeof source === "string" && isTailorSdkSource(source)) {
|
|
947
|
+
for (const specifier of importDecl.specifiers || []) if (specifier.type === "ImportSpecifier") {
|
|
948
|
+
const importSpec = specifier;
|
|
949
|
+
const imported = importSpec.imported.type === "Identifier" ? importSpec.imported.name : importSpec.imported.value;
|
|
950
|
+
if (imported === "createWorkflowJob") bindings.add(importSpec.local?.name || imported);
|
|
951
|
+
} else if (specifier.type === "ImportDefaultSpecifier" || specifier.type === "ImportNamespaceSpecifier") {
|
|
952
|
+
const spec = specifier;
|
|
953
|
+
bindings.add(`__namespace__:${spec.local?.name}`);
|
|
954
|
+
}
|
|
955
|
+
}
|
|
956
|
+
}
|
|
957
|
+
if (nodeType === "VariableDeclaration") {
|
|
958
|
+
const varDecl = node;
|
|
959
|
+
for (const decl of varDecl.declarations || []) {
|
|
960
|
+
const init = unwrapAwait(decl.init);
|
|
961
|
+
const source = getImportSource(init);
|
|
962
|
+
if (source && isTailorSdkSource(source)) {
|
|
963
|
+
const id = decl.id;
|
|
964
|
+
if (id?.type === "Identifier") bindings.add(`__namespace__:${id.name}`);
|
|
965
|
+
else if (id?.type === "ObjectPattern") {
|
|
966
|
+
const objPattern = id;
|
|
967
|
+
for (const prop of objPattern.properties || []) if (prop.type === "Property") {
|
|
968
|
+
const bindingProp = prop;
|
|
969
|
+
const keyName = bindingProp.key.type === "Identifier" ? bindingProp.key.name : bindingProp.key.value;
|
|
970
|
+
if (keyName === "createWorkflowJob") {
|
|
971
|
+
const localName = bindingProp.value.type === "Identifier" ? bindingProp.value.name : keyName;
|
|
972
|
+
bindings.add(localName ?? "");
|
|
973
|
+
}
|
|
974
|
+
}
|
|
975
|
+
}
|
|
976
|
+
}
|
|
977
|
+
}
|
|
978
|
+
}
|
|
979
|
+
for (const key of Object.keys(node)) {
|
|
980
|
+
const child = node[key];
|
|
981
|
+
if (Array.isArray(child)) child.forEach((c) => walk(c));
|
|
982
|
+
else if (child && typeof child === "object") walk(child);
|
|
983
|
+
}
|
|
984
|
+
}
|
|
985
|
+
walk(program);
|
|
986
|
+
return bindings;
|
|
987
|
+
}
|
|
988
|
+
/**
|
|
989
|
+
* Check if a CallExpression is a createWorkflowJob call
|
|
990
|
+
*/
|
|
991
|
+
function isCreateWorkflowJobCall(node, bindings) {
|
|
992
|
+
if (node.type !== "CallExpression") return false;
|
|
993
|
+
const callee = node.callee;
|
|
994
|
+
if (callee.type === "Identifier") {
|
|
995
|
+
const identifier = callee;
|
|
996
|
+
return bindings.has(identifier.name);
|
|
997
|
+
}
|
|
998
|
+
if (callee.type === "MemberExpression") {
|
|
999
|
+
const memberExpr = callee;
|
|
1000
|
+
if (!memberExpr.computed) {
|
|
1001
|
+
const object = memberExpr.object;
|
|
1002
|
+
const property = memberExpr.property;
|
|
1003
|
+
if (object.type === "Identifier" && bindings.has(`__namespace__:${object.name}`) && property.name === "createWorkflowJob") return true;
|
|
1004
|
+
}
|
|
1005
|
+
}
|
|
1006
|
+
return false;
|
|
1007
|
+
}
|
|
1008
|
+
/**
|
|
1009
|
+
* Check if a node is a string literal
|
|
1010
|
+
*/
|
|
1011
|
+
function isStringLiteral(node) {
|
|
1012
|
+
return node?.type === "Literal" && typeof node.value === "string";
|
|
1013
|
+
}
|
|
1014
|
+
/**
|
|
1015
|
+
* Check if a node is a function expression (arrow or regular)
|
|
1016
|
+
*/
|
|
1017
|
+
function isFunctionExpression(node) {
|
|
1018
|
+
return node?.type === "ArrowFunctionExpression" || node?.type === "FunctionExpression";
|
|
1019
|
+
}
|
|
1020
|
+
/**
|
|
1021
|
+
* Find a property in an object expression
|
|
1022
|
+
*/
|
|
1023
|
+
function findProperty(properties, name) {
|
|
1024
|
+
for (const prop of properties) if (prop.type === "Property") {
|
|
1025
|
+
const objProp = prop;
|
|
1026
|
+
if ((objProp.key.type === "Identifier" ? objProp.key.name : objProp.key.type === "Literal" ? objProp.key.value : null) === name) return {
|
|
1027
|
+
key: objProp.key,
|
|
1028
|
+
value: objProp.value,
|
|
1029
|
+
start: objProp.start,
|
|
1030
|
+
end: objProp.end
|
|
1031
|
+
};
|
|
1032
|
+
}
|
|
1033
|
+
return null;
|
|
1034
|
+
}
|
|
1035
|
+
/**
|
|
1036
|
+
* Find all workflow jobs by detecting createWorkflowJob calls from @tailor-platform/sdk
|
|
1037
|
+
*/
|
|
1038
|
+
function findAllJobs(program, _sourceText) {
|
|
1039
|
+
const jobs = [];
|
|
1040
|
+
const bindings = collectCreateWorkflowJobBindings(program);
|
|
1041
|
+
function walk(node, parents = []) {
|
|
1042
|
+
if (!node || typeof node !== "object") return;
|
|
1043
|
+
if (isCreateWorkflowJobCall(node, bindings)) {
|
|
1044
|
+
const args = node.arguments;
|
|
1045
|
+
if (args?.length >= 1 && args[0]?.type === "ObjectExpression") {
|
|
1046
|
+
const configObj = args[0];
|
|
1047
|
+
const nameProp = findProperty(configObj.properties, "name");
|
|
1048
|
+
const bodyProp = findProperty(configObj.properties, "body");
|
|
1049
|
+
const depsProp = findProperty(configObj.properties, "deps");
|
|
1050
|
+
if (nameProp && isStringLiteral(nameProp.value) && bodyProp && isFunctionExpression(bodyProp.value)) {
|
|
1051
|
+
let statementRange;
|
|
1052
|
+
for (let i = 0; i < parents.length; i++) {
|
|
1053
|
+
const parent = parents[i];
|
|
1054
|
+
if (parent.type === "ExportNamedDeclaration" || parent.type === "VariableDeclaration") {
|
|
1055
|
+
statementRange = {
|
|
1056
|
+
start: parent.start,
|
|
1057
|
+
end: parent.end
|
|
1058
|
+
};
|
|
1059
|
+
break;
|
|
1060
|
+
}
|
|
1061
|
+
}
|
|
1062
|
+
jobs.push({
|
|
1063
|
+
name: nameProp.value.value,
|
|
1064
|
+
nameRange: {
|
|
1065
|
+
start: nameProp.start,
|
|
1066
|
+
end: nameProp.end
|
|
1067
|
+
},
|
|
1068
|
+
depsRange: depsProp ? {
|
|
1069
|
+
start: depsProp.start,
|
|
1070
|
+
end: depsProp.end
|
|
1071
|
+
} : void 0,
|
|
1072
|
+
bodyValueRange: {
|
|
1073
|
+
start: bodyProp.value.start,
|
|
1074
|
+
end: bodyProp.value.end
|
|
1075
|
+
},
|
|
1076
|
+
statementRange
|
|
1077
|
+
});
|
|
1078
|
+
}
|
|
1079
|
+
}
|
|
1080
|
+
}
|
|
1081
|
+
const newParents = [...parents, node];
|
|
1082
|
+
for (const key of Object.keys(node)) {
|
|
1083
|
+
const child = node[key];
|
|
1084
|
+
if (Array.isArray(child)) child.forEach((c) => walk(c, newParents));
|
|
1085
|
+
else if (child && typeof child === "object") walk(child, newParents);
|
|
1086
|
+
}
|
|
1087
|
+
}
|
|
1088
|
+
walk(program);
|
|
1089
|
+
return jobs;
|
|
1090
|
+
}
|
|
1091
|
+
/**
|
|
1092
|
+
* Apply string replacements to source code
|
|
1093
|
+
* Replacements are applied from end to start to maintain positions
|
|
1094
|
+
*/
|
|
1095
|
+
function applyReplacements(source, replacements) {
|
|
1096
|
+
const sorted = [...replacements].sort((a, b) => b.start - a.start);
|
|
1097
|
+
let result = source;
|
|
1098
|
+
for (const r of sorted) result = result.slice(0, r.start) + r.text + result.slice(r.end);
|
|
1099
|
+
return result;
|
|
1100
|
+
}
|
|
1101
|
+
/**
|
|
1102
|
+
* Find the end position including trailing comma
|
|
1103
|
+
*/
|
|
1104
|
+
function findTrailingCommaEnd(source, position) {
|
|
1105
|
+
let i = position;
|
|
1106
|
+
while (i < source.length) {
|
|
1107
|
+
const char = source[i];
|
|
1108
|
+
if (char === ",") return i + 1;
|
|
1109
|
+
if (!/\s/.test(char)) break;
|
|
1110
|
+
i++;
|
|
1111
|
+
}
|
|
1112
|
+
return position;
|
|
1113
|
+
}
|
|
1114
|
+
/**
|
|
1115
|
+
* Find the end of a statement including any trailing newline
|
|
1116
|
+
*/
|
|
1117
|
+
function findStatementEnd(source, position) {
|
|
1118
|
+
let i = position;
|
|
1119
|
+
while (i < source.length && (source[i] === ";" || source[i] === " " || source[i] === " ")) i++;
|
|
1120
|
+
if (i < source.length && source[i] === "\n") i++;
|
|
1121
|
+
return i;
|
|
1122
|
+
}
|
|
1123
|
+
/**
|
|
1124
|
+
* Find variable declarations by export names
|
|
1125
|
+
* Returns a map of export name to statement range
|
|
1126
|
+
*/
|
|
1127
|
+
function findVariableDeclarationsByName(program) {
|
|
1128
|
+
const declarations = /* @__PURE__ */ new Map();
|
|
1129
|
+
function walk(node) {
|
|
1130
|
+
if (!node || typeof node !== "object") return;
|
|
1131
|
+
const nodeType = node.type;
|
|
1132
|
+
if (nodeType === "VariableDeclaration") {
|
|
1133
|
+
const varDecl = node;
|
|
1134
|
+
for (const decl of varDecl.declarations || []) if (decl.id?.type === "Identifier" && decl.id.name) {
|
|
1135
|
+
if (!declarations.has(decl.id.name)) declarations.set(decl.id.name, {
|
|
1136
|
+
start: varDecl.start,
|
|
1137
|
+
end: varDecl.end
|
|
1138
|
+
});
|
|
1139
|
+
}
|
|
1140
|
+
}
|
|
1141
|
+
if (nodeType === "ExportNamedDeclaration") {
|
|
1142
|
+
const exportDecl = node;
|
|
1143
|
+
const declaration = exportDecl.declaration;
|
|
1144
|
+
if (declaration?.type === "VariableDeclaration") {
|
|
1145
|
+
const varDecl = declaration;
|
|
1146
|
+
for (const decl of varDecl.declarations || []) if (decl.id?.type === "Identifier" && decl.id.name) declarations.set(decl.id.name, {
|
|
1147
|
+
start: exportDecl.start,
|
|
1148
|
+
end: exportDecl.end
|
|
1149
|
+
});
|
|
1150
|
+
}
|
|
1151
|
+
}
|
|
1152
|
+
for (const key of Object.keys(node)) {
|
|
1153
|
+
const child = node[key];
|
|
1154
|
+
if (Array.isArray(child)) child.forEach((c) => walk(c));
|
|
1155
|
+
else if (child && typeof child === "object") walk(child);
|
|
1156
|
+
}
|
|
1157
|
+
}
|
|
1158
|
+
walk(program);
|
|
1159
|
+
return declarations;
|
|
1160
|
+
}
|
|
1161
|
+
/**
|
|
1162
|
+
* Transform workflow source code
|
|
1163
|
+
* - Target job: remove deps
|
|
1164
|
+
* - Other jobs: remove entire variable declaration
|
|
1165
|
+
*
|
|
1166
|
+
* @param source - The source code to transform
|
|
1167
|
+
* @param targetJobName - The name of the target job (from job config)
|
|
1168
|
+
* @param targetJobExportName - The export name of the target job (optional, for enhanced detection)
|
|
1169
|
+
* @param otherJobExportNames - Export names of other jobs to remove (optional, for enhanced detection)
|
|
1170
|
+
*/
|
|
1171
|
+
function transformWorkflowSource(source, targetJobName, targetJobExportName, otherJobExportNames) {
|
|
1172
|
+
const { program } = parseSync("input.ts", source);
|
|
1173
|
+
const detectedJobs = findAllJobs(program, source);
|
|
1174
|
+
const allDeclarations = findVariableDeclarationsByName(program);
|
|
1175
|
+
const replacements = [];
|
|
1176
|
+
const removedRanges = /* @__PURE__ */ new Set();
|
|
1177
|
+
const markRemoved = (start, end) => {
|
|
1178
|
+
removedRanges.add(`${start}-${end}`);
|
|
1179
|
+
};
|
|
1180
|
+
const isRemoved = (start, end) => {
|
|
1181
|
+
return removedRanges.has(`${start}-${end}`);
|
|
1182
|
+
};
|
|
1183
|
+
for (const job of detectedJobs) if (job.name === targetJobName) {
|
|
1184
|
+
if (job.depsRange) replacements.push({
|
|
1185
|
+
start: job.depsRange.start,
|
|
1186
|
+
end: findTrailingCommaEnd(source, job.depsRange.end),
|
|
1187
|
+
text: ""
|
|
1188
|
+
});
|
|
1189
|
+
} else if (job.statementRange && !isRemoved(job.statementRange.start, job.statementRange.end)) {
|
|
1190
|
+
replacements.push({
|
|
1191
|
+
start: job.statementRange.start,
|
|
1192
|
+
end: findStatementEnd(source, job.statementRange.end),
|
|
1193
|
+
text: ""
|
|
1194
|
+
});
|
|
1195
|
+
markRemoved(job.statementRange.start, job.statementRange.end);
|
|
1196
|
+
} else if (!job.statementRange) replacements.push({
|
|
1197
|
+
start: job.bodyValueRange.start,
|
|
1198
|
+
end: job.bodyValueRange.end,
|
|
1199
|
+
text: "() => {}"
|
|
1200
|
+
});
|
|
1201
|
+
if (otherJobExportNames) for (const exportName of otherJobExportNames) {
|
|
1202
|
+
if (exportName === targetJobExportName) continue;
|
|
1203
|
+
const declRange = allDeclarations.get(exportName);
|
|
1204
|
+
if (declRange && !isRemoved(declRange.start, declRange.end)) {
|
|
1205
|
+
replacements.push({
|
|
1206
|
+
start: declRange.start,
|
|
1207
|
+
end: findStatementEnd(source, declRange.end),
|
|
1208
|
+
text: ""
|
|
1209
|
+
});
|
|
1210
|
+
markRemoved(declRange.start, declRange.end);
|
|
1211
|
+
}
|
|
1212
|
+
}
|
|
1213
|
+
return applyReplacements(source, replacements);
|
|
1214
|
+
}
|
|
1215
|
+
|
|
1216
|
+
//#endregion
|
|
1217
|
+
//#region src/cli/bundler/workflow/workflow-bundler.ts
|
|
1218
|
+
/**
|
|
1219
|
+
* Bundle workflow jobs
|
|
1220
|
+
*
|
|
1221
|
+
* This function:
|
|
1222
|
+
* 1. Uses a transform plugin to remove deps during bundling (preserves module resolution)
|
|
1223
|
+
* 2. Creates entry file
|
|
1224
|
+
* 3. Bundles in a single step with tree-shaking
|
|
1225
|
+
*/
|
|
1226
|
+
async function bundleWorkflowJobs(allJobs) {
|
|
1227
|
+
if (allJobs.length === 0) {
|
|
1228
|
+
console.log(styleText("dim", "No workflow jobs to bundle"));
|
|
1229
|
+
return;
|
|
1230
|
+
}
|
|
1231
|
+
console.log("");
|
|
1232
|
+
console.log("Bundling", styleText("cyanBright", allJobs.length.toString()), "files for", styleText("cyan", "\"workflow-job\""));
|
|
1233
|
+
const outputDir = path.resolve(getDistDir(), "workflow-jobs");
|
|
1234
|
+
fs.mkdirSync(outputDir, { recursive: true });
|
|
1235
|
+
let tsconfig;
|
|
1236
|
+
try {
|
|
1237
|
+
tsconfig = await resolveTSConfig();
|
|
1238
|
+
} catch {
|
|
1239
|
+
tsconfig = void 0;
|
|
1240
|
+
}
|
|
1241
|
+
await Promise.all(allJobs.map((job) => bundleSingleJob(job, allJobs, outputDir, tsconfig)));
|
|
1242
|
+
console.log(styleText("green", "Bundled"), styleText("cyan", "\"workflow-job\""));
|
|
1243
|
+
}
|
|
1244
|
+
async function bundleSingleJob(job, allJobs, outputDir, tsconfig) {
|
|
1245
|
+
const depsJobNames = findJobDeps(job.name, allJobs);
|
|
1246
|
+
const jobsObject = generateJobsObject(depsJobNames);
|
|
1247
|
+
const entryPath = path.join(outputDir, `${job.name}.entry.js`);
|
|
1248
|
+
const absoluteSourcePath = path.resolve(job.sourceFile).replace(/\\/g, "/");
|
|
1249
|
+
const entryContent = ml`
|
|
1250
|
+
import { ${job.exportName} } from "${absoluteSourcePath}";
|
|
1251
|
+
|
|
1252
|
+
const jobs = {
|
|
1253
|
+
${jobsObject}
|
|
1254
|
+
};
|
|
1255
|
+
|
|
1256
|
+
globalThis.main = async (input) => {
|
|
1257
|
+
return await ${job.exportName}.body(input, jobs);
|
|
1258
|
+
};
|
|
1259
|
+
`;
|
|
1260
|
+
fs.writeFileSync(entryPath, entryContent);
|
|
1261
|
+
const outputPath = path.join(outputDir, `${job.name}.js`);
|
|
1262
|
+
const otherJobExportNames = allJobs.filter((j) => j.name !== job.name).map((j) => j.exportName);
|
|
1263
|
+
await rolldown.build(rolldown.defineConfig({
|
|
1264
|
+
input: entryPath,
|
|
1265
|
+
output: {
|
|
1266
|
+
file: outputPath,
|
|
1267
|
+
format: "esm",
|
|
1268
|
+
sourcemap: true,
|
|
1269
|
+
minify: true,
|
|
1270
|
+
inlineDynamicImports: true
|
|
1271
|
+
},
|
|
1272
|
+
tsconfig,
|
|
1273
|
+
plugins: [{
|
|
1274
|
+
name: "workflow-transform",
|
|
1275
|
+
transform: {
|
|
1276
|
+
filter: { id: { include: [/\.ts$/, /\.js$/] } },
|
|
1277
|
+
handler(code) {
|
|
1278
|
+
if (!code.includes("createWorkflowJob")) return null;
|
|
1279
|
+
return { code: transformWorkflowSource(code, job.name, job.exportName, otherJobExportNames) };
|
|
1280
|
+
}
|
|
1281
|
+
}
|
|
1282
|
+
}],
|
|
1283
|
+
treeshake: {
|
|
1284
|
+
moduleSideEffects: false,
|
|
1285
|
+
annotations: true,
|
|
1286
|
+
unknownGlobalSideEffects: false
|
|
1287
|
+
},
|
|
1288
|
+
logLevel: "silent"
|
|
1289
|
+
}));
|
|
1290
|
+
}
|
|
1291
|
+
/**
|
|
1292
|
+
* Find the dependencies of a specific job
|
|
1293
|
+
*/
|
|
1294
|
+
function findJobDeps(targetJobName, allJobs) {
|
|
1295
|
+
return allJobs.find((j) => j.name === targetJobName)?.deps ?? [];
|
|
1296
|
+
}
|
|
1297
|
+
function generateJobsObject(jobNames) {
|
|
1298
|
+
if (jobNames.length === 0) return "";
|
|
1299
|
+
return jobNames.map((jobName) => {
|
|
1300
|
+
return `"${jobName.replace(/[-\s]/g, "_")}": (args) => tailor.workflow.triggerJobFunction("${jobName}", args)`;
|
|
1301
|
+
}).join(",\n ");
|
|
1302
|
+
}
|
|
796
1303
|
|
|
797
1304
|
//#endregion
|
|
798
1305
|
//#region src/parser/generator-config/index.ts
|
|
@@ -4314,7 +4821,7 @@ async function planResolvers(client, workspaceId, pipelines, executors, deletedS
|
|
|
4314
4821
|
return changeSet;
|
|
4315
4822
|
}
|
|
4316
4823
|
function processResolver(resolver, executorUsedResolvers, env) {
|
|
4317
|
-
const functionPath = path.join(getDistDir(), "
|
|
4824
|
+
const functionPath = path.join(getDistDir(), "resolvers", `${resolver.name}.js`);
|
|
4318
4825
|
let functionCode = "";
|
|
4319
4826
|
try {
|
|
4320
4827
|
functionCode = fs.readFileSync(functionPath, "utf-8");
|
|
@@ -5025,6 +5532,134 @@ function protoGqlOperand(operand) {
|
|
|
5025
5532
|
} };
|
|
5026
5533
|
}
|
|
5027
5534
|
|
|
5535
|
+
//#endregion
|
|
5536
|
+
//#region src/cli/apply/services/workflow.ts
|
|
5537
|
+
async function applyWorkflow(client, changeSet, phase = "create-update") {
|
|
5538
|
+
if (phase === "create-update") {
|
|
5539
|
+
await Promise.all(changeSet.creates.map(async (create) => {
|
|
5540
|
+
const jobFunctions = {};
|
|
5541
|
+
for (const [jobName, script] of create.scripts.entries()) {
|
|
5542
|
+
const response = await client.createWorkflowJobFunction({
|
|
5543
|
+
workspaceId: create.workspaceId,
|
|
5544
|
+
jobFunctionName: jobName,
|
|
5545
|
+
script
|
|
5546
|
+
});
|
|
5547
|
+
if (response.jobFunction) jobFunctions[jobName] = response.jobFunction.version;
|
|
5548
|
+
}
|
|
5549
|
+
await client.createWorkflow({
|
|
5550
|
+
workspaceId: create.workspaceId,
|
|
5551
|
+
workflowName: create.workflow.name,
|
|
5552
|
+
mainJobFunctionName: create.workflow.mainJob.name,
|
|
5553
|
+
jobFunctions
|
|
5554
|
+
});
|
|
5555
|
+
}));
|
|
5556
|
+
await Promise.all(changeSet.updates.map(async (update) => {
|
|
5557
|
+
const jobFunctions = {};
|
|
5558
|
+
for (const [jobName, script] of update.scripts.entries()) {
|
|
5559
|
+
const response = await client.updateWorkflowJobFunction({
|
|
5560
|
+
workspaceId: update.workspaceId,
|
|
5561
|
+
jobFunctionName: jobName,
|
|
5562
|
+
script
|
|
5563
|
+
});
|
|
5564
|
+
if (response.jobFunction) jobFunctions[jobName] = response.jobFunction.version;
|
|
5565
|
+
}
|
|
5566
|
+
await client.updateWorkflow({
|
|
5567
|
+
workspaceId: update.workspaceId,
|
|
5568
|
+
workflowName: update.workflow.name,
|
|
5569
|
+
mainJobFunctionName: update.workflow.mainJob.name,
|
|
5570
|
+
jobFunctions
|
|
5571
|
+
});
|
|
5572
|
+
}));
|
|
5573
|
+
} else if (phase === "delete") await Promise.all(changeSet.deletes.map((del) => client.deleteWorkflow({
|
|
5574
|
+
workspaceId: del.workspaceId,
|
|
5575
|
+
workflowId: del.workflowId
|
|
5576
|
+
})));
|
|
5577
|
+
}
|
|
5578
|
+
/**
|
|
5579
|
+
* Recursively collect all job names from a workflow's mainJob and its dependencies
|
|
5580
|
+
*/
|
|
5581
|
+
function collectJobNamesFromWorkflow(workflow) {
|
|
5582
|
+
const jobNames = /* @__PURE__ */ new Set();
|
|
5583
|
+
const collectFromJob = (job) => {
|
|
5584
|
+
if (!job || jobNames.has(job.name)) return;
|
|
5585
|
+
jobNames.add(job.name);
|
|
5586
|
+
if (job.deps && Array.isArray(job.deps)) for (const dep of job.deps) collectFromJob(dep);
|
|
5587
|
+
};
|
|
5588
|
+
collectFromJob(workflow.mainJob);
|
|
5589
|
+
return jobNames;
|
|
5590
|
+
}
|
|
5591
|
+
async function planWorkflow(client, workspaceId, workflows) {
|
|
5592
|
+
const changeSet = new ChangeSet("Workflows");
|
|
5593
|
+
const existingWorkflows = await fetchAll(async (pageToken) => {
|
|
5594
|
+
const response = await client.listWorkflows({
|
|
5595
|
+
workspaceId,
|
|
5596
|
+
pageToken,
|
|
5597
|
+
pageSize: 100,
|
|
5598
|
+
pageDirection: 0
|
|
5599
|
+
});
|
|
5600
|
+
return [response.workflows.map((w) => ({
|
|
5601
|
+
id: w.id,
|
|
5602
|
+
name: w.name
|
|
5603
|
+
})), response.nextPageToken];
|
|
5604
|
+
});
|
|
5605
|
+
const existingWorkflowMap = /* @__PURE__ */ new Map();
|
|
5606
|
+
existingWorkflows.forEach((workflow) => {
|
|
5607
|
+
existingWorkflowMap.set(workflow.name, {
|
|
5608
|
+
id: workflow.id,
|
|
5609
|
+
name: workflow.name
|
|
5610
|
+
});
|
|
5611
|
+
});
|
|
5612
|
+
const allScripts = await loadWorkflowScripts();
|
|
5613
|
+
for (const workflow of Object.values(workflows)) {
|
|
5614
|
+
const requiredJobNames = collectJobNamesFromWorkflow(workflow);
|
|
5615
|
+
const scripts = /* @__PURE__ */ new Map();
|
|
5616
|
+
for (const jobName of requiredJobNames) {
|
|
5617
|
+
const script = allScripts.get(jobName);
|
|
5618
|
+
if (script) scripts.set(jobName, script);
|
|
5619
|
+
else console.warn(`Warning: Script for job "${jobName}" not found in workflow "${workflow.name}"`);
|
|
5620
|
+
}
|
|
5621
|
+
if (existingWorkflowMap.get(workflow.name)) {
|
|
5622
|
+
changeSet.updates.push({
|
|
5623
|
+
name: workflow.name,
|
|
5624
|
+
workspaceId,
|
|
5625
|
+
workflow,
|
|
5626
|
+
scripts
|
|
5627
|
+
});
|
|
5628
|
+
existingWorkflowMap.delete(workflow.name);
|
|
5629
|
+
} else changeSet.creates.push({
|
|
5630
|
+
name: workflow.name,
|
|
5631
|
+
workspaceId,
|
|
5632
|
+
workflow,
|
|
5633
|
+
scripts
|
|
5634
|
+
});
|
|
5635
|
+
}
|
|
5636
|
+
existingWorkflowMap.forEach((existing) => {
|
|
5637
|
+
changeSet.deletes.push({
|
|
5638
|
+
name: existing.name,
|
|
5639
|
+
workspaceId,
|
|
5640
|
+
workflowId: existing.id
|
|
5641
|
+
});
|
|
5642
|
+
});
|
|
5643
|
+
changeSet.print();
|
|
5644
|
+
return changeSet;
|
|
5645
|
+
}
|
|
5646
|
+
async function loadWorkflowScripts() {
|
|
5647
|
+
const scripts = /* @__PURE__ */ new Map();
|
|
5648
|
+
const jobsDir = path.join(getDistDir(), "workflow-jobs");
|
|
5649
|
+
if (!fs.existsSync(jobsDir)) {
|
|
5650
|
+
console.warn(`Warning: workflow-jobs directory not found at ${jobsDir}`);
|
|
5651
|
+
return scripts;
|
|
5652
|
+
}
|
|
5653
|
+
const files = fs.readdirSync(jobsDir);
|
|
5654
|
+
for (const file of files) if (file.endsWith(".js") && !file.endsWith(".base.js") && !file.endsWith(".transformed.js") && !file.endsWith(".map")) {
|
|
5655
|
+
const jobName = file.replace(/\.js$/, "");
|
|
5656
|
+
const scriptPath = path.join(jobsDir, file);
|
|
5657
|
+
const script = fs.readFileSync(scriptPath, "utf-8");
|
|
5658
|
+
scripts.set(jobName, script);
|
|
5659
|
+
}
|
|
5660
|
+
return scripts;
|
|
5661
|
+
}
|
|
5662
|
+
|
|
5028
5663
|
//#endregion
|
|
5029
5664
|
//#region src/cli/apply/index.ts
|
|
5030
5665
|
async function apply(options) {
|
|
@@ -5035,8 +5670,11 @@ async function apply(options) {
|
|
|
5035
5670
|
const buildOnly = options?.buildOnly ?? false;
|
|
5036
5671
|
await generateUserTypes(config, configPath);
|
|
5037
5672
|
const application = defineApplication(config);
|
|
5673
|
+
let workflowResult;
|
|
5674
|
+
if (application.workflowConfig) workflowResult = await loadAndCollectJobs(application.workflowConfig);
|
|
5038
5675
|
for (const app$1 of application.applications) for (const pipeline$1 of app$1.resolverServices) await buildPipeline(pipeline$1.namespace, pipeline$1.config);
|
|
5039
5676
|
if (application.executorService) await buildExecutor(application.executorService.config);
|
|
5677
|
+
if (workflowResult && workflowResult.jobs.length > 0) await buildWorkflow(workflowResult.jobs);
|
|
5040
5678
|
if (buildOnly) return;
|
|
5041
5679
|
const accessToken = await loadAccessToken({
|
|
5042
5680
|
useProfile: true,
|
|
@@ -5050,6 +5688,7 @@ async function apply(options) {
|
|
|
5050
5688
|
for (const tailordb of application.tailorDBServices) await tailordb.loadTypes();
|
|
5051
5689
|
for (const pipeline$1 of application.resolverServices) await pipeline$1.loadResolvers();
|
|
5052
5690
|
if (application.executorService) await application.executorService.loadExecutors();
|
|
5691
|
+
if (workflowResult) printLoadedWorkflows(workflowResult);
|
|
5053
5692
|
console.log("");
|
|
5054
5693
|
const ctx = {
|
|
5055
5694
|
client,
|
|
@@ -5063,6 +5702,7 @@ async function apply(options) {
|
|
|
5063
5702
|
const pipeline = await planPipeline(ctx);
|
|
5064
5703
|
const app = await planApplication(ctx);
|
|
5065
5704
|
const executor = await planExecutor(ctx);
|
|
5705
|
+
const workflow = await planWorkflow(client, workspaceId, workflowResult?.workflows ?? {});
|
|
5066
5706
|
const allConflicts = [
|
|
5067
5707
|
...tailorDB.conflicts,
|
|
5068
5708
|
...staticWebsite.conflicts,
|
|
@@ -5118,6 +5758,8 @@ async function apply(options) {
|
|
|
5118
5758
|
await applyPipeline(client, pipeline, "create-update");
|
|
5119
5759
|
await applyApplication(client, app, "create-update");
|
|
5120
5760
|
await applyExecutor(client, executor, "create-update");
|
|
5761
|
+
await applyWorkflow(client, workflow, "create-update");
|
|
5762
|
+
await applyWorkflow(client, workflow, "delete");
|
|
5121
5763
|
await applyExecutor(client, executor, "delete");
|
|
5122
5764
|
await applyApplication(client, app, "delete");
|
|
5123
5765
|
await applyPipeline(client, pipeline, "delete");
|
|
@@ -5128,31 +5770,13 @@ async function apply(options) {
|
|
|
5128
5770
|
console.log("Successfully applied changes.");
|
|
5129
5771
|
}
|
|
5130
5772
|
async function buildPipeline(namespace, config) {
|
|
5131
|
-
|
|
5132
|
-
namespace,
|
|
5133
|
-
serviceConfig: config,
|
|
5134
|
-
loader: new ResolverLoader(),
|
|
5135
|
-
transformer: new CodeTransformer(),
|
|
5136
|
-
outputDirs: {
|
|
5137
|
-
preBundle: "resolvers",
|
|
5138
|
-
postBundle: "functions"
|
|
5139
|
-
}
|
|
5140
|
-
};
|
|
5141
|
-
await new Bundler(bundlerConfig).bundle();
|
|
5773
|
+
await bundleResolvers(namespace, config);
|
|
5142
5774
|
}
|
|
5143
5775
|
async function buildExecutor(config) {
|
|
5144
|
-
|
|
5145
|
-
|
|
5146
|
-
|
|
5147
|
-
|
|
5148
|
-
transformer: new ExecutorTransformer(),
|
|
5149
|
-
outputDirs: {
|
|
5150
|
-
preBundle: "executors",
|
|
5151
|
-
postBundle: "executors"
|
|
5152
|
-
},
|
|
5153
|
-
shouldProcess: (executor) => ["function", "jobFunction"].includes(executor.operation.kind)
|
|
5154
|
-
};
|
|
5155
|
-
await new Bundler(bundlerConfig).bundle();
|
|
5776
|
+
await bundleExecutors(config);
|
|
5777
|
+
}
|
|
5778
|
+
async function buildWorkflow(collectedJobs) {
|
|
5779
|
+
await bundleWorkflowJobs(collectedJobs);
|
|
5156
5780
|
}
|
|
5157
5781
|
const applyCommand = defineCommand({
|
|
5158
5782
|
meta: {
|
|
@@ -6318,4 +6942,5 @@ const tokenCommand = defineCommand({
|
|
|
6318
6942
|
});
|
|
6319
6943
|
|
|
6320
6944
|
//#endregion
|
|
6321
|
-
export { PATScope, apply, applyCommand, commonArgs, createCommand, deleteCommand, fetchAll, fetchLatestToken, fetchUserInfo, formatArgs, generate, generateCommand, generateUserTypes, initOAuth2Client, initOperatorClient, listCommand, listCommand$1, loadAccessToken, loadConfig, loadConfigPath, loadWorkspaceId, machineUserList, machineUserToken, parseFormat, printWithFormat, readPackageJson, readPlatformConfig, show, showCommand, tokenCommand, withCommonArgs, workspaceCreate, workspaceDelete, workspaceList, writePlatformConfig };
|
|
6945
|
+
export { PATScope, apply, applyCommand, commonArgs, createCommand, deleteCommand, fetchAll, fetchLatestToken, fetchUserInfo, formatArgs, generate, generateCommand, generateUserTypes, initOAuth2Client, initOperatorClient, listCommand, listCommand$1, loadAccessToken, loadConfig, loadConfigPath, loadWorkspaceId, machineUserList, machineUserToken, parseFormat, printWithFormat, readPackageJson, readPlatformConfig, show, showCommand, tokenCommand, withCommonArgs, workspaceCreate, workspaceDelete, workspaceList, writePlatformConfig };
|
|
6946
|
+
//# sourceMappingURL=token-43KGC4QJ.mjs.map
|