@tailor-platform/sdk 0.12.4 → 0.13.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,4 @@
1
- import { WORKFLOW_JOB_BRAND, getDistDir, tailorUserMap } from "./job-wYkb6yMl.mjs";
1
+ import { WORKFLOW_JOB_BRAND, getDistDir, tailorUserMap } from "./job-vYIg6hFf.mjs";
2
2
  import { createRequire } from "node:module";
3
3
  import { defineCommand } from "citty";
4
4
  import * as path from "node:path";
@@ -587,9 +587,7 @@ function defineApplication(config) {
587
587
  //#region src/parser/service/workflow/schema.ts
588
588
  const WorkflowJobSchema = z.object({
589
589
  name: z.string(),
590
- get deps() {
591
- return z.array(WorkflowJobSchema).optional();
592
- },
590
+ trigger: functionSchema,
593
591
  body: functionSchema
594
592
  });
595
593
  const WorkflowSchema = z.object({
@@ -601,17 +599,16 @@ const WorkflowSchema = z.object({
601
599
  //#region src/cli/application/workflow/service.ts
602
600
  /**
603
601
  * Load workflow files and collect all jobs in a single pass.
602
+ * Dependencies are detected at bundle time via AST analysis.
604
603
  */
605
604
  async function loadAndCollectJobs(config) {
606
605
  const workflows = {};
607
606
  const workflowSources = [];
608
607
  const collectedJobs = [];
609
- let unusedJobs = [];
610
608
  if (!config.files || config.files.length === 0) return {
611
609
  workflows,
612
610
  workflowSources,
613
611
  jobs: collectedJobs,
614
- unusedJobs,
615
612
  fileCount: 0
616
613
  };
617
614
  const workflowFiles = loadFilesWithIgnores(config);
@@ -630,27 +627,13 @@ async function loadAndCollectJobs(config) {
630
627
  const existing = allJobsMap.get(job.name);
631
628
  if (existing) throw new Error(`Duplicate job name "${job.name}" found:\n - ${existing.sourceFile} (export: ${existing.exportName})\n - ${job.sourceFile} (export: ${job.exportName})\nEach job must have a unique name.`);
632
629
  allJobsMap.set(job.name, job);
630
+ collectedJobs.push(job);
633
631
  }
634
632
  }
635
- const tracedJobs = /* @__PURE__ */ new Map();
636
- for (const { workflow } of workflowSources) traceJobDependencies(workflow.mainJob, tracedJobs);
637
- const notExportedJobs = [];
638
- for (const jobName of tracedJobs.keys()) if (!allJobsMap.has(jobName)) notExportedJobs.push(jobName);
639
- if (notExportedJobs.length > 0) throw new Error(`The following workflow jobs are used but not exported:\n` + notExportedJobs.map((name) => ` - "${name}"`).join("\n") + "\n\nAll workflow jobs must be named exports. Example:\n export const myJob = createWorkflowJob({ name: \"my-job\", ... });\n\nAlso ensure that files containing job exports are included in the workflow.files glob pattern.");
640
- unusedJobs = Array.from(allJobsMap.keys()).filter((jobName) => !tracedJobs.has(jobName));
641
- for (const [jobName, job] of tracedJobs) {
642
- const exportedMetadata = allJobsMap.get(jobName);
643
- const depNames = job.deps?.map((dep) => dep.name);
644
- collectedJobs.push({
645
- ...exportedMetadata,
646
- deps: depNames
647
- });
648
- }
649
633
  return {
650
634
  workflows,
651
635
  workflowSources,
652
636
  jobs: collectedJobs,
653
- unusedJobs,
654
637
  fileCount
655
638
  };
656
639
  }
@@ -665,7 +648,6 @@ function printLoadedWorkflows(result) {
665
648
  const relativePath = path.relative(process.cwd(), sourceFile);
666
649
  console.log("Workflow:", styleText("greenBright", `"${workflow.name}"`), "loaded from", styleText("cyan", relativePath));
667
650
  }
668
- if (result.unusedJobs.length > 0) console.warn(`⚠️ Warning: Unused workflow jobs found: ${result.unusedJobs.join(", ")}`);
669
651
  }
670
652
  /**
671
653
  * Load a single file and extract jobs and workflow
@@ -707,14 +689,6 @@ async function loadFileContent(filePath) {
707
689
  function isWorkflowJob(value) {
708
690
  return value != null && typeof value === "object" && WORKFLOW_JOB_BRAND in value && value[WORKFLOW_JOB_BRAND] === true;
709
691
  }
710
- /**
711
- * Recursively trace all job dependencies
712
- */
713
- function traceJobDependencies(job, visited) {
714
- if (visited.has(job.name)) return;
715
- visited.set(job.name, job);
716
- if (job.deps && Array.isArray(job.deps)) for (const dep of job.deps) traceJobDependencies(dep, visited);
717
- }
718
692
 
719
693
  //#endregion
720
694
  //#region src/cli/bundler/executor/loader.ts
@@ -1048,29 +1022,27 @@ function findAllJobs(program, _sourceText) {
1048
1022
  const configObj = args[0];
1049
1023
  const nameProp = findProperty(configObj.properties, "name");
1050
1024
  const bodyProp = findProperty(configObj.properties, "body");
1051
- const depsProp = findProperty(configObj.properties, "deps");
1052
1025
  if (nameProp && isStringLiteral(nameProp.value) && bodyProp && isFunctionExpression(bodyProp.value)) {
1053
1026
  let statementRange;
1054
- for (let i = 0; i < parents.length; i++) {
1027
+ let exportName;
1028
+ for (let i = parents.length - 1; i >= 0; i--) {
1055
1029
  const parent = parents[i];
1056
- if (parent.type === "ExportNamedDeclaration" || parent.type === "VariableDeclaration") {
1057
- statementRange = {
1058
- start: parent.start,
1059
- end: parent.end
1060
- };
1061
- break;
1030
+ if (parent.type === "VariableDeclarator") {
1031
+ const declarator = parent;
1032
+ if (declarator.id?.type === "Identifier") exportName = declarator.id.name;
1062
1033
  }
1034
+ if (parent.type === "ExportNamedDeclaration" || parent.type === "VariableDeclaration") statementRange = {
1035
+ start: parent.start,
1036
+ end: parent.end
1037
+ };
1063
1038
  }
1064
1039
  jobs.push({
1065
1040
  name: nameProp.value.value,
1041
+ exportName,
1066
1042
  nameRange: {
1067
1043
  start: nameProp.start,
1068
1044
  end: nameProp.end
1069
1045
  },
1070
- depsRange: depsProp ? {
1071
- start: depsProp.start,
1072
- end: depsProp.end
1073
- } : void 0,
1074
1046
  bodyValueRange: {
1075
1047
  start: bodyProp.value.start,
1076
1048
  end: bodyProp.value.end
@@ -1101,19 +1073,6 @@ function applyReplacements(source, replacements) {
1101
1073
  return result;
1102
1074
  }
1103
1075
  /**
1104
- * Find the end position including trailing comma
1105
- */
1106
- function findTrailingCommaEnd(source, position) {
1107
- let i = position;
1108
- while (i < source.length) {
1109
- const char = source[i];
1110
- if (char === ",") return i + 1;
1111
- if (!/\s/.test(char)) break;
1112
- i++;
1113
- }
1114
- return position;
1115
- }
1116
- /**
1117
1076
  * Find the end of a statement including any trailing newline
1118
1077
  */
1119
1078
  function findStatementEnd(source, position) {
@@ -1123,6 +1082,55 @@ function findStatementEnd(source, position) {
1123
1082
  return i;
1124
1083
  }
1125
1084
  /**
1085
+ * Detect all .trigger() calls in the source code
1086
+ * Returns information about each trigger call for transformation
1087
+ */
1088
+ function detectTriggerCalls(program, sourceText) {
1089
+ const calls = [];
1090
+ function walk(node) {
1091
+ if (!node || typeof node !== "object") return;
1092
+ if (node.type === "CallExpression") {
1093
+ const callExpr = node;
1094
+ const callee = callExpr.callee;
1095
+ if (callee.type === "MemberExpression") {
1096
+ const memberExpr = callee;
1097
+ if (!memberExpr.computed && memberExpr.object.type === "Identifier" && memberExpr.property.name === "trigger") {
1098
+ const identifierName = memberExpr.object.name;
1099
+ let argsText = "";
1100
+ if (callExpr.arguments.length > 0) {
1101
+ const firstArg = callExpr.arguments[0];
1102
+ const lastArg = callExpr.arguments[callExpr.arguments.length - 1];
1103
+ if (firstArg && lastArg && "start" in firstArg && "end" in lastArg) argsText = sourceText.slice(firstArg.start, lastArg.end);
1104
+ }
1105
+ calls.push({
1106
+ identifierName,
1107
+ callRange: {
1108
+ start: callExpr.start,
1109
+ end: callExpr.end
1110
+ },
1111
+ argsText
1112
+ });
1113
+ }
1114
+ }
1115
+ }
1116
+ for (const key of Object.keys(node)) {
1117
+ const child = node[key];
1118
+ if (Array.isArray(child)) child.forEach((c) => walk(c));
1119
+ else if (child && typeof child === "object") walk(child);
1120
+ }
1121
+ }
1122
+ walk(program);
1123
+ return calls;
1124
+ }
1125
+ /**
1126
+ * Build a map from export name to job name from detected jobs
1127
+ */
1128
+ function buildJobNameMap(jobs) {
1129
+ const map = /* @__PURE__ */ new Map();
1130
+ for (const job of jobs) if (job.exportName) map.set(job.exportName, job.name);
1131
+ return map;
1132
+ }
1133
+ /**
1126
1134
  * Find variable declarations by export names
1127
1135
  * Returns a map of export name to statement range
1128
1136
  */
@@ -1162,54 +1170,74 @@ function findVariableDeclarationsByName(program) {
1162
1170
  }
1163
1171
  /**
1164
1172
  * Transform workflow source code
1165
- * - Target job: remove deps
1173
+ * - Transform .trigger() calls to tailor.workflow.triggerJobFunction()
1166
1174
  * - Other jobs: remove entire variable declaration
1167
1175
  *
1168
1176
  * @param source - The source code to transform
1169
1177
  * @param targetJobName - The name of the target job (from job config)
1170
1178
  * @param targetJobExportName - The export name of the target job (optional, for enhanced detection)
1171
1179
  * @param otherJobExportNames - Export names of other jobs to remove (optional, for enhanced detection)
1180
+ * @param allJobsMap - Map from export name to job name for trigger transformation (optional)
1172
1181
  */
1173
- function transformWorkflowSource(source, targetJobName, targetJobExportName, otherJobExportNames) {
1182
+ function transformWorkflowSource(source, targetJobName, targetJobExportName, otherJobExportNames, allJobsMap) {
1174
1183
  const { program } = parseSync("input.ts", source);
1175
1184
  const detectedJobs = findAllJobs(program, source);
1185
+ const jobNameMap = allJobsMap ?? buildJobNameMap(detectedJobs);
1176
1186
  const allDeclarations = findVariableDeclarationsByName(program);
1187
+ const triggerCalls = detectTriggerCalls(program, source);
1177
1188
  const replacements = [];
1178
- const removedRanges = /* @__PURE__ */ new Set();
1179
- const markRemoved = (start, end) => {
1180
- removedRanges.add(`${start}-${end}`);
1189
+ const removedRanges = [];
1190
+ const isInsideRemovedRange = (pos) => {
1191
+ return removedRanges.some((r) => pos >= r.start && pos < r.end);
1181
1192
  };
1182
- const isRemoved = (start, end) => {
1183
- return removedRanges.has(`${start}-${end}`);
1193
+ const isAlreadyMarkedForRemoval = (start) => {
1194
+ return removedRanges.some((r) => r.start === start);
1184
1195
  };
1185
- for (const job of detectedJobs) if (job.name === targetJobName) {
1186
- if (job.depsRange) replacements.push({
1187
- start: job.depsRange.start,
1188
- end: findTrailingCommaEnd(source, job.depsRange.end),
1189
- text: ""
1190
- });
1191
- } else if (job.statementRange && !isRemoved(job.statementRange.start, job.statementRange.end)) {
1192
- replacements.push({
1193
- start: job.statementRange.start,
1194
- end: findStatementEnd(source, job.statementRange.end),
1195
- text: ""
1196
+ for (const job of detectedJobs) {
1197
+ if (job.name === targetJobName) continue;
1198
+ if (job.statementRange && !isAlreadyMarkedForRemoval(job.statementRange.start)) {
1199
+ const endPos = findStatementEnd(source, job.statementRange.end);
1200
+ removedRanges.push({
1201
+ start: job.statementRange.start,
1202
+ end: endPos
1203
+ });
1204
+ replacements.push({
1205
+ start: job.statementRange.start,
1206
+ end: endPos,
1207
+ text: ""
1208
+ });
1209
+ } else if (!job.statementRange) replacements.push({
1210
+ start: job.bodyValueRange.start,
1211
+ end: job.bodyValueRange.end,
1212
+ text: "() => {}"
1196
1213
  });
1197
- markRemoved(job.statementRange.start, job.statementRange.end);
1198
- } else if (!job.statementRange) replacements.push({
1199
- start: job.bodyValueRange.start,
1200
- end: job.bodyValueRange.end,
1201
- text: "() => {}"
1202
- });
1214
+ }
1203
1215
  if (otherJobExportNames) for (const exportName of otherJobExportNames) {
1204
1216
  if (exportName === targetJobExportName) continue;
1205
1217
  const declRange = allDeclarations.get(exportName);
1206
- if (declRange && !isRemoved(declRange.start, declRange.end)) {
1218
+ if (declRange && !isAlreadyMarkedForRemoval(declRange.start)) {
1219
+ const endPos = findStatementEnd(source, declRange.end);
1220
+ removedRanges.push({
1221
+ start: declRange.start,
1222
+ end: endPos
1223
+ });
1207
1224
  replacements.push({
1208
1225
  start: declRange.start,
1209
- end: findStatementEnd(source, declRange.end),
1226
+ end: endPos,
1210
1227
  text: ""
1211
1228
  });
1212
- markRemoved(declRange.start, declRange.end);
1229
+ }
1230
+ }
1231
+ for (const call of triggerCalls) {
1232
+ if (isInsideRemovedRange(call.callRange.start)) continue;
1233
+ const jobName = jobNameMap.get(call.identifierName);
1234
+ if (jobName) {
1235
+ const transformedCall = `tailor.workflow.triggerJobFunction("${jobName}", ${call.argsText || "undefined"})`;
1236
+ replacements.push({
1237
+ start: call.callRange.start,
1238
+ end: call.callRange.end,
1239
+ text: transformedCall
1240
+ });
1213
1241
  }
1214
1242
  }
1215
1243
  return applyReplacements(source, replacements);
@@ -1221,18 +1249,20 @@ function transformWorkflowSource(source, targetJobName, targetJobExportName, oth
1221
1249
  * Bundle workflow jobs
1222
1250
  *
1223
1251
  * This function:
1224
- * 1. Uses a transform plugin to remove deps during bundling (preserves module resolution)
1225
- * 2. Creates entry file
1226
- * 3. Bundles in a single step with tree-shaking
1252
+ * 1. Detects which jobs are actually used (mainJobs + their dependencies)
1253
+ * 2. Uses a transform plugin to transform trigger calls during bundling
1254
+ * 3. Creates entry file and bundles with tree-shaking
1227
1255
  */
1228
- async function bundleWorkflowJobs(allJobs) {
1256
+ async function bundleWorkflowJobs(allJobs, mainJobNames, env = {}) {
1229
1257
  if (allJobs.length === 0) {
1230
1258
  console.log(styleText("dim", "No workflow jobs to bundle"));
1231
1259
  return;
1232
1260
  }
1261
+ const usedJobs = await filterUsedJobs(allJobs, mainJobNames);
1233
1262
  console.log("");
1234
- console.log("Bundling", styleText("cyanBright", allJobs.length.toString()), "files for", styleText("cyan", "\"workflow-job\""));
1263
+ console.log("Bundling", styleText("cyanBright", usedJobs.length.toString()), "files for", styleText("cyan", "\"workflow-job\""));
1235
1264
  const outputDir = path.resolve(getDistDir(), "workflow-jobs");
1265
+ if (fs.existsSync(outputDir)) fs.rmSync(outputDir, { recursive: true });
1236
1266
  fs.mkdirSync(outputDir, { recursive: true });
1237
1267
  let tsconfig;
1238
1268
  try {
@@ -1240,28 +1270,81 @@ async function bundleWorkflowJobs(allJobs) {
1240
1270
  } catch {
1241
1271
  tsconfig = void 0;
1242
1272
  }
1243
- await Promise.all(allJobs.map((job) => bundleSingleJob(job, allJobs, outputDir, tsconfig)));
1273
+ await Promise.all(usedJobs.map((job) => bundleSingleJob(job, usedJobs, outputDir, tsconfig, env)));
1244
1274
  console.log(styleText("green", "Bundled"), styleText("cyan", "\"workflow-job\""));
1245
1275
  }
1246
- async function bundleSingleJob(job, allJobs, outputDir, tsconfig) {
1247
- const depsJobNames = findJobDeps(job.name, allJobs);
1248
- const jobsObject = generateJobsObject(depsJobNames);
1276
+ /**
1277
+ * Filter jobs to only include those that are actually used.
1278
+ * A job is "used" if:
1279
+ * - It's a mainJob of a workflow
1280
+ * - It's called via .trigger() from another used job (transitively)
1281
+ */
1282
+ async function filterUsedJobs(allJobs, mainJobNames) {
1283
+ if (allJobs.length === 0 || mainJobNames.length === 0) return [];
1284
+ const jobsBySourceFile = /* @__PURE__ */ new Map();
1285
+ for (const job of allJobs) {
1286
+ const existing = jobsBySourceFile.get(job.sourceFile) || [];
1287
+ existing.push(job);
1288
+ jobsBySourceFile.set(job.sourceFile, existing);
1289
+ }
1290
+ const exportNameToJobName = /* @__PURE__ */ new Map();
1291
+ for (const job of allJobs) exportNameToJobName.set(job.exportName, job.name);
1292
+ const dependencies = /* @__PURE__ */ new Map();
1293
+ const fileResults = await Promise.all(Array.from(jobsBySourceFile.entries()).map(async ([sourceFile, jobs]) => {
1294
+ try {
1295
+ const source = await fs.promises.readFile(sourceFile, "utf-8");
1296
+ const { program } = parseSync("input.ts", source);
1297
+ const detectedJobs = findAllJobs(program, source);
1298
+ const localExportNameToJobName = /* @__PURE__ */ new Map();
1299
+ for (const detected of detectedJobs) if (detected.exportName) localExportNameToJobName.set(detected.exportName, detected.name);
1300
+ const triggerCalls = detectTriggerCalls(program, source);
1301
+ const jobDependencies = [];
1302
+ for (const job of jobs) {
1303
+ const detectedJob = detectedJobs.find((d) => d.name === job.name);
1304
+ if (!detectedJob) continue;
1305
+ const jobDeps = /* @__PURE__ */ new Set();
1306
+ for (const call of triggerCalls) if (detectedJob.bodyValueRange && call.callRange.start >= detectedJob.bodyValueRange.start && call.callRange.end <= detectedJob.bodyValueRange.end) {
1307
+ const triggeredJobName = localExportNameToJobName.get(call.identifierName) || exportNameToJobName.get(call.identifierName);
1308
+ if (triggeredJobName) jobDeps.add(triggeredJobName);
1309
+ }
1310
+ if (jobDeps.size > 0) jobDependencies.push({
1311
+ jobName: job.name,
1312
+ deps: jobDeps
1313
+ });
1314
+ }
1315
+ return jobDependencies;
1316
+ } catch {
1317
+ return [];
1318
+ }
1319
+ }));
1320
+ for (const jobDependencies of fileResults) for (const { jobName, deps } of jobDependencies) dependencies.set(jobName, deps);
1321
+ const usedJobNames = /* @__PURE__ */ new Set();
1322
+ function markUsed(jobName) {
1323
+ if (usedJobNames.has(jobName)) return;
1324
+ usedJobNames.add(jobName);
1325
+ const deps = dependencies.get(jobName);
1326
+ if (deps) for (const dep of deps) markUsed(dep);
1327
+ }
1328
+ for (const mainJobName of mainJobNames) markUsed(mainJobName);
1329
+ return allJobs.filter((job) => usedJobNames.has(job.name));
1330
+ }
1331
+ async function bundleSingleJob(job, allJobs, outputDir, tsconfig, env) {
1249
1332
  const entryPath = path.join(outputDir, `${job.name}.entry.js`);
1250
1333
  const absoluteSourcePath = path.resolve(job.sourceFile).replace(/\\/g, "/");
1251
1334
  const entryContent = ml`
1252
1335
  import { ${job.exportName} } from "${absoluteSourcePath}";
1253
1336
 
1254
- const jobs = {
1255
- ${jobsObject}
1256
- };
1337
+ const env = ${JSON.stringify(env)};
1257
1338
 
1258
1339
  globalThis.main = async (input) => {
1259
- return await ${job.exportName}.body(input, jobs);
1340
+ return await ${job.exportName}.body(input, { env });
1260
1341
  };
1261
1342
  `;
1262
1343
  fs.writeFileSync(entryPath, entryContent);
1263
1344
  const outputPath = path.join(outputDir, `${job.name}.js`);
1264
1345
  const otherJobExportNames = allJobs.filter((j) => j.name !== job.name).map((j) => j.exportName);
1346
+ const allJobsMap = /* @__PURE__ */ new Map();
1347
+ for (const j of allJobs) allJobsMap.set(j.exportName, j.name);
1265
1348
  await rolldown.build(rolldown.defineConfig({
1266
1349
  input: entryPath,
1267
1350
  output: {
@@ -1277,8 +1360,8 @@ async function bundleSingleJob(job, allJobs, outputDir, tsconfig) {
1277
1360
  transform: {
1278
1361
  filter: { id: { include: [/\.ts$/, /\.js$/] } },
1279
1362
  handler(code) {
1280
- if (!code.includes("createWorkflowJob")) return null;
1281
- return { code: transformWorkflowSource(code, job.name, job.exportName, otherJobExportNames) };
1363
+ if (!code.includes("createWorkflowJob") && !code.includes(".trigger(")) return null;
1364
+ return { code: transformWorkflowSource(code, job.name, job.exportName, otherJobExportNames, allJobsMap) };
1282
1365
  }
1283
1366
  }
1284
1367
  }],
@@ -1290,18 +1373,6 @@ async function bundleSingleJob(job, allJobs, outputDir, tsconfig) {
1290
1373
  logLevel: "silent"
1291
1374
  }));
1292
1375
  }
1293
- /**
1294
- * Find the dependencies of a specific job
1295
- */
1296
- function findJobDeps(targetJobName, allJobs) {
1297
- return allJobs.find((j) => j.name === targetJobName)?.deps ?? [];
1298
- }
1299
- function generateJobsObject(jobNames) {
1300
- if (jobNames.length === 0) return "";
1301
- return jobNames.map((jobName) => {
1302
- return `"${jobName.replace(/[-\s]/g, "_")}": (args) => tailor.workflow.triggerJobFunction("${jobName}", args)`;
1303
- }).join(",\n ");
1304
- }
1305
1376
 
1306
1377
  //#endregion
1307
1378
  //#region src/parser/generator-config/index.ts
@@ -5685,59 +5756,56 @@ function protoGqlOperand(operand) {
5685
5756
  async function applyWorkflow(client, result, phase = "create-update") {
5686
5757
  const { changeSet } = result;
5687
5758
  if (phase === "create-update") {
5688
- await Promise.all(changeSet.creates.map(async (create) => {
5689
- const jobFunctions = {};
5690
- for (const [jobName, script] of create.scripts.entries()) {
5691
- const response = await client.createWorkflowJobFunction({
5692
- workspaceId: create.workspaceId,
5693
- jobFunctionName: jobName,
5694
- script
5695
- });
5696
- if (response.jobFunction) jobFunctions[jobName] = response.jobFunction.version;
5697
- }
5759
+ const jobFunctionVersions = await registerJobFunctions(client, changeSet);
5760
+ await Promise.all([...changeSet.creates.map(async (create) => {
5698
5761
  await client.createWorkflow({
5699
5762
  workspaceId: create.workspaceId,
5700
5763
  workflowName: create.workflow.name,
5701
5764
  mainJobFunctionName: create.workflow.mainJob.name,
5702
- jobFunctions
5765
+ jobFunctions: jobFunctionVersions
5703
5766
  });
5704
5767
  await client.setMetadata(create.metaRequest);
5705
- }));
5706
- await Promise.all(changeSet.updates.map(async (update) => {
5707
- const jobFunctions = {};
5708
- for (const [jobName, script] of update.scripts.entries()) {
5709
- const response = await client.updateWorkflowJobFunction({
5710
- workspaceId: update.workspaceId,
5711
- jobFunctionName: jobName,
5712
- script
5713
- });
5714
- if (response.jobFunction) jobFunctions[jobName] = response.jobFunction.version;
5715
- }
5768
+ }), ...changeSet.updates.map(async (update) => {
5716
5769
  await client.updateWorkflow({
5717
5770
  workspaceId: update.workspaceId,
5718
5771
  workflowName: update.workflow.name,
5719
5772
  mainJobFunctionName: update.workflow.mainJob.name,
5720
- jobFunctions
5773
+ jobFunctions: jobFunctionVersions
5721
5774
  });
5722
5775
  await client.setMetadata(update.metaRequest);
5723
- }));
5776
+ })]);
5724
5777
  } else if (phase === "delete") await Promise.all(changeSet.deletes.map((del) => client.deleteWorkflow({
5725
5778
  workspaceId: del.workspaceId,
5726
5779
  workflowId: del.workflowId
5727
5780
  })));
5728
5781
  }
5729
5782
  /**
5730
- * Recursively collect all job names from a workflow's mainJob and its dependencies
5783
+ * Register all job functions once, returns a map of job name to version.
5784
+ * Uses update for existing workflows, create for new workflows.
5731
5785
  */
5732
- function collectJobNamesFromWorkflow(workflow) {
5733
- const jobNames = /* @__PURE__ */ new Set();
5734
- const collectFromJob = (job) => {
5735
- if (!job || jobNames.has(job.name)) return;
5736
- jobNames.add(job.name);
5737
- if (job.deps && Array.isArray(job.deps)) for (const dep of job.deps) collectFromJob(dep);
5738
- };
5739
- collectFromJob(workflow.mainJob);
5740
- return jobNames;
5786
+ async function registerJobFunctions(client, changeSet) {
5787
+ const jobFunctionVersions = {};
5788
+ const firstWorkflow = changeSet.creates[0] || changeSet.updates[0];
5789
+ if (!firstWorkflow) return jobFunctionVersions;
5790
+ const { workspaceId, scripts } = firstWorkflow;
5791
+ const hasExistingWorkflows = changeSet.updates.length > 0;
5792
+ const results = await Promise.all(Array.from(scripts.entries()).map(async ([jobName, script]) => {
5793
+ const response = hasExistingWorkflows ? await client.updateWorkflowJobFunction({
5794
+ workspaceId,
5795
+ jobFunctionName: jobName,
5796
+ script
5797
+ }) : await client.createWorkflowJobFunction({
5798
+ workspaceId,
5799
+ jobFunctionName: jobName,
5800
+ script
5801
+ });
5802
+ return {
5803
+ jobName,
5804
+ version: response.jobFunction?.version
5805
+ };
5806
+ }));
5807
+ for (const { jobName, version } of results) if (version) jobFunctionVersions[jobName] = version;
5808
+ return jobFunctionVersions;
5741
5809
  }
5742
5810
  function trn(workspaceId, name) {
5743
5811
  return `trn:v1:workspace:${workspaceId}:workflow:${name}`;
@@ -5767,13 +5835,6 @@ async function planWorkflow(client, workspaceId, appName, workflows) {
5767
5835
  }));
5768
5836
  const allScripts = await loadWorkflowScripts();
5769
5837
  for (const workflow of Object.values(workflows)) {
5770
- const requiredJobNames = collectJobNamesFromWorkflow(workflow);
5771
- const scripts = /* @__PURE__ */ new Map();
5772
- for (const jobName of requiredJobNames) {
5773
- const script = allScripts.get(jobName);
5774
- if (script) scripts.set(jobName, script);
5775
- else console.warn(`Warning: Script for job "${jobName}" not found in workflow "${workflow.name}"`);
5776
- }
5777
5838
  const existing = existingWorkflows[workflow.name];
5778
5839
  const metaRequest = await buildMetaRequest(trn(workspaceId, workflow.name), appName);
5779
5840
  if (existing) {
@@ -5790,7 +5851,7 @@ async function planWorkflow(client, workspaceId, appName, workflows) {
5790
5851
  name: workflow.name,
5791
5852
  workspaceId,
5792
5853
  workflow,
5793
- scripts,
5854
+ scripts: allScripts,
5794
5855
  metaRequest
5795
5856
  });
5796
5857
  delete existingWorkflows[workflow.name];
@@ -5798,7 +5859,7 @@ async function planWorkflow(client, workspaceId, appName, workflows) {
5798
5859
  name: workflow.name,
5799
5860
  workspaceId,
5800
5861
  workflow,
5801
- scripts,
5862
+ scripts: allScripts,
5802
5863
  metaRequest
5803
5864
  });
5804
5865
  }
@@ -5824,7 +5885,7 @@ async function loadWorkflowScripts() {
5824
5885
  const jobsDir = path.join(getDistDir(), "workflow-jobs");
5825
5886
  if (!fs.existsSync(jobsDir)) return scripts;
5826
5887
  const files = fs.readdirSync(jobsDir);
5827
- for (const file of files) if (file.endsWith(".js") && !file.endsWith(".base.js") && !file.endsWith(".transformed.js") && !file.endsWith(".map")) {
5888
+ for (const file of files) if (/^[^.]+\.js$/.test(file)) {
5828
5889
  const jobName = file.replace(/\.js$/, "");
5829
5890
  const scriptPath = path.join(jobsDir, file);
5830
5891
  const script = fs.readFileSync(scriptPath, "utf-8");
@@ -5846,7 +5907,10 @@ async function apply(options) {
5846
5907
  if (application.workflowConfig) workflowResult = await loadAndCollectJobs(application.workflowConfig);
5847
5908
  for (const app$1 of application.applications) for (const pipeline$1 of app$1.resolverServices) await buildPipeline(pipeline$1.namespace, pipeline$1.config);
5848
5909
  if (application.executorService) await buildExecutor(application.executorService.config);
5849
- if (workflowResult && workflowResult.jobs.length > 0) await buildWorkflow(workflowResult.jobs);
5910
+ if (workflowResult && workflowResult.jobs.length > 0) {
5911
+ const mainJobNames = workflowResult.workflowSources.map((ws) => ws.workflow.mainJob.name);
5912
+ await buildWorkflow(workflowResult.jobs, mainJobNames, application.env);
5913
+ }
5850
5914
  if (buildOnly) return;
5851
5915
  const accessToken = await loadAccessToken({
5852
5916
  useProfile: true,
@@ -5951,8 +6015,8 @@ async function buildPipeline(namespace, config) {
5951
6015
  async function buildExecutor(config) {
5952
6016
  await bundleExecutors(config);
5953
6017
  }
5954
- async function buildWorkflow(collectedJobs) {
5955
- await bundleWorkflowJobs(collectedJobs);
6018
+ async function buildWorkflow(collectedJobs, mainJobNames, env) {
6019
+ await bundleWorkflowJobs(collectedJobs, mainJobNames, env);
5956
6020
  }
5957
6021
  const applyCommand = defineCommand({
5958
6022
  meta: {
@@ -7384,4 +7448,4 @@ const listCommand = defineCommand({
7384
7448
 
7385
7449
  //#endregion
7386
7450
  export { PATScope, apply, applyCommand, commonArgs, createCommand, deleteCommand, fetchAll, fetchLatestToken, fetchUserInfo, formatArgs, generate, generateCommand, generateUserTypes, getCommand, initOAuth2Client, initOperatorClient, listCommand, listCommand$1, listCommand$2, loadAccessToken, loadConfig, loadWorkspaceId, machineUserList, machineUserToken, oauth2ClientGet, oauth2ClientList, parseFormat, printWithFormat, readPackageJson, readPlatformConfig, remove, removeCommand, show, showCommand, tokenCommand, withCommonArgs, workspaceCreate, workspaceDelete, workspaceList, writePlatformConfig };
7387
- //# sourceMappingURL=list-4T6XN_zi.mjs.map
7451
+ //# sourceMappingURL=list-D-R1mEOM.mjs.map