windmill-cli 1.618.2 → 1.620.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -32,7 +32,7 @@ export const OpenAPI = {
32
32
  PASSWORD: undefined,
33
33
  TOKEN: getEnv("WM_TOKEN"),
34
34
  USERNAME: undefined,
35
- VERSION: '1.618.2',
35
+ VERSION: '1.620.0',
36
36
  WITH_CREDENTIALS: true,
37
37
  interceptors: {
38
38
  request: new Interceptors(),
@@ -5633,22 +5633,20 @@ export const existsRawApp = (data) => {
5633
5633
  });
5634
5634
  };
5635
5635
  /**
5636
- * get app by path
5636
+ * get raw app data by
5637
5637
  * @param data The data for the request.
5638
5638
  * @param data.workspace
5639
- * @param data.version
5640
- * @param data.path
5639
+ * @param data.secretWithExtension
5641
5640
  * @returns string app details
5642
5641
  * @throws ApiError
5643
5642
  */
5644
5643
  export const getRawAppData = (data) => {
5645
5644
  return __request(OpenAPI, {
5646
5645
  method: 'GET',
5647
- url: '/w/{workspace}/apps/get_data/{version}/{path}',
5646
+ url: '/w/{workspace}/apps/get_data/v/{secretWithExtension}',
5648
5647
  path: {
5649
5648
  workspace: data.workspace,
5650
- version: data.version,
5651
- path: data.path
5649
+ secretWithExtension: data.secretWithExtension
5652
5650
  }
5653
5651
  });
5654
5652
  };
@@ -149,6 +149,57 @@ async function run(opts, path) {
149
149
  });
150
150
  log.info(JSON.stringify(jobInfo.result ?? {}, null, 2));
151
151
  }
152
+ async function preview(opts, flowPath) {
153
+ const workspace = await resolveWorkspace(opts);
154
+ await requireLogin(opts);
155
+ // Normalize path - ensure it's a directory path to a .flow folder
156
+ if (!flowPath.endsWith(".flow") && !flowPath.endsWith(".flow" + SEP)) {
157
+ // Check if it's a flow.yaml file
158
+ if (flowPath.endsWith("flow.yaml") || flowPath.endsWith("flow.json")) {
159
+ flowPath = flowPath.substring(0, flowPath.lastIndexOf(SEP));
160
+ }
161
+ else {
162
+ throw new Error("Flow path must be a .flow directory or a flow.yaml file");
163
+ }
164
+ }
165
+ if (!flowPath.endsWith(SEP)) {
166
+ flowPath += SEP;
167
+ }
168
+ // Read and parse the flow definition
169
+ const localFlow = (await yamlParseFile(flowPath + "flow.yaml"));
170
+ // Replace inline scripts with their actual content
171
+ await replaceInlineScripts(localFlow.value.modules, async (path) => await dntShim.Deno.readTextFile(flowPath + path), log, flowPath, SEP);
172
+ const input = opts.data ? await resolve(opts.data) : {};
173
+ if (!opts.silent) {
174
+ log.info(colors.yellow(`Running flow preview for ${flowPath}...`));
175
+ }
176
+ log.debug(`Flow value: ${JSON.stringify(localFlow.value, null, 2)}`);
177
+ // Run the flow preview
178
+ let result;
179
+ try {
180
+ result = await wmill.runFlowPreviewAndWaitResult({
181
+ workspace: workspace.workspaceId,
182
+ requestBody: {
183
+ value: localFlow.value,
184
+ path: flowPath.substring(0, flowPath.indexOf(".flow")).replaceAll(SEP, "/"),
185
+ args: input,
186
+ },
187
+ });
188
+ }
189
+ catch (e) {
190
+ if (e.body) {
191
+ log.error(`Flow preview failed: ${JSON.stringify(e.body)}`);
192
+ }
193
+ throw e;
194
+ }
195
+ if (opts.silent) {
196
+ console.log(JSON.stringify(result, null, 2));
197
+ }
198
+ else {
199
+ log.info(colors.bold.underline.green("Flow preview completed"));
200
+ log.info(JSON.stringify(result, null, 2));
201
+ }
202
+ }
152
203
  async function generateLocks(opts, folder) {
153
204
  const workspace = await resolveWorkspace(opts);
154
205
  await requireLogin(opts);
@@ -220,6 +271,11 @@ const command = new Command()
220
271
  .option("-d --data <data:string>", "Inputs specified as a JSON string or a file using @<filename> or stdin using @-.")
221
272
  .option("-s --silent", "Do not ouput anything other then the final output. Useful for scripting.")
222
273
  .action(run)
274
+ .command("preview", "preview a local flow without deploying it. Runs the flow definition from local files.")
275
+ .arguments("<flow_path:string>")
276
+ .option("-d --data <data:string>", "Inputs specified as a JSON string or a file using @<filename> or stdin using @-.")
277
+ .option("-s --silent", "Do not output anything other then the final output. Useful for scripting.")
278
+ .action(preview)
223
279
  .command("generate-locks", "re-generate the lock files of all inline scripts of all updated flows")
224
280
  .arguments("[flow:file]")
225
281
  .option("--yes", "Skip confirmation prompt")
@@ -785,6 +785,178 @@ async function generateMetadata(opts, scriptPath) {
785
785
  }
786
786
  }
787
787
  }
788
+ async function preview(opts, filePath) {
789
+ opts = await mergeConfigWithConfigFile(opts);
790
+ const workspace = await resolveWorkspace(opts);
791
+ await requireLogin(opts);
792
+ if (!validatePath(filePath)) {
793
+ return;
794
+ }
795
+ const fstat = await dntShim.Deno.stat(filePath);
796
+ if (!fstat.isFile) {
797
+ throw new Error("file path must refer to a file.");
798
+ }
799
+ if (filePath.endsWith(".script.json") || filePath.endsWith(".script.yaml")) {
800
+ throw Error("Cannot preview a script metadata file, point to the script content file instead (.py, .ts, .go, .sh)");
801
+ }
802
+ const codebases = await listSyncCodebases(opts);
803
+ const language = inferContentTypeFromFilePath(filePath, opts?.defaultTs);
804
+ const content = await dntShim.Deno.readTextFile(filePath);
805
+ const input = opts.data ? await resolve(opts.data) : {};
806
+ // Check if this is a codebase script
807
+ const codebase = language == "bun" ? findCodebase(filePath, codebases) : undefined;
808
+ let bundledContent = undefined;
809
+ let isTar = false;
810
+ if (codebase) {
811
+ if (codebase.customBundler) {
812
+ if (!opts.silent) {
813
+ log.info(`Using custom bundler ${codebase.customBundler} for preview`);
814
+ }
815
+ bundledContent = execSync(codebase.customBundler + " " + filePath, {
816
+ maxBuffer: 1024 * 1024 * 50,
817
+ }).toString();
818
+ }
819
+ else {
820
+ const esbuild = await import("esbuild");
821
+ if (!opts.silent) {
822
+ log.info(`Bundling ${filePath} for preview...`);
823
+ }
824
+ const startTime = performance.now();
825
+ const format = codebase.format ?? "cjs";
826
+ const out = await esbuild.build({
827
+ entryPoints: [filePath],
828
+ format: format,
829
+ bundle: true,
830
+ write: false,
831
+ external: codebase.external,
832
+ inject: codebase.inject,
833
+ define: codebase.define,
834
+ loader: codebase.loader ?? { ".node": "file" },
835
+ outdir: "/",
836
+ platform: "node",
837
+ packages: "bundle",
838
+ target: format == "cjs" ? "node20.15.1" : "esnext",
839
+ banner: codebase.banner,
840
+ });
841
+ const endTime = performance.now();
842
+ bundledContent = out.outputFiles[0].text;
843
+ // Handle multiple output files (create tarball)
844
+ if (out.outputFiles.length > 1) {
845
+ const archiveNpm = await import("@ayonli/jsext/archive");
846
+ if (!opts.silent) {
847
+ log.info(`Creating tarball for multiple output files...`);
848
+ }
849
+ const tarball = new archiveNpm.Tarball();
850
+ const mainPath = filePath.split(SEP).pop()?.split(".")[0] + ".js";
851
+ const mainContent = out.outputFiles.find((file) => file.path == "/" + mainPath)?.text ?? "";
852
+ tarball.append(new File([mainContent], "main.js", { type: "text/plain" }));
853
+ for (const file of out.outputFiles) {
854
+ if (file.path == "/" + mainPath)
855
+ continue;
856
+ // deno-lint-ignore no-explicit-any
857
+ const fil = new File([file.contents], file.path.substring(1));
858
+ tarball.append(fil);
859
+ }
860
+ const blob = await streamToBlob(tarball.stream());
861
+ bundledContent = btoa(await blob.text());
862
+ isTar = true;
863
+ }
864
+ else if (Array.isArray(codebase.assets) && codebase.assets.length > 0) {
865
+ // Handle assets
866
+ const archiveNpm = await import("@ayonli/jsext/archive");
867
+ if (!opts.silent) {
868
+ log.info(`Adding assets to tarball...`);
869
+ }
870
+ const tarball = new archiveNpm.Tarball();
871
+ tarball.append(new File([bundledContent], "main.js", { type: "text/plain" }));
872
+ for (const asset of codebase.assets) {
873
+ const data = fs.readFileSync(asset.from);
874
+ const blob = new Blob([data], { type: "text/plain" });
875
+ const file = new File([blob], asset.to);
876
+ tarball.append(file);
877
+ }
878
+ const blob = await streamToBlob(tarball.stream());
879
+ bundledContent = btoa(await blob.text());
880
+ isTar = true;
881
+ }
882
+ if (!opts.silent) {
883
+ log.info(`Bundled ${filePath}: ${(bundledContent.length / 1024).toFixed(0)}kB (${(endTime - startTime).toFixed(0)}ms)`);
884
+ }
885
+ }
886
+ }
887
+ if (!opts.silent) {
888
+ log.info(colors.yellow(`Running preview for ${filePath}...`));
889
+ }
890
+ // For codebase scripts with bundles, we need to use a multipart form upload
891
+ if (bundledContent) {
892
+ const form = new FormData();
893
+ const previewPayload = {
894
+ content: content, // Pass the original content (frontend does this too)
895
+ path: filePath.substring(0, filePath.indexOf(".")).replaceAll(SEP, "/"),
896
+ args: input,
897
+ language: language,
898
+ kind: isTar ? "tarbundle" : "bundle",
899
+ format: codebase?.format ?? "cjs",
900
+ };
901
+ form.append("preview", JSON.stringify(previewPayload));
902
+ form.append("file", new Blob([bundledContent], { type: "application/javascript" }));
903
+ const url = workspace.remote +
904
+ "api/w/" +
905
+ workspace.workspaceId +
906
+ "/jobs/run/preview_bundle";
907
+ const response = await fetch(url, {
908
+ method: "POST",
909
+ headers: { Authorization: `Bearer ${workspace.token}` },
910
+ body: form,
911
+ });
912
+ if (!response.ok) {
913
+ throw new Error(`Preview failed: ${response.status} - ${response.statusText} - ${await response.text()}`);
914
+ }
915
+ const jobId = await response.text();
916
+ if (!opts.silent) {
917
+ await track_job(workspace.workspaceId, jobId);
918
+ }
919
+ // Wait for the job to complete and get the result
920
+ while (true) {
921
+ try {
922
+ const completedJob = await wmill.getCompletedJob({
923
+ workspace: workspace.workspaceId,
924
+ id: jobId,
925
+ });
926
+ const result = completedJob.result ?? {};
927
+ if (opts.silent) {
928
+ console.log(JSON.stringify(result, null, 2));
929
+ }
930
+ else {
931
+ log.info(JSON.stringify(result, null, 2));
932
+ }
933
+ break;
934
+ }
935
+ catch {
936
+ await new Promise((resolve) => setTimeout(resolve, 100));
937
+ }
938
+ }
939
+ }
940
+ else {
941
+ // For regular scripts, use the standard preview API
942
+ const result = await wmill.runScriptPreviewAndWaitResult({
943
+ workspace: workspace.workspaceId,
944
+ requestBody: {
945
+ content,
946
+ path: filePath.substring(0, filePath.indexOf(".")).replaceAll(SEP, "/"),
947
+ args: input,
948
+ language: language,
949
+ },
950
+ });
951
+ if (opts.silent) {
952
+ console.log(JSON.stringify(result, null, 2));
953
+ }
954
+ else {
955
+ log.info(colors.bold.underline.green("Preview completed"));
956
+ log.info(JSON.stringify(result, null, 2));
957
+ }
958
+ }
959
+ }
788
960
  const command = new Command()
789
961
  .description("script related commands")
790
962
  .option("--show-archived", "Enable archived scripts in output")
@@ -800,6 +972,11 @@ const command = new Command()
800
972
  .option("-d --data <data:file>", "Inputs specified as a JSON string or a file using @<filename> or stdin using @-.")
801
973
  .option("-s --silent", "Do not output anything other then the final output. Useful for scripting.")
802
974
  .action(run)
975
+ .command("preview", "preview a local script without deploying it. Supports both regular and codebase scripts.")
976
+ .arguments("<path:file>")
977
+ .option("-d --data <data:file>", "Inputs specified as a JSON string or a file using @<filename> or stdin using @-.")
978
+ .option("-s --silent", "Do not output anything other than the final output. Useful for scripting.")
979
+ .action(preview)
803
980
  .command("bootstrap", "create a new script")
804
981
  .arguments("<path:file> <language:string>")
805
982
  .option("--summary <summary:string>", "script summary")
@@ -35,6 +35,16 @@ export function findCodebase(path, codebases) {
35
35
  return;
36
36
  }
37
37
  for (const c of codebases) {
38
+ // First check if the path is within this codebase's relative_path
39
+ const codebasePath = c.relative_path.replaceAll("\\", "/");
40
+ const normalizedPath = path.replaceAll("\\", "/");
41
+ if (!normalizedPath.startsWith(codebasePath + "/") && normalizedPath !== codebasePath) {
42
+ continue;
43
+ }
44
+ // Get the path relative to the codebase root for pattern matching
45
+ const relativePath = normalizedPath.startsWith(codebasePath + "/")
46
+ ? normalizedPath.substring(codebasePath.length + 1)
47
+ : normalizedPath;
38
48
  let included = false;
39
49
  let excluded = false;
40
50
  if (c.includes == undefined || c.includes == null) {
@@ -47,7 +57,7 @@ export function findCodebase(path, codebases) {
47
57
  if (included) {
48
58
  break;
49
59
  }
50
- if (minimatch(path, r)) {
60
+ if (minimatch(relativePath, r)) {
51
61
  included = true;
52
62
  }
53
63
  }
@@ -55,7 +65,7 @@ export function findCodebase(path, codebases) {
55
65
  c.excludes = [c.excludes];
56
66
  }
57
67
  for (const r of c.excludes ?? []) {
58
- if (minimatch(path, r)) {
68
+ if (minimatch(relativePath, r)) {
59
69
  excluded = true;
60
70
  }
61
71
  }
@@ -1047,6 +1057,8 @@ export async function* readDirRecursiveWithIgnore(ignore, root) {
1047
1057
  export async function elementsToMap(els, ignore, json, skips, specificItems, branchOverride) {
1048
1058
  const map = {};
1049
1059
  const processedBasePaths = new Set();
1060
+ // Cache git branch at the start to avoid repeated execSync calls per file
1061
+ const cachedBranch = branchOverride ?? getCurrentGitBranch() ?? undefined;
1050
1062
  for await (const entry of readDirRecursiveWithIgnore(ignore, els)) {
1051
1063
  // console.log("FOO", entry.path, entry.ignored, entry.isDirectory)
1052
1064
  if (entry.isDirectory || entry.ignored) {
@@ -1146,7 +1158,7 @@ export async function elementsToMap(els, ignore, json, skips, specificItems, bra
1146
1158
  }
1147
1159
  // Handle branch-specific files - skip files for other branches
1148
1160
  if (specificItems && isBranchSpecificFile(path)) {
1149
- if (!isCurrentBranchFile(path, branchOverride)) {
1161
+ if (!isCurrentBranchFile(path, cachedBranch)) {
1150
1162
  // Skip branch-specific files for other branches
1151
1163
  continue;
1152
1164
  }
@@ -1182,9 +1194,9 @@ export async function elementsToMap(els, ignore, json, skips, specificItems, bra
1182
1194
  }
1183
1195
  }
1184
1196
  // Handle branch-specific path mapping after all filtering
1185
- if (isCurrentBranchFile(path, branchOverride)) {
1197
+ if (cachedBranch && isCurrentBranchFile(path, cachedBranch)) {
1186
1198
  // This is a branch-specific file for current branch
1187
- const currentBranch = branchOverride || getCurrentGitBranch();
1199
+ const currentBranch = cachedBranch;
1188
1200
  const basePath = fromBranchSpecificPath(path, currentBranch);
1189
1201
  // Only use branch-specific files if the item type IS configured as branch-specific
1190
1202
  // AND matches the pattern. Otherwise, skip and use base file instead.
@@ -2064,6 +2076,8 @@ export async function push(opts) {
2064
2076
  // Create a pool of workers that processes items as they become available
2065
2077
  const pool = new Set();
2066
2078
  const queue = [...groupedChangesArray];
2079
+ // Cache git branch at the start to avoid repeated execSync calls per change
2080
+ const cachedBranchForPush = opts.branch || (isGitRepository() ? getCurrentGitBranch() : null);
2067
2081
  while (queue.length > 0 || pool.size > 0) {
2068
2082
  // Fill the pool until we reach parallelizationFactor
2069
2083
  while (pool.size < parallelizationFactor && queue.length > 0) {
@@ -2117,7 +2131,7 @@ export async function push(opts) {
2117
2131
  // For branch-specific resources, push to the base path on the workspace server
2118
2132
  // This ensures branch-specific files are stored with their base names in the workspace
2119
2133
  let serverPath = resourceFilePath;
2120
- const currentBranch = opts.branch || (isGitRepository() ? getCurrentGitBranch() : null);
2134
+ const currentBranch = cachedBranchForPush;
2121
2135
  if (currentBranch && isBranchSpecificFile(resourceFilePath)) {
2122
2136
  serverPath = fromBranchSpecificPath(resourceFilePath, currentBranch);
2123
2137
  }
package/esm/src/main.js CHANGED
@@ -3,6 +3,12 @@ import "../_dnt.polyfills.js";
3
3
  import "../_dnt.polyfills.js";
4
4
  import * as dntShim from "../_dnt.shims.js";
5
5
  import { Command, CompletionsCommand, UpgradeCommand, esMain, log, } from "../deps.js";
6
+ // Node.js-specific imports for symlink resolution in isMain()
7
+ // These are only used in Node.js, not Deno
8
+ // dnt-shim-ignore
9
+ import { realpathSync } from "node:fs";
10
+ // dnt-shim-ignore
11
+ import { fileURLToPath } from "node:url";
6
12
  import flow from "./commands/flow/flow.js";
7
13
  import app from "./commands/app/app.js";
8
14
  import script from "./commands/script/script.js";
@@ -40,7 +46,7 @@ export { flow, app, script, workspace, resource, resourceType, user, variable, h
40
46
  // console.error(JSON.stringify(event.error, null, 4));
41
47
  // }
42
48
  // });
43
- export const VERSION = "1.618.2";
49
+ export const VERSION = "1.620.0";
44
50
  // Re-exported from constants.ts to maintain backwards compatibility
45
51
  export { WM_FORK_PREFIX } from "./core/constants.js";
46
52
  const command = new Command()
@@ -170,8 +176,23 @@ function isMain() {
170
176
  return isMain;
171
177
  }
172
178
  else {
173
- //@ts-ignore
174
- return esMain.default(globalThis[Symbol.for("import-meta-ponyfill-esmodule")](import.meta));
179
+ // For Node.js, we need to handle symlinks properly.
180
+ // The dnt polyfill doesn't resolve symlinks when comparing process.argv[1]
181
+ // with import.meta.url, so `wmill` symlink doesn't match the real file path.
182
+ // We resolve symlinks manually to get accurate comparison.
183
+ try {
184
+ const scriptPath = process.argv[1];
185
+ if (!scriptPath)
186
+ return false;
187
+ const realScriptPath = realpathSync(scriptPath);
188
+ const modulePath = fileURLToPath(globalThis[Symbol.for("import-meta-ponyfill-esmodule")](import.meta).url);
189
+ return realScriptPath === modulePath;
190
+ }
191
+ catch {
192
+ // Fallback to esMain if something fails
193
+ //@ts-ignore
194
+ return esMain.default(globalThis[Symbol.for("import-meta-ponyfill-esmodule")](import.meta));
195
+ }
175
196
  }
176
197
  }
177
198
  if (isMain()) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "windmill-cli",
3
- "version": "1.618.2",
3
+ "version": "1.620.0",
4
4
  "description": "CLI for Windmill",
5
5
  "repository": {
6
6
  "type": "git",
@@ -2809,11 +2809,10 @@ export declare const listRawApps: (data: ListRawAppsData) => CancelablePromise<L
2809
2809
  */
2810
2810
  export declare const existsRawApp: (data: ExistsRawAppData) => CancelablePromise<ExistsRawAppResponse>;
2811
2811
  /**
2812
- * get app by path
2812
+ * get raw app data by
2813
2813
  * @param data The data for the request.
2814
2814
  * @param data.workspace
2815
- * @param data.version
2816
- * @param data.path
2815
+ * @param data.secretWithExtension
2817
2816
  * @returns string app details
2818
2817
  * @throws ApiError
2819
2818
  */