windmill-cli 1.654.0 → 1.657.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/esm/main.js +1723 -1321
  2. package/package.json +1 -1
package/esm/main.js CHANGED
@@ -11785,7 +11785,7 @@ var init_OpenAPI = __esm(() => {
11785
11785
  PASSWORD: undefined,
11786
11786
  TOKEN: getEnv2("WM_TOKEN"),
11787
11787
  USERNAME: undefined,
11788
- VERSION: "1.654.0",
11788
+ VERSION: "1.657.0",
11789
11789
  WITH_CREDENTIALS: true,
11790
11790
  interceptors: {
11791
11791
  request: new Interceptors,
@@ -12347,6 +12347,7 @@ __export(exports_services_gen, {
12347
12347
  globalUserRename: () => globalUserRename,
12348
12348
  globalUserDelete: () => globalUserDelete,
12349
12349
  gitRepoViewerFileUpload: () => gitRepoViewerFileUpload,
12350
+ ghesInstallationCallback: () => ghesInstallationCallback,
12350
12351
  getWorkspaceUsage: () => getWorkspaceUsage,
12351
12352
  getWorkspaceSlackOauthConfig: () => getWorkspaceSlackOauthConfig,
12352
12353
  getWorkspaceName: () => getWorkspaceName,
@@ -12451,6 +12452,7 @@ __export(exports_services_gen, {
12451
12452
  getGlobal: () => getGlobal,
12452
12453
  getGithubAppToken: () => getGithubAppToken,
12453
12454
  getGitCommitHash: () => getGitCommitHash,
12455
+ getGhesConfig: () => getGhesConfig,
12454
12456
  getGcpTrigger: () => getGcpTrigger,
12455
12457
  getFolderUsage: () => getFolderUsage,
12456
12458
  getFolderPermissionHistory: () => getFolderPermissionHistory,
@@ -12660,6 +12662,7 @@ __export(exports_services_gen, {
12660
12662
  connectClientCredentials: () => connectClientCredentials,
12661
12663
  connectCallback: () => connectCallback,
12662
12664
  compareWorkspaces: () => compareWorkspaces,
12665
+ commitKafkaOffsets: () => commitKafkaOffsets,
12663
12666
  clearIndex: () => clearIndex,
12664
12667
  checkS3FolderExists: () => checkS3FolderExists,
12665
12668
  checkInstanceSharingAvailable: () => checkInstanceSharingAvailable,
@@ -13310,6 +13313,21 @@ var backendVersion = () => {
13310
13313
  body: data2.requestBody,
13311
13314
  mediaType: "application/json"
13312
13315
  });
13316
+ }, ghesInstallationCallback = (data2) => {
13317
+ return request(OpenAPI, {
13318
+ method: "POST",
13319
+ url: "/w/{workspace}/github_app/ghes_installation_callback",
13320
+ path: {
13321
+ workspace: data2.workspace
13322
+ },
13323
+ body: data2.requestBody,
13324
+ mediaType: "application/json"
13325
+ });
13326
+ }, getGhesConfig = () => {
13327
+ return request(OpenAPI, {
13328
+ method: "GET",
13329
+ url: "/github_app/ghes_config"
13330
+ });
13313
13331
  }, acceptInvite = (data2) => {
13314
13332
  return request(OpenAPI, {
13315
13333
  method: "POST",
@@ -17229,6 +17247,17 @@ var backendVersion = () => {
17229
17247
  path: data2.path
17230
17248
  }
17231
17249
  });
17250
+ }, commitKafkaOffsets = (data2) => {
17251
+ return request(OpenAPI, {
17252
+ method: "POST",
17253
+ url: "/w/{workspace}/kafka_triggers/commit_offsets/{path}",
17254
+ path: {
17255
+ workspace: data2.workspace,
17256
+ path: data2.path
17257
+ },
17258
+ body: data2.requestBody,
17259
+ mediaType: "application/json"
17260
+ });
17232
17261
  }, createNatsTrigger = (data2) => {
17233
17262
  return request(OpenAPI, {
17234
17263
  method: "POST",
@@ -59333,6 +59362,7 @@ async function bootstrap(opts, scriptPath, language) {
59333
59362
  });
59334
59363
  }
59335
59364
  async function generateMetadata(opts, scriptPath) {
59365
+ warn(colors.yellow('This command is deprecated. Use "wmill generate-metadata" instead.'));
59336
59366
  info("This command only works for workspace scripts, for flows inline scripts use `wmill flow generate-locks`");
59337
59367
  if (scriptPath == "") {
59338
59368
  scriptPath = undefined;
@@ -59350,7 +59380,7 @@ async function generateMetadata(opts, scriptPath) {
59350
59380
  } else {
59351
59381
  const ignore = await ignoreF(opts);
59352
59382
  const elems = await elementsToMap(await FSFSElement(process.cwd(), codebases, false), (p, isD) => {
59353
- return !isD && !exts.some((ext2) => p.endsWith(ext2)) || ignore(p, isD) || isFlowPath(p) || isAppPath(p);
59383
+ return !isD && !exts.some((ext2) => p.endsWith(ext2)) || ignore(p, isD) || isFlowPath(p) || isAppPath(p) || isRawAppPath(p);
59354
59384
  }, false, {});
59355
59385
  let hasAny = false;
59356
59386
  info("Generating metadata for all stale scripts:");
@@ -60551,11 +60581,11 @@ async function generateFlowLockInternal(folder, dryRun, workspace, opts, justUpd
60551
60581
  } else if (dryRun) {
60552
60582
  return remote_path;
60553
60583
  }
60554
- if (Object.keys(filteredDeps).length > 0) {
60584
+ if (Object.keys(filteredDeps).length > 0 && !noStaleMessage) {
60555
60585
  info((await blueColor())(`Found workspace dependencies (${workspaceDependenciesLanguages.map((l) => l.filename).join("/")}) for ${folder}, using them`));
60556
60586
  }
60587
+ let changedScripts = [];
60557
60588
  if (!justUpdateMetadataLock) {
60558
- const changedScripts = [];
60559
60589
  for (const [path7, hash2] of Object.entries(hashes)) {
60560
60590
  if (path7 == TOP_HASH) {
60561
60591
  continue;
@@ -60564,10 +60594,26 @@ async function generateFlowLockInternal(folder, dryRun, workspace, opts, justUpd
60564
60594
  changedScripts.push(path7);
60565
60595
  }
60566
60596
  }
60567
- info(`Recomputing locks of ${changedScripts.join(", ")} in ${folder}`);
60568
- await replaceInlineScripts(flowValue.value.modules, async (path7) => await readFile7(folder + SEP7 + path7, "utf-8"), exports_log, folder + SEP7, SEP7, changedScripts);
60597
+ if (!noStaleMessage) {
60598
+ info(`Recomputing locks of ${changedScripts.join(", ")} in ${folder}`);
60599
+ }
60600
+ const fileReader = async (path7) => await readFile7(folder + SEP7 + path7, "utf-8");
60601
+ await replaceInlineScripts(flowValue.value.modules, fileReader, exports_log, folder + SEP7, SEP7, changedScripts);
60602
+ if (flowValue.value.failure_module) {
60603
+ await replaceInlineScripts([flowValue.value.failure_module], fileReader, exports_log, folder + SEP7, SEP7, changedScripts);
60604
+ }
60605
+ if (flowValue.value.preprocessor_module) {
60606
+ await replaceInlineScripts([flowValue.value.preprocessor_module], fileReader, exports_log, folder + SEP7, SEP7, changedScripts);
60607
+ }
60569
60608
  flowValue.value = await updateFlow2(workspace, flowValue.value, remote_path, filteredDeps);
60570
- const inlineScripts = extractInlineScripts(flowValue.value.modules, {}, SEP7, opts.defaultTs);
60609
+ const lockAssigner = newPathAssigner(opts.defaultTs ?? "bun");
60610
+ const inlineScripts = extractInlineScripts(flowValue.value.modules, {}, SEP7, opts.defaultTs, lockAssigner);
60611
+ if (flowValue.value.failure_module) {
60612
+ inlineScripts.push(...extractInlineScripts([flowValue.value.failure_module], {}, SEP7, opts.defaultTs, lockAssigner));
60613
+ }
60614
+ if (flowValue.value.preprocessor_module) {
60615
+ inlineScripts.push(...extractInlineScripts([flowValue.value.preprocessor_module], {}, SEP7, opts.defaultTs, lockAssigner));
60616
+ }
60571
60617
  inlineScripts.forEach((s) => {
60572
60618
  writeIfChanged(process.cwd() + SEP7 + folder + SEP7 + s.path, s.content);
60573
60619
  });
@@ -60578,10 +60624,25 @@ async function generateFlowLockInternal(folder, dryRun, workspace, opts, justUpd
60578
60624
  for (const [path7, hash2] of Object.entries(hashes)) {
60579
60625
  await updateMetadataGlobalLock(folder, hash2, path7);
60580
60626
  }
60581
- info(colors.green(`Flow ${remote_path} lockfiles updated`));
60627
+ if (!noStaleMessage) {
60628
+ info(colors.green(`Flow ${remote_path} lockfiles updated`));
60629
+ }
60630
+ const updatedScripts = changedScripts.map((p) => {
60631
+ const parts = p.split(SEP7);
60632
+ return parts[parts.length - 1].replace(/\.[^.]+$/, "");
60633
+ });
60634
+ return { path: remote_path, updatedScripts };
60582
60635
  }
60583
60636
  async function filterWorkspaceDependenciesForFlow(flowValue, rawWorkspaceDependencies, folder) {
60584
- const inlineScripts = extractInlineScripts(structuredClone(flowValue.modules), {}, SEP7, undefined);
60637
+ const clonedValue = structuredClone(flowValue);
60638
+ const depAssigner = newPathAssigner("bun");
60639
+ const inlineScripts = extractInlineScripts(clonedValue.modules, {}, SEP7, undefined, depAssigner);
60640
+ if (clonedValue.failure_module) {
60641
+ inlineScripts.push(...extractInlineScripts([clonedValue.failure_module], {}, SEP7, undefined, depAssigner));
60642
+ }
60643
+ if (clonedValue.preprocessor_module) {
60644
+ inlineScripts.push(...extractInlineScripts([clonedValue.preprocessor_module], {}, SEP7, undefined, depAssigner));
60645
+ }
60585
60646
  const scripts = inlineScripts.filter((s) => !s.is_lock).map((s) => ({ content: s.content, language: s.language }));
60586
60647
  return await filterWorkspaceDependenciesForScripts(scripts, rawWorkspaceDependencies, folder, SEP7);
60587
60648
  }
@@ -60645,6 +60706,7 @@ var init_flow_metadata = __esm(async () => {
60645
60706
  init_log();
60646
60707
  init_yaml();
60647
60708
  init_extractor();
60709
+ init_path_assigner();
60648
60710
  init_script_common();
60649
60711
  init_resource_folders();
60650
60712
  await __promiseAll([
@@ -61038,7 +61100,14 @@ function ZipFSElement(zip, useYaml, defaultTs, resourceTypeToFormatExtension, re
61038
61100
  }
61039
61101
  let inlineScripts;
61040
61102
  try {
61041
- inlineScripts = extractInlineScripts(flow.value.modules, {}, SEP8, defaultTs, undefined, { skipInlineScriptSuffix: getNonDottedPaths() });
61103
+ const assigner = newPathAssigner(defaultTs, { skipInlineScriptSuffix: getNonDottedPaths() });
61104
+ inlineScripts = extractInlineScripts(flow.value.modules, {}, SEP8, defaultTs, assigner, { skipInlineScriptSuffix: getNonDottedPaths() });
61105
+ if (flow.value.failure_module) {
61106
+ inlineScripts.push(...extractInlineScripts([flow.value.failure_module], {}, SEP8, defaultTs, assigner, { skipInlineScriptSuffix: getNonDottedPaths() }));
61107
+ }
61108
+ if (flow.value.preprocessor_module) {
61109
+ inlineScripts.push(...extractInlineScripts([flow.value.preprocessor_module], {}, SEP8, defaultTs, assigner, { skipInlineScriptSuffix: getNonDottedPaths() }));
61110
+ }
61042
61111
  } catch (error2) {
61043
61112
  error(`Failed to extract inline scripts for flow at path: ${p}`);
61044
61113
  throw error2;
@@ -62920,7 +62989,6 @@ __export(exports_metadata, {
62920
62989
  getRawWorkspaceDependencies: () => getRawWorkspaceDependencies,
62921
62990
  generateScriptMetadataInternal: () => generateScriptMetadataInternal,
62922
62991
  generateScriptHash: () => generateScriptHash,
62923
- generateAllMetadata: () => generateAllMetadata,
62924
62992
  filterWorkspaceDependenciesForScripts: () => filterWorkspaceDependenciesForScripts,
62925
62993
  filterWorkspaceDependencies: () => filterWorkspaceDependencies,
62926
62994
  extractWorkspaceDepsAnnotation: () => extractWorkspaceDepsAnnotation,
@@ -62941,14 +63009,13 @@ function loadParser(pkgName) {
62941
63009
  p = (async () => {
62942
63010
  const mod = await import(pkgName);
62943
63011
  const wasmPath = _require.resolve(`${pkgName}/windmill_parser_wasm_bg.wasm`);
62944
- await mod.default(readFileSync3(wasmPath));
63012
+ await mod.default({ module_or_path: readFileSync3(wasmPath) });
62945
63013
  return mod;
62946
63014
  })();
62947
63015
  _parserCache.set(pkgName, p);
62948
63016
  }
62949
63017
  return p;
62950
63018
  }
62951
- async function generateAllMetadata() {}
62952
63019
  async function getRawWorkspaceDependencies() {
62953
63020
  const rawWorkspaceDeps = {};
62954
63021
  try {
@@ -63035,7 +63102,7 @@ async function generateScriptMetadataInternal(scriptPath, workspace, opts, dryRu
63035
63102
  } else if (dryRun) {
63036
63103
  return `${remotePath} (${language})`;
63037
63104
  }
63038
- if (!justUpdateMetadataLock) {
63105
+ if (!justUpdateMetadataLock && !noStaleMessage) {
63039
63106
  info(colors.gray(`Generating metadata for ${scriptPath}`));
63040
63107
  }
63041
63108
  const metadataParsedContent = metadataWithType?.payload;
@@ -63343,6 +63410,9 @@ async function inferSchema(language, content, currentSchema, path8) {
63343
63410
  }
63344
63411
  currentSchema.properties[arg.name] = sortObject(currentSchema.properties[arg.name]);
63345
63412
  argSigToJsonSchemaType(arg.typ, currentSchema.properties[arg.name]);
63413
+ if (arg.otyp && arg.otyp.includes("[") && arg.otyp.includes("|")) {
63414
+ currentSchema.properties[arg.name].originalType = arg.otyp;
63415
+ }
63346
63416
  currentSchema.properties[arg.name].default = arg.default;
63347
63417
  if (!arg.has_default && !currentSchema.required.includes(arg.name)) {
63348
63418
  currentSchema.required.push(arg.name);
@@ -63828,6 +63898,7 @@ var init_raw_apps = __esm(async () => {
63828
63898
  var exports_app_metadata = {};
63829
63899
  __export(exports_app_metadata, {
63830
63900
  inferRunnableSchemaFromFile: () => inferRunnableSchemaFromFile,
63901
+ getAppFolders: () => getAppFolders,
63831
63902
  generateLocksCommand: () => generateLocksCommand,
63832
63903
  generateAppLocksInternal: () => generateAppLocksInternal,
63833
63904
  filterWorkspaceDependenciesForApp: () => filterWorkspaceDependenciesForApp,
@@ -63883,9 +63954,10 @@ async function generateAppLocksInternal(appFolder, rawApp, dryRun, workspace, op
63883
63954
  } else if (dryRun) {
63884
63955
  return remote_path;
63885
63956
  }
63886
- if (Object.keys(filteredDeps).length > 0) {
63957
+ if (Object.keys(filteredDeps).length > 0 && !noStaleMessage) {
63887
63958
  info((await blueColor())(`Found workspace dependencies (${workspaceDependenciesLanguages.map((l) => l.filename).join("/")}) for ${appFolder}, using them`));
63888
63959
  }
63960
+ let updatedScripts = [];
63889
63961
  if (!justUpdateMetadataLock) {
63890
63962
  const changedScripts = [];
63891
63963
  for (const [scriptPath, hash2] of Object.entries(hashes)) {
@@ -63897,7 +63969,9 @@ async function generateAppLocksInternal(appFolder, rawApp, dryRun, workspace, op
63897
63969
  }
63898
63970
  }
63899
63971
  if (changedScripts.length > 0) {
63900
- info(`Recomputing locks of ${changedScripts.join(", ")} in ${appFolder}`);
63972
+ if (!noStaleMessage) {
63973
+ info(`Recomputing locks of ${changedScripts.join(", ")} in ${appFolder}`);
63974
+ }
63901
63975
  if (rawApp) {
63902
63976
  const runnablesPath = path9.join(appFolder, APP_BACKEND_FOLDER);
63903
63977
  const rawAppFile = appFile;
@@ -63906,14 +63980,16 @@ async function generateAppLocksInternal(appFolder, rawApp, dryRun, workspace, op
63906
63980
  runnables = rawAppFile.runnables;
63907
63981
  }
63908
63982
  replaceInlineScripts2(runnables, runnablesPath + SEP11, false);
63909
- await updateRawAppRunnables(workspace, runnables, remote_path, appFolder, filteredDeps, opts.defaultTs);
63983
+ updatedScripts = await updateRawAppRunnables(workspace, runnables, remote_path, appFolder, filteredDeps, opts.defaultTs, noStaleMessage);
63910
63984
  } else {
63911
63985
  const normalAppFile = appFile;
63912
63986
  replaceInlineScripts2(normalAppFile.value, appFolder + SEP11, false);
63913
- normalAppFile.value = await updateAppInlineScripts(workspace, normalAppFile.value, remote_path, appFolder, filteredDeps, opts.defaultTs);
63987
+ const result = await updateAppInlineScripts(workspace, normalAppFile.value, remote_path, appFolder, filteredDeps, opts.defaultTs, noStaleMessage);
63988
+ normalAppFile.value = result.value;
63989
+ updatedScripts = result.updatedScripts;
63914
63990
  writeIfChanged(appFilePath, import_yaml17.stringify(appFile, yamlOptions));
63915
63991
  }
63916
- } else {
63992
+ } else if (!noStaleMessage) {
63917
63993
  info(colors.gray(`No scripts changed in ${appFolder}`));
63918
63994
  }
63919
63995
  }
@@ -63922,7 +63998,10 @@ async function generateAppLocksInternal(appFolder, rawApp, dryRun, workspace, op
63922
63998
  for (const [scriptPath, hash2] of Object.entries(hashes)) {
63923
63999
  await updateMetadataGlobalLock(appFolder, hash2, scriptPath);
63924
64000
  }
63925
- info(colors.green(`App ${remote_path} lockfiles updated`));
64001
+ if (!noStaleMessage) {
64002
+ info(colors.green(`App ${remote_path} lockfiles updated`));
64003
+ }
64004
+ return { path: remote_path, updatedScripts };
63926
64005
  }
63927
64006
  async function filterWorkspaceDependenciesForApp(appValue, rawWorkspaceDependencies, folder) {
63928
64007
  const scripts = [];
@@ -63964,7 +64043,8 @@ async function traverseAndProcessInlineScripts(obj, processor, currentPath = [])
63964
64043
  }
63965
64044
  return result;
63966
64045
  }
63967
- async function updateRawAppRunnables(workspace, runnables, remotePath, appFolder, rawDeps, defaultTs = "bun") {
64046
+ async function updateRawAppRunnables(workspace, runnables, remotePath, appFolder, rawDeps, defaultTs = "bun", noStaleMessage) {
64047
+ const updatedRunnables = [];
63968
64048
  const runnablesFolder = path9.join(appFolder, APP_BACKEND_FOLDER);
63969
64049
  try {
63970
64050
  await mkdir4(runnablesFolder, { recursive: true });
@@ -64001,8 +64081,9 @@ async function updateRawAppRunnables(workspace, runnables, remotePath, appFolder
64001
64081
  writeRunnableToBackend(runnablesFolder, runnableId, simplifiedRunnable);
64002
64082
  continue;
64003
64083
  }
64004
- info(colors.gray(`Generating lock for runnable ${runnableId} (${language})
64005
- }`));
64084
+ if (!noStaleMessage) {
64085
+ info(colors.gray(`Generating lock for runnable ${runnableId} (${language})`));
64086
+ }
64006
64087
  try {
64007
64088
  const lock = await generateInlineScriptLock(workspace, content, language, `${remotePath}/${runnableId}`, rawDeps);
64008
64089
  const [basePathO, ext2] = pathAssigner.assignPath(runnable.name ?? runnableId, language);
@@ -64020,15 +64101,20 @@ async function updateRawAppRunnables(workspace, runnables, remotePath, appFolder
64020
64101
  }
64021
64102
  }
64022
64103
  writeRunnableToBackend(runnablesFolder, runnableId, simplifiedRunnable);
64023
- info(colors.gray(` Written ${runnableId}.yaml, ${basePath}${ext2}${lock ? ` and ${basePath}lock` : ""}`));
64104
+ updatedRunnables.push(runnableId);
64105
+ if (!noStaleMessage) {
64106
+ info(colors.gray(` Written ${runnableId}.yaml, ${basePath}${ext2}${lock ? ` and ${basePath}lock` : ""}`));
64107
+ }
64024
64108
  } catch (error2) {
64025
64109
  error(colors.red(`Failed to generate lock for runnable ${runnableId}: ${error2.message}`));
64026
64110
  writeRunnableToBackend(runnablesFolder, runnableId, runnable);
64027
64111
  }
64028
64112
  }
64113
+ return updatedRunnables;
64029
64114
  }
64030
- async function updateAppInlineScripts(workspace, appValue, remotePath, appFolder, rawDeps, defaultTs = "bun") {
64115
+ async function updateAppInlineScripts(workspace, appValue, remotePath, appFolder, rawDeps, defaultTs = "bun", noStaleMessage) {
64031
64116
  const pathAssigner = newPathAssigner(defaultTs, { skipInlineScriptSuffix: getNonDottedPaths() });
64117
+ const updatedScripts = [];
64032
64118
  const processor = async (inlineScript, context) => {
64033
64119
  const language = inlineScript.language;
64034
64120
  const content = inlineScript.content;
@@ -64044,7 +64130,9 @@ async function updateAppInlineScripts(workspace, appValue, remotePath, appFolder
64044
64130
  try {
64045
64131
  let lock;
64046
64132
  if (language !== "frontend") {
64047
- info(colors.gray(`Generating lock for inline script "${scriptName}" at ${context.path.join(".")} (${language})`));
64133
+ if (!noStaleMessage) {
64134
+ info(colors.gray(`Generating lock for inline script "${scriptName}" at ${context.path.join(".")} (${language})`));
64135
+ }
64048
64136
  lock = await generateInlineScriptLock(workspace, content, language, scriptPath, rawDeps);
64049
64137
  }
64050
64138
  const [basePathO, ext2] = pathAssigner.assignPath(scriptName, language);
@@ -64057,7 +64145,12 @@ async function updateAppInlineScripts(workspace, appValue, remotePath, appFolder
64057
64145
  }
64058
64146
  const inlineContentRef = `!inline ${basePath}${ext2}`;
64059
64147
  const inlineLockRef = lock && lock !== "" ? `!inline ${basePath}lock` : "";
64060
- info(colors.gray(` Written ${basePath}${ext2}${lock ? ` and ${basePath}lock` : ""}`));
64148
+ if (!noStaleMessage) {
64149
+ info(colors.gray(` Written ${basePath}${ext2}${lock ? ` and ${basePath}lock` : ""}`));
64150
+ }
64151
+ if (language !== "frontend") {
64152
+ updatedScripts.push(scriptName);
64153
+ }
64061
64154
  return {
64062
64155
  ...inlineScript,
64063
64156
  content: inlineContentRef,
@@ -64068,7 +64161,8 @@ async function updateAppInlineScripts(workspace, appValue, remotePath, appFolder
64068
64161
  return inlineScript;
64069
64162
  }
64070
64163
  };
64071
- return await traverseAndProcessInlineScripts(appValue, processor);
64164
+ const updatedValue = await traverseAndProcessInlineScripts(appValue, processor);
64165
+ return { value: updatedValue, updatedScripts };
64072
64166
  }
64073
64167
  async function generateInlineScriptLock(workspace, content, language, scriptPath, rawWorkspaceDependencies) {
64074
64168
  const filteredDeps = rawWorkspaceDependencies ? filterWorkspaceDependencies(rawWorkspaceDependencies, content, language) : undefined;
@@ -66377,6 +66471,7 @@ var init_app = __esm(async () => {
66377
66471
  ]);
66378
66472
  alreadySynced2 = [];
66379
66473
  command11 = new Command().description("app related commands").option("--json", "Output as JSON (for piping to jq)").action(list4).command("list", "list all apps").option("--json", "Output as JSON (for piping to jq)").action(list4).command("get", "get an app's details").arguments("<path:string>").option("--json", "Output as JSON (for piping to jq)").action(get3).command("push", "push a local app ").arguments("<file_path:string> <remote_path:string>").action(push4).command("dev", dev_default).command("lint", lint_default2).command("new", new_default).command("generate-agents", generate_agents_default).command("generate-locks", "re-generate the lockfiles for app runnables inline scripts that have changed").arguments("[app_folder:string]").option("--yes", "Skip confirmation prompt").option("--dry-run", "Perform a dry run without making changes").option("--default-ts <runtime:string>", "Default TypeScript runtime (bun or deno)").action(async (opts, appFolder) => {
66474
+ warn(colors.yellow('This command is deprecated. Use "wmill generate-metadata" instead.'));
66380
66475
  const { generateLocksCommand: generateLocksCommand2 } = await init_app_metadata().then(() => exports_app_metadata);
66381
66476
  await generateLocksCommand2(opts, appFolder);
66382
66477
  });
@@ -67195,6 +67290,8 @@ function migrateToGroupedFormat(settings) {
67195
67290
  result.color = settings.color;
67196
67291
  if (settings.operator_settings !== undefined)
67197
67292
  result.operator_settings = settings.operator_settings;
67293
+ if (settings.datatable !== undefined)
67294
+ result.datatable = settings.datatable;
67198
67295
  if (settings.slack_team_id !== undefined)
67199
67296
  result.slack_team_id = settings.slack_team_id;
67200
67297
  if (settings.slack_name !== undefined)
@@ -67263,6 +67360,7 @@ async function pushWorkspaceSettings(workspace, _path, settings, localSettings)
67263
67360
  mute_critical_alerts: remoteSettings.mute_critical_alerts,
67264
67361
  color: remoteSettings.color,
67265
67362
  operator_settings: remoteSettings.operator_settings,
67363
+ datatable: remoteSettings.datatable,
67266
67364
  slack_team_id: remoteSettings.slack_team_id,
67267
67365
  slack_name: remoteSettings.slack_name,
67268
67366
  slack_command_script: remoteSettings.slack_command_script
@@ -67421,6 +67519,13 @@ async function pushWorkspaceSettings(workspace, _path, settings, localSettings)
67421
67519
  requestBody: localSettings.operator_settings
67422
67520
  });
67423
67521
  }
67522
+ if (!deepEqual(localSettings.datatable, settings.datatable)) {
67523
+ debug(`Updating datatable config...`);
67524
+ await editDataTableConfig({
67525
+ workspace,
67526
+ requestBody: { settings: localSettings.datatable ?? { datatables: {} } }
67527
+ });
67528
+ }
67424
67529
  if (localSettings.slack_command_script != settings.slack_command_script) {
67425
67530
  debug(`Updating slack command script...`);
67426
67531
  await editSlackCommand({
@@ -69294,7 +69399,14 @@ async function pushFlow(workspace, remotePath, localPath, message) {
69294
69399
  localPath += SEP19;
69295
69400
  }
69296
69401
  const localFlow = await yamlParseFile(localPath + "flow.yaml");
69297
- await replaceInlineScripts(localFlow.value.modules, async (path17) => await readFile14(localPath + path17, "utf-8"), exports_log, localPath, SEP19);
69402
+ const fileReader = async (path17) => await readFile14(localPath + path17, "utf-8");
69403
+ await replaceInlineScripts(localFlow.value.modules, fileReader, exports_log, localPath, SEP19);
69404
+ if (localFlow.value.failure_module) {
69405
+ await replaceInlineScripts([localFlow.value.failure_module], fileReader, exports_log, localPath, SEP19);
69406
+ }
69407
+ if (localFlow.value.preprocessor_module) {
69408
+ await replaceInlineScripts([localFlow.value.preprocessor_module], fileReader, exports_log, localPath, SEP19);
69409
+ }
69298
69410
  if (flow) {
69299
69411
  if (isSuperset(localFlow, flow)) {
69300
69412
  info(colors.green(`Flow ${remotePath} is up to date`));
@@ -69436,7 +69548,14 @@ async function preview2(opts, flowPath) {
69436
69548
  flowPath += SEP19;
69437
69549
  }
69438
69550
  const localFlow = await yamlParseFile(flowPath + "flow.yaml");
69439
- await replaceInlineScripts(localFlow.value.modules, async (path17) => await readFile14(flowPath + path17, "utf-8"), exports_log, flowPath, SEP19);
69551
+ const fileReader = async (path17) => await readFile14(flowPath + path17, "utf-8");
69552
+ await replaceInlineScripts(localFlow.value.modules, fileReader, exports_log, flowPath, SEP19);
69553
+ if (localFlow.value.failure_module) {
69554
+ await replaceInlineScripts([localFlow.value.failure_module], fileReader, exports_log, flowPath, SEP19);
69555
+ }
69556
+ if (localFlow.value.preprocessor_module) {
69557
+ await replaceInlineScripts([localFlow.value.preprocessor_module], fileReader, exports_log, flowPath, SEP19);
69558
+ }
69440
69559
  const input = opts.data ? await resolve6(opts.data) : {};
69441
69560
  if (!opts.silent) {
69442
69561
  info(colors.yellow(`Running flow preview for ${flowPath}...`));
@@ -69466,6 +69585,7 @@ async function preview2(opts, flowPath) {
69466
69585
  }
69467
69586
  }
69468
69587
  async function generateLocks(opts, folder) {
69588
+ warn(colors.yellow('This command is deprecated. Use "wmill generate-metadata" instead.'));
69469
69589
  const workspace = await resolveWorkspace(opts);
69470
69590
  await requireLogin(opts);
69471
69591
  opts = await mergeConfigWithConfigFile(opts);
@@ -72174,25 +72294,25 @@ import { stat as stat15, writeFile as writeFile19, rm as rm4, mkdir as mkdir8 }
72174
72294
 
72175
72295
  // src/guidance/skills.ts
72176
72296
  var SKILLS = [
72177
- { name: "write-script-go", description: "MUST use when writing Go scripts.", languageKey: "go" },
72178
- { name: "write-script-java", description: "MUST use when writing Java scripts.", languageKey: "java" },
72179
- { name: "write-script-graphql", description: "MUST use when writing GraphQL queries.", languageKey: "graphql" },
72180
- { name: "write-script-rust", description: "MUST use when writing Rust scripts.", languageKey: "rust" },
72181
- { name: "write-script-bunnative", description: "MUST use when writing Bun Native scripts.", languageKey: "bunnative" },
72182
- { name: "write-script-postgresql", description: "MUST use when writing PostgreSQL queries.", languageKey: "postgresql" },
72183
- { name: "write-script-php", description: "MUST use when writing PHP scripts.", languageKey: "php" },
72297
+ { name: "write-script-bash", description: "MUST use when writing Bash scripts.", languageKey: "bash" },
72184
72298
  { name: "write-script-bigquery", description: "MUST use when writing BigQuery queries.", languageKey: "bigquery" },
72185
72299
  { name: "write-script-bun", description: "MUST use when writing Bun/TypeScript scripts.", languageKey: "bun" },
72300
+ { name: "write-script-bunnative", description: "MUST use when writing Bun Native scripts.", languageKey: "bunnative" },
72186
72301
  { name: "write-script-csharp", description: "MUST use when writing C# scripts.", languageKey: "csharp" },
72187
- { name: "write-script-mssql", description: "MUST use when writing MS SQL Server queries.", languageKey: "mssql" },
72188
72302
  { name: "write-script-deno", description: "MUST use when writing Deno/TypeScript scripts.", languageKey: "deno" },
72303
+ { name: "write-script-duckdb", description: "MUST use when writing DuckDB queries.", languageKey: "duckdb" },
72304
+ { name: "write-script-go", description: "MUST use when writing Go scripts.", languageKey: "go" },
72305
+ { name: "write-script-graphql", description: "MUST use when writing GraphQL queries.", languageKey: "graphql" },
72306
+ { name: "write-script-java", description: "MUST use when writing Java scripts.", languageKey: "java" },
72307
+ { name: "write-script-mssql", description: "MUST use when writing MS SQL Server queries.", languageKey: "mssql" },
72189
72308
  { name: "write-script-mysql", description: "MUST use when writing MySQL queries.", languageKey: "mysql" },
72309
+ { name: "write-script-nativets", description: "MUST use when writing Native TypeScript scripts.", languageKey: "nativets" },
72310
+ { name: "write-script-php", description: "MUST use when writing PHP scripts.", languageKey: "php" },
72311
+ { name: "write-script-postgresql", description: "MUST use when writing PostgreSQL queries.", languageKey: "postgresql" },
72190
72312
  { name: "write-script-powershell", description: "MUST use when writing PowerShell scripts.", languageKey: "powershell" },
72191
- { name: "write-script-snowflake", description: "MUST use when writing Snowflake queries.", languageKey: "snowflake" },
72192
72313
  { name: "write-script-python3", description: "MUST use when writing Python scripts.", languageKey: "python3" },
72193
- { name: "write-script-duckdb", description: "MUST use when writing DuckDB queries.", languageKey: "duckdb" },
72194
- { name: "write-script-bash", description: "MUST use when writing Bash scripts.", languageKey: "bash" },
72195
- { name: "write-script-nativets", description: "MUST use when writing Native TypeScript scripts.", languageKey: "nativets" },
72314
+ { name: "write-script-rust", description: "MUST use when writing Rust scripts.", languageKey: "rust" },
72315
+ { name: "write-script-snowflake", description: "MUST use when writing Snowflake queries.", languageKey: "snowflake" },
72196
72316
  { name: "write-flow", description: "MUST use when creating flows." },
72197
72317
  { name: "raw-app", description: "MUST use when creating raw apps." },
72198
72318
  { name: "triggers", description: "MUST use when configuring triggers." },
@@ -72201,294 +72321,116 @@ var SKILLS = [
72201
72321
  { name: "cli-commands", description: "MUST use when using the CLI." }
72202
72322
  ];
72203
72323
  var SKILL_CONTENT = {
72204
- "write-script-go": `---
72205
- name: write-script-go
72206
- description: MUST use when writing Go scripts.
72324
+ "write-script-bash": `---
72325
+ name: write-script-bash
72326
+ description: MUST use when writing Bash scripts.
72207
72327
  ---
72208
72328
 
72209
72329
  ## CLI Commands
72210
72330
 
72211
- Place scripts in a folder. After writing, run:
72331
+ Place scripts in a folder. After writing, tell the user they can run:
72212
72332
  - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
72213
72333
  - \`wmill sync push\` - Deploy to Windmill
72214
72334
 
72335
+ Do NOT run these commands yourself. Instead, inform the user that they should run them.
72336
+
72215
72337
  Use \`wmill resource-type list --schema\` to discover available resource types.
72216
72338
 
72217
- # Go
72339
+ # Bash
72218
72340
 
72219
72341
  ## Structure
72220
72342
 
72221
- The file package must be \`inner\` and export a function called \`main\`:
72343
+ Do not include \`#!/bin/bash\`. Arguments are obtained as positional parameters:
72222
72344
 
72223
- \`\`\`go
72224
- package inner
72345
+ \`\`\`bash
72346
+ # Get arguments
72347
+ var1="$1"
72348
+ var2="$2"
72225
72349
 
72226
- func main(param1 string, param2 int) (map[string]interface{}, error) {
72227
- return map[string]interface{}{
72228
- "result": param1,
72229
- "count": param2,
72230
- }, nil
72231
- }
72350
+ echo "Processing $var1 and $var2"
72351
+
72352
+ # Return JSON by echoing to stdout
72353
+ echo "{\\"result\\": \\"$var1\\", \\"count\\": $var2}"
72232
72354
  \`\`\`
72233
72355
 
72234
72356
  **Important:**
72235
- - Package must be \`inner\`
72236
- - Return type must be \`({return_type}, error)\`
72237
- - Function name is \`main\` (lowercase)
72238
-
72239
- ## Return Types
72357
+ - Do not include shebang (\`#!/bin/bash\`)
72358
+ - Arguments are always strings
72359
+ - Access with \`$1\`, \`$2\`, etc.
72240
72360
 
72241
- The return type can be any Go type that can be serialized to JSON:
72361
+ ## Output
72242
72362
 
72243
- \`\`\`go
72244
- package inner
72363
+ The script output is captured as the result. For structured data, output valid JSON:
72245
72364
 
72246
- type Result struct {
72247
- Name string \`json:"name"\`
72248
- Count int \`json:"count"\`
72249
- }
72365
+ \`\`\`bash
72366
+ name="$1"
72367
+ count="$2"
72250
72368
 
72251
- func main(name string, count int) (Result, error) {
72252
- return Result{
72253
- Name: name,
72254
- Count: count,
72255
- }, nil
72369
+ # Output JSON result
72370
+ cat << EOF
72371
+ {
72372
+ "name": "$name",
72373
+ "count": $count,
72374
+ "timestamp": "$(date -Iseconds)"
72256
72375
  }
72376
+ EOF
72257
72377
  \`\`\`
72258
72378
 
72259
- ## Error Handling
72260
-
72261
- Return errors as the second return value:
72262
-
72263
- \`\`\`go
72264
- package inner
72379
+ ## Environment Variables
72265
72380
 
72266
- import "errors"
72381
+ Environment variables set in Windmill are available:
72267
72382
 
72268
- func main(value int) (string, error) {
72269
- if value < 0 {
72270
- return "", errors.New("value must be positive")
72271
- }
72272
- return "success", nil
72273
- }
72383
+ \`\`\`bash
72384
+ # Access environment variable
72385
+ echo "Workspace: $WM_WORKSPACE"
72386
+ echo "Job ID: $WM_JOB_ID"
72274
72387
  \`\`\`
72275
72388
  `,
72276
- "write-script-java": `---
72277
- name: write-script-java
72278
- description: MUST use when writing Java scripts.
72389
+ "write-script-bigquery": `---
72390
+ name: write-script-bigquery
72391
+ description: MUST use when writing BigQuery queries.
72279
72392
  ---
72280
72393
 
72281
72394
  ## CLI Commands
72282
72395
 
72283
- Place scripts in a folder. After writing, run:
72396
+ Place scripts in a folder. After writing, tell the user they can run:
72284
72397
  - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
72285
72398
  - \`wmill sync push\` - Deploy to Windmill
72286
72399
 
72287
- Use \`wmill resource-type list --schema\` to discover available resource types.
72288
-
72289
- # Java
72290
-
72291
- The script must contain a Main public class with a \`public static main()\` method:
72292
-
72293
- \`\`\`java
72294
- public class Main {
72295
- public static Object main(String name, int count) {
72296
- java.util.Map<String, Object> result = new java.util.HashMap<>();
72297
- result.put("name", name);
72298
- result.put("count", count);
72299
- return result;
72300
- }
72301
- }
72302
- \`\`\`
72303
-
72304
- **Important:**
72305
- - Class must be named \`Main\`
72306
- - Method must be \`public static Object main(...)\`
72307
- - Return type is \`Object\` or \`void\`
72308
-
72309
- ## Maven Dependencies
72310
-
72311
- Add dependencies using comments at the top:
72312
-
72313
- \`\`\`java
72314
- //requirements:
72315
- //com.google.code.gson:gson:2.10.1
72316
- //org.apache.httpcomponents:httpclient:4.5.14
72317
-
72318
- import com.google.gson.Gson;
72319
-
72320
- public class Main {
72321
- public static Object main(String input) {
72322
- Gson gson = new Gson();
72323
- return gson.fromJson(input, Object.class);
72324
- }
72325
- }
72326
- \`\`\`
72327
- `,
72328
- "write-script-graphql": `---
72329
- name: write-script-graphql
72330
- description: MUST use when writing GraphQL queries.
72331
- ---
72332
-
72333
- ## CLI Commands
72334
-
72335
- Place scripts in a folder. After writing, run:
72336
- - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
72337
- - \`wmill sync push\` - Deploy to Windmill
72400
+ Do NOT run these commands yourself. Instead, inform the user that they should run them.
72338
72401
 
72339
72402
  Use \`wmill resource-type list --schema\` to discover available resource types.
72340
72403
 
72341
- # GraphQL
72342
-
72343
- ## Structure
72344
-
72345
- Write GraphQL queries or mutations. Arguments can be added as query parameters:
72346
-
72347
- \`\`\`graphql
72348
- query GetUser($id: ID!) {
72349
- user(id: $id) {
72350
- id
72351
- name
72352
- email
72353
- }
72354
- }
72355
- \`\`\`
72356
-
72357
- ## Variables
72358
-
72359
- Variables are passed as script arguments and automatically bound to the query:
72404
+ # BigQuery
72360
72405
 
72361
- \`\`\`graphql
72362
- query SearchProducts($query: String!, $limit: Int = 10) {
72363
- products(search: $query, first: $limit) {
72364
- edges {
72365
- node {
72366
- id
72367
- name
72368
- price
72369
- }
72370
- }
72371
- }
72372
- }
72373
- \`\`\`
72406
+ Arguments use \`@name\` syntax.
72374
72407
 
72375
- ## Mutations
72408
+ Name the parameters by adding comments before the statement:
72376
72409
 
72377
- \`\`\`graphql
72378
- mutation CreateUser($input: CreateUserInput!) {
72379
- createUser(input: $input) {
72380
- id
72381
- name
72382
- createdAt
72383
- }
72384
- }
72410
+ \`\`\`sql
72411
+ -- @name1 (string)
72412
+ -- @name2 (int64) = 0
72413
+ SELECT * FROM users WHERE name = @name1 AND age > @name2;
72385
72414
  \`\`\`
72386
72415
  `,
72387
- "write-script-rust": `---
72388
- name: write-script-rust
72389
- description: MUST use when writing Rust scripts.
72416
+ "write-script-bun": `---
72417
+ name: write-script-bun
72418
+ description: MUST use when writing Bun/TypeScript scripts.
72390
72419
  ---
72391
72420
 
72392
72421
  ## CLI Commands
72393
72422
 
72394
- Place scripts in a folder. After writing, run:
72423
+ Place scripts in a folder. After writing, tell the user they can run:
72395
72424
  - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
72396
72425
  - \`wmill sync push\` - Deploy to Windmill
72397
72426
 
72398
- Use \`wmill resource-type list --schema\` to discover available resource types.
72399
-
72400
- # Rust
72401
-
72402
- ## Structure
72403
-
72404
- The script must contain a function called \`main\` with proper return type:
72405
-
72406
- \`\`\`rust
72407
- use anyhow::anyhow;
72408
- use serde::Serialize;
72409
-
72410
- #[derive(Serialize, Debug)]
72411
- struct ReturnType {
72412
- result: String,
72413
- count: i32,
72414
- }
72415
-
72416
- fn main(param1: String, param2: i32) -> anyhow::Result<ReturnType> {
72417
- Ok(ReturnType {
72418
- result: param1,
72419
- count: param2,
72420
- })
72421
- }
72422
- \`\`\`
72423
-
72424
- **Important:**
72425
- - Arguments should be owned types
72426
- - Return type must be serializable (\`#[derive(Serialize)]\`)
72427
- - Return type is \`anyhow::Result<T>\`
72428
-
72429
- ## Dependencies
72430
-
72431
- Packages must be specified with a partial cargo.toml at the beginning of the script:
72432
-
72433
- \`\`\`rust
72434
- //! \`\`\`cargo
72435
- //! [dependencies]
72436
- //! anyhow = "1.0.86"
72437
- //! reqwest = { version = "0.11", features = ["json"] }
72438
- //! tokio = { version = "1", features = ["full"] }
72439
- //! \`\`\`
72440
-
72441
- use anyhow::anyhow;
72442
- // ... rest of the code
72443
- \`\`\`
72444
-
72445
- **Note:** Serde is already included, no need to add it again.
72446
-
72447
- ## Async Functions
72448
-
72449
- If you need to handle async functions (e.g., using tokio), keep the main function sync and create the runtime inside:
72450
-
72451
- \`\`\`rust
72452
- //! \`\`\`cargo
72453
- //! [dependencies]
72454
- //! anyhow = "1.0.86"
72455
- //! tokio = { version = "1", features = ["full"] }
72456
- //! reqwest = { version = "0.11", features = ["json"] }
72457
- //! \`\`\`
72458
-
72459
- use anyhow::anyhow;
72460
- use serde::Serialize;
72461
-
72462
- #[derive(Serialize, Debug)]
72463
- struct Response {
72464
- data: String,
72465
- }
72466
-
72467
- fn main(url: String) -> anyhow::Result<Response> {
72468
- let rt = tokio::runtime::Runtime::new()?;
72469
- rt.block_on(async {
72470
- let resp = reqwest::get(&url).await?.text().await?;
72471
- Ok(Response { data: resp })
72472
- })
72473
- }
72474
- \`\`\`
72475
- `,
72476
- "write-script-bunnative": `---
72477
- name: write-script-bunnative
72478
- description: MUST use when writing Bun Native scripts.
72479
- ---
72480
-
72481
- ## CLI Commands
72482
-
72483
- Place scripts in a folder. After writing, run:
72484
- - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
72485
- - \`wmill sync push\` - Deploy to Windmill
72427
+ Do NOT run these commands yourself. Instead, inform the user that they should run them.
72486
72428
 
72487
72429
  Use \`wmill resource-type list --schema\` to discover available resource types.
72488
72430
 
72489
- # TypeScript (Bun Native)
72431
+ # TypeScript (Bun)
72490
72432
 
72491
- Native TypeScript execution with fetch only - no external imports allowed.
72433
+ Bun runtime with full npm ecosystem and fastest execution.
72492
72434
 
72493
72435
  ## Structure
72494
72436
 
@@ -72501,7 +72443,7 @@ export async function main(param1: string, param2: number) {
72501
72443
  }
72502
72444
  \`\`\`
72503
72445
 
72504
- Do not call the main function.
72446
+ Do not call the main function. Libraries are installed automatically.
72505
72447
 
72506
72448
  ## Resource Types
72507
72449
 
@@ -72521,18 +72463,20 @@ Before using a resource type, check the \`rt.d.ts\` file in the project root to
72521
72463
 
72522
72464
  ## Imports
72523
72465
 
72524
- **No imports allowed.** Use the globally available \`fetch\` function:
72525
-
72526
72466
  \`\`\`typescript
72527
- export async function main(url: string) {
72528
- const response = await fetch(url);
72529
- return await response.json();
72530
- }
72467
+ import Stripe from "stripe";
72468
+ import { someFunction } from "some-package";
72531
72469
  \`\`\`
72532
72470
 
72533
72471
  ## Windmill Client
72534
72472
 
72535
- The windmill client is not available in native TypeScript mode. Use fetch to call APIs directly.
72473
+ Import the windmill client for platform interactions:
72474
+
72475
+ \`\`\`typescript
72476
+ import * as wmill from "windmill-client";
72477
+ \`\`\`
72478
+
72479
+ See the SDK documentation for available methods.
72536
72480
 
72537
72481
  ## Preprocessor Scripts
72538
72482
 
@@ -72602,36 +72546,6 @@ const result: S3Object = await wmill.writeS3File(
72602
72546
 
72603
72547
  Import: import * as wmill from 'windmill-client'
72604
72548
 
72605
- /**
72606
- * Create a SQL template function for PostgreSQL/datatable queries
72607
- * @param name - Database/datatable name (default: "main")
72608
- * @returns SQL template function for building parameterized queries
72609
- * @example
72610
- * let sql = wmill.datatable()
72611
- * let name = 'Robin'
72612
- * let age = 21
72613
- * await sql\`
72614
- * SELECT * FROM friends
72615
- * WHERE name = \${name} AND age = \${age}::int
72616
- * \`.fetch()
72617
- */
72618
- datatable(name: string = "main"): DatatableSqlTemplateFunction
72619
-
72620
- /**
72621
- * Create a SQL template function for DuckDB/ducklake queries
72622
- * @param name - DuckDB database name (default: "main")
72623
- * @returns SQL template function for building parameterized queries
72624
- * @example
72625
- * let sql = wmill.ducklake()
72626
- * let name = 'Robin'
72627
- * let age = 21
72628
- * await sql\`
72629
- * SELECT * FROM friends
72630
- * WHERE name = \${name} AND age = \${age}
72631
- * \`.fetch()
72632
- */
72633
- ducklake(name: string = "main"): SqlTemplateFunction
72634
-
72635
72549
  /**
72636
72550
  * Initialize the Windmill client with authentication token and base URL
72637
72551
  * @param token - Authentication token (defaults to WM_TOKEN env variable)
@@ -73124,144 +73038,64 @@ waitForApproval(options?: { timeout?: number; form?: object; }): PromiseLike<{ v
73124
73038
  * const results = await parallel(items, process, { concurrency: 5 });
73125
73039
  */
73126
73040
  async parallel<T, R>(items: T[], fn: (item: T) => PromiseLike<R> | R, options?: { concurrency?: number },): Promise<R[]>
73127
- `,
73128
- "write-script-postgresql": `---
73129
- name: write-script-postgresql
73130
- description: MUST use when writing PostgreSQL queries.
73131
- ---
73132
-
73133
- ## CLI Commands
73134
73041
 
73135
- Place scripts in a folder. After writing, run:
73136
- - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
73137
- - \`wmill sync push\` - Deploy to Windmill
73138
-
73139
- Use \`wmill resource-type list --schema\` to discover available resource types.
73140
-
73141
- # PostgreSQL
73142
-
73143
- Arguments are obtained directly in the statement with \`$1::{type}\`, \`$2::{type}\`, etc.
73144
-
73145
- Name the parameters by adding comments at the beginning of the script (without specifying the type):
73146
-
73147
- \`\`\`sql
73148
- -- $1 name1
73149
- -- $2 name2 = default_value
73150
- SELECT * FROM users WHERE name = $1::TEXT AND age > $2::INT;
73151
- \`\`\`
73152
- `,
73153
- "write-script-php": `---
73154
- name: write-script-php
73155
- description: MUST use when writing PHP scripts.
73156
- ---
73157
-
73158
- ## CLI Commands
73159
-
73160
- Place scripts in a folder. After writing, run:
73161
- - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
73162
- - \`wmill sync push\` - Deploy to Windmill
73163
-
73164
- Use \`wmill resource-type list --schema\` to discover available resource types.
73165
-
73166
- # PHP
73167
-
73168
- ## Structure
73169
-
73170
- The script must start with \`<?php\` and contain at least one function called \`main\`:
73171
-
73172
- \`\`\`php
73173
- <?php
73174
-
73175
- function main(string $param1, int $param2) {
73176
- return ["result" => $param1, "count" => $param2];
73177
- }
73178
- \`\`\`
73179
-
73180
- ## Resource Types
73181
-
73182
- On Windmill, credentials and configuration are stored in resources and passed as parameters to main.
73183
-
73184
- You need to **redefine** the type of the resources that are needed before the main function. Always check if the class already exists using \`class_exists\`:
73185
-
73186
- \`\`\`php
73187
- <?php
73188
-
73189
- if (!class_exists('Postgresql')) {
73190
- class Postgresql {
73191
- public string $host;
73192
- public int $port;
73193
- public string $user;
73194
- public string $password;
73195
- public string $dbname;
73196
- }
73197
- }
73198
-
73199
- function main(Postgresql $db) {
73200
- // $db contains the database connection details
73201
- }
73202
- \`\`\`
73203
-
73204
- The resource type name has to be exactly as specified.
73205
-
73206
- ## Library Dependencies
73207
-
73208
- Specify library dependencies as comments before the main function:
73209
-
73210
- \`\`\`php
73211
- <?php
73212
-
73213
- // require:
73214
- // guzzlehttp/guzzle
73215
- // stripe/stripe-php@^10.0
73042
+ /**
73043
+ * Commit Kafka offsets for a trigger with auto_commit disabled.
73044
+ * @param triggerPath - Path to the Kafka trigger (from event.wm_trigger.trigger_path)
73045
+ * @param topic - Kafka topic name (from event.topic)
73046
+ * @param partition - Partition number (from event.partition)
73047
+ * @param offset - Message offset to commit (from event.offset)
73048
+ */
73049
+ async commitKafkaOffsets(triggerPath: string, topic: string, partition: number, offset: number,): Promise<void>
73216
73050
 
73217
- function main() {
73218
- // Libraries are available
73219
- }
73220
- \`\`\`
73051
+ /**
73052
+ * Create a SQL template function for PostgreSQL/datatable queries
73053
+ * @param name - Database/datatable name (default: "main")
73054
+ * @returns SQL template function for building parameterized queries
73055
+ * @example
73056
+ * let sql = wmill.datatable()
73057
+ * let name = 'Robin'
73058
+ * let age = 21
73059
+ * await sql\`
73060
+ * SELECT * FROM friends
73061
+ * WHERE name = \${name} AND age = \${age}::int
73062
+ * \`.fetch()
73063
+ */
73064
+ datatable(name: string = "main"): DatatableSqlTemplateFunction
73221
73065
 
73222
- One dependency per line. No need to require autoload, it is already done.
73066
+ /**
73067
+ * Create a SQL template function for DuckDB/ducklake queries
73068
+ * @param name - DuckDB database name (default: "main")
73069
+ * @returns SQL template function for building parameterized queries
73070
+ * @example
73071
+ * let sql = wmill.ducklake()
73072
+ * let name = 'Robin'
73073
+ * let age = 21
73074
+ * await sql\`
73075
+ * SELECT * FROM friends
73076
+ * WHERE name = \${name} AND age = \${age}
73077
+ * \`.fetch()
73078
+ */
73079
+ ducklake(name: string = "main"): SqlTemplateFunction
73223
73080
  `,
73224
- "write-script-bigquery": `---
73225
- name: write-script-bigquery
73226
- description: MUST use when writing BigQuery queries.
73081
+ "write-script-bunnative": `---
73082
+ name: write-script-bunnative
73083
+ description: MUST use when writing Bun Native scripts.
73227
73084
  ---
73228
73085
 
73229
73086
  ## CLI Commands
73230
73087
 
73231
- Place scripts in a folder. After writing, run:
73088
+ Place scripts in a folder. After writing, tell the user they can run:
73232
73089
  - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
73233
73090
  - \`wmill sync push\` - Deploy to Windmill
73234
73091
 
73235
- Use \`wmill resource-type list --schema\` to discover available resource types.
73236
-
73237
- # BigQuery
73238
-
73239
- Arguments use \`@name\` syntax.
73240
-
73241
- Name the parameters by adding comments before the statement:
73242
-
73243
- \`\`\`sql
73244
- -- @name1 (string)
73245
- -- @name2 (int64) = 0
73246
- SELECT * FROM users WHERE name = @name1 AND age > @name2;
73247
- \`\`\`
73248
- `,
73249
- "write-script-bun": `---
73250
- name: write-script-bun
73251
- description: MUST use when writing Bun/TypeScript scripts.
73252
- ---
73253
-
73254
- ## CLI Commands
73255
-
73256
- Place scripts in a folder. After writing, run:
73257
- - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
73258
- - \`wmill sync push\` - Deploy to Windmill
73092
+ Do NOT run these commands yourself. Instead, inform the user that they should run them.
73259
73093
 
73260
73094
  Use \`wmill resource-type list --schema\` to discover available resource types.
73261
73095
 
73262
- # TypeScript (Bun)
73096
+ # TypeScript (Bun Native)
73263
73097
 
73264
- Bun runtime with full npm ecosystem and fastest execution.
73098
+ Native TypeScript execution with fetch only - no external imports allowed.
73265
73099
 
73266
73100
  ## Structure
73267
73101
 
@@ -73274,7 +73108,7 @@ export async function main(param1: string, param2: number) {
73274
73108
  }
73275
73109
  \`\`\`
73276
73110
 
73277
- Do not call the main function. Libraries are installed automatically.
73111
+ Do not call the main function.
73278
73112
 
73279
73113
  ## Resource Types
73280
73114
 
@@ -73294,20 +73128,18 @@ Before using a resource type, check the \`rt.d.ts\` file in the project root to
73294
73128
 
73295
73129
  ## Imports
73296
73130
 
73131
+ **No imports allowed.** Use the globally available \`fetch\` function:
73132
+
73297
73133
  \`\`\`typescript
73298
- import Stripe from "stripe";
73299
- import { someFunction } from "some-package";
73134
+ export async function main(url: string) {
73135
+ const response = await fetch(url);
73136
+ return await response.json();
73137
+ }
73300
73138
  \`\`\`
73301
73139
 
73302
73140
  ## Windmill Client
73303
73141
 
73304
- Import the windmill client for platform interactions:
73305
-
73306
- \`\`\`typescript
73307
- import * as wmill from "windmill-client";
73308
- \`\`\`
73309
-
73310
- See the SDK documentation for available methods.
73142
+ The windmill client is not available in native TypeScript mode. Use fetch to call APIs directly.
73311
73143
 
73312
73144
  ## Preprocessor Scripts
73313
73145
 
@@ -73377,36 +73209,6 @@ const result: S3Object = await wmill.writeS3File(
73377
73209
 
73378
73210
  Import: import * as wmill from 'windmill-client'
73379
73211
 
73380
- /**
73381
- * Create a SQL template function for PostgreSQL/datatable queries
73382
- * @param name - Database/datatable name (default: "main")
73383
- * @returns SQL template function for building parameterized queries
73384
- * @example
73385
- * let sql = wmill.datatable()
73386
- * let name = 'Robin'
73387
- * let age = 21
73388
- * await sql\`
73389
- * SELECT * FROM friends
73390
- * WHERE name = \${name} AND age = \${age}::int
73391
- * \`.fetch()
73392
- */
73393
- datatable(name: string = "main"): DatatableSqlTemplateFunction
73394
-
73395
- /**
73396
- * Create a SQL template function for DuckDB/ducklake queries
73397
- * @param name - DuckDB database name (default: "main")
73398
- * @returns SQL template function for building parameterized queries
73399
- * @example
73400
- * let sql = wmill.ducklake()
73401
- * let name = 'Robin'
73402
- * let age = 21
73403
- * await sql\`
73404
- * SELECT * FROM friends
73405
- * WHERE name = \${name} AND age = \${age}
73406
- * \`.fetch()
73407
- */
73408
- ducklake(name: string = "main"): SqlTemplateFunction
73409
-
73410
73212
  /**
73411
73213
  * Initialize the Windmill client with authentication token and base URL
73412
73214
  * @param token - Authentication token (defaults to WM_TOKEN env variable)
@@ -73899,6 +73701,45 @@ waitForApproval(options?: { timeout?: number; form?: object; }): PromiseLike<{ v
73899
73701
  * const results = await parallel(items, process, { concurrency: 5 });
73900
73702
  */
73901
73703
  async parallel<T, R>(items: T[], fn: (item: T) => PromiseLike<R> | R, options?: { concurrency?: number },): Promise<R[]>
73704
+
73705
+ /**
73706
+ * Commit Kafka offsets for a trigger with auto_commit disabled.
73707
+ * @param triggerPath - Path to the Kafka trigger (from event.wm_trigger.trigger_path)
73708
+ * @param topic - Kafka topic name (from event.topic)
73709
+ * @param partition - Partition number (from event.partition)
73710
+ * @param offset - Message offset to commit (from event.offset)
73711
+ */
73712
+ async commitKafkaOffsets(triggerPath: string, topic: string, partition: number, offset: number,): Promise<void>
73713
+
73714
+ /**
73715
+ * Create a SQL template function for PostgreSQL/datatable queries
73716
+ * @param name - Database/datatable name (default: "main")
73717
+ * @returns SQL template function for building parameterized queries
73718
+ * @example
73719
+ * let sql = wmill.datatable()
73720
+ * let name = 'Robin'
73721
+ * let age = 21
73722
+ * await sql\`
73723
+ * SELECT * FROM friends
73724
+ * WHERE name = \${name} AND age = \${age}::int
73725
+ * \`.fetch()
73726
+ */
73727
+ datatable(name: string = "main"): DatatableSqlTemplateFunction
73728
+
73729
+ /**
73730
+ * Create a SQL template function for DuckDB/ducklake queries
73731
+ * @param name - DuckDB database name (default: "main")
73732
+ * @returns SQL template function for building parameterized queries
73733
+ * @example
73734
+ * let sql = wmill.ducklake()
73735
+ * let name = 'Robin'
73736
+ * let age = 21
73737
+ * await sql\`
73738
+ * SELECT * FROM friends
73739
+ * WHERE name = \${name} AND age = \${age}
73740
+ * \`.fetch()
73741
+ */
73742
+ ducklake(name: string = "main"): SqlTemplateFunction
73902
73743
  `,
73903
73744
  "write-script-csharp": `---
73904
73745
  name: write-script-csharp
@@ -73907,10 +73748,12 @@ description: MUST use when writing C# scripts.
73907
73748
 
73908
73749
  ## CLI Commands
73909
73750
 
73910
- Place scripts in a folder. After writing, run:
73751
+ Place scripts in a folder. After writing, tell the user they can run:
73911
73752
  - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
73912
73753
  - \`wmill sync push\` - Deploy to Windmill
73913
73754
 
73755
+ Do NOT run these commands yourself. Instead, inform the user that they should run them.
73756
+
73914
73757
  Use \`wmill resource-type list --schema\` to discover available resource types.
73915
73758
 
73916
73759
  # C#
@@ -73954,31 +73797,6 @@ public class Script
73954
73797
  }
73955
73798
  }
73956
73799
  \`\`\`
73957
- `,
73958
- "write-script-mssql": `---
73959
- name: write-script-mssql
73960
- description: MUST use when writing MS SQL Server queries.
73961
- ---
73962
-
73963
- ## CLI Commands
73964
-
73965
- Place scripts in a folder. After writing, run:
73966
- - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
73967
- - \`wmill sync push\` - Deploy to Windmill
73968
-
73969
- Use \`wmill resource-type list --schema\` to discover available resource types.
73970
-
73971
- # Microsoft SQL Server (MSSQL)
73972
-
73973
- Arguments use \`@P1\`, \`@P2\`, etc.
73974
-
73975
- Name the parameters by adding comments before the statement:
73976
-
73977
- \`\`\`sql
73978
- -- @P1 name1 (varchar)
73979
- -- @P2 name2 (int) = 0
73980
- SELECT * FROM users WHERE name = @P1 AND age > @P2;
73981
- \`\`\`
73982
73800
  `,
73983
73801
  "write-script-deno": `---
73984
73802
  name: write-script-deno
@@ -73987,10 +73805,12 @@ description: MUST use when writing Deno/TypeScript scripts.
73987
73805
 
73988
73806
  ## CLI Commands
73989
73807
 
73990
- Place scripts in a folder. After writing, run:
73808
+ Place scripts in a folder. After writing, tell the user they can run:
73991
73809
  - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
73992
73810
  - \`wmill sync push\` - Deploy to Windmill
73993
73811
 
73812
+ Do NOT run these commands yourself. Instead, inform the user that they should run them.
73813
+
73994
73814
  Use \`wmill resource-type list --schema\` to discover available resource types.
73995
73815
 
73996
73816
  # TypeScript (Deno)
@@ -74115,6 +73935,508 @@ const result: S3Object = await wmill.writeS3File(
74115
73935
 
74116
73936
  Import: import * as wmill from 'windmill-client'
74117
73937
 
73938
+ /**
73939
+ * Initialize the Windmill client with authentication token and base URL
73940
+ * @param token - Authentication token (defaults to WM_TOKEN env variable)
73941
+ * @param baseUrl - API base URL (defaults to BASE_INTERNAL_URL or BASE_URL env variable)
73942
+ */
73943
+ setClient(token?: string, baseUrl?: string): void
73944
+
73945
+ /**
73946
+ * Create a client configuration from env variables
73947
+ * @returns client configuration
73948
+ */
73949
+ getWorkspace(): string
73950
+
73951
+ /**
73952
+ * Get a resource value by path
73953
+ * @param path path of the resource, default to internal state path
73954
+ * @param undefinedIfEmpty if the resource does not exist, return undefined instead of throwing an error
73955
+ * @returns resource value
73956
+ */
73957
+ async getResource(path?: string, undefinedIfEmpty?: boolean): Promise<any>
73958
+
73959
+ /**
73960
+ * Get the true root job id
73961
+ * @param jobId job id to get the root job id from (default to current job)
73962
+ * @returns root job id
73963
+ */
73964
+ async getRootJobId(jobId?: string): Promise<string>
73965
+
73966
+ /**
73967
+ * @deprecated Use runScriptByPath or runScriptByHash instead
73968
+ */
73969
+ async runScript(path: string | null = null, hash_: string | null = null, args: Record<string, any> | null = null, verbose: boolean = false): Promise<any>
73970
+
73971
+ /**
73972
+ * Run a script synchronously by its path and wait for the result
73973
+ * @param path - Script path in Windmill
73974
+ * @param args - Arguments to pass to the script
73975
+ * @param verbose - Enable verbose logging
73976
+ * @returns Script execution result
73977
+ */
73978
+ async runScriptByPath(path: string, args: Record<string, any> | null = null, verbose: boolean = false): Promise<any>
73979
+
73980
+ /**
73981
+ * Run a script synchronously by its hash and wait for the result
73982
+ * @param hash_ - Script hash in Windmill
73983
+ * @param args - Arguments to pass to the script
73984
+ * @param verbose - Enable verbose logging
73985
+ * @returns Script execution result
73986
+ */
73987
+ async runScriptByHash(hash_: string, args: Record<string, any> | null = null, verbose: boolean = false): Promise<any>
73988
+
73989
+ /**
73990
+ * Append a text to the result stream
73991
+ * @param text text to append to the result stream
73992
+ */
73993
+ appendToResultStream(text: string): void
73994
+
73995
+ /**
73996
+ * Stream to the result stream
73997
+ * @param stream stream to stream to the result stream
73998
+ */
73999
+ async streamResult(stream: AsyncIterable<string>): Promise<void>
74000
+
74001
+ /**
74002
+ * Run a flow synchronously by its path and wait for the result
74003
+ * @param path - Flow path in Windmill
74004
+ * @param args - Arguments to pass to the flow
74005
+ * @param verbose - Enable verbose logging
74006
+ * @returns Flow execution result
74007
+ */
74008
+ async runFlow(path: string | null = null, args: Record<string, any> | null = null, verbose: boolean = false): Promise<any>
74009
+
74010
+ /**
74011
+ * Wait for a job to complete and return its result
74012
+ * @param jobId - ID of the job to wait for
74013
+ * @param verbose - Enable verbose logging
74014
+ * @returns Job result when completed
74015
+ */
74016
+ async waitJob(jobId: string, verbose: boolean = false): Promise<any>
74017
+
74018
+ /**
74019
+ * Get the result of a completed job
74020
+ * @param jobId - ID of the completed job
74021
+ * @returns Job result
74022
+ */
74023
+ async getResult(jobId: string): Promise<any>
74024
+
74025
+ /**
74026
+ * Get the result of a job if completed, or its current status
74027
+ * @param jobId - ID of the job
74028
+ * @returns Object with started, completed, success, and result properties
74029
+ */
74030
+ async getResultMaybe(jobId: string): Promise<any>
74031
+
74032
+ /**
74033
+ * @deprecated Use runScriptByPathAsync or runScriptByHashAsync instead
74034
+ */
74035
+ async runScriptAsync(path: string | null, hash_: string | null, args: Record<string, any> | null, scheduledInSeconds: number | null = null): Promise<string>
74036
+
74037
+ /**
74038
+ * Run a script asynchronously by its path
74039
+ * @param path - Script path in Windmill
74040
+ * @param args - Arguments to pass to the script
74041
+ * @param scheduledInSeconds - Schedule execution for a future time (in seconds)
74042
+ * @returns Job ID of the created job
74043
+ */
74044
+ async runScriptByPathAsync(path: string, args: Record<string, any> | null = null, scheduledInSeconds: number | null = null): Promise<string>
74045
+
74046
+ /**
74047
+ * Run a script asynchronously by its hash
74048
+ * @param hash_ - Script hash in Windmill
74049
+ * @param args - Arguments to pass to the script
74050
+ * @param scheduledInSeconds - Schedule execution for a future time (in seconds)
74051
+ * @returns Job ID of the created job
74052
+ */
74053
+ async runScriptByHashAsync(hash_: string, args: Record<string, any> | null = null, scheduledInSeconds: number | null = null): Promise<string>
74054
+
74055
+ /**
74056
+ * Run a flow asynchronously by its path
74057
+ * @param path - Flow path in Windmill
74058
+ * @param args - Arguments to pass to the flow
74059
+ * @param scheduledInSeconds - Schedule execution for a future time (in seconds)
74060
+ * @param doNotTrackInParent - If false, tracks state in parent job (only use when fully awaiting the job)
74061
+ * @returns Job ID of the created job
74062
+ */
74063
+ async runFlowAsync(path: string | null, args: Record<string, any> | null, scheduledInSeconds: number | null = null, // can only be set to false if this the job will be fully await and not concurrent with any other job // as otherwise the child flow and its own child will store their state in the parent job which will // lead to incorrectness and failures doNotTrackInParent: boolean = true): Promise<string>
74064
+
74065
+ /**
74066
+ * Resolve a resource value in case the default value was picked because the input payload was undefined
74067
+ * @param obj resource value or path of the resource under the format \`$res:path\`
74068
+ * @returns resource value
74069
+ */
74070
+ async resolveDefaultResource(obj: any): Promise<any>
74071
+
74072
+ /**
74073
+ * Get the state file path from environment variables
74074
+ * @returns State path string
74075
+ */
74076
+ getStatePath(): string
74077
+
74078
+ /**
74079
+ * Set a resource value by path
74080
+ * @param path path of the resource to set, default to state path
74081
+ * @param value new value of the resource to set
74082
+ * @param initializeToTypeIfNotExist if the resource does not exist, initialize it with this type
74083
+ */
74084
+ async setResource(value: any, path?: string, initializeToTypeIfNotExist?: string): Promise<void>
74085
+
74086
+ /**
74087
+ * Set the state
74088
+ * @param state state to set
74089
+ * @deprecated use setState instead
74090
+ */
74091
+ async setInternalState(state: any): Promise<void>
74092
+
74093
+ /**
74094
+ * Set the state
74095
+ * @param state state to set
74096
+ * @param path Optional state resource path override. Defaults to \`getStatePath()\`.
74097
+ */
74098
+ async setState(state: any, path?: string): Promise<void>
74099
+
74100
+ /**
74101
+ * Set the progress
74102
+ * Progress cannot go back and limited to 0% to 99% range
74103
+ * @param percent Progress to set in %
74104
+ * @param jobId? Job to set progress for
74105
+ */
74106
+ async setProgress(percent: number, jobId?: any): Promise<void>
74107
+
74108
+ /**
74109
+ * Get the progress
74110
+ * @param jobId? Job to get progress from
74111
+ * @returns Optional clamped between 0 and 100 progress value
74112
+ */
74113
+ async getProgress(jobId?: any): Promise<number | null>
74114
+
74115
+ /**
74116
+ * Set a flow user state
74117
+ * @param key key of the state
74118
+ * @param value value of the state
74119
+ */
74120
+ async setFlowUserState(key: string, value: any, errorIfNotPossible?: boolean): Promise<void>
74121
+
74122
+ /**
74123
+ * Get a flow user state
74124
+ * @param path path of the variable
74125
+ */
74126
+ async getFlowUserState(key: string, errorIfNotPossible?: boolean): Promise<any>
74127
+
74128
+ /**
74129
+ * Get the internal state
74130
+ * @deprecated use getState instead
74131
+ */
74132
+ async getInternalState(): Promise<any>
74133
+
74134
+ /**
74135
+ * Get the state shared across executions
74136
+ * @param path Optional state resource path override. Defaults to \`getStatePath()\`.
74137
+ */
74138
+ async getState(path?: string): Promise<any>
74139
+
74140
+ /**
74141
+ * Get a variable by path
74142
+ * @param path path of the variable
74143
+ * @returns variable value
74144
+ */
74145
+ async getVariable(path: string): Promise<string>
74146
+
74147
+ /**
74148
+ * Set a variable by path, create if not exist
74149
+ * @param path path of the variable
74150
+ * @param value value of the variable
74151
+ * @param isSecretIfNotExist if the variable does not exist, create it as secret or not (default: false)
74152
+ * @param descriptionIfNotExist if the variable does not exist, create it with this description (default: "")
74153
+ */
74154
+ async setVariable(path: string, value: string, isSecretIfNotExist?: boolean, descriptionIfNotExist?: string): Promise<void>
74155
+
74156
+ /**
74157
+ * Build a PostgreSQL connection URL from a database resource
74158
+ * @param path - Path to the database resource
74159
+ * @returns PostgreSQL connection URL string
74160
+ */
74161
+ async databaseUrlFromResource(path: string): Promise<string>
74162
+
74163
+ async polarsConnectionSettings(s3_resource_path: string | undefined): Promise<any>
74164
+
74165
+ async duckdbConnectionSettings(s3_resource_path: string | undefined): Promise<any>
74166
+
74167
+ /**
74168
+ * Get S3 client settings from a resource or workspace default
74169
+ * @param s3_resource_path - Path to S3 resource (uses workspace default if undefined)
74170
+ * @returns S3 client configuration settings
74171
+ */
74172
+ async denoS3LightClientSettings(s3_resource_path: string | undefined): Promise<DenoS3LightClientSettings>
74173
+
74174
+ /**
74175
+ * Load the content of a file stored in S3. If the s3ResourcePath is undefined, it will default to the workspace S3 resource.
74176
+ *
74177
+ * \`\`\`typescript
74178
+ * let fileContent = await wmill.loadS3FileContent(inputFile)
74179
+ * // if the file is a raw text file, it can be decoded and printed directly:
74180
+ * const text = new TextDecoder().decode(fileContentStream)
74181
+ * console.log(text);
74182
+ * \`\`\`
74183
+ */
74184
+ async loadS3File(s3object: S3Object, s3ResourcePath: string | undefined = undefined): Promise<Uint8Array | undefined>
74185
+
74186
+ /**
74187
+ * Load the content of a file stored in S3 as a stream. If the s3ResourcePath is undefined, it will default to the workspace S3 resource.
74188
+ *
74189
+ * \`\`\`typescript
74190
+ * let fileContentBlob = await wmill.loadS3FileStream(inputFile)
74191
+ * // if the content is plain text, the blob can be read directly:
74192
+ * console.log(await fileContentBlob.text());
74193
+ * \`\`\`
74194
+ */
74195
+ async loadS3FileStream(s3object: S3Object, s3ResourcePath: string | undefined = undefined): Promise<Blob | undefined>
74196
+
74197
+ /**
74198
+ * Persist a file to the S3 bucket. If the s3ResourcePath is undefined, it will default to the workspace S3 resource.
74199
+ *
74200
+ * \`\`\`typescript
74201
+ * const s3object = await writeS3File(s3Object, "Hello Windmill!")
74202
+ * const fileContentAsUtf8Str = (await s3object.toArray()).toString('utf-8')
74203
+ * console.log(fileContentAsUtf8Str)
74204
+ * \`\`\`
74205
+ */
74206
+ async writeS3File(s3object: S3Object | undefined, fileContent: string | Blob, s3ResourcePath: string | undefined = undefined, contentType: string | undefined = undefined, contentDisposition: string | undefined = undefined): Promise<S3Object>
74207
+
74208
+ /**
74209
+ * Sign S3 objects to be used by anonymous users in public apps
74210
+ * @param s3objects s3 objects to sign
74211
+ * @returns signed s3 objects
74212
+ */
74213
+ async signS3Objects(s3objects: S3Object[]): Promise<S3Object[]>
74214
+
74215
+ /**
74216
+ * Sign S3 object to be used by anonymous users in public apps
74217
+ * @param s3object s3 object to sign
74218
+ * @returns signed s3 object
74219
+ */
74220
+ async signS3Object(s3object: S3Object): Promise<S3Object>
74221
+
74222
+ /**
74223
+ * Generate a presigned public URL for an array of S3 objects.
74224
+ * If an S3 object is not signed yet, it will be signed first.
74225
+ * @param s3Objects s3 objects to sign
74226
+ * @returns list of signed public URLs
74227
+ */
74228
+ async getPresignedS3PublicUrls(s3Objects: S3Object[], { baseUrl }: { baseUrl?: string } = {}): Promise<string[]>
74229
+
74230
+ /**
74231
+ * Generate a presigned public URL for an S3 object. If the S3 object is not signed yet, it will be signed first.
74232
+ * @param s3Object s3 object to sign
74233
+ * @returns signed public URL
74234
+ */
74235
+ async getPresignedS3PublicUrl(s3Objects: S3Object, { baseUrl }: { baseUrl?: string } = {}): Promise<string>
74236
+
74237
+ /**
74238
+ * Get URLs needed for resuming a flow after this step
74239
+ * @param approver approver name
74240
+ * @param flowLevel if true, generate resume URLs for the parent flow instead of the specific step.
74241
+ * This allows pre-approvals that can be consumed by any later suspend step in the same flow.
74242
+ * @returns approval page UI URL, resume and cancel API URLs for resuming the flow
74243
+ */
74244
+ async getResumeUrls(approver?: string, flowLevel?: boolean): Promise<{
74245
+ approvalPage: string;
74246
+ resume: string;
74247
+ cancel: string;
74248
+ }>
74249
+
74250
+ /**
74251
+ * @deprecated use getResumeUrls instead
74252
+ */
74253
+ getResumeEndpoints(approver?: string): Promise<{
74254
+ approvalPage: string;
74255
+ resume: string;
74256
+ cancel: string;
74257
+ }>
74258
+
74259
+ /**
74260
+ * Get an OIDC jwt token for auth to external services (e.g: Vault, AWS) (ee only)
74261
+ * @param audience audience of the token
74262
+ * @param expiresIn Optional number of seconds until the token expires
74263
+ * @returns jwt token
74264
+ */
74265
+ async getIdToken(audience: string, expiresIn?: number): Promise<string>
74266
+
74267
+ /**
74268
+ * Convert a base64-encoded string to Uint8Array
74269
+ * @param data - Base64-encoded string
74270
+ * @returns Decoded Uint8Array
74271
+ */
74272
+ base64ToUint8Array(data: string): Uint8Array
74273
+
74274
+ /**
74275
+ * Convert a Uint8Array to base64-encoded string
74276
+ * @param arrayBuffer - Uint8Array to encode
74277
+ * @returns Base64-encoded string
74278
+ */
74279
+ uint8ArrayToBase64(arrayBuffer: Uint8Array): string
74280
+
74281
+ /**
74282
+ * Get email from workspace username
74283
+ * This method is particularly useful for apps that require the email address of the viewer.
74284
+ * Indeed, in the viewer context, WM_USERNAME is set to the username of the viewer but WM_EMAIL is set to the email of the creator of the app.
74285
+ * @param username
74286
+ * @returns email address
74287
+ */
74288
+ async usernameToEmail(username: string): Promise<string>
74289
+
74290
+ /**
74291
+ * Sends an interactive approval request via Slack, allowing optional customization of the message, approver, and form fields.
74292
+ *
74293
+ * **[Enterprise Edition Only]** To include form fields in the Slack approval request, go to **Advanced -> Suspend -> Form**
74294
+ * and define a form. Learn more at [Windmill Documentation](https://www.windmill.dev/docs/flows/flow_approval#form).
74295
+ *
74296
+ * @param {Object} options - The configuration options for the Slack approval request.
74297
+ * @param {string} options.slackResourcePath - The path to the Slack resource in Windmill.
74298
+ * @param {string} options.channelId - The Slack channel ID where the approval request will be sent.
74299
+ * @param {string} [options.message] - Optional custom message to include in the Slack approval request.
74300
+ * @param {string} [options.approver] - Optional user ID or name of the approver for the request.
74301
+ * @param {DefaultArgs} [options.defaultArgsJson] - Optional object defining or overriding the default arguments to a form field.
74302
+ * @param {Enums} [options.dynamicEnumsJson] - Optional object overriding the enum default values of an enum form field.
74303
+ * @param {string} [options.resumeButtonText] - Optional text for the resume button.
74304
+ * @param {string} [options.cancelButtonText] - Optional text for the cancel button.
74305
+ *
74306
+ * @returns {Promise<void>} Resolves when the Slack approval request is successfully sent.
74307
+ *
74308
+ * @throws {Error} If the function is not called within a flow or flow preview.
74309
+ * @throws {Error} If the \`JobService.getSlackApprovalPayload\` call fails.
74310
+ *
74311
+ * **Usage Example:**
74312
+ * \`\`\`typescript
74313
+ * await requestInteractiveSlackApproval({
74314
+ * slackResourcePath: "/u/alex/my_slack_resource",
74315
+ * channelId: "admins-slack-channel",
74316
+ * message: "Please approve this request",
74317
+ * approver: "approver123",
74318
+ * defaultArgsJson: { key1: "value1", key2: 42 },
74319
+ * dynamicEnumsJson: { foo: ["choice1", "choice2"], bar: ["optionA", "optionB"] },
74320
+ * resumeButtonText: "Resume",
74321
+ * cancelButtonText: "Cancel",
74322
+ * });
74323
+ * \`\`\`
74324
+ *
74325
+ * **Note:** This function requires execution within a Windmill flow or flow preview.
74326
+ */
74327
+ async requestInteractiveSlackApproval({ slackResourcePath, channelId, message, approver, defaultArgsJson, dynamicEnumsJson, resumeButtonText, cancelButtonText, }: SlackApprovalOptions): Promise<void>
74328
+
74329
+ /**
74330
+ * Sends an interactive approval request via Teams, allowing optional customization of the message, approver, and form fields.
74331
+ *
74332
+ * **[Enterprise Edition Only]** To include form fields in the Teams approval request, go to **Advanced -> Suspend -> Form**
74333
+ * and define a form. Learn more at [Windmill Documentation](https://www.windmill.dev/docs/flows/flow_approval#form).
74334
+ *
74335
+ * @param {Object} options - The configuration options for the Teams approval request.
74336
+ * @param {string} options.teamName - The Teams team name where the approval request will be sent.
74337
+ * @param {string} options.channelName - The Teams channel name where the approval request will be sent.
74338
+ * @param {string} [options.message] - Optional custom message to include in the Teams approval request.
74339
+ * @param {string} [options.approver] - Optional user ID or name of the approver for the request.
74340
+ * @param {DefaultArgs} [options.defaultArgsJson] - Optional object defining or overriding the default arguments to a form field.
74341
+ * @param {Enums} [options.dynamicEnumsJson] - Optional object overriding the enum default values of an enum form field.
74342
+ *
74343
+ * @returns {Promise<void>} Resolves when the Teams approval request is successfully sent.
74344
+ *
74345
+ * @throws {Error} If the function is not called within a flow or flow preview.
74346
+ * @throws {Error} If the \`JobService.getTeamsApprovalPayload\` call fails.
74347
+ *
74348
+ * **Usage Example:**
74349
+ * \`\`\`typescript
74350
+ * await requestInteractiveTeamsApproval({
74351
+ * teamName: "admins-teams",
74352
+ * channelName: "admins-teams-channel",
74353
+ * message: "Please approve this request",
74354
+ * approver: "approver123",
74355
+ * defaultArgsJson: { key1: "value1", key2: 42 },
74356
+ * dynamicEnumsJson: { foo: ["choice1", "choice2"], bar: ["optionA", "optionB"] },
74357
+ * });
74358
+ * \`\`\`
74359
+ *
74360
+ * **Note:** This function requires execution within a Windmill flow or flow preview.
74361
+ */
74362
+ async requestInteractiveTeamsApproval({ teamName, channelName, message, approver, defaultArgsJson, dynamicEnumsJson, }: TeamsApprovalOptions): Promise<void>
74363
+
74364
+ /**
74365
+ * Parse an S3 object from URI string or record format
74366
+ * @param s3Object - S3 object as URI string (s3://storage/key) or record
74367
+ * @returns S3 object record with storage and s3 key
74368
+ */
74369
+ parseS3Object(s3Object: S3Object): S3ObjectRecord
74370
+
74371
+ setWorkflowCtx(ctx: WorkflowCtx | null): void
74372
+
74373
+ async sleep(seconds: number): Promise<void>
74374
+
74375
+ async step<T>(name: string, fn: () => T | Promise<T>): Promise<T>
74376
+
74377
+ /**
74378
+ * Create a task that dispatches to a separate Windmill script.
74379
+ *
74380
+ * @example
74381
+ * const extract = taskScript("f/data/extract");
74382
+ * // inside workflow: await extract({ url: "https://..." })
74383
+ */
74384
+ taskScript(path: string, options?: TaskOptions): (...args: any[]) => PromiseLike<any>
74385
+
74386
+ /**
74387
+ * Create a task that dispatches to a separate Windmill flow.
74388
+ *
74389
+ * @example
74390
+ * const pipeline = taskFlow("f/etl/pipeline");
74391
+ * // inside workflow: await pipeline({ input: data })
74392
+ */
74393
+ taskFlow(path: string, options?: TaskOptions): (...args: any[]) => PromiseLike<any>
74394
+
74395
+ /**
74396
+ * Mark an async function as a workflow-as-code entry point.
74397
+ *
74398
+ * The function must be **deterministic**: given the same inputs it must call
74399
+ * tasks in the same order on every replay. Branching on task results is fine
74400
+ * (results are replayed from checkpoint), but branching on external state
74401
+ * (current time, random values, external API calls) must use \`step()\` to
74402
+ * checkpoint the value so replays see the same result.
74403
+ */
74404
+ workflow<T>(fn: (...args: any[]) => Promise<T>): void
74405
+
74406
+ /**
74407
+ * Suspend the workflow and wait for an external approval.
74408
+ *
74409
+ * Use \`getResumeUrls()\` (wrapped in \`step()\`) to obtain resume/cancel/approvalPage
74410
+ * URLs before calling this function.
74411
+ *
74412
+ * @example
74413
+ * const urls = await step("urls", () => getResumeUrls());
74414
+ * await step("notify", () => sendEmail(urls.approvalPage));
74415
+ * const { value, approver } = await waitForApproval({ timeout: 3600 });
74416
+ */
74417
+ waitForApproval(options?: { timeout?: number; form?: object; }): PromiseLike<{ value: any; approver: string; approved: boolean }>
74418
+
74419
+ /**
74420
+ * Process items in parallel with optional concurrency control.
74421
+ *
74422
+ * Each item is processed by calling \`fn(item)\`, which should be a task().
74423
+ * Items are dispatched in batches of \`concurrency\` (default: all at once).
74424
+ *
74425
+ * @example
74426
+ * const process = task(async (item: string) => { ... });
74427
+ * const results = await parallel(items, process, { concurrency: 5 });
74428
+ */
74429
+ async parallel<T, R>(items: T[], fn: (item: T) => PromiseLike<R> | R, options?: { concurrency?: number },): Promise<R[]>
74430
+
74431
+ /**
74432
+ * Commit Kafka offsets for a trigger with auto_commit disabled.
74433
+ * @param triggerPath - Path to the Kafka trigger (from event.wm_trigger.trigger_path)
74434
+ * @param topic - Kafka topic name (from event.topic)
74435
+ * @param partition - Partition number (from event.partition)
74436
+ * @param offset - Message offset to commit (from event.offset)
74437
+ */
74438
+ async commitKafkaOffsets(triggerPath: string, topic: string, partition: number, offset: number,): Promise<void>
74439
+
74118
74440
  /**
74119
74441
  * Create a SQL template function for PostgreSQL/datatable queries
74120
74442
  * @param name - Database/datatable name (default: "main")
@@ -74144,6 +74466,414 @@ datatable(name: string = "main"): DatatableSqlTemplateFunction
74144
74466
  * \`.fetch()
74145
74467
  */
74146
74468
  ducklake(name: string = "main"): SqlTemplateFunction
74469
+ `,
74470
+ "write-script-duckdb": `---
74471
+ name: write-script-duckdb
74472
+ description: MUST use when writing DuckDB queries.
74473
+ ---
74474
+
74475
+ ## CLI Commands
74476
+
74477
+ Place scripts in a folder. After writing, tell the user they can run:
74478
+ - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
74479
+ - \`wmill sync push\` - Deploy to Windmill
74480
+
74481
+ Do NOT run these commands yourself. Instead, inform the user that they should run them.
74482
+
74483
+ Use \`wmill resource-type list --schema\` to discover available resource types.
74484
+
74485
+ # DuckDB
74486
+
74487
+ Arguments are defined with comments and used with \`$name\` syntax:
74488
+
74489
+ \`\`\`sql
74490
+ -- $name (text) = default
74491
+ -- $age (integer)
74492
+ SELECT * FROM users WHERE name = $name AND age > $age;
74493
+ \`\`\`
74494
+
74495
+ ## Ducklake Integration
74496
+
74497
+ Attach Ducklake for data lake operations:
74498
+
74499
+ \`\`\`sql
74500
+ -- Main ducklake
74501
+ ATTACH 'ducklake' AS dl;
74502
+
74503
+ -- Named ducklake
74504
+ ATTACH 'ducklake://my_lake' AS dl;
74505
+
74506
+ -- Then query
74507
+ SELECT * FROM dl.schema.table;
74508
+ \`\`\`
74509
+
74510
+ ## External Database Connections
74511
+
74512
+ Connect to external databases using resources:
74513
+
74514
+ \`\`\`sql
74515
+ ATTACH '$res:path/to/resource' AS db (TYPE postgres);
74516
+ SELECT * FROM db.schema.table;
74517
+ \`\`\`
74518
+
74519
+ ## S3 File Operations
74520
+
74521
+ Read files from S3 storage:
74522
+
74523
+ \`\`\`sql
74524
+ -- Default storage
74525
+ SELECT * FROM read_csv('s3:///path/to/file.csv');
74526
+
74527
+ -- Named storage
74528
+ SELECT * FROM read_csv('s3://storage_name/path/to/file.csv');
74529
+
74530
+ -- Parquet files
74531
+ SELECT * FROM read_parquet('s3:///path/to/file.parquet');
74532
+
74533
+ -- JSON files
74534
+ SELECT * FROM read_json('s3:///path/to/file.json');
74535
+ \`\`\`
74536
+ `,
74537
+ "write-script-go": `---
74538
+ name: write-script-go
74539
+ description: MUST use when writing Go scripts.
74540
+ ---
74541
+
74542
+ ## CLI Commands
74543
+
74544
+ Place scripts in a folder. After writing, tell the user they can run:
74545
+ - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
74546
+ - \`wmill sync push\` - Deploy to Windmill
74547
+
74548
+ Do NOT run these commands yourself. Instead, inform the user that they should run them.
74549
+
74550
+ Use \`wmill resource-type list --schema\` to discover available resource types.
74551
+
74552
+ # Go
74553
+
74554
+ ## Structure
74555
+
74556
+ The file package must be \`inner\` and export a function called \`main\`:
74557
+
74558
+ \`\`\`go
74559
+ package inner
74560
+
74561
+ func main(param1 string, param2 int) (map[string]interface{}, error) {
74562
+ return map[string]interface{}{
74563
+ "result": param1,
74564
+ "count": param2,
74565
+ }, nil
74566
+ }
74567
+ \`\`\`
74568
+
74569
+ **Important:**
74570
+ - Package must be \`inner\`
74571
+ - Return type must be \`({return_type}, error)\`
74572
+ - Function name is \`main\` (lowercase)
74573
+
74574
+ ## Return Types
74575
+
74576
+ The return type can be any Go type that can be serialized to JSON:
74577
+
74578
+ \`\`\`go
74579
+ package inner
74580
+
74581
+ type Result struct {
74582
+ Name string \`json:"name"\`
74583
+ Count int \`json:"count"\`
74584
+ }
74585
+
74586
+ func main(name string, count int) (Result, error) {
74587
+ return Result{
74588
+ Name: name,
74589
+ Count: count,
74590
+ }, nil
74591
+ }
74592
+ \`\`\`
74593
+
74594
+ ## Error Handling
74595
+
74596
+ Return errors as the second return value:
74597
+
74598
+ \`\`\`go
74599
+ package inner
74600
+
74601
+ import "errors"
74602
+
74603
+ func main(value int) (string, error) {
74604
+ if value < 0 {
74605
+ return "", errors.New("value must be positive")
74606
+ }
74607
+ return "success", nil
74608
+ }
74609
+ \`\`\`
74610
+ `,
74611
+ "write-script-graphql": `---
74612
+ name: write-script-graphql
74613
+ description: MUST use when writing GraphQL queries.
74614
+ ---
74615
+
74616
+ ## CLI Commands
74617
+
74618
+ Place scripts in a folder. After writing, tell the user they can run:
74619
+ - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
74620
+ - \`wmill sync push\` - Deploy to Windmill
74621
+
74622
+ Do NOT run these commands yourself. Instead, inform the user that they should run them.
74623
+
74624
+ Use \`wmill resource-type list --schema\` to discover available resource types.
74625
+
74626
+ # GraphQL
74627
+
74628
+ ## Structure
74629
+
74630
+ Write GraphQL queries or mutations. Arguments can be added as query parameters:
74631
+
74632
+ \`\`\`graphql
74633
+ query GetUser($id: ID!) {
74634
+ user(id: $id) {
74635
+ id
74636
+ name
74637
+ email
74638
+ }
74639
+ }
74640
+ \`\`\`
74641
+
74642
+ ## Variables
74643
+
74644
+ Variables are passed as script arguments and automatically bound to the query:
74645
+
74646
+ \`\`\`graphql
74647
+ query SearchProducts($query: String!, $limit: Int = 10) {
74648
+ products(search: $query, first: $limit) {
74649
+ edges {
74650
+ node {
74651
+ id
74652
+ name
74653
+ price
74654
+ }
74655
+ }
74656
+ }
74657
+ }
74658
+ \`\`\`
74659
+
74660
+ ## Mutations
74661
+
74662
+ \`\`\`graphql
74663
+ mutation CreateUser($input: CreateUserInput!) {
74664
+ createUser(input: $input) {
74665
+ id
74666
+ name
74667
+ createdAt
74668
+ }
74669
+ }
74670
+ \`\`\`
74671
+ `,
74672
+ "write-script-java": `---
74673
+ name: write-script-java
74674
+ description: MUST use when writing Java scripts.
74675
+ ---
74676
+
74677
+ ## CLI Commands
74678
+
74679
+ Place scripts in a folder. After writing, tell the user they can run:
74680
+ - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
74681
+ - \`wmill sync push\` - Deploy to Windmill
74682
+
74683
+ Do NOT run these commands yourself. Instead, inform the user that they should run them.
74684
+
74685
+ Use \`wmill resource-type list --schema\` to discover available resource types.
74686
+
74687
+ # Java
74688
+
74689
+ The script must contain a Main public class with a \`public static main()\` method:
74690
+
74691
+ \`\`\`java
74692
+ public class Main {
74693
+ public static Object main(String name, int count) {
74694
+ java.util.Map<String, Object> result = new java.util.HashMap<>();
74695
+ result.put("name", name);
74696
+ result.put("count", count);
74697
+ return result;
74698
+ }
74699
+ }
74700
+ \`\`\`
74701
+
74702
+ **Important:**
74703
+ - Class must be named \`Main\`
74704
+ - Method must be \`public static Object main(...)\`
74705
+ - Return type is \`Object\` or \`void\`
74706
+
74707
+ ## Maven Dependencies
74708
+
74709
+ Add dependencies using comments at the top:
74710
+
74711
+ \`\`\`java
74712
+ //requirements:
74713
+ //com.google.code.gson:gson:2.10.1
74714
+ //org.apache.httpcomponents:httpclient:4.5.14
74715
+
74716
+ import com.google.gson.Gson;
74717
+
74718
+ public class Main {
74719
+ public static Object main(String input) {
74720
+ Gson gson = new Gson();
74721
+ return gson.fromJson(input, Object.class);
74722
+ }
74723
+ }
74724
+ \`\`\`
74725
+ `,
74726
+ "write-script-mssql": `---
74727
+ name: write-script-mssql
74728
+ description: MUST use when writing MS SQL Server queries.
74729
+ ---
74730
+
74731
+ ## CLI Commands
74732
+
74733
+ Place scripts in a folder. After writing, tell the user they can run:
74734
+ - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
74735
+ - \`wmill sync push\` - Deploy to Windmill
74736
+
74737
+ Do NOT run these commands yourself. Instead, inform the user that they should run them.
74738
+
74739
+ Use \`wmill resource-type list --schema\` to discover available resource types.
74740
+
74741
+ # Microsoft SQL Server (MSSQL)
74742
+
74743
+ Arguments use \`@P1\`, \`@P2\`, etc.
74744
+
74745
+ Name the parameters by adding comments before the statement:
74746
+
74747
+ \`\`\`sql
74748
+ -- @P1 name1 (varchar)
74749
+ -- @P2 name2 (int) = 0
74750
+ SELECT * FROM users WHERE name = @P1 AND age > @P2;
74751
+ \`\`\`
74752
+ `,
74753
+ "write-script-mysql": `---
74754
+ name: write-script-mysql
74755
+ description: MUST use when writing MySQL queries.
74756
+ ---
74757
+
74758
+ ## CLI Commands
74759
+
74760
+ Place scripts in a folder. After writing, tell the user they can run:
74761
+ - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
74762
+ - \`wmill sync push\` - Deploy to Windmill
74763
+
74764
+ Do NOT run these commands yourself. Instead, inform the user that they should run them.
74765
+
74766
+ Use \`wmill resource-type list --schema\` to discover available resource types.
74767
+
74768
+ # MySQL
74769
+
74770
+ Arguments use \`?\` placeholders.
74771
+
74772
+ Name the parameters by adding comments before the statement:
74773
+
74774
+ \`\`\`sql
74775
+ -- ? name1 (text)
74776
+ -- ? name2 (int) = 0
74777
+ SELECT * FROM users WHERE name = ? AND age > ?;
74778
+ \`\`\`
74779
+ `,
74780
+ "write-script-nativets": `---
74781
+ name: write-script-nativets
74782
+ description: MUST use when writing Native TypeScript scripts.
74783
+ ---
74784
+
74785
+ ## CLI Commands
74786
+
74787
+ Place scripts in a folder. After writing, tell the user they can run:
74788
+ - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
74789
+ - \`wmill sync push\` - Deploy to Windmill
74790
+
74791
+ Do NOT run these commands yourself. Instead, inform the user that they should run them.
74792
+
74793
+ Use \`wmill resource-type list --schema\` to discover available resource types.
74794
+
74795
+ # TypeScript (Native)
74796
+
74797
+ Native TypeScript execution with fetch only - no external imports allowed.
74798
+
74799
+ ## Structure
74800
+
74801
+ Export a single **async** function called \`main\`:
74802
+
74803
+ \`\`\`typescript
74804
+ export async function main(param1: string, param2: number) {
74805
+ // Your code here
74806
+ return { result: param1, count: param2 };
74807
+ }
74808
+ \`\`\`
74809
+
74810
+ Do not call the main function.
74811
+
74812
+ ## Resource Types
74813
+
74814
+ On Windmill, credentials and configuration are stored in resources and passed as parameters to main.
74815
+
74816
+ Use the \`RT\` namespace for resource types:
74817
+
74818
+ \`\`\`typescript
74819
+ export async function main(stripe: RT.Stripe) {
74820
+ // stripe contains API key and config from the resource
74821
+ }
74822
+ \`\`\`
74823
+
74824
+ Only use resource types if you need them to satisfy the instructions. Always use the RT namespace.
74825
+
74826
+ Before using a resource type, check the \`rt.d.ts\` file in the project root to see all available resource types and their fields. This file is generated by \`wmill resource-type generate-namespace\`.
74827
+
74828
+ ## Imports
74829
+
74830
+ **No imports allowed.** Use the globally available \`fetch\` function:
74831
+
74832
+ \`\`\`typescript
74833
+ export async function main(url: string) {
74834
+ const response = await fetch(url);
74835
+ return await response.json();
74836
+ }
74837
+ \`\`\`
74838
+
74839
+ ## Windmill Client
74840
+
74841
+ The windmill client is not available in native TypeScript mode. Use fetch to call APIs directly.
74842
+
74843
+ ## Preprocessor Scripts
74844
+
74845
+ For preprocessor scripts, the function should be named \`preprocessor\` and receives an \`event\` parameter:
74846
+
74847
+ \`\`\`typescript
74848
+ type Event = {
74849
+ kind:
74850
+ | "webhook"
74851
+ | "http"
74852
+ | "websocket"
74853
+ | "kafka"
74854
+ | "email"
74855
+ | "nats"
74856
+ | "postgres"
74857
+ | "sqs"
74858
+ | "mqtt"
74859
+ | "gcp";
74860
+ body: any;
74861
+ headers: Record<string, string>;
74862
+ query: Record<string, string>;
74863
+ };
74864
+
74865
+ export async function preprocessor(event: Event) {
74866
+ return {
74867
+ param1: event.body.field1,
74868
+ param2: event.query.id
74869
+ };
74870
+ }
74871
+ \`\`\`
74872
+
74873
+
74874
+ # TypeScript SDK (windmill-client)
74875
+
74876
+ Import: import * as wmill from 'windmill-client'
74147
74877
 
74148
74878
  /**
74149
74879
  * Initialize the Windmill client with authentication token and base URL
@@ -74637,30 +75367,144 @@ waitForApproval(options?: { timeout?: number; form?: object; }): PromiseLike<{ v
74637
75367
  * const results = await parallel(items, process, { concurrency: 5 });
74638
75368
  */
74639
75369
  async parallel<T, R>(items: T[], fn: (item: T) => PromiseLike<R> | R, options?: { concurrency?: number },): Promise<R[]>
75370
+
75371
+ /**
75372
+ * Commit Kafka offsets for a trigger with auto_commit disabled.
75373
+ * @param triggerPath - Path to the Kafka trigger (from event.wm_trigger.trigger_path)
75374
+ * @param topic - Kafka topic name (from event.topic)
75375
+ * @param partition - Partition number (from event.partition)
75376
+ * @param offset - Message offset to commit (from event.offset)
75377
+ */
75378
+ async commitKafkaOffsets(triggerPath: string, topic: string, partition: number, offset: number,): Promise<void>
75379
+
75380
+ /**
75381
+ * Create a SQL template function for PostgreSQL/datatable queries
75382
+ * @param name - Database/datatable name (default: "main")
75383
+ * @returns SQL template function for building parameterized queries
75384
+ * @example
75385
+ * let sql = wmill.datatable()
75386
+ * let name = 'Robin'
75387
+ * let age = 21
75388
+ * await sql\`
75389
+ * SELECT * FROM friends
75390
+ * WHERE name = \${name} AND age = \${age}::int
75391
+ * \`.fetch()
75392
+ */
75393
+ datatable(name: string = "main"): DatatableSqlTemplateFunction
75394
+
75395
+ /**
75396
+ * Create a SQL template function for DuckDB/ducklake queries
75397
+ * @param name - DuckDB database name (default: "main")
75398
+ * @returns SQL template function for building parameterized queries
75399
+ * @example
75400
+ * let sql = wmill.ducklake()
75401
+ * let name = 'Robin'
75402
+ * let age = 21
75403
+ * await sql\`
75404
+ * SELECT * FROM friends
75405
+ * WHERE name = \${name} AND age = \${age}
75406
+ * \`.fetch()
75407
+ */
75408
+ ducklake(name: string = "main"): SqlTemplateFunction
75409
+ `,
75410
+ "write-script-php": `---
75411
+ name: write-script-php
75412
+ description: MUST use when writing PHP scripts.
75413
+ ---
75414
+
75415
+ ## CLI Commands
75416
+
75417
+ Place scripts in a folder. After writing, tell the user they can run:
75418
+ - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
75419
+ - \`wmill sync push\` - Deploy to Windmill
75420
+
75421
+ Do NOT run these commands yourself. Instead, inform the user that they should run them.
75422
+
75423
+ Use \`wmill resource-type list --schema\` to discover available resource types.
75424
+
75425
+ # PHP
75426
+
75427
+ ## Structure
75428
+
75429
+ The script must start with \`<?php\` and contain at least one function called \`main\`:
75430
+
75431
+ \`\`\`php
75432
+ <?php
75433
+
75434
+ function main(string $param1, int $param2) {
75435
+ return ["result" => $param1, "count" => $param2];
75436
+ }
75437
+ \`\`\`
75438
+
75439
+ ## Resource Types
75440
+
75441
+ On Windmill, credentials and configuration are stored in resources and passed as parameters to main.
75442
+
75443
+ You need to **redefine** the type of the resources that are needed before the main function. Always check if the class already exists using \`class_exists\`:
75444
+
75445
+ \`\`\`php
75446
+ <?php
75447
+
75448
+ if (!class_exists('Postgresql')) {
75449
+ class Postgresql {
75450
+ public string $host;
75451
+ public int $port;
75452
+ public string $user;
75453
+ public string $password;
75454
+ public string $dbname;
75455
+ }
75456
+ }
75457
+
75458
+ function main(Postgresql $db) {
75459
+ // $db contains the database connection details
75460
+ }
75461
+ \`\`\`
75462
+
75463
+ The resource type name has to be exactly as specified.
75464
+
75465
+ ## Library Dependencies
75466
+
75467
+ Specify library dependencies as comments before the main function:
75468
+
75469
+ \`\`\`php
75470
+ <?php
75471
+
75472
+ // require:
75473
+ // guzzlehttp/guzzle
75474
+ // stripe/stripe-php@^10.0
75475
+
75476
+ function main() {
75477
+ // Libraries are available
75478
+ }
75479
+ \`\`\`
75480
+
75481
+ One dependency per line. No need to require autoload, it is already done.
74640
75482
  `,
74641
- "write-script-mysql": `---
74642
- name: write-script-mysql
74643
- description: MUST use when writing MySQL queries.
75483
+ "write-script-postgresql": `---
75484
+ name: write-script-postgresql
75485
+ description: MUST use when writing PostgreSQL queries.
74644
75486
  ---
74645
75487
 
74646
75488
  ## CLI Commands
74647
75489
 
74648
- Place scripts in a folder. After writing, run:
75490
+ Place scripts in a folder. After writing, tell the user they can run:
74649
75491
  - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
74650
75492
  - \`wmill sync push\` - Deploy to Windmill
74651
75493
 
75494
+ Do NOT run these commands yourself. Instead, inform the user that they should run them.
75495
+
74652
75496
  Use \`wmill resource-type list --schema\` to discover available resource types.
74653
75497
 
74654
- # MySQL
75498
+ # PostgreSQL
74655
75499
 
74656
- Arguments use \`?\` placeholders.
75500
+ Arguments are obtained directly in the statement with \`$1::{type}\`, \`$2::{type}\`, etc.
74657
75501
 
74658
- Name the parameters by adding comments before the statement:
75502
+ Name the parameters by adding comments at the beginning of the script (without specifying the type):
74659
75503
 
74660
75504
  \`\`\`sql
74661
- -- ? name1 (text)
74662
- -- ? name2 (int) = 0
74663
- SELECT * FROM users WHERE name = ? AND age > ?;
75505
+ -- $1 name1
75506
+ -- $2 name2 = default_value
75507
+ SELECT * FROM users WHERE name = $1::TEXT AND age > $2::INT;
74664
75508
  \`\`\`
74665
75509
  `,
74666
75510
  "write-script-powershell": `---
@@ -74670,10 +75514,12 @@ description: MUST use when writing PowerShell scripts.
74670
75514
 
74671
75515
  ## CLI Commands
74672
75516
 
74673
- Place scripts in a folder. After writing, run:
75517
+ Place scripts in a folder. After writing, tell the user they can run:
74674
75518
  - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
74675
75519
  - \`wmill sync push\` - Deploy to Windmill
74676
75520
 
75521
+ Do NOT run these commands yourself. Instead, inform the user that they should run them.
75522
+
74677
75523
  Use \`wmill resource-type list --schema\` to discover available resource types.
74678
75524
 
74679
75525
  # PowerShell
@@ -74731,31 +75577,6 @@ $result = @{
74731
75577
 
74732
75578
  $result
74733
75579
  \`\`\`
74734
- `,
74735
- "write-script-snowflake": `---
74736
- name: write-script-snowflake
74737
- description: MUST use when writing Snowflake queries.
74738
- ---
74739
-
74740
- ## CLI Commands
74741
-
74742
- Place scripts in a folder. After writing, run:
74743
- - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
74744
- - \`wmill sync push\` - Deploy to Windmill
74745
-
74746
- Use \`wmill resource-type list --schema\` to discover available resource types.
74747
-
74748
- # Snowflake
74749
-
74750
- Arguments use \`?\` placeholders.
74751
-
74752
- Name the parameters by adding comments before the statement:
74753
-
74754
- \`\`\`sql
74755
- -- ? name1 (text)
74756
- -- ? name2 (number) = 0
74757
- SELECT * FROM users WHERE name = ? AND age > ?;
74758
- \`\`\`
74759
75580
  `,
74760
75581
  "write-script-python3": `---
74761
75582
  name: write-script-python3
@@ -74764,10 +75585,12 @@ description: MUST use when writing Python scripts.
74764
75585
 
74765
75586
  ## CLI Commands
74766
75587
 
74767
- Place scripts in a folder. After writing, run:
75588
+ Place scripts in a folder. After writing, tell the user they can run:
74768
75589
  - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
74769
75590
  - \`wmill sync push\` - Deploy to Windmill
74770
75591
 
75592
+ Do NOT run these commands yourself. Instead, inform the user that they should run them.
75593
+
74771
75594
  Use \`wmill resource-type list --schema\` to discover available resource types.
74772
75595
 
74773
75596
  # Python
@@ -75508,806 +76331,186 @@ def task_script(path: str, timeout: Optional[int] = None, tag: Optional[str] = N
75508
76331
  #
75509
76332
  # @workflow
75510
76333
  # async def main():
75511
- # result = await pipeline(input=data)
75512
- def task_flow(path: str, timeout: Optional[int] = None, tag: Optional[str] = None, cache_ttl: Optional[int] = None, priority: Optional[int] = None, concurrency_limit: Optional[int] = None, concurrency_key: Optional[str] = None, concurrency_time_window_s: Optional[int] = None)
75513
-
75514
- # Decorator marking an async function as a workflow-as-code entry point.
75515
- #
75516
- # The function must be **deterministic**: given the same inputs it must call
75517
- # tasks in the same order on every replay. Branching on task results is fine
75518
- # (results are replayed from checkpoint), but branching on external state
75519
- # (current time, random values, external API calls) must use \`\`step()\`\` to
75520
- # checkpoint the value so replays see the same result.
75521
- def workflow(func)
75522
-
75523
- # Execute \`\`fn\`\` inline and checkpoint the result.
75524
- #
75525
- # On replay the cached value is returned without re-executing \`\`fn\`\`.
75526
- # Use for lightweight deterministic operations (timestamps, random IDs,
75527
- # config reads) that should not incur the overhead of a child job.
75528
- async def step(name: str, fn)
75529
-
75530
- # Server-side sleep — suspend the workflow for the given duration without holding a worker.
75531
- #
75532
- # Inside a @workflow, the parent job suspends and auto-resumes after \`\`seconds\`\`.
75533
- # Outside a workflow, falls back to \`\`asyncio.sleep\`\`.
75534
- async def sleep(seconds: int)
75535
-
75536
- # Suspend the workflow and wait for an external approval.
75537
- #
75538
- # Use \`\`get_resume_urls()\`\` (wrapped in \`\`step()\`\`) to obtain
75539
- # resume/cancel/approval URLs before calling this function.
75540
- #
75541
- # Returns a dict with \`\`value\`\` (form data), \`\`approver\`\`, and \`\`approved\`\`.
75542
- #
75543
- # Example::
75544
- #
75545
- # urls = await step("urls", lambda: get_resume_urls())
75546
- # await step("notify", lambda: send_email(urls["approvalPage"]))
75547
- # result = await wait_for_approval(timeout=3600)
75548
- async def wait_for_approval(timeout: int = 1800, form: dict | None = None) -> dict
75549
-
75550
- # Process items in parallel with optional concurrency control.
75551
- #
75552
- # Each item is processed by calling \`\`fn(item)\`\`, which should be a @task.
75553
- # Items are dispatched in batches of \`\`concurrency\`\` (default: all at once).
75554
- #
75555
- # Example::
75556
- #
75557
- # @task
75558
- # async def process(item: str):
75559
- # ...
75560
- #
75561
- # results = await parallel(items, process, concurrency=5)
75562
- async def parallel(items, fn, concurrency: Optional[int] = None)
75563
-
75564
- `,
75565
- "write-script-duckdb": `---
75566
- name: write-script-duckdb
75567
- description: MUST use when writing DuckDB queries.
75568
- ---
75569
-
75570
- ## CLI Commands
75571
-
75572
- Place scripts in a folder. After writing, run:
75573
- - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
75574
- - \`wmill sync push\` - Deploy to Windmill
75575
-
75576
- Use \`wmill resource-type list --schema\` to discover available resource types.
75577
-
75578
- # DuckDB
75579
-
75580
- Arguments are defined with comments and used with \`$name\` syntax:
75581
-
75582
- \`\`\`sql
75583
- -- $name (text) = default
75584
- -- $age (integer)
75585
- SELECT * FROM users WHERE name = $name AND age > $age;
75586
- \`\`\`
75587
-
75588
- ## Ducklake Integration
75589
-
75590
- Attach Ducklake for data lake operations:
75591
-
75592
- \`\`\`sql
75593
- -- Main ducklake
75594
- ATTACH 'ducklake' AS dl;
75595
-
75596
- -- Named ducklake
75597
- ATTACH 'ducklake://my_lake' AS dl;
75598
-
75599
- -- Then query
75600
- SELECT * FROM dl.schema.table;
75601
- \`\`\`
75602
-
75603
- ## External Database Connections
75604
-
75605
- Connect to external databases using resources:
75606
-
75607
- \`\`\`sql
75608
- ATTACH '$res:path/to/resource' AS db (TYPE postgres);
75609
- SELECT * FROM db.schema.table;
75610
- \`\`\`
75611
-
75612
- ## S3 File Operations
75613
-
75614
- Read files from S3 storage:
75615
-
75616
- \`\`\`sql
75617
- -- Default storage
75618
- SELECT * FROM read_csv('s3:///path/to/file.csv');
75619
-
75620
- -- Named storage
75621
- SELECT * FROM read_csv('s3://storage_name/path/to/file.csv');
75622
-
75623
- -- Parquet files
75624
- SELECT * FROM read_parquet('s3:///path/to/file.parquet');
75625
-
75626
- -- JSON files
75627
- SELECT * FROM read_json('s3:///path/to/file.json');
75628
- \`\`\`
75629
- `,
75630
- "write-script-bash": `---
75631
- name: write-script-bash
75632
- description: MUST use when writing Bash scripts.
75633
- ---
75634
-
75635
- ## CLI Commands
75636
-
75637
- Place scripts in a folder. After writing, run:
75638
- - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
75639
- - \`wmill sync push\` - Deploy to Windmill
75640
-
75641
- Use \`wmill resource-type list --schema\` to discover available resource types.
75642
-
75643
- # Bash
75644
-
75645
- ## Structure
75646
-
75647
- Do not include \`#!/bin/bash\`. Arguments are obtained as positional parameters:
75648
-
75649
- \`\`\`bash
75650
- # Get arguments
75651
- var1="$1"
75652
- var2="$2"
75653
-
75654
- echo "Processing $var1 and $var2"
75655
-
75656
- # Return JSON by echoing to stdout
75657
- echo "{\\"result\\": \\"$var1\\", \\"count\\": $var2}"
75658
- \`\`\`
75659
-
75660
- **Important:**
75661
- - Do not include shebang (\`#!/bin/bash\`)
75662
- - Arguments are always strings
75663
- - Access with \`$1\`, \`$2\`, etc.
75664
-
75665
- ## Output
75666
-
75667
- The script output is captured as the result. For structured data, output valid JSON:
75668
-
75669
- \`\`\`bash
75670
- name="$1"
75671
- count="$2"
75672
-
75673
- # Output JSON result
75674
- cat << EOF
75675
- {
75676
- "name": "$name",
75677
- "count": $count,
75678
- "timestamp": "$(date -Iseconds)"
75679
- }
75680
- EOF
75681
- \`\`\`
75682
-
75683
- ## Environment Variables
75684
-
75685
- Environment variables set in Windmill are available:
75686
-
75687
- \`\`\`bash
75688
- # Access environment variable
75689
- echo "Workspace: $WM_WORKSPACE"
75690
- echo "Job ID: $WM_JOB_ID"
75691
- \`\`\`
75692
- `,
75693
- "write-script-nativets": `---
75694
- name: write-script-nativets
75695
- description: MUST use when writing Native TypeScript scripts.
75696
- ---
75697
-
75698
- ## CLI Commands
75699
-
75700
- Place scripts in a folder. After writing, run:
75701
- - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
75702
- - \`wmill sync push\` - Deploy to Windmill
75703
-
75704
- Use \`wmill resource-type list --schema\` to discover available resource types.
75705
-
75706
- # TypeScript (Native)
75707
-
75708
- Native TypeScript execution with fetch only - no external imports allowed.
75709
-
75710
- ## Structure
75711
-
75712
- Export a single **async** function called \`main\`:
75713
-
75714
- \`\`\`typescript
75715
- export async function main(param1: string, param2: number) {
75716
- // Your code here
75717
- return { result: param1, count: param2 };
75718
- }
75719
- \`\`\`
75720
-
75721
- Do not call the main function.
75722
-
75723
- ## Resource Types
75724
-
75725
- On Windmill, credentials and configuration are stored in resources and passed as parameters to main.
75726
-
75727
- Use the \`RT\` namespace for resource types:
75728
-
75729
- \`\`\`typescript
75730
- export async function main(stripe: RT.Stripe) {
75731
- // stripe contains API key and config from the resource
75732
- }
75733
- \`\`\`
75734
-
75735
- Only use resource types if you need them to satisfy the instructions. Always use the RT namespace.
75736
-
75737
- Before using a resource type, check the \`rt.d.ts\` file in the project root to see all available resource types and their fields. This file is generated by \`wmill resource-type generate-namespace\`.
75738
-
75739
- ## Imports
75740
-
75741
- **No imports allowed.** Use the globally available \`fetch\` function:
75742
-
75743
- \`\`\`typescript
75744
- export async function main(url: string) {
75745
- const response = await fetch(url);
75746
- return await response.json();
75747
- }
75748
- \`\`\`
75749
-
75750
- ## Windmill Client
75751
-
75752
- The windmill client is not available in native TypeScript mode. Use fetch to call APIs directly.
75753
-
75754
- ## Preprocessor Scripts
75755
-
75756
- For preprocessor scripts, the function should be named \`preprocessor\` and receives an \`event\` parameter:
75757
-
75758
- \`\`\`typescript
75759
- type Event = {
75760
- kind:
75761
- | "webhook"
75762
- | "http"
75763
- | "websocket"
75764
- | "kafka"
75765
- | "email"
75766
- | "nats"
75767
- | "postgres"
75768
- | "sqs"
75769
- | "mqtt"
75770
- | "gcp";
75771
- body: any;
75772
- headers: Record<string, string>;
75773
- query: Record<string, string>;
75774
- };
75775
-
75776
- export async function preprocessor(event: Event) {
75777
- return {
75778
- param1: event.body.field1,
75779
- param2: event.query.id
75780
- };
75781
- }
75782
- \`\`\`
75783
-
75784
-
75785
- # TypeScript SDK (windmill-client)
75786
-
75787
- Import: import * as wmill from 'windmill-client'
75788
-
75789
- /**
75790
- * Create a SQL template function for PostgreSQL/datatable queries
75791
- * @param name - Database/datatable name (default: "main")
75792
- * @returns SQL template function for building parameterized queries
75793
- * @example
75794
- * let sql = wmill.datatable()
75795
- * let name = 'Robin'
75796
- * let age = 21
75797
- * await sql\`
75798
- * SELECT * FROM friends
75799
- * WHERE name = \${name} AND age = \${age}::int
75800
- * \`.fetch()
75801
- */
75802
- datatable(name: string = "main"): DatatableSqlTemplateFunction
75803
-
75804
- /**
75805
- * Create a SQL template function for DuckDB/ducklake queries
75806
- * @param name - DuckDB database name (default: "main")
75807
- * @returns SQL template function for building parameterized queries
75808
- * @example
75809
- * let sql = wmill.ducklake()
75810
- * let name = 'Robin'
75811
- * let age = 21
75812
- * await sql\`
75813
- * SELECT * FROM friends
75814
- * WHERE name = \${name} AND age = \${age}
75815
- * \`.fetch()
75816
- */
75817
- ducklake(name: string = "main"): SqlTemplateFunction
75818
-
75819
- /**
75820
- * Initialize the Windmill client with authentication token and base URL
75821
- * @param token - Authentication token (defaults to WM_TOKEN env variable)
75822
- * @param baseUrl - API base URL (defaults to BASE_INTERNAL_URL or BASE_URL env variable)
75823
- */
75824
- setClient(token?: string, baseUrl?: string): void
75825
-
75826
- /**
75827
- * Create a client configuration from env variables
75828
- * @returns client configuration
75829
- */
75830
- getWorkspace(): string
75831
-
75832
- /**
75833
- * Get a resource value by path
75834
- * @param path path of the resource, default to internal state path
75835
- * @param undefinedIfEmpty if the resource does not exist, return undefined instead of throwing an error
75836
- * @returns resource value
75837
- */
75838
- async getResource(path?: string, undefinedIfEmpty?: boolean): Promise<any>
75839
-
75840
- /**
75841
- * Get the true root job id
75842
- * @param jobId job id to get the root job id from (default to current job)
75843
- * @returns root job id
75844
- */
75845
- async getRootJobId(jobId?: string): Promise<string>
75846
-
75847
- /**
75848
- * @deprecated Use runScriptByPath or runScriptByHash instead
75849
- */
75850
- async runScript(path: string | null = null, hash_: string | null = null, args: Record<string, any> | null = null, verbose: boolean = false): Promise<any>
75851
-
75852
- /**
75853
- * Run a script synchronously by its path and wait for the result
75854
- * @param path - Script path in Windmill
75855
- * @param args - Arguments to pass to the script
75856
- * @param verbose - Enable verbose logging
75857
- * @returns Script execution result
75858
- */
75859
- async runScriptByPath(path: string, args: Record<string, any> | null = null, verbose: boolean = false): Promise<any>
75860
-
75861
- /**
75862
- * Run a script synchronously by its hash and wait for the result
75863
- * @param hash_ - Script hash in Windmill
75864
- * @param args - Arguments to pass to the script
75865
- * @param verbose - Enable verbose logging
75866
- * @returns Script execution result
75867
- */
75868
- async runScriptByHash(hash_: string, args: Record<string, any> | null = null, verbose: boolean = false): Promise<any>
75869
-
75870
- /**
75871
- * Append a text to the result stream
75872
- * @param text text to append to the result stream
75873
- */
75874
- appendToResultStream(text: string): void
75875
-
75876
- /**
75877
- * Stream to the result stream
75878
- * @param stream stream to stream to the result stream
75879
- */
75880
- async streamResult(stream: AsyncIterable<string>): Promise<void>
75881
-
75882
- /**
75883
- * Run a flow synchronously by its path and wait for the result
75884
- * @param path - Flow path in Windmill
75885
- * @param args - Arguments to pass to the flow
75886
- * @param verbose - Enable verbose logging
75887
- * @returns Flow execution result
75888
- */
75889
- async runFlow(path: string | null = null, args: Record<string, any> | null = null, verbose: boolean = false): Promise<any>
75890
-
75891
- /**
75892
- * Wait for a job to complete and return its result
75893
- * @param jobId - ID of the job to wait for
75894
- * @param verbose - Enable verbose logging
75895
- * @returns Job result when completed
75896
- */
75897
- async waitJob(jobId: string, verbose: boolean = false): Promise<any>
75898
-
75899
- /**
75900
- * Get the result of a completed job
75901
- * @param jobId - ID of the completed job
75902
- * @returns Job result
75903
- */
75904
- async getResult(jobId: string): Promise<any>
75905
-
75906
- /**
75907
- * Get the result of a job if completed, or its current status
75908
- * @param jobId - ID of the job
75909
- * @returns Object with started, completed, success, and result properties
75910
- */
75911
- async getResultMaybe(jobId: string): Promise<any>
75912
-
75913
- /**
75914
- * @deprecated Use runScriptByPathAsync or runScriptByHashAsync instead
75915
- */
75916
- async runScriptAsync(path: string | null, hash_: string | null, args: Record<string, any> | null, scheduledInSeconds: number | null = null): Promise<string>
75917
-
75918
- /**
75919
- * Run a script asynchronously by its path
75920
- * @param path - Script path in Windmill
75921
- * @param args - Arguments to pass to the script
75922
- * @param scheduledInSeconds - Schedule execution for a future time (in seconds)
75923
- * @returns Job ID of the created job
75924
- */
75925
- async runScriptByPathAsync(path: string, args: Record<string, any> | null = null, scheduledInSeconds: number | null = null): Promise<string>
75926
-
75927
- /**
75928
- * Run a script asynchronously by its hash
75929
- * @param hash_ - Script hash in Windmill
75930
- * @param args - Arguments to pass to the script
75931
- * @param scheduledInSeconds - Schedule execution for a future time (in seconds)
75932
- * @returns Job ID of the created job
75933
- */
75934
- async runScriptByHashAsync(hash_: string, args: Record<string, any> | null = null, scheduledInSeconds: number | null = null): Promise<string>
75935
-
75936
- /**
75937
- * Run a flow asynchronously by its path
75938
- * @param path - Flow path in Windmill
75939
- * @param args - Arguments to pass to the flow
75940
- * @param scheduledInSeconds - Schedule execution for a future time (in seconds)
75941
- * @param doNotTrackInParent - If false, tracks state in parent job (only use when fully awaiting the job)
75942
- * @returns Job ID of the created job
75943
- */
75944
- async runFlowAsync(path: string | null, args: Record<string, any> | null, scheduledInSeconds: number | null = null, // can only be set to false if this the job will be fully await and not concurrent with any other job // as otherwise the child flow and its own child will store their state in the parent job which will // lead to incorrectness and failures doNotTrackInParent: boolean = true): Promise<string>
75945
-
75946
- /**
75947
- * Resolve a resource value in case the default value was picked because the input payload was undefined
75948
- * @param obj resource value or path of the resource under the format \`$res:path\`
75949
- * @returns resource value
75950
- */
75951
- async resolveDefaultResource(obj: any): Promise<any>
75952
-
75953
- /**
75954
- * Get the state file path from environment variables
75955
- * @returns State path string
75956
- */
75957
- getStatePath(): string
75958
-
75959
- /**
75960
- * Set a resource value by path
75961
- * @param path path of the resource to set, default to state path
75962
- * @param value new value of the resource to set
75963
- * @param initializeToTypeIfNotExist if the resource does not exist, initialize it with this type
75964
- */
75965
- async setResource(value: any, path?: string, initializeToTypeIfNotExist?: string): Promise<void>
75966
-
75967
- /**
75968
- * Set the state
75969
- * @param state state to set
75970
- * @deprecated use setState instead
75971
- */
75972
- async setInternalState(state: any): Promise<void>
75973
-
75974
- /**
75975
- * Set the state
75976
- * @param state state to set
75977
- * @param path Optional state resource path override. Defaults to \`getStatePath()\`.
75978
- */
75979
- async setState(state: any, path?: string): Promise<void>
76334
+ # result = await pipeline(input=data)
76335
+ def task_flow(path: str, timeout: Optional[int] = None, tag: Optional[str] = None, cache_ttl: Optional[int] = None, priority: Optional[int] = None, concurrency_limit: Optional[int] = None, concurrency_key: Optional[str] = None, concurrency_time_window_s: Optional[int] = None)
75980
76336
 
75981
- /**
75982
- * Set the progress
75983
- * Progress cannot go back and limited to 0% to 99% range
75984
- * @param percent Progress to set in %
75985
- * @param jobId? Job to set progress for
75986
- */
75987
- async setProgress(percent: number, jobId?: any): Promise<void>
76337
+ # Decorator marking an async function as a workflow-as-code entry point.
76338
+ #
76339
+ # The function must be **deterministic**: given the same inputs it must call
76340
+ # tasks in the same order on every replay. Branching on task results is fine
76341
+ # (results are replayed from checkpoint), but branching on external state
76342
+ # (current time, random values, external API calls) must use \`\`step()\`\` to
76343
+ # checkpoint the value so replays see the same result.
76344
+ def workflow(func)
75988
76345
 
75989
- /**
75990
- * Get the progress
75991
- * @param jobId? Job to get progress from
75992
- * @returns Optional clamped between 0 and 100 progress value
75993
- */
75994
- async getProgress(jobId?: any): Promise<number | null>
76346
+ # Execute \`\`fn\`\` inline and checkpoint the result.
76347
+ #
76348
+ # On replay the cached value is returned without re-executing \`\`fn\`\`.
76349
+ # Use for lightweight deterministic operations (timestamps, random IDs,
76350
+ # config reads) that should not incur the overhead of a child job.
76351
+ async def step(name: str, fn)
75995
76352
 
75996
- /**
75997
- * Set a flow user state
75998
- * @param key key of the state
75999
- * @param value value of the state
76000
- */
76001
- async setFlowUserState(key: string, value: any, errorIfNotPossible?: boolean): Promise<void>
76353
+ # Server-side sleep — suspend the workflow for the given duration without holding a worker.
76354
+ #
76355
+ # Inside a @workflow, the parent job suspends and auto-resumes after \`\`seconds\`\`.
76356
+ # Outside a workflow, falls back to \`\`asyncio.sleep\`\`.
76357
+ async def sleep(seconds: int)
76002
76358
 
76003
- /**
76004
- * Get a flow user state
76005
- * @param path path of the variable
76006
- */
76007
- async getFlowUserState(key: string, errorIfNotPossible?: boolean): Promise<any>
76359
+ # Suspend the workflow and wait for an external approval.
76360
+ #
76361
+ # Use \`\`get_resume_urls()\`\` (wrapped in \`\`step()\`\`) to obtain
76362
+ # resume/cancel/approval URLs before calling this function.
76363
+ #
76364
+ # Returns a dict with \`\`value\`\` (form data), \`\`approver\`\`, and \`\`approved\`\`.
76365
+ #
76366
+ # Example::
76367
+ #
76368
+ # urls = await step("urls", lambda: get_resume_urls())
76369
+ # await step("notify", lambda: send_email(urls["approvalPage"]))
76370
+ # result = await wait_for_approval(timeout=3600)
76371
+ async def wait_for_approval(timeout: int = 1800, form: dict | None = None) -> dict
76008
76372
 
76009
- /**
76010
- * Get the internal state
76011
- * @deprecated use getState instead
76012
- */
76013
- async getInternalState(): Promise<any>
76373
+ # Process items in parallel with optional concurrency control.
76374
+ #
76375
+ # Each item is processed by calling \`\`fn(item)\`\`, which should be a @task.
76376
+ # Items are dispatched in batches of \`\`concurrency\`\` (default: all at once).
76377
+ #
76378
+ # Example::
76379
+ #
76380
+ # @task
76381
+ # async def process(item: str):
76382
+ # ...
76383
+ #
76384
+ # results = await parallel(items, process, concurrency=5)
76385
+ async def parallel(items, fn, concurrency: Optional[int] = None)
76014
76386
 
76015
- /**
76016
- * Get the state shared across executions
76017
- * @param path Optional state resource path override. Defaults to \`getStatePath()\`.
76018
- */
76019
- async getState(path?: string): Promise<any>
76387
+ # Commit Kafka offsets for a trigger with auto_commit disabled.
76388
+ #
76389
+ # Args:
76390
+ # trigger_path: Path to the Kafka trigger (from event['wm_trigger']['trigger_path'])
76391
+ # topic: Kafka topic name (from event['topic'])
76392
+ # partition: Partition number (from event['partition'])
76393
+ # offset: Message offset to commit (from event['offset'])
76394
+ def commit_kafka_offsets(trigger_path: str, topic: str, partition: int, offset: int) -> None
76020
76395
 
76021
- /**
76022
- * Get a variable by path
76023
- * @param path path of the variable
76024
- * @returns variable value
76025
- */
76026
- async getVariable(path: string): Promise<string>
76396
+ `,
76397
+ "write-script-rust": `---
76398
+ name: write-script-rust
76399
+ description: MUST use when writing Rust scripts.
76400
+ ---
76027
76401
 
76028
- /**
76029
- * Set a variable by path, create if not exist
76030
- * @param path path of the variable
76031
- * @param value value of the variable
76032
- * @param isSecretIfNotExist if the variable does not exist, create it as secret or not (default: false)
76033
- * @param descriptionIfNotExist if the variable does not exist, create it with this description (default: "")
76034
- */
76035
- async setVariable(path: string, value: string, isSecretIfNotExist?: boolean, descriptionIfNotExist?: string): Promise<void>
76402
+ ## CLI Commands
76036
76403
 
76037
- /**
76038
- * Build a PostgreSQL connection URL from a database resource
76039
- * @param path - Path to the database resource
76040
- * @returns PostgreSQL connection URL string
76041
- */
76042
- async databaseUrlFromResource(path: string): Promise<string>
76404
+ Place scripts in a folder. After writing, tell the user they can run:
76405
+ - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
76406
+ - \`wmill sync push\` - Deploy to Windmill
76043
76407
 
76044
- async polarsConnectionSettings(s3_resource_path: string | undefined): Promise<any>
76408
+ Do NOT run these commands yourself. Instead, inform the user that they should run them.
76045
76409
 
76046
- async duckdbConnectionSettings(s3_resource_path: string | undefined): Promise<any>
76410
+ Use \`wmill resource-type list --schema\` to discover available resource types.
76047
76411
 
76048
- /**
76049
- * Get S3 client settings from a resource or workspace default
76050
- * @param s3_resource_path - Path to S3 resource (uses workspace default if undefined)
76051
- * @returns S3 client configuration settings
76052
- */
76053
- async denoS3LightClientSettings(s3_resource_path: string | undefined): Promise<DenoS3LightClientSettings>
76412
+ # Rust
76054
76413
 
76055
- /**
76056
- * Load the content of a file stored in S3. If the s3ResourcePath is undefined, it will default to the workspace S3 resource.
76057
- *
76058
- * \`\`\`typescript
76059
- * let fileContent = await wmill.loadS3FileContent(inputFile)
76060
- * // if the file is a raw text file, it can be decoded and printed directly:
76061
- * const text = new TextDecoder().decode(fileContentStream)
76062
- * console.log(text);
76063
- * \`\`\`
76064
- */
76065
- async loadS3File(s3object: S3Object, s3ResourcePath: string | undefined = undefined): Promise<Uint8Array | undefined>
76414
+ ## Structure
76066
76415
 
76067
- /**
76068
- * Load the content of a file stored in S3 as a stream. If the s3ResourcePath is undefined, it will default to the workspace S3 resource.
76069
- *
76070
- * \`\`\`typescript
76071
- * let fileContentBlob = await wmill.loadS3FileStream(inputFile)
76072
- * // if the content is plain text, the blob can be read directly:
76073
- * console.log(await fileContentBlob.text());
76074
- * \`\`\`
76075
- */
76076
- async loadS3FileStream(s3object: S3Object, s3ResourcePath: string | undefined = undefined): Promise<Blob | undefined>
76416
+ The script must contain a function called \`main\` with proper return type:
76077
76417
 
76078
- /**
76079
- * Persist a file to the S3 bucket. If the s3ResourcePath is undefined, it will default to the workspace S3 resource.
76080
- *
76081
- * \`\`\`typescript
76082
- * const s3object = await writeS3File(s3Object, "Hello Windmill!")
76083
- * const fileContentAsUtf8Str = (await s3object.toArray()).toString('utf-8')
76084
- * console.log(fileContentAsUtf8Str)
76085
- * \`\`\`
76086
- */
76087
- async writeS3File(s3object: S3Object | undefined, fileContent: string | Blob, s3ResourcePath: string | undefined = undefined, contentType: string | undefined = undefined, contentDisposition: string | undefined = undefined): Promise<S3Object>
76418
+ \`\`\`rust
76419
+ use anyhow::anyhow;
76420
+ use serde::Serialize;
76088
76421
 
76089
- /**
76090
- * Sign S3 objects to be used by anonymous users in public apps
76091
- * @param s3objects s3 objects to sign
76092
- * @returns signed s3 objects
76093
- */
76094
- async signS3Objects(s3objects: S3Object[]): Promise<S3Object[]>
76422
+ #[derive(Serialize, Debug)]
76423
+ struct ReturnType {
76424
+ result: String,
76425
+ count: i32,
76426
+ }
76095
76427
 
76096
- /**
76097
- * Sign S3 object to be used by anonymous users in public apps
76098
- * @param s3object s3 object to sign
76099
- * @returns signed s3 object
76100
- */
76101
- async signS3Object(s3object: S3Object): Promise<S3Object>
76428
+ fn main(param1: String, param2: i32) -> anyhow::Result<ReturnType> {
76429
+ Ok(ReturnType {
76430
+ result: param1,
76431
+ count: param2,
76432
+ })
76433
+ }
76434
+ \`\`\`
76102
76435
 
76103
- /**
76104
- * Generate a presigned public URL for an array of S3 objects.
76105
- * If an S3 object is not signed yet, it will be signed first.
76106
- * @param s3Objects s3 objects to sign
76107
- * @returns list of signed public URLs
76108
- */
76109
- async getPresignedS3PublicUrls(s3Objects: S3Object[], { baseUrl }: { baseUrl?: string } = {}): Promise<string[]>
76436
+ **Important:**
76437
+ - Arguments should be owned types
76438
+ - Return type must be serializable (\`#[derive(Serialize)]\`)
76439
+ - Return type is \`anyhow::Result<T>\`
76110
76440
 
76111
- /**
76112
- * Generate a presigned public URL for an S3 object. If the S3 object is not signed yet, it will be signed first.
76113
- * @param s3Object s3 object to sign
76114
- * @returns signed public URL
76115
- */
76116
- async getPresignedS3PublicUrl(s3Objects: S3Object, { baseUrl }: { baseUrl?: string } = {}): Promise<string>
76441
+ ## Dependencies
76117
76442
 
76118
- /**
76119
- * Get URLs needed for resuming a flow after this step
76120
- * @param approver approver name
76121
- * @param flowLevel if true, generate resume URLs for the parent flow instead of the specific step.
76122
- * This allows pre-approvals that can be consumed by any later suspend step in the same flow.
76123
- * @returns approval page UI URL, resume and cancel API URLs for resuming the flow
76124
- */
76125
- async getResumeUrls(approver?: string, flowLevel?: boolean): Promise<{
76126
- approvalPage: string;
76127
- resume: string;
76128
- cancel: string;
76129
- }>
76443
+ Packages must be specified with a partial cargo.toml at the beginning of the script:
76130
76444
 
76131
- /**
76132
- * @deprecated use getResumeUrls instead
76133
- */
76134
- getResumeEndpoints(approver?: string): Promise<{
76135
- approvalPage: string;
76136
- resume: string;
76137
- cancel: string;
76138
- }>
76445
+ \`\`\`rust
76446
+ //! \`\`\`cargo
76447
+ //! [dependencies]
76448
+ //! anyhow = "1.0.86"
76449
+ //! reqwest = { version = "0.11", features = ["json"] }
76450
+ //! tokio = { version = "1", features = ["full"] }
76451
+ //! \`\`\`
76139
76452
 
76140
- /**
76141
- * Get an OIDC jwt token for auth to external services (e.g: Vault, AWS) (ee only)
76142
- * @param audience audience of the token
76143
- * @param expiresIn Optional number of seconds until the token expires
76144
- * @returns jwt token
76145
- */
76146
- async getIdToken(audience: string, expiresIn?: number): Promise<string>
76453
+ use anyhow::anyhow;
76454
+ // ... rest of the code
76455
+ \`\`\`
76147
76456
 
76148
- /**
76149
- * Convert a base64-encoded string to Uint8Array
76150
- * @param data - Base64-encoded string
76151
- * @returns Decoded Uint8Array
76152
- */
76153
- base64ToUint8Array(data: string): Uint8Array
76457
+ **Note:** Serde is already included, no need to add it again.
76154
76458
 
76155
- /**
76156
- * Convert a Uint8Array to base64-encoded string
76157
- * @param arrayBuffer - Uint8Array to encode
76158
- * @returns Base64-encoded string
76159
- */
76160
- uint8ArrayToBase64(arrayBuffer: Uint8Array): string
76459
+ ## Async Functions
76161
76460
 
76162
- /**
76163
- * Get email from workspace username
76164
- * This method is particularly useful for apps that require the email address of the viewer.
76165
- * Indeed, in the viewer context, WM_USERNAME is set to the username of the viewer but WM_EMAIL is set to the email of the creator of the app.
76166
- * @param username
76167
- * @returns email address
76168
- */
76169
- async usernameToEmail(username: string): Promise<string>
76461
+ If you need to handle async functions (e.g., using tokio), keep the main function sync and create the runtime inside:
76170
76462
 
76171
- /**
76172
- * Sends an interactive approval request via Slack, allowing optional customization of the message, approver, and form fields.
76173
- *
76174
- * **[Enterprise Edition Only]** To include form fields in the Slack approval request, go to **Advanced -> Suspend -> Form**
76175
- * and define a form. Learn more at [Windmill Documentation](https://www.windmill.dev/docs/flows/flow_approval#form).
76176
- *
76177
- * @param {Object} options - The configuration options for the Slack approval request.
76178
- * @param {string} options.slackResourcePath - The path to the Slack resource in Windmill.
76179
- * @param {string} options.channelId - The Slack channel ID where the approval request will be sent.
76180
- * @param {string} [options.message] - Optional custom message to include in the Slack approval request.
76181
- * @param {string} [options.approver] - Optional user ID or name of the approver for the request.
76182
- * @param {DefaultArgs} [options.defaultArgsJson] - Optional object defining or overriding the default arguments to a form field.
76183
- * @param {Enums} [options.dynamicEnumsJson] - Optional object overriding the enum default values of an enum form field.
76184
- * @param {string} [options.resumeButtonText] - Optional text for the resume button.
76185
- * @param {string} [options.cancelButtonText] - Optional text for the cancel button.
76186
- *
76187
- * @returns {Promise<void>} Resolves when the Slack approval request is successfully sent.
76188
- *
76189
- * @throws {Error} If the function is not called within a flow or flow preview.
76190
- * @throws {Error} If the \`JobService.getSlackApprovalPayload\` call fails.
76191
- *
76192
- * **Usage Example:**
76193
- * \`\`\`typescript
76194
- * await requestInteractiveSlackApproval({
76195
- * slackResourcePath: "/u/alex/my_slack_resource",
76196
- * channelId: "admins-slack-channel",
76197
- * message: "Please approve this request",
76198
- * approver: "approver123",
76199
- * defaultArgsJson: { key1: "value1", key2: 42 },
76200
- * dynamicEnumsJson: { foo: ["choice1", "choice2"], bar: ["optionA", "optionB"] },
76201
- * resumeButtonText: "Resume",
76202
- * cancelButtonText: "Cancel",
76203
- * });
76204
- * \`\`\`
76205
- *
76206
- * **Note:** This function requires execution within a Windmill flow or flow preview.
76207
- */
76208
- async requestInteractiveSlackApproval({ slackResourcePath, channelId, message, approver, defaultArgsJson, dynamicEnumsJson, resumeButtonText, cancelButtonText, }: SlackApprovalOptions): Promise<void>
76463
+ \`\`\`rust
76464
+ //! \`\`\`cargo
76465
+ //! [dependencies]
76466
+ //! anyhow = "1.0.86"
76467
+ //! tokio = { version = "1", features = ["full"] }
76468
+ //! reqwest = { version = "0.11", features = ["json"] }
76469
+ //! \`\`\`
76209
76470
 
76210
- /**
76211
- * Sends an interactive approval request via Teams, allowing optional customization of the message, approver, and form fields.
76212
- *
76213
- * **[Enterprise Edition Only]** To include form fields in the Teams approval request, go to **Advanced -> Suspend -> Form**
76214
- * and define a form. Learn more at [Windmill Documentation](https://www.windmill.dev/docs/flows/flow_approval#form).
76215
- *
76216
- * @param {Object} options - The configuration options for the Teams approval request.
76217
- * @param {string} options.teamName - The Teams team name where the approval request will be sent.
76218
- * @param {string} options.channelName - The Teams channel name where the approval request will be sent.
76219
- * @param {string} [options.message] - Optional custom message to include in the Teams approval request.
76220
- * @param {string} [options.approver] - Optional user ID or name of the approver for the request.
76221
- * @param {DefaultArgs} [options.defaultArgsJson] - Optional object defining or overriding the default arguments to a form field.
76222
- * @param {Enums} [options.dynamicEnumsJson] - Optional object overriding the enum default values of an enum form field.
76223
- *
76224
- * @returns {Promise<void>} Resolves when the Teams approval request is successfully sent.
76225
- *
76226
- * @throws {Error} If the function is not called within a flow or flow preview.
76227
- * @throws {Error} If the \`JobService.getTeamsApprovalPayload\` call fails.
76228
- *
76229
- * **Usage Example:**
76230
- * \`\`\`typescript
76231
- * await requestInteractiveTeamsApproval({
76232
- * teamName: "admins-teams",
76233
- * channelName: "admins-teams-channel",
76234
- * message: "Please approve this request",
76235
- * approver: "approver123",
76236
- * defaultArgsJson: { key1: "value1", key2: 42 },
76237
- * dynamicEnumsJson: { foo: ["choice1", "choice2"], bar: ["optionA", "optionB"] },
76238
- * });
76239
- * \`\`\`
76240
- *
76241
- * **Note:** This function requires execution within a Windmill flow or flow preview.
76242
- */
76243
- async requestInteractiveTeamsApproval({ teamName, channelName, message, approver, defaultArgsJson, dynamicEnumsJson, }: TeamsApprovalOptions): Promise<void>
76471
+ use anyhow::anyhow;
76472
+ use serde::Serialize;
76244
76473
 
76245
- /**
76246
- * Parse an S3 object from URI string or record format
76247
- * @param s3Object - S3 object as URI string (s3://storage/key) or record
76248
- * @returns S3 object record with storage and s3 key
76249
- */
76250
- parseS3Object(s3Object: S3Object): S3ObjectRecord
76474
+ #[derive(Serialize, Debug)]
76475
+ struct Response {
76476
+ data: String,
76477
+ }
76251
76478
 
76252
- setWorkflowCtx(ctx: WorkflowCtx | null): void
76479
+ fn main(url: String) -> anyhow::Result<Response> {
76480
+ let rt = tokio::runtime::Runtime::new()?;
76481
+ rt.block_on(async {
76482
+ let resp = reqwest::get(&url).await?.text().await?;
76483
+ Ok(Response { data: resp })
76484
+ })
76485
+ }
76486
+ \`\`\`
76487
+ `,
76488
+ "write-script-snowflake": `---
76489
+ name: write-script-snowflake
76490
+ description: MUST use when writing Snowflake queries.
76491
+ ---
76253
76492
 
76254
- async sleep(seconds: number): Promise<void>
76493
+ ## CLI Commands
76255
76494
 
76256
- async step<T>(name: string, fn: () => T | Promise<T>): Promise<T>
76495
+ Place scripts in a folder. After writing, tell the user they can run:
76496
+ - \`wmill script generate-metadata\` - Generate .script.yaml and .lock files
76497
+ - \`wmill sync push\` - Deploy to Windmill
76257
76498
 
76258
- /**
76259
- * Create a task that dispatches to a separate Windmill script.
76260
- *
76261
- * @example
76262
- * const extract = taskScript("f/data/extract");
76263
- * // inside workflow: await extract({ url: "https://..." })
76264
- */
76265
- taskScript(path: string, options?: TaskOptions): (...args: any[]) => PromiseLike<any>
76499
+ Do NOT run these commands yourself. Instead, inform the user that they should run them.
76266
76500
 
76267
- /**
76268
- * Create a task that dispatches to a separate Windmill flow.
76269
- *
76270
- * @example
76271
- * const pipeline = taskFlow("f/etl/pipeline");
76272
- * // inside workflow: await pipeline({ input: data })
76273
- */
76274
- taskFlow(path: string, options?: TaskOptions): (...args: any[]) => PromiseLike<any>
76501
+ Use \`wmill resource-type list --schema\` to discover available resource types.
76275
76502
 
76276
- /**
76277
- * Mark an async function as a workflow-as-code entry point.
76278
- *
76279
- * The function must be **deterministic**: given the same inputs it must call
76280
- * tasks in the same order on every replay. Branching on task results is fine
76281
- * (results are replayed from checkpoint), but branching on external state
76282
- * (current time, random values, external API calls) must use \`step()\` to
76283
- * checkpoint the value so replays see the same result.
76284
- */
76285
- workflow<T>(fn: (...args: any[]) => Promise<T>): void
76503
+ # Snowflake
76286
76504
 
76287
- /**
76288
- * Suspend the workflow and wait for an external approval.
76289
- *
76290
- * Use \`getResumeUrls()\` (wrapped in \`step()\`) to obtain resume/cancel/approvalPage
76291
- * URLs before calling this function.
76292
- *
76293
- * @example
76294
- * const urls = await step("urls", () => getResumeUrls());
76295
- * await step("notify", () => sendEmail(urls.approvalPage));
76296
- * const { value, approver } = await waitForApproval({ timeout: 3600 });
76297
- */
76298
- waitForApproval(options?: { timeout?: number; form?: object; }): PromiseLike<{ value: any; approver: string; approved: boolean }>
76505
+ Arguments use \`?\` placeholders.
76299
76506
 
76300
- /**
76301
- * Process items in parallel with optional concurrency control.
76302
- *
76303
- * Each item is processed by calling \`fn(item)\`, which should be a task().
76304
- * Items are dispatched in batches of \`concurrency\` (default: all at once).
76305
- *
76306
- * @example
76307
- * const process = task(async (item: string) => { ... });
76308
- * const results = await parallel(items, process, { concurrency: 5 });
76309
- */
76310
- async parallel<T, R>(items: T[], fn: (item: T) => PromiseLike<R> | R, options?: { concurrency?: number },): Promise<R[]>
76507
+ Name the parameters by adding comments before the statement:
76508
+
76509
+ \`\`\`sql
76510
+ -- ? name1 (text)
76511
+ -- ? name2 (number) = 0
76512
+ SELECT * FROM users WHERE name = ? AND age > ?;
76513
+ \`\`\`
76311
76514
  `,
76312
76515
  "write-flow": `---
76313
76516
  name: write-flow
@@ -76320,10 +76523,12 @@ description: MUST use when creating flows.
76320
76523
 
76321
76524
  Create a folder ending with \`.flow\` and add a YAML file with the flow definition.
76322
76525
  For rawscript modules, use \`!inline path/to/script.ts\` for the content key.
76323
- After writing:
76526
+ After writing, tell the user they can run:
76324
76527
  - \`wmill flow generate-locks <path_to_flow_folder> --yes\` - Generate lock files for the specific flow you modified (e.g. \`wmill flow generate-locks f/my_folder/my_flow.flow --yes\`)
76325
76528
  - \`wmill sync push\` - Deploy to Windmill
76326
76529
 
76530
+ Do NOT run these commands yourself. Instead, inform the user that they should run them.
76531
+
76327
76532
  ## OpenFlow Schema
76328
76533
 
76329
76534
  The OpenFlow schema (openflow.openapi.yaml) is the source of truth for flow structure. Refer to OPENFLOW_SCHEMA for the complete type definitions.
@@ -76521,7 +76726,7 @@ export async function main(user_id: string) {
76521
76726
  }
76522
76727
  \`\`\`
76523
76728
 
76524
- After creating, generate lock files:
76729
+ After creating, tell the user they can generate lock files by running:
76525
76730
  \`\`\`bash
76526
76731
  wmill app generate-locks
76527
76732
  \`\`\`
@@ -76674,6 +76879,8 @@ data:
76674
76879
 
76675
76880
  ## CLI Commands
76676
76881
 
76882
+ Tell the user they can run these commands (do NOT run them yourself):
76883
+
76677
76884
  | Command | Description |
76678
76885
  |---------|-------------|
76679
76886
  | \`wmill app new\` | Create a new raw app interactively |
@@ -76690,7 +76897,7 @@ data:
76690
76897
  3. **Keep runnables focused** - one function per file
76691
76898
  4. **Use descriptive IDs** - \`get_user.ts\` not \`a.ts\`
76692
76899
  5. **Always whitelist tables** - add to \`data.tables\` before querying
76693
- 6. **Generate locks** - run \`wmill app generate-locks\` after adding/modifying backend runnables
76900
+ 6. **Generate locks** - tell the user to run \`wmill app generate-locks\` after adding/modifying backend runnables
76694
76901
  `,
76695
76902
  triggers: `---
76696
76903
  name: triggers
@@ -76712,6 +76919,8 @@ Examples:
76712
76919
 
76713
76920
  ## CLI Commands
76714
76921
 
76922
+ After writing, tell the user they can run these commands (do NOT run them yourself):
76923
+
76715
76924
  \`\`\`bash
76716
76925
  # Push trigger configuration
76717
76926
  wmill sync push
@@ -76761,6 +76970,8 @@ Windmill uses 6-field cron expressions (includes seconds):
76761
76970
 
76762
76971
  ## CLI Commands
76763
76972
 
76973
+ After writing, tell the user they can run these commands (do NOT run them yourself):
76974
+
76764
76975
  \`\`\`bash
76765
76976
  # Push schedules to Windmill
76766
76977
  wmill sync push
@@ -77016,7 +77227,7 @@ wmill resource-type list --schema
77016
77227
  # Get specific resource type schema
77017
77228
  wmill resource-type get postgresql
77018
77229
 
77019
- # Push resources
77230
+ # Push resources (tell the user to run this, do NOT run it yourself)
77020
77231
  wmill sync push
77021
77232
  \`\`\`
77022
77233
  `,
@@ -77029,8 +77240,6 @@ description: MUST use when using the CLI.
77029
77240
 
77030
77241
  The Windmill CLI (\`wmill\`) provides commands for managing scripts, flows, apps, and other resources.
77031
77242
 
77032
- Current version: 1.651.1
77033
-
77034
77243
  ## Global Options
77035
77244
 
77036
77245
  - \`--workspace <workspace:string>\` - Specify the target workspace. This overrides the default workspace.
@@ -77148,6 +77357,23 @@ folder related commands
77148
77357
  - \`folder add-missing\` - create default folder.meta.yaml for all subdirectories of f/ that are missing one
77149
77358
  - \`-y, --yes\` - skip confirmation prompt
77150
77359
 
77360
+ ### generate-metadata
77361
+
77362
+ Generate metadata (locks, schemas) for all scripts, flows, and apps
77363
+
77364
+ **Arguments:** \`[folder:string]\`
77365
+
77366
+ **Options:**
77367
+ - \`--yes\` - Skip confirmation prompt
77368
+ - \`--dry-run\` - Show what would be updated without making changes
77369
+ - \`--lock-only\` - Re-generate only the lock files
77370
+ - \`--schema-only\` - Re-generate only script schemas (skips flows and apps)
77371
+ - \`--skip-scripts\` - Skip processing scripts
77372
+ - \`--skip-flows\` - Skip processing flows
77373
+ - \`--skip-apps\` - Skip processing apps
77374
+ - \`-i --includes <patterns:file[]>\` - Comma separated patterns to specify which files to include
77375
+ - \`-e --excludes <patterns:file[]>\` - Comma separated patterns to specify which files to exclude
77376
+
77151
77377
  ### gitsync-settings
77152
77378
 
77153
77379
  Manage git-sync settings between local wmill.yaml and Windmill backend
@@ -77776,6 +78002,18 @@ properties:
77776
78002
  key:
77777
78003
  type: string
77778
78004
  value: {}
78005
+ auto_offset_reset:
78006
+ type: string
78007
+ enum:
78008
+ - latest
78009
+ - earliest
78010
+ description: Initial offset behavior when consumer group has no committed offset.
78011
+ 'latest' starts from new messages only, 'earliest' starts from the beginning.
78012
+ auto_commit:
78013
+ type: boolean
78014
+ description: When true (default), offsets are committed automatically after receiving
78015
+ each message. When false, you must manually commit offsets using the commit_offsets
78016
+ endpoint.
77779
78017
  error_handler_path:
77780
78018
  type: string
77781
78019
  description: Path to a script or flow to run when the triggered job fails
@@ -78764,6 +79002,170 @@ var push12 = new Command().description("Push completed and queued jobs to worksp
78764
79002
  var command28 = new Command().description("Manage jobs (import/export)").command("pull", pull3).command("push", push12);
78765
79003
  var jobs_default = command28;
78766
79004
 
79005
+ // src/commands/generate-metadata/generate-metadata.ts
79006
+ init_mod3();
79007
+ init_colors2();
79008
+ init_log();
79009
+ init_resource_folders();
79010
+ await __promiseAll([
79011
+ init_confirm(),
79012
+ init_conf(),
79013
+ init_context(),
79014
+ init_auth(),
79015
+ init_metadata(),
79016
+ init_flow_metadata(),
79017
+ init_app_metadata(),
79018
+ init_sync(),
79019
+ init_script(),
79020
+ init_codebase()
79021
+ ]);
79022
+ import { sep as SEP21 } from "node:path";
79023
+ async function generateMetadata2(opts, folder) {
79024
+ if (folder === "") {
79025
+ folder = undefined;
79026
+ }
79027
+ const workspace = await resolveWorkspace(opts);
79028
+ await requireLogin(opts);
79029
+ opts = await mergeConfigWithConfigFile(opts);
79030
+ const rawWorkspaceDependencies = await getRawWorkspaceDependencies();
79031
+ const codebases = await listSyncCodebases(opts);
79032
+ const ignore = await ignoreF(opts);
79033
+ const staleItems = [];
79034
+ const skipScripts = opts.skipScripts ?? false;
79035
+ const skipFlows = opts.skipFlows ?? opts.schemaOnly ?? false;
79036
+ const skipApps = opts.skipApps ?? opts.schemaOnly ?? false;
79037
+ const checking = [];
79038
+ if (!skipScripts)
79039
+ checking.push("scripts");
79040
+ if (!skipFlows)
79041
+ checking.push("flows");
79042
+ if (!skipApps)
79043
+ checking.push("apps");
79044
+ if (checking.length === 0) {
79045
+ info(colors.yellow("Nothing to check (all types skipped)"));
79046
+ return;
79047
+ }
79048
+ info(colors.gray(`Checking ${checking.join(", ")}...`));
79049
+ if (!skipScripts) {
79050
+ const scriptElems = await elementsToMap(await FSFSElement(process.cwd(), codebases, false), (p, isD) => {
79051
+ return !isD && !exts.some((ext2) => p.endsWith(ext2)) || ignore(p, isD) || isFlowPath(p) || isAppPath(p) || isRawAppPath(p);
79052
+ }, false, {});
79053
+ for (const e of Object.keys(scriptElems)) {
79054
+ const candidate = await generateScriptMetadataInternal(e, workspace, opts, true, true, rawWorkspaceDependencies, codebases, false);
79055
+ if (candidate) {
79056
+ staleItems.push({ type: "script", path: candidate, folder: e });
79057
+ }
79058
+ }
79059
+ }
79060
+ if (!skipFlows) {
79061
+ const flowElems = Object.keys(await elementsToMap(await FSFSElement(process.cwd(), [], true), (p, isD) => {
79062
+ return ignore(p, isD) || !isD && !p.endsWith(SEP21 + "flow.yaml") && !p.endsWith(SEP21 + "flow.json");
79063
+ }, false, {})).map((x) => x.substring(0, x.lastIndexOf(SEP21)));
79064
+ for (const folder2 of flowElems) {
79065
+ const candidate = await generateFlowLockInternal(folder2, true, workspace, opts, false, true);
79066
+ if (candidate) {
79067
+ staleItems.push({ type: "flow", path: candidate, folder: folder2 });
79068
+ }
79069
+ }
79070
+ }
79071
+ if (!skipApps) {
79072
+ const elems = await elementsToMap(await FSFSElement(process.cwd(), [], true), (p, isD) => {
79073
+ return ignore(p, isD) || !isD && !p.endsWith(SEP21 + "raw_app.yaml") && !p.endsWith(SEP21 + "app.yaml");
79074
+ }, false, {});
79075
+ const rawAppFolders = getAppFolders(elems, "raw_app.yaml");
79076
+ const appFolders = getAppFolders(elems, "app.yaml");
79077
+ for (const appFolder of rawAppFolders) {
79078
+ const candidate = await generateAppLocksInternal(appFolder, true, true, workspace, opts, false, true);
79079
+ if (candidate) {
79080
+ staleItems.push({ type: "app", path: candidate, folder: appFolder, isRawApp: true });
79081
+ }
79082
+ }
79083
+ for (const appFolder of appFolders) {
79084
+ const candidate = await generateAppLocksInternal(appFolder, false, true, workspace, opts, false, true);
79085
+ if (candidate) {
79086
+ staleItems.push({ type: "app", path: candidate, folder: appFolder, isRawApp: false });
79087
+ }
79088
+ }
79089
+ }
79090
+ let filteredItems = staleItems;
79091
+ if (folder) {
79092
+ folder = folder.replaceAll("\\", "/");
79093
+ if (folder.endsWith("/")) {
79094
+ folder = folder.substring(0, folder.length - 1);
79095
+ }
79096
+ filteredItems = staleItems.filter((item) => {
79097
+ const normalizedFolder = item.folder.replaceAll("\\", "/");
79098
+ return normalizedFolder === folder || normalizedFolder.startsWith(folder + "/");
79099
+ });
79100
+ }
79101
+ if (filteredItems.length === 0) {
79102
+ info(colors.green("All metadata up-to-date"));
79103
+ return;
79104
+ }
79105
+ const scripts = filteredItems.filter((i) => i.type === "script");
79106
+ const flows = filteredItems.filter((i) => i.type === "flow");
79107
+ const apps2 = filteredItems.filter((i) => i.type === "app");
79108
+ info("");
79109
+ info(`Found ${filteredItems.length} item(s) with stale metadata:`);
79110
+ if (scripts.length > 0) {
79111
+ info(colors.gray(` Scripts (${scripts.length}):`));
79112
+ for (const item of scripts) {
79113
+ info(colors.yellow(` ${item.path}`));
79114
+ }
79115
+ }
79116
+ if (flows.length > 0) {
79117
+ info(colors.gray(` Flows (${flows.length}):`));
79118
+ for (const item of flows) {
79119
+ info(colors.yellow(` ${item.path}`));
79120
+ }
79121
+ }
79122
+ if (apps2.length > 0) {
79123
+ info(colors.gray(` Apps (${apps2.length}):`));
79124
+ for (const item of apps2) {
79125
+ info(colors.yellow(` ${item.path}`));
79126
+ }
79127
+ }
79128
+ if (opts.dryRun) {
79129
+ return;
79130
+ }
79131
+ info("");
79132
+ if (!opts.yes && !await Confirm.prompt({
79133
+ message: "Update metadata?",
79134
+ default: true
79135
+ })) {
79136
+ return;
79137
+ }
79138
+ info("");
79139
+ const total = filteredItems.length;
79140
+ const maxWidth = `[${total}/${total}]`.length;
79141
+ let current = 0;
79142
+ const formatProgress = (n) => {
79143
+ const bracket = `[${n}/${total}]`;
79144
+ return colors.gray(bracket.padEnd(maxWidth, " "));
79145
+ };
79146
+ for (const item of scripts) {
79147
+ current++;
79148
+ info(`${formatProgress(current)} script ${colors.cyan(item.path)}`);
79149
+ await generateScriptMetadataInternal(item.folder, workspace, opts, false, true, rawWorkspaceDependencies, codebases, false);
79150
+ }
79151
+ for (const item of flows) {
79152
+ current++;
79153
+ const result = await generateFlowLockInternal(item.folder, false, workspace, opts, false, true);
79154
+ const scriptsInfo = result?.updatedScripts?.length ? `: ${colors.gray(result.updatedScripts.join(", "))}` : "";
79155
+ info(`${formatProgress(current)} flow ${colors.cyan(item.path)}${scriptsInfo}`);
79156
+ }
79157
+ for (const item of apps2) {
79158
+ current++;
79159
+ const result = await generateAppLocksInternal(item.folder, item.isRawApp, false, workspace, opts, false, true);
79160
+ const scriptsInfo = result?.updatedScripts?.length ? `: ${colors.gray(result.updatedScripts.join(", "))}` : "";
79161
+ info(`${formatProgress(current)} app ${colors.cyan(item.path)}${scriptsInfo}`);
79162
+ }
79163
+ info("");
79164
+ info(colors.green(`Done. Updated ${total} item(s).`));
79165
+ }
79166
+ var command29 = new Command().description("Generate metadata (locks, schemas) for all scripts, flows, and apps").arguments("[folder:string]").option("--yes", "Skip confirmation prompt").option("--dry-run", "Show what would be updated without making changes").option("--lock-only", "Re-generate only the lock files").option("--schema-only", "Re-generate only script schemas (skips flows and apps)").option("--skip-scripts", "Skip processing scripts").option("--skip-flows", "Skip processing flows").option("--skip-apps", "Skip processing apps").option("-i --includes <patterns:file[]>", "Comma separated patterns to specify which files to include").option("-e --excludes <patterns:file[]>", "Comma separated patterns to specify which files to exclude").action(generateMetadata2);
79167
+ var generate_metadata_default = command29;
79168
+
78767
79169
  // src/commands/docs/docs.ts
78768
79170
  init_mod3();
78769
79171
  init_colors2();
@@ -78834,13 +79236,13 @@ ${await res.text()}`);
78834
79236
  console.log();
78835
79237
  }
78836
79238
  }
78837
- var command29 = new Command().name("docs").description("Search Windmill documentation. Requires Enterprise Edition.").arguments("<query:string>").option("--json", "Output results as JSON.").action(docs);
78838
- var docs_default = command29;
79239
+ var command30 = new Command().name("docs").description("Search Windmill documentation. Requires Enterprise Edition.").arguments("<query:string>").option("--json", "Output results as JSON.").action(docs);
79240
+ var docs_default = command30;
78839
79241
 
78840
79242
  // src/main.ts
78841
79243
  await init_context();
78842
- var VERSION = "1.654.0";
78843
- var command30 = new Command().name("wmill").action(() => info(`Welcome to Windmill CLI ${VERSION}. Use -h for help.`)).description("Windmill CLI").globalOption("--workspace <workspace:string>", "Specify the target workspace. This overrides the default workspace.").globalOption("--debug --verbose", "Show debug/verbose logs").globalOption("--show-diffs", "Show diff informations when syncing (may show sensitive informations)").globalOption("--token <token:string>", "Specify an API token. This will override any stored token.").globalOption("--base-url <baseUrl:string>", "Specify the base URL of the API. If used, --token and --workspace are required and no local remote/workspace already set will be used.").globalOption("--config-dir <configDir:string>", "Specify a custom config directory. Overrides WMILL_CONFIG_DIR environment variable and default ~/.config location.").env("HEADERS <headers:string>", `Specify headers to use for all requests. e.g: "HEADERS='h1: v1, h2: v2'"`).version(VERSION).versionOption(false).command("init", init_default).command("app", app_default).command("flow", flow_default).command("script", script_default).command("workspace", workspace_default).command("resource", resource_default).command("resource-type", resource_type_default).command("user", user_default).command("variable", variable_default).command("hub", hub_default).command("folder", folder_default).command("schedule", schedule_default).command("trigger", trigger_default).command("dev", dev_default2).command("sync", sync_default).command("lint", lint_default).command("gitsync-settings", gitsync_settings_default).command("instance", instance_default).command("worker-groups", worker_groups_default).command("workers", workers_default).command("queues", queues_default).command("dependencies", dependencies_default).command("jobs", jobs_default).command("docs", docs_default).command("version --version", "Show version information").action(async (opts) => {
79244
+ var VERSION = "1.657.0";
79245
+ var command31 = new Command().name("wmill").action(() => info(`Welcome to Windmill CLI ${VERSION}. Use -h for help.`)).description("Windmill CLI").globalOption("--workspace <workspace:string>", "Specify the target workspace. This overrides the default workspace.").globalOption("--debug --verbose", "Show debug/verbose logs").globalOption("--show-diffs", "Show diff informations when syncing (may show sensitive informations)").globalOption("--token <token:string>", "Specify an API token. This will override any stored token.").globalOption("--base-url <baseUrl:string>", "Specify the base URL of the API. If used, --token and --workspace are required and no local remote/workspace already set will be used.").globalOption("--config-dir <configDir:string>", "Specify a custom config directory. Overrides WMILL_CONFIG_DIR environment variable and default ~/.config location.").env("HEADERS <headers:string>", `Specify headers to use for all requests. e.g: "HEADERS='h1: v1, h2: v2'"`).version(VERSION).versionOption(false).command("init", init_default).command("app", app_default).command("flow", flow_default).command("script", script_default).command("workspace", workspace_default).command("resource", resource_default).command("resource-type", resource_type_default).command("user", user_default).command("variable", variable_default).command("hub", hub_default).command("folder", folder_default).command("schedule", schedule_default).command("trigger", trigger_default).command("dev", dev_default2).command("sync", sync_default).command("lint", lint_default).command("gitsync-settings", gitsync_settings_default).command("instance", instance_default).command("worker-groups", worker_groups_default).command("workers", workers_default).command("queues", queues_default).command("dependencies", dependencies_default).command("jobs", jobs_default).command("generate-metadata", generate_metadata_default).command("docs", docs_default).command("version --version", "Show version information").action(async (opts) => {
78844
79246
  console.log("CLI version: " + VERSION);
78845
79247
  try {
78846
79248
  const provider = new NpmProvider({ package: "windmill-cli" });
@@ -78870,20 +79272,20 @@ var command30 = new Command().name("wmill").action(() => info(`Welcome to Windmi
78870
79272
  error(e);
78871
79273
  info("Try running with sudo and otherwise check the result of the command: npm uninstall windmill-cli && npm install -g windmill-cli");
78872
79274
  })).command("completions", new Command().description("Generate shell completions.").command("bash", new Command().description("Generate bash completions.").action(() => {
78873
- process.stdout.write(generateShellCompletions(command30, "bash") + `
79275
+ process.stdout.write(generateShellCompletions(command31, "bash") + `
78874
79276
  `);
78875
79277
  })).command("zsh", new Command().description("Generate zsh completions.").action(() => {
78876
- process.stdout.write(generateShellCompletions(command30, "zsh") + `
79278
+ process.stdout.write(generateShellCompletions(command31, "zsh") + `
78877
79279
  `);
78878
79280
  })).command("fish", new Command().description("Generate fish completions.").action(() => {
78879
- process.stdout.write(generateShellCompletions(command30, "fish") + `
79281
+ process.stdout.write(generateShellCompletions(command31, "fish") + `
78880
79282
  `);
78881
79283
  })));
78882
79284
  async function main2() {
78883
79285
  try {
78884
79286
  const args = process.argv.slice(2);
78885
79287
  if (args.length === 0) {
78886
- command30.showHelp();
79288
+ command31.showHelp();
78887
79289
  }
78888
79290
  const LOG_LEVEL = args.includes("--verbose") || args.includes("--debug") ? "DEBUG" : "INFO";
78889
79291
  setShowDiffs(args.includes("--show-diffs"));
@@ -78893,7 +79295,7 @@ async function main2() {
78893
79295
  if (extraHeaders) {
78894
79296
  OpenAPI.HEADERS = extraHeaders;
78895
79297
  }
78896
- await command30.parse(args);
79298
+ await command31.parse(args);
78897
79299
  } catch (e) {
78898
79300
  if (e && typeof e === "object" && "name" in e && e.name === "ApiError") {
78899
79301
  console.log("Server failed. " + e.statusText + ": " + e.body);
@@ -78918,7 +79320,7 @@ if (isMain()) {
78918
79320
  process.stdin.destroy();
78919
79321
  });
78920
79322
  }
78921
- var main_default = command30;
79323
+ var main_default = command31;
78922
79324
  export {
78923
79325
  add as workspaceAdd,
78924
79326
  workspace_default as workspace,