windmill-cli 1.587.0 → 1.588.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -32,7 +32,7 @@ export const OpenAPI = {
32
32
  PASSWORD: undefined,
33
33
  TOKEN: getEnv("WM_TOKEN"),
34
34
  USERNAME: undefined,
35
- VERSION: '1.587.0',
35
+ VERSION: '1.588.0',
36
36
  WITH_CREDENTIALS: true,
37
37
  interceptors: {
38
38
  request: new Interceptors(),
@@ -6603,6 +6603,107 @@ export const listCompletedJobs = (data) => {
6603
6603
  }
6604
6604
  });
6605
6605
  };
6606
+ /**
6607
+ * export all completed jobs for backup/migration
6608
+ * @param data The data for the request.
6609
+ * @param data.workspace
6610
+ * @param data.page which page to return (start at 1, default 1)
6611
+ * @param data.perPage number of items to return for a given page (default 30, max 100)
6612
+ * @returns ExportableCompletedJob All completed jobs exported
6613
+ * @throws ApiError
6614
+ */
6615
+ export const exportCompletedJobs = (data) => {
6616
+ return __request(OpenAPI, {
6617
+ method: 'GET',
6618
+ url: '/w/{workspace}/jobs/completed/export',
6619
+ path: {
6620
+ workspace: data.workspace
6621
+ },
6622
+ query: {
6623
+ page: data.page,
6624
+ per_page: data.perPage
6625
+ }
6626
+ });
6627
+ };
6628
+ /**
6629
+ * import completed jobs from backup/migration
6630
+ * @param data The data for the request.
6631
+ * @param data.workspace
6632
+ * @param data.requestBody
6633
+ * @returns string Successfully imported completed jobs
6634
+ * @throws ApiError
6635
+ */
6636
+ export const importCompletedJobs = (data) => {
6637
+ return __request(OpenAPI, {
6638
+ method: 'POST',
6639
+ url: '/w/{workspace}/jobs/completed/import',
6640
+ path: {
6641
+ workspace: data.workspace
6642
+ },
6643
+ body: data.requestBody,
6644
+ mediaType: 'application/json'
6645
+ });
6646
+ };
6647
+ /**
6648
+ * export all queued jobs for backup/migration
6649
+ * @param data The data for the request.
6650
+ * @param data.workspace
6651
+ * @param data.page which page to return (start at 1, default 1)
6652
+ * @param data.perPage number of items to return for a given page (default 30, max 100)
6653
+ * @returns ExportableQueuedJob All queued jobs exported
6654
+ * @throws ApiError
6655
+ */
6656
+ export const exportQueuedJobs = (data) => {
6657
+ return __request(OpenAPI, {
6658
+ method: 'GET',
6659
+ url: '/w/{workspace}/jobs/queue/export',
6660
+ path: {
6661
+ workspace: data.workspace
6662
+ },
6663
+ query: {
6664
+ page: data.page,
6665
+ per_page: data.perPage
6666
+ }
6667
+ });
6668
+ };
6669
+ /**
6670
+ * import queued jobs from backup/migration
6671
+ * @param data The data for the request.
6672
+ * @param data.workspace
6673
+ * @param data.requestBody
6674
+ * @returns string Successfully imported queued jobs
6675
+ * @throws ApiError
6676
+ */
6677
+ export const importQueuedJobs = (data) => {
6678
+ return __request(OpenAPI, {
6679
+ method: 'POST',
6680
+ url: '/w/{workspace}/jobs/queue/import',
6681
+ path: {
6682
+ workspace: data.workspace
6683
+ },
6684
+ body: data.requestBody,
6685
+ mediaType: 'application/json'
6686
+ });
6687
+ };
6688
+ /**
6689
+ * delete jobs by IDs from all related tables
6690
+ * @param data The data for the request.
6691
+ * @param data.workspace
6692
+ * @param data.requestBody
6693
+ * @returns string Successfully deleted jobs
6694
+ * @throws ApiError
6695
+ */
6696
+ export const deleteJobs = (data) => {
6697
+ return __request(OpenAPI, {
6698
+ method: 'POST',
6699
+ url: '/w/{workspace}/jobs/delete',
6700
+ path: {
6701
+ workspace: data.workspace
6702
+ },
6703
+ body: data.requestBody,
6704
+ mediaType: 'application/json'
6705
+ });
6706
+ };
6606
6707
  /**
6607
6708
  * list all jobs
6608
6709
  * @param data The data for the request.
@@ -6,6 +6,7 @@ import * as wmill from "../../../gen/services.gen.js";
6
6
  import { isSuperset } from "../../types.js";
7
7
  import { readInlinePathSync } from "../../utils/utils.js";
8
8
  import devCommand from "./dev.js";
9
+ import { isVersionsGeq15851 } from "../sync/global.js";
9
10
  const alreadySynced = [];
10
11
  function respecializeFields(fields) {
11
12
  Object.entries(fields).forEach(([k, v]) => {
@@ -14,7 +15,11 @@ function respecializeFields(fields) {
14
15
  fields[k] = { value: v.value, type: "static" };
15
16
  }
16
17
  else if (v.expr !== undefined) {
17
- fields[k] = { expr: v.expr, allowUserResources: v.allowUserResources, type: "javascript" };
18
+ fields[k] = {
19
+ expr: v.expr,
20
+ allowUserResources: v.allowUserResources,
21
+ type: "javascript",
22
+ };
18
23
  }
19
24
  }
20
25
  });
@@ -34,11 +39,15 @@ export function replaceInlineScripts(rec, localPath) {
34
39
  }
35
40
  if (typeof rec == "object") {
36
41
  return Object.entries(rec).flatMap(([k, v]) => {
37
- if (k == 'runType') {
38
- rec["type"] = 'path';
42
+ if (k == "runType") {
43
+ if (isVersionsGeq15851()) {
44
+ rec["type"] = "path";
45
+ }
39
46
  }
40
47
  else if (k == "inlineScript" && typeof v == "object") {
41
- rec["type"] = 'inline';
48
+ if (isVersionsGeq15851()) {
49
+ rec["type"] = "inline";
50
+ }
42
51
  const o = v;
43
52
  if (o["content"] && o["content"].startsWith("!inline")) {
44
53
  const basePath = localPath + o["content"].split(" ")[1];
@@ -89,7 +98,8 @@ export async function pushApp(workspace, remotePath, localPath, message) {
89
98
  const path = localPath + "app.yaml";
90
99
  const localApp = (await yamlParseFile(path));
91
100
  replaceInlineScripts(localApp.value, localPath);
92
- await generatingPolicy(localApp, remotePath, localApp?.["public"] ?? false);
101
+ await generatingPolicy(localApp, remotePath, localApp?.["public"] ??
102
+ localApp?.["policy"]?.["execution_mode"] == "anonymous");
93
103
  if (app) {
94
104
  if (isSuperset(localApp, app)) {
95
105
  log.info(colors.green(`App ${remotePath} is up to date`));
@@ -63,15 +63,28 @@ const createHTML = (jsPath, cssPath) => `
63
63
  `;
64
64
  async function dev(opts) {
65
65
  GLOBAL_CONFIG_OPT.noCdToRoot = true;
66
+ // Validate that we're in a .raw_app folder
67
+ const cwd = process.cwd();
68
+ const currentDirName = path.basename(cwd);
69
+ if (!currentDirName.endsWith(".raw_app")) {
70
+ log.error(colors.red(`Error: The dev command must be run inside a .raw_app folder.\n` +
71
+ `Current directory: ${currentDirName}\n` +
72
+ `Please navigate to a folder ending with '.raw_app' before running this command.`));
73
+ dntShim.Deno.exit(1);
74
+ }
75
+ // Check for raw_app.yaml
76
+ const rawAppPath = path.join(cwd, "raw_app.yaml");
77
+ if (!fs.existsSync(rawAppPath)) {
78
+ log.error(colors.red(`Error: raw_app.yaml not found in current directory.\n` +
79
+ `The dev command must be run in a .raw_app folder containing a raw_app.yaml file.`));
80
+ dntShim.Deno.exit(1);
81
+ }
66
82
  // Resolve workspace and authenticate
67
83
  const workspace = await resolveWorkspace(opts);
68
84
  await requireLogin(opts);
69
85
  const workspaceId = workspace.workspaceId;
70
86
  // Load app path from raw_app.yaml
71
- const rawAppPath = path.join(process.cwd(), "raw_app.yaml");
72
- const rawApp = fs.existsSync(rawAppPath)
73
- ? (await yamlParseFile(rawAppPath))
74
- : {};
87
+ const rawApp = (await yamlParseFile(rawAppPath));
75
88
  const appPath = rawApp?.custom_path ?? "u/unknown/newapp";
76
89
  // Dynamically import esbuild only when the dev command is called
77
90
  const esbuild = await import("esbuild");
@@ -223,7 +223,6 @@ const command = new Command()
223
223
  .command("generate-locks", "re-generate the lock files of all inline scripts of all updated flows")
224
224
  .arguments("[flow:file]")
225
225
  .option("--yes", "Skip confirmation prompt")
226
- .option("-r --use-raw-requirements", "Use raw requirements (requirements.txt, go.mod, package.json, etc) instead of generating them on the server (can also be set with USE_RAW_REQUIREMENTS=true environment variable)")
227
226
  .option("-i --includes <patterns:file[]>", "Comma separated patterns to specify which file to take into account (among files that are compatible with windmill). Patterns can include * (any string until '/') and ** (any string)")
228
227
  .option("-e --excludes <patterns:file[]>", "Comma separated patterns to specify which file to NOT take into account.")
229
228
  .action(generateLocks)
@@ -0,0 +1,186 @@
1
+ import { requireLogin } from "../../core/auth.js";
2
+ import { resolveWorkspace } from "../../core/context.js";
3
+ import { colors, Command, Confirm, log } from "../../../deps.js";
4
+ import { mergeConfigWithConfigFile } from "../../core/conf.js";
5
+ import * as fs from "node:fs/promises";
6
+ import * as wmill from "../../../gen/services.gen.js";
7
+ async function pullJobs(opts, workspace) {
8
+ opts = await mergeConfigWithConfigFile(opts);
9
+ const ws = await resolveWorkspace({ ...opts, workspace });
10
+ await requireLogin(opts);
11
+ log.info("Pulling jobs from workspace " + ws.workspaceId);
12
+ // Check for active workers and warn user
13
+ if (!opts.skipWorkerCheck) {
14
+ try {
15
+ const workers = await wmill.listWorkers({ pingSince: 60 });
16
+ if (workers.length > 0) {
17
+ log.info(colors.yellow(`\nWarning: Found ${workers.length} active worker(s) on the instance.`));
18
+ log.info("It's recommended to scale down all workers before exporting jobs to ensure that no new jobs are being created during export.");
19
+ const proceed = await Confirm.prompt({
20
+ message: "Do you want to continue with the export anyway?",
21
+ default: false,
22
+ });
23
+ if (!proceed) {
24
+ log.info("Export cancelled. Please scale down workers and try again.");
25
+ log.info("You can skip this check with --skip-worker-check flag.");
26
+ return;
27
+ }
28
+ }
29
+ }
30
+ catch (e) {
31
+ log.debug(`Could not check for active workers: ${e}`);
32
+ }
33
+ }
34
+ // Pull completed jobs
35
+ let completedJobs = [];
36
+ let page = 1;
37
+ const perPage = 1000;
38
+ while (true) {
39
+ const batch = await wmill.exportCompletedJobs({
40
+ workspace: ws.workspaceId,
41
+ page,
42
+ perPage,
43
+ });
44
+ if (batch.length === 0)
45
+ break;
46
+ completedJobs = completedJobs.concat(batch);
47
+ if (batch.length < perPage)
48
+ break;
49
+ page++;
50
+ }
51
+ const completedPath = opts.completedOutput || "completed_jobs.json";
52
+ await fs.writeFile(completedPath, JSON.stringify(completedJobs, null, 2));
53
+ log.info(colors.green(`Successfully pulled ${completedJobs.length} completed jobs to ${completedPath}`));
54
+ // Pull queued jobs
55
+ let queuedJobs = [];
56
+ page = 1;
57
+ while (true) {
58
+ const batch = await wmill.exportQueuedJobs({
59
+ workspace: ws.workspaceId,
60
+ page,
61
+ perPage,
62
+ });
63
+ if (batch.length === 0)
64
+ break;
65
+ queuedJobs = queuedJobs.concat(batch);
66
+ if (batch.length < perPage)
67
+ break;
68
+ page++;
69
+ }
70
+ const queuedPath = opts.queuedOutput || "queued_jobs.json";
71
+ await fs.writeFile(queuedPath, JSON.stringify(queuedJobs, null, 2));
72
+ log.info(colors.green(`Successfully pulled ${queuedJobs.length} queued jobs to ${queuedPath}`));
73
+ // Ask to delete all jobs (queued and completed)
74
+ const allJobs = [...queuedJobs, ...completedJobs];
75
+ if (allJobs.length > 0) {
76
+ const confirmed = await Confirm.prompt({
77
+ message: `Do you want to delete the ${allJobs.length} pulled jobs (queued + completed) from the workspace? If you don't, you won't be able to import them again on the same instance because of the unique constraint on the job ID.`,
78
+ default: false,
79
+ });
80
+ if (confirmed) {
81
+ const jobIds = allJobs.map((job) => job.id);
82
+ await wmill.deleteJobs({
83
+ workspace: ws.workspaceId,
84
+ requestBody: jobIds,
85
+ });
86
+ log.info(colors.green(`Deleted ${jobIds.length} jobs (queued + completed)`));
87
+ }
88
+ else {
89
+ log.info("Skipping deletion of jobs");
90
+ }
91
+ }
92
+ }
93
+ async function pushJobs(opts, workspace) {
94
+ opts = await mergeConfigWithConfigFile(opts);
95
+ const ws = await resolveWorkspace({ ...opts, workspace });
96
+ await requireLogin(opts);
97
+ log.info(`Pushing jobs to workspace ${ws.workspaceId}`);
98
+ // Check for active workers before importing
99
+ if (!opts.skipWorkerCheck) {
100
+ try {
101
+ const workers = await wmill.listWorkers({ pingSince: 60 });
102
+ if (workers.length > 0) {
103
+ log.info(colors.yellow(`\nWarning: Found ${workers.length} active worker(s) on the instance.`));
104
+ log.info("It's recommended to scale down all workers before importing jobs to ensure:");
105
+ log.info(" - No imported jobs are processed immediately during import");
106
+ log.info(" - You have time to review or adjust the imported jobs before they start running");
107
+ log.info("");
108
+ const proceed = await Confirm.prompt({
109
+ message: "Do you want to continue with the import anyway?",
110
+ default: false,
111
+ });
112
+ if (!proceed) {
113
+ log.info("Import cancelled. Please scale down workers and try again.");
114
+ log.info("You can skip this check with --skip-worker-check flag.");
115
+ return;
116
+ }
117
+ }
118
+ }
119
+ catch (e) {
120
+ log.debug(`Could not check for active workers: ${e}`);
121
+ }
122
+ }
123
+ // Push completed jobs
124
+ const completedPath = opts.completedFile || "completed_jobs.json";
125
+ try {
126
+ const completedContent = await fs.readFile(completedPath, "utf-8");
127
+ const completedJobs = JSON.parse(completedContent);
128
+ if (!Array.isArray(completedJobs)) {
129
+ throw new Error("Completed jobs file must contain an array of jobs");
130
+ }
131
+ const completedResult = await wmill.importCompletedJobs({
132
+ workspace: ws.workspaceId,
133
+ requestBody: completedJobs,
134
+ });
135
+ log.info(colors.green(`Completed jobs: ${completedResult}`));
136
+ }
137
+ catch (e) {
138
+ if (e.code === "ENOENT") {
139
+ log.info(colors.yellow(`No completed jobs file found at ${completedPath}, skipping`));
140
+ }
141
+ else {
142
+ throw new Error(`Failed to push completed jobs: ${e}`);
143
+ }
144
+ }
145
+ // Push queued jobs
146
+ const queuedPath = opts.queuedFile || "queued_jobs.json";
147
+ try {
148
+ const queuedContent = await fs.readFile(queuedPath, "utf-8");
149
+ const queuedJobs = JSON.parse(queuedContent);
150
+ if (!Array.isArray(queuedJobs)) {
151
+ throw new Error("Queued jobs file must contain an array of jobs");
152
+ }
153
+ const queuedResult = await wmill.importQueuedJobs({
154
+ workspace: ws.workspaceId,
155
+ requestBody: queuedJobs,
156
+ });
157
+ log.info(colors.green(`Queued jobs: ${queuedResult}`));
158
+ }
159
+ catch (e) {
160
+ if (e.code === "ENOENT") {
161
+ log.info(colors.yellow(`No queued jobs file found at ${queuedPath}, skipping`));
162
+ }
163
+ else {
164
+ throw new Error(`Failed to push queued jobs: ${e}`);
165
+ }
166
+ }
167
+ }
168
+ const pull = new Command()
169
+ .description("Pull completed and queued jobs from workspace")
170
+ .option("-c, --completed-output <file:string>", "Completed jobs output file (default: completed_jobs.json)")
171
+ .option("-q, --queued-output <file:string>", "Queued jobs output file (default: queued_jobs.json)")
172
+ .option("--skip-worker-check", "Skip checking for active workers before export")
173
+ .arguments("[workspace:string]")
174
+ .action(pullJobs);
175
+ const push = new Command()
176
+ .description("Push completed and queued jobs to workspace")
177
+ .option("-c, --completed-file <file:string>", "Completed jobs input file (default: completed_jobs.json)")
178
+ .option("-q, --queued-file <file:string>", "Queued jobs input file (default: queued_jobs.json)")
179
+ .option("--skip-worker-check", "Skip checking for active workers before import")
180
+ .arguments("[workspace:string]")
181
+ .action(pushJobs);
182
+ const command = new Command()
183
+ .description("Manage jobs (import/export)")
184
+ .command("pull", pull)
185
+ .command("push", push);
186
+ export default command;
@@ -0,0 +1,23 @@
1
+ import { colors, log } from "../../../deps.js";
2
+ let GLOBAL_VERSIONS = {
3
+ remoteMajor: undefined,
4
+ remoteMinor: undefined,
5
+ };
6
+ export function updateGlobalVersions(version) {
7
+ try {
8
+ const [prefix, remoteMinorStr] = version.split(".");
9
+ GLOBAL_VERSIONS = {
10
+ remoteMajor: parseInt(prefix.split("v")[1]),
11
+ remoteMinor: parseInt(remoteMinorStr),
12
+ };
13
+ }
14
+ catch (e) {
15
+ log.info(colors.gray(`Error reading remote version: ${e}`));
16
+ }
17
+ }
18
+ export function isVersionsGeq15851() {
19
+ return (GLOBAL_VERSIONS.remoteMajor !== undefined &&
20
+ GLOBAL_VERSIONS.remoteMajor >= 1 &&
21
+ GLOBAL_VERSIONS.remoteMinor !== undefined &&
22
+ GLOBAL_VERSIONS.remoteMinor >= 5851);
23
+ }
@@ -7,7 +7,7 @@ import { getTypeStrFromPath, parseFromPath, pushObj, showConflict, showDiff, } f
7
7
  import { downloadZip } from "./pull.js";
8
8
  import { exts, findContentFile, findResourceFile, handleScriptMetadata, removeExtensionToPath, } from "../script/script.js";
9
9
  import { handleFile } from "../script/script.js";
10
- import { deepEqual, isFileResource, isRawAppFile, isWorkspaceDependencies } from "../../utils/utils.js";
10
+ import { deepEqual, isFileResource, isRawAppFile, isWorkspaceDependencies, } from "../../utils/utils.js";
11
11
  import { getEffectiveSettings, mergeConfigWithConfigFile, validateBranchConfiguration, } from "../../core/conf.js";
12
12
  import { fromBranchSpecificPath, getBranchSpecificPath, getSpecificItemsForCurrentBranch, isBranchSpecificFile, isCurrentBranchFile, isSpecificItem, } from "../../core/specific_items.js";
13
13
  import { getCurrentGitBranch } from "../../utils/git.js";
@@ -20,6 +20,7 @@ import { extractInlineScripts as extractInlineScriptsForFlows } from "../../../w
20
20
  import { generateFlowLockInternal } from "../flow/flow_metadata.js";
21
21
  import { isExecutionModeAnonymous } from "../app/apps.js";
22
22
  import { generateAppLocksInternal } from "../app/app_metadata.js";
23
+ import { updateGlobalVersions } from "./global.js";
23
24
  // Merge CLI options with effective settings, preserving CLI flags as overrides
24
25
  function mergeCliWithEffectiveOptions(cliOpts, effectiveOpts) {
25
26
  // overlay CLI options on top (undefined cliOpts won't override effectiveOpts)
@@ -210,7 +211,7 @@ export function extractInlineScriptsForApps(key, rec, pathAssigner, toId) {
210
211
  }
211
212
  if (typeof rec == "object") {
212
213
  return Object.entries(rec).flatMap(([k, v]) => {
213
- if (k == 'runType') {
214
+ if (k == "runType") {
214
215
  rec["type"] = undefined;
215
216
  rec["schema"] = undefined;
216
217
  return [];
@@ -613,7 +614,9 @@ export async function* readDirRecursiveWithIgnore(ignore, root) {
613
614
  for await (const e2 of e.c()) {
614
615
  if (e2.isDirectory) {
615
616
  const dirName = e2.path.split(SEP).pop();
616
- if (dirName == "node_modules" || dirName == ".claude" || dirName?.startsWith(".")) {
617
+ if (dirName == "node_modules" ||
618
+ dirName == ".claude" ||
619
+ dirName?.startsWith(".")) {
617
620
  continue;
618
621
  }
619
622
  }
@@ -637,7 +640,9 @@ export async function elementsToMap(els, ignore, json, skips, specificItems) {
637
640
  continue;
638
641
  }
639
642
  const path = entry.path;
640
- if (!isFileResource(path) && !isRawAppFile(path) && !isWorkspaceDependencies(path)) {
643
+ if (!isFileResource(path) &&
644
+ !isRawAppFile(path) &&
645
+ !isWorkspaceDependencies(path)) {
641
646
  if (json && path.endsWith(".yaml"))
642
647
  continue;
643
648
  if (!json && path.endsWith(".json"))
@@ -668,7 +673,9 @@ export async function elementsToMap(els, ignore, json, skips, specificItems) {
668
673
  }
669
674
  if (isRawAppFile(path)) {
670
675
  const suffix = path.split(".raw_app" + SEP).pop();
671
- if (suffix?.startsWith("dist/") || suffix == "wmill.d.ts" || suffix == "package-lock.json") {
676
+ if (suffix?.startsWith("dist/") ||
677
+ suffix == "wmill.d.ts" ||
678
+ suffix == "package-lock.json") {
672
679
  continue;
673
680
  }
674
681
  }
@@ -715,7 +722,8 @@ export async function elementsToMap(els, ignore, json, skips, specificItems) {
715
722
  continue;
716
723
  if (skips.skipFolders && fileType === "folder")
717
724
  continue;
718
- if (skips.skipWorkspaceDependencies && fileType === "workspace_dependencies")
725
+ if (skips.skipWorkspaceDependencies &&
726
+ fileType === "workspace_dependencies")
719
727
  continue;
720
728
  }
721
729
  catch {
@@ -1087,7 +1095,8 @@ export async function ignoreF(wmillconf) {
1087
1095
  if (wmillconf.includeKey && fileType === "encryption_key") {
1088
1096
  return false; // Don't ignore, always include
1089
1097
  }
1090
- if (!wmillconf.skipWorkspaceDependencies && fileType === "workspace_dependencies") {
1098
+ if (!wmillconf.skipWorkspaceDependencies &&
1099
+ fileType === "workspace_dependencies") {
1091
1100
  return false; // Don't ignore workspace dependencies (they are always included unless explicitly skipped)
1092
1101
  }
1093
1102
  }
@@ -1539,6 +1548,9 @@ export async function push(opts) {
1539
1548
  log.info("");
1540
1549
  }
1541
1550
  const version = await fetchVersion(workspace.remote);
1551
+ if (version) {
1552
+ updateGlobalVersions(version);
1553
+ }
1542
1554
  log.info(colors.gray("Remote version: " + version));
1543
1555
  log.info(`remote (${workspace.name}) <- local: ${changes.length} changes to apply`);
1544
1556
  // Handle JSON output for dry-run
@@ -1618,7 +1630,8 @@ export async function push(opts) {
1618
1630
  const alreadySynced = [];
1619
1631
  const isRawApp = isRawAppFile(changes[0].path);
1620
1632
  if (isRawApp) {
1621
- const deleteRawApp = changes.find(change => change.name === "deleted" && change.path.endsWith(".raw_app/raw_app.yaml"));
1633
+ const deleteRawApp = changes.find((change) => change.name === "deleted" &&
1634
+ change.path.endsWith(".raw_app/raw_app.yaml"));
1622
1635
  if (deleteRawApp) {
1623
1636
  changes = [deleteRawApp];
1624
1637
  }
@@ -1767,7 +1780,8 @@ export async function push(opts) {
1767
1780
  });
1768
1781
  break;
1769
1782
  case "raw_app":
1770
- if (target.endsWith(".raw_app/raw_app.yaml") || target.endsWith(".raw_app/raw_app.json")) {
1783
+ if (target.endsWith(".raw_app/raw_app.yaml") ||
1784
+ target.endsWith(".raw_app/raw_app.json")) {
1771
1785
  await wmill.deleteApp({
1772
1786
  workspace: workspaceId,
1773
1787
  path: removeSuffix(target, ".raw_app/raw_app.json"),
@@ -1871,7 +1885,7 @@ export async function push(opts) {
1871
1885
  await wmill.deleteWorkspaceDependencies({
1872
1886
  workspace: workspaceId,
1873
1887
  language,
1874
- name
1888
+ name,
1875
1889
  });
1876
1890
  break;
1877
1891
  default:
package/esm/src/main.js CHANGED
@@ -32,6 +32,7 @@ import workers from "./commands/workers/workers.js";
32
32
  import queues from "./commands/queues/queues.js";
33
33
  import dependencies from "./commands/dependencies/dependencies.js";
34
34
  import init from "./commands/init/init.js";
35
+ import jobs from "./commands/jobs/jobs.js";
35
36
  export { flow, app, script, workspace, resource, resourceType, user, variable, hub, folder, schedule, trigger, sync, gitsyncSettings, instance, dev, hubPull, pull, push, workspaceAdd, };
36
37
  // addEventListener("error", (event) => {
37
38
  // if (event.error) {
@@ -39,7 +40,7 @@ export { flow, app, script, workspace, resource, resourceType, user, variable, h
39
40
  // console.error(JSON.stringify(event.error, null, 4));
40
41
  // }
41
42
  // });
42
- export const VERSION = "1.587.0";
43
+ export const VERSION = "1.588.0";
43
44
  export const WM_FORK_PREFIX = "wm-fork";
44
45
  const command = new Command()
45
46
  .name("wmill")
@@ -75,6 +76,7 @@ const command = new Command()
75
76
  .command("workers", workers)
76
77
  .command("queues", queues)
77
78
  .command("dependencies", dependencies)
79
+ .command("jobs", jobs)
78
80
  .command("version --version", "Show version information")
79
81
  .action(async (opts) => {
80
82
  console.log("CLI version: " + VERSION);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "windmill-cli",
3
- "version": "1.587.0",
3
+ "version": "1.588.0",
4
4
  "description": "CLI for Windmill",
5
5
  "repository": {
6
6
  "type": "git",