windmill-cli 1.591.3 → 1.592.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/esm/gen/core/OpenAPI.js +1 -1
  2. package/esm/src/commands/app/app_metadata.js +1 -1
  3. package/esm/src/commands/app/dev.js +1 -1
  4. package/esm/src/commands/app/raw_apps.js +1 -1
  5. package/esm/src/commands/gitsync-settings/gitsync-settings.js +28 -2
  6. package/esm/src/commands/init/init.js +14 -15
  7. package/esm/src/commands/sync/sync.js +1 -1
  8. package/esm/src/core/settings.js +1 -1
  9. package/esm/src/guidance/flow_guidance.js +10 -429
  10. package/esm/src/guidance/prompts.js +2620 -0
  11. package/esm/src/guidance/script_guidance.js +9 -435
  12. package/esm/src/main.js +3 -3
  13. package/esm/src/types.js +1 -1
  14. package/package.json +1 -1
  15. package/types/src/commands/app/{apps.d.ts → app.d.ts} +1 -1
  16. package/types/src/commands/app/app.d.ts.map +1 -0
  17. package/types/src/commands/gitsync-settings/gitsync-settings.d.ts +24 -2
  18. package/types/src/commands/gitsync-settings/gitsync-settings.d.ts.map +1 -1
  19. package/types/src/commands/init/init.d.ts.map +1 -1
  20. package/types/src/commands/worker-groups/{worker_groups.d.ts → worker-groups.d.ts} +1 -1
  21. package/types/src/commands/worker-groups/{worker_groups.d.ts.map → worker-groups.d.ts.map} +1 -1
  22. package/types/src/guidance/flow_guidance.d.ts +1 -1
  23. package/types/src/guidance/flow_guidance.d.ts.map +1 -1
  24. package/types/src/guidance/prompts.d.ts +4 -0
  25. package/types/src/guidance/prompts.d.ts.map +1 -0
  26. package/types/src/guidance/script_guidance.d.ts +1 -1
  27. package/types/src/guidance/script_guidance.d.ts.map +1 -1
  28. package/types/src/main.d.ts +2 -2
  29. package/types/src/main.d.ts.map +1 -1
  30. package/esm/src/commands/gitsync-settings/index.js +0 -28
  31. package/types/src/commands/app/apps.d.ts.map +0 -1
  32. package/types/src/commands/gitsync-settings/index.d.ts +0 -25
  33. package/types/src/commands/gitsync-settings/index.d.ts.map +0 -1
  34. /package/esm/src/commands/app/{apps.js → app.js} +0 -0
  35. /package/esm/src/commands/worker-groups/{worker_groups.js → worker-groups.js} +0 -0
@@ -0,0 +1,2620 @@
1
+ // Auto-generated by generate.py - DO NOT EDIT
2
+ export const SCRIPT_PROMPT = `# Windmill Script Writing Guide
3
+
4
+ ## General Principles
5
+
6
+ - Scripts must export a main function (do not call it)
7
+ - Libraries are installed automatically - do not show installation instructions
8
+ - Credentials and configuration are stored in resources and passed as parameters
9
+ - The windmill client (\`wmill\`) provides APIs for interacting with the platform
10
+
11
+ ## Function Naming
12
+
13
+ - Main function: \`main\` (or \`preprocessor\` for preprocessor scripts)
14
+ - Must be async for TypeScript variants
15
+
16
+ ## Return Values
17
+
18
+ - Scripts can return any JSON-serializable value
19
+ - Return values become available to subsequent flow steps via \`results.step_id\`
20
+
21
+ ## Preprocessor Scripts
22
+
23
+ Preprocessor scripts process raw trigger data from various sources (webhook, custom HTTP route, SQS, WebSocket, Kafka, NATS, MQTT, Postgres, or email) before passing it to the flow. This separates the trigger logic from the flow logic and keeps the auto-generated UI clean.
24
+
25
+ The returned object determines the parameter values passed to the flow.
26
+ e.g., \`{ b: 1, a: 2 }\` calls the flow with \`a = 2\` and \`b = 1\`, assuming the flow has two inputs called \`a\` and \`b\`.
27
+
28
+ The preprocessor receives a single parameter called \`event\`.
29
+
30
+
31
+ # Bash
32
+
33
+ ## Structure
34
+
35
+ Do not include \`#!/bin/bash\`. Arguments are obtained as positional parameters:
36
+
37
+ \`\`\`bash
38
+ # Get arguments
39
+ var1="$1"
40
+ var2="$2"
41
+
42
+ echo "Processing $var1 and $var2"
43
+
44
+ # Return JSON by echoing to stdout
45
+ echo "{\\"result\\": \\"$var1\\", \\"count\\": $var2}"
46
+ \`\`\`
47
+
48
+ **Important:**
49
+ - Do not include shebang (\`#!/bin/bash\`)
50
+ - Arguments are always strings
51
+ - Access with \`$1\`, \`$2\`, etc.
52
+
53
+ ## Output
54
+
55
+ The script output is captured as the result. For structured data, output valid JSON:
56
+
57
+ \`\`\`bash
58
+ name="$1"
59
+ count="$2"
60
+
61
+ # Output JSON result
62
+ cat << EOF
63
+ {
64
+ "name": "$name",
65
+ "count": $count,
66
+ "timestamp": "$(date -Iseconds)"
67
+ }
68
+ EOF
69
+ \`\`\`
70
+
71
+ ## Environment Variables
72
+
73
+ Environment variables set in Windmill are available:
74
+
75
+ \`\`\`bash
76
+ # Access environment variable
77
+ echo "Workspace: $WM_WORKSPACE"
78
+ echo "Job ID: $WM_JOB_ID"
79
+ \`\`\`
80
+
81
+
82
+ # BigQuery
83
+
84
+ Arguments use \`@name\` syntax.
85
+
86
+ Name the parameters by adding comments before the statement:
87
+
88
+ \`\`\`sql
89
+ -- @name1 (string)
90
+ -- @name2 (int64) = 0
91
+ SELECT * FROM users WHERE name = @name1 AND age > @name2;
92
+ \`\`\`
93
+
94
+
95
+ # TypeScript (Bun)
96
+
97
+ Bun runtime with full npm ecosystem and fastest execution.
98
+
99
+ ## Structure
100
+
101
+ Export a single **async** function called \`main\`:
102
+
103
+ \`\`\`typescript
104
+ export async function main(param1: string, param2: number) {
105
+ // Your code here
106
+ return { result: param1, count: param2 };
107
+ }
108
+ \`\`\`
109
+
110
+ Do not call the main function. Libraries are installed automatically.
111
+
112
+ ## Resource Types
113
+
114
+ On Windmill, credentials and configuration are stored in resources and passed as parameters to main.
115
+
116
+ Use the \`RT\` namespace for resource types:
117
+
118
+ \`\`\`typescript
119
+ export async function main(stripe: RT.Stripe) {
120
+ // stripe contains API key and config from the resource
121
+ }
122
+ \`\`\`
123
+
124
+ Only use resource types if you need them to satisfy the instructions. Always use the RT namespace.
125
+
126
+ ## Imports
127
+
128
+ \`\`\`typescript
129
+ import Stripe from "stripe";
130
+ import { someFunction } from "some-package";
131
+ \`\`\`
132
+
133
+ ## Windmill Client
134
+
135
+ Import the windmill client for platform interactions:
136
+
137
+ \`\`\`typescript
138
+ import * as wmill from "windmill-client";
139
+ \`\`\`
140
+
141
+ See the SDK documentation for available methods.
142
+
143
+ ## Preprocessor Scripts
144
+
145
+ For preprocessor scripts, the function should be named \`preprocessor\` and receives an \`event\` parameter:
146
+
147
+ \`\`\`typescript
148
+ type Event = {
149
+ kind:
150
+ | "webhook"
151
+ | "http"
152
+ | "websocket"
153
+ | "kafka"
154
+ | "email"
155
+ | "nats"
156
+ | "postgres"
157
+ | "sqs"
158
+ | "mqtt"
159
+ | "gcp";
160
+ body: any;
161
+ headers: Record<string, string>;
162
+ query: Record<string, string>;
163
+ };
164
+
165
+ export async function preprocessor(event: Event) {
166
+ return {
167
+ param1: event.body.field1,
168
+ param2: event.query.id,
169
+ };
170
+ }
171
+ \`\`\`
172
+
173
+ ## S3 Object Operations
174
+
175
+ Windmill provides built-in support for S3-compatible storage operations.
176
+
177
+ ### S3Object Type
178
+
179
+ The S3Object type represents a file in S3 storage:
180
+
181
+ \`\`\`typescript
182
+ type S3Object = {
183
+ s3: string; // Path within the bucket
184
+ };
185
+ \`\`\`
186
+
187
+ ## TypeScript Operations
188
+
189
+ \`\`\`typescript
190
+ import * as wmill from "windmill-client";
191
+
192
+ // Load file content from S3
193
+ const content: Uint8Array = await wmill.loadS3File(s3object);
194
+
195
+ // Load file as stream
196
+ const blob: Blob = await wmill.loadS3FileStream(s3object);
197
+
198
+ // Write file to S3
199
+ const result: S3Object = await wmill.writeS3File(
200
+ s3object, // Target path (or undefined to auto-generate)
201
+ fileContent, // string or Blob
202
+ s3ResourcePath // Optional: specific S3 resource to use
203
+ );
204
+ \`\`\`
205
+
206
+
207
+ # TypeScript (Bun Native)
208
+
209
+ Native TypeScript execution with fetch only - no external imports allowed.
210
+
211
+ ## Structure
212
+
213
+ Export a single **async** function called \`main\`:
214
+
215
+ \`\`\`typescript
216
+ export async function main(param1: string, param2: number) {
217
+ // Your code here
218
+ return { result: param1, count: param2 };
219
+ }
220
+ \`\`\`
221
+
222
+ Do not call the main function.
223
+
224
+ ## Resource Types
225
+
226
+ On Windmill, credentials and configuration are stored in resources and passed as parameters to main.
227
+
228
+ Use the \`RT\` namespace for resource types:
229
+
230
+ \`\`\`typescript
231
+ export async function main(stripe: RT.Stripe) {
232
+ // stripe contains API key and config from the resource
233
+ }
234
+ \`\`\`
235
+
236
+ Only use resource types if you need them to satisfy the instructions. Always use the RT namespace.
237
+
238
+ ## Imports
239
+
240
+ **No imports allowed.** Use the globally available \`fetch\` function:
241
+
242
+ \`\`\`typescript
243
+ export async function main(url: string) {
244
+ const response = await fetch(url);
245
+ return await response.json();
246
+ }
247
+ \`\`\`
248
+
249
+ ## Windmill Client
250
+
251
+ The windmill client is not available in native TypeScript mode. Use fetch to call APIs directly.
252
+
253
+ ## Preprocessor Scripts
254
+
255
+ For preprocessor scripts, the function should be named \`preprocessor\` and receives an \`event\` parameter:
256
+
257
+ \`\`\`typescript
258
+ type Event = {
259
+ kind:
260
+ | "webhook"
261
+ | "http"
262
+ | "websocket"
263
+ | "kafka"
264
+ | "email"
265
+ | "nats"
266
+ | "postgres"
267
+ | "sqs"
268
+ | "mqtt"
269
+ | "gcp";
270
+ body: any;
271
+ headers: Record<string, string>;
272
+ query: Record<string, string>;
273
+ };
274
+
275
+ export async function preprocessor(event: Event) {
276
+ return {
277
+ param1: event.body.field1,
278
+ param2: event.query.id,
279
+ };
280
+ }
281
+ \`\`\`
282
+
283
+ ## S3 Object Operations
284
+
285
+ Windmill provides built-in support for S3-compatible storage operations.
286
+
287
+ ### S3Object Type
288
+
289
+ The S3Object type represents a file in S3 storage:
290
+
291
+ \`\`\`typescript
292
+ type S3Object = {
293
+ s3: string; // Path within the bucket
294
+ };
295
+ \`\`\`
296
+
297
+ ## TypeScript Operations
298
+
299
+ \`\`\`typescript
300
+ import * as wmill from "windmill-client";
301
+
302
+ // Load file content from S3
303
+ const content: Uint8Array = await wmill.loadS3File(s3object);
304
+
305
+ // Load file as stream
306
+ const blob: Blob = await wmill.loadS3FileStream(s3object);
307
+
308
+ // Write file to S3
309
+ const result: S3Object = await wmill.writeS3File(
310
+ s3object, // Target path (or undefined to auto-generate)
311
+ fileContent, // string or Blob
312
+ s3ResourcePath // Optional: specific S3 resource to use
313
+ );
314
+ \`\`\`
315
+
316
+
317
+ # C#
318
+
319
+ The script must contain a public static \`Main\` method inside a class:
320
+
321
+ \`\`\`csharp
322
+ public class Script
323
+ {
324
+ public static object Main(string name, int count)
325
+ {
326
+ return new { Name = name, Count = count };
327
+ }
328
+ }
329
+ \`\`\`
330
+
331
+ **Important:**
332
+ - Class name is irrelevant
333
+ - Method must be \`public static\`
334
+ - Return type can be \`object\` or specific type
335
+
336
+ ## NuGet Packages
337
+
338
+ Add packages using the \`#r\` directive at the top:
339
+
340
+ \`\`\`csharp
341
+ #r "nuget: Newtonsoft.Json, 13.0.3"
342
+ #r "nuget: RestSharp, 110.2.0"
343
+
344
+ using Newtonsoft.Json;
345
+ using RestSharp;
346
+
347
+ public class Script
348
+ {
349
+ public static object Main(string url)
350
+ {
351
+ var client = new RestClient(url);
352
+ var request = new RestRequest();
353
+ var response = client.Get(request);
354
+ return JsonConvert.DeserializeObject(response.Content);
355
+ }
356
+ }
357
+ \`\`\`
358
+
359
+
360
+ # TypeScript (Deno)
361
+
362
+ Deno runtime with npm support via \`npm:\` prefix and native Deno libraries.
363
+
364
+ ## Structure
365
+
366
+ Export a single **async** function called \`main\`:
367
+
368
+ \`\`\`typescript
369
+ export async function main(param1: string, param2: number) {
370
+ // Your code here
371
+ return { result: param1, count: param2 };
372
+ }
373
+ \`\`\`
374
+
375
+ Do not call the main function. Libraries are installed automatically.
376
+
377
+ ## Resource Types
378
+
379
+ On Windmill, credentials and configuration are stored in resources and passed as parameters to main.
380
+
381
+ Use the \`RT\` namespace for resource types:
382
+
383
+ \`\`\`typescript
384
+ export async function main(stripe: RT.Stripe) {
385
+ // stripe contains API key and config from the resource
386
+ }
387
+ \`\`\`
388
+
389
+ Only use resource types if you need them to satisfy the instructions. Always use the RT namespace.
390
+
391
+ ## Imports
392
+
393
+ \`\`\`typescript
394
+ // npm packages use npm: prefix
395
+ import Stripe from "npm:stripe";
396
+ import { someFunction } from "npm:some-package";
397
+
398
+ // Deno standard library
399
+ import { serve } from "https://deno.land/std/http/server.ts";
400
+ \`\`\`
401
+
402
+ ## Windmill Client
403
+
404
+ Import the windmill client for platform interactions:
405
+
406
+ \`\`\`typescript
407
+ import * as wmill from "windmill-client";
408
+ \`\`\`
409
+
410
+ See the SDK documentation for available methods.
411
+
412
+ ## Preprocessor Scripts
413
+
414
+ For preprocessor scripts, the function should be named \`preprocessor\` and receives an \`event\` parameter:
415
+
416
+ \`\`\`typescript
417
+ type Event = {
418
+ kind:
419
+ | "webhook"
420
+ | "http"
421
+ | "websocket"
422
+ | "kafka"
423
+ | "email"
424
+ | "nats"
425
+ | "postgres"
426
+ | "sqs"
427
+ | "mqtt"
428
+ | "gcp";
429
+ body: any;
430
+ headers: Record<string, string>;
431
+ query: Record<string, string>;
432
+ };
433
+
434
+ export async function preprocessor(event: Event) {
435
+ return {
436
+ param1: event.body.field1,
437
+ param2: event.query.id,
438
+ };
439
+ }
440
+ \`\`\`
441
+
442
+ ## S3 Object Operations
443
+
444
+ Windmill provides built-in support for S3-compatible storage operations.
445
+
446
+ ### S3Object Type
447
+
448
+ The S3Object type represents a file in S3 storage:
449
+
450
+ \`\`\`typescript
451
+ type S3Object = {
452
+ s3: string; // Path within the bucket
453
+ };
454
+ \`\`\`
455
+
456
+ ## TypeScript Operations
457
+
458
+ \`\`\`typescript
459
+ import * as wmill from "windmill-client";
460
+
461
+ // Load file content from S3
462
+ const content: Uint8Array = await wmill.loadS3File(s3object);
463
+
464
+ // Load file as stream
465
+ const blob: Blob = await wmill.loadS3FileStream(s3object);
466
+
467
+ // Write file to S3
468
+ const result: S3Object = await wmill.writeS3File(
469
+ s3object, // Target path (or undefined to auto-generate)
470
+ fileContent, // string or Blob
471
+ s3ResourcePath // Optional: specific S3 resource to use
472
+ );
473
+ \`\`\`
474
+
475
+
476
+ # DuckDB
477
+
478
+ Arguments are defined with comments and used with \`$name\` syntax:
479
+
480
+ \`\`\`sql
481
+ -- $name (text) = default
482
+ -- $age (integer)
483
+ SELECT * FROM users WHERE name = $name AND age > $age;
484
+ \`\`\`
485
+
486
+ ## Ducklake Integration
487
+
488
+ Attach Ducklake for data lake operations:
489
+
490
+ \`\`\`sql
491
+ -- Main ducklake
492
+ ATTACH 'ducklake' AS dl;
493
+
494
+ -- Named ducklake
495
+ ATTACH 'ducklake://my_lake' AS dl;
496
+
497
+ -- Then query
498
+ SELECT * FROM dl.schema.table;
499
+ \`\`\`
500
+
501
+ ## External Database Connections
502
+
503
+ Connect to external databases using resources:
504
+
505
+ \`\`\`sql
506
+ ATTACH '$res:path/to/resource' AS db (TYPE postgres);
507
+ SELECT * FROM db.schema.table;
508
+ \`\`\`
509
+
510
+ ## S3 File Operations
511
+
512
+ Read files from S3 storage:
513
+
514
+ \`\`\`sql
515
+ -- Default storage
516
+ SELECT * FROM read_csv('s3:///path/to/file.csv');
517
+
518
+ -- Named storage
519
+ SELECT * FROM read_csv('s3://storage_name/path/to/file.csv');
520
+
521
+ -- Parquet files
522
+ SELECT * FROM read_parquet('s3:///path/to/file.parquet');
523
+
524
+ -- JSON files
525
+ SELECT * FROM read_json('s3:///path/to/file.json');
526
+ \`\`\`
527
+
528
+
529
+ # Go
530
+
531
+ ## Structure
532
+
533
+ The file package must be \`inner\` and export a function called \`main\`:
534
+
535
+ \`\`\`go
536
+ package inner
537
+
538
+ func main(param1 string, param2 int) (map[string]interface{}, error) {
539
+ return map[string]interface{}{
540
+ "result": param1,
541
+ "count": param2,
542
+ }, nil
543
+ }
544
+ \`\`\`
545
+
546
+ **Important:**
547
+ - Package must be \`inner\`
548
+ - Return type must be \`({return_type}, error)\`
549
+ - Function name is \`main\` (lowercase)
550
+
551
+ ## Return Types
552
+
553
+ The return type can be any Go type that can be serialized to JSON:
554
+
555
+ \`\`\`go
556
+ package inner
557
+
558
+ type Result struct {
559
+ Name string \`json:"name"\`
560
+ Count int \`json:"count"\`
561
+ }
562
+
563
+ func main(name string, count int) (Result, error) {
564
+ return Result{
565
+ Name: name,
566
+ Count: count,
567
+ }, nil
568
+ }
569
+ \`\`\`
570
+
571
+ ## Error Handling
572
+
573
+ Return errors as the second return value:
574
+
575
+ \`\`\`go
576
+ package inner
577
+
578
+ import "errors"
579
+
580
+ func main(value int) (string, error) {
581
+ if value < 0 {
582
+ return "", errors.New("value must be positive")
583
+ }
584
+ return "success", nil
585
+ }
586
+ \`\`\`
587
+
588
+
589
+ # GraphQL
590
+
591
+ ## Structure
592
+
593
+ Write GraphQL queries or mutations. Arguments can be added as query parameters:
594
+
595
+ \`\`\`graphql
596
+ query GetUser($id: ID!) {
597
+ user(id: $id) {
598
+ id
599
+ name
600
+ email
601
+ }
602
+ }
603
+ \`\`\`
604
+
605
+ ## Variables
606
+
607
+ Variables are passed as script arguments and automatically bound to the query:
608
+
609
+ \`\`\`graphql
610
+ query SearchProducts($query: String!, $limit: Int = 10) {
611
+ products(search: $query, first: $limit) {
612
+ edges {
613
+ node {
614
+ id
615
+ name
616
+ price
617
+ }
618
+ }
619
+ }
620
+ }
621
+ \`\`\`
622
+
623
+ ## Mutations
624
+
625
+ \`\`\`graphql
626
+ mutation CreateUser($input: CreateUserInput!) {
627
+ createUser(input: $input) {
628
+ id
629
+ name
630
+ createdAt
631
+ }
632
+ }
633
+ \`\`\`
634
+
635
+
636
+ # Java
637
+
638
+ The script must contain a Main public class with a \`public static main()\` method:
639
+
640
+ \`\`\`java
641
+ public class Main {
642
+ public static Object main(String name, int count) {
643
+ java.util.Map<String, Object> result = new java.util.HashMap<>();
644
+ result.put("name", name);
645
+ result.put("count", count);
646
+ return result;
647
+ }
648
+ }
649
+ \`\`\`
650
+
651
+ **Important:**
652
+ - Class must be named \`Main\`
653
+ - Method must be \`public static Object main(...)\`
654
+ - Return type is \`Object\` or \`void\`
655
+
656
+ ## Maven Dependencies
657
+
658
+ Add dependencies using comments at the top:
659
+
660
+ \`\`\`java
661
+ //requirements:
662
+ //com.google.code.gson:gson:2.10.1
663
+ //org.apache.httpcomponents:httpclient:4.5.14
664
+
665
+ import com.google.gson.Gson;
666
+
667
+ public class Main {
668
+ public static Object main(String input) {
669
+ Gson gson = new Gson();
670
+ return gson.fromJson(input, Object.class);
671
+ }
672
+ }
673
+ \`\`\`
674
+
675
+
676
+ # Microsoft SQL Server (MSSQL)
677
+
678
+ Arguments use \`@P1\`, \`@P2\`, etc.
679
+
680
+ Name the parameters by adding comments before the statement:
681
+
682
+ \`\`\`sql
683
+ -- @P1 name1 (varchar)
684
+ -- @P2 name2 (int) = 0
685
+ SELECT * FROM users WHERE name = @P1 AND age > @P2;
686
+ \`\`\`
687
+
688
+
689
+ # MySQL
690
+
691
+ Arguments use \`?\` placeholders.
692
+
693
+ Name the parameters by adding comments before the statement:
694
+
695
+ \`\`\`sql
696
+ -- ? name1 (text)
697
+ -- ? name2 (int) = 0
698
+ SELECT * FROM users WHERE name = ? AND age > ?;
699
+ \`\`\`
700
+
701
+
702
+ # TypeScript (Native)
703
+
704
+ Native TypeScript execution with fetch only - no external imports allowed.
705
+
706
+ ## Structure
707
+
708
+ Export a single **async** function called \`main\`:
709
+
710
+ \`\`\`typescript
711
+ export async function main(param1: string, param2: number) {
712
+ // Your code here
713
+ return { result: param1, count: param2 };
714
+ }
715
+ \`\`\`
716
+
717
+ Do not call the main function.
718
+
719
+ ## Resource Types
720
+
721
+ On Windmill, credentials and configuration are stored in resources and passed as parameters to main.
722
+
723
+ Use the \`RT\` namespace for resource types:
724
+
725
+ \`\`\`typescript
726
+ export async function main(stripe: RT.Stripe) {
727
+ // stripe contains API key and config from the resource
728
+ }
729
+ \`\`\`
730
+
731
+ Only use resource types if you need them to satisfy the instructions. Always use the RT namespace.
732
+
733
+ ## Imports
734
+
735
+ **No imports allowed.** Use the globally available \`fetch\` function:
736
+
737
+ \`\`\`typescript
738
+ export async function main(url: string) {
739
+ const response = await fetch(url);
740
+ return await response.json();
741
+ }
742
+ \`\`\`
743
+
744
+ ## Windmill Client
745
+
746
+ The windmill client is not available in native TypeScript mode. Use fetch to call APIs directly.
747
+
748
+ ## Preprocessor Scripts
749
+
750
+ For preprocessor scripts, the function should be named \`preprocessor\` and receives an \`event\` parameter:
751
+
752
+ \`\`\`typescript
753
+ type Event = {
754
+ kind:
755
+ | "webhook"
756
+ | "http"
757
+ | "websocket"
758
+ | "kafka"
759
+ | "email"
760
+ | "nats"
761
+ | "postgres"
762
+ | "sqs"
763
+ | "mqtt"
764
+ | "gcp";
765
+ body: any;
766
+ headers: Record<string, string>;
767
+ query: Record<string, string>;
768
+ };
769
+
770
+ export async function preprocessor(event: Event) {
771
+ return {
772
+ param1: event.body.field1,
773
+ param2: event.query.id
774
+ };
775
+ }
776
+ \`\`\`
777
+
778
+
779
+ # PHP
780
+
781
+ ## Structure
782
+
783
+ The script must start with \`<?php\` and contain at least one function called \`main\`:
784
+
785
+ \`\`\`php
786
+ <?php
787
+
788
+ function main(string $param1, int $param2) {
789
+ return ["result" => $param1, "count" => $param2];
790
+ }
791
+ \`\`\`
792
+
793
+ ## Resource Types
794
+
795
+ On Windmill, credentials and configuration are stored in resources and passed as parameters to main.
796
+
797
+ You need to **redefine** the type of the resources that are needed before the main function. Always check if the class already exists using \`class_exists\`:
798
+
799
+ \`\`\`php
800
+ <?php
801
+
802
+ if (!class_exists('Postgresql')) {
803
+ class Postgresql {
804
+ public string $host;
805
+ public int $port;
806
+ public string $user;
807
+ public string $password;
808
+ public string $dbname;
809
+ }
810
+ }
811
+
812
+ function main(Postgresql $db) {
813
+ // $db contains the database connection details
814
+ }
815
+ \`\`\`
816
+
817
+ The resource type name has to be exactly as specified.
818
+
819
+ ## Library Dependencies
820
+
821
+ Specify library dependencies as comments before the main function:
822
+
823
+ \`\`\`php
824
+ <?php
825
+
826
+ // require:
827
+ // guzzlehttp/guzzle
828
+ // stripe/stripe-php@^10.0
829
+
830
+ function main() {
831
+ // Libraries are available
832
+ }
833
+ \`\`\`
834
+
835
+ One dependency per line. No need to require autoload, it is already done.
836
+
837
+
838
+ # PostgreSQL
839
+
840
+ Arguments are obtained directly in the statement with \`$1::{type}\`, \`$2::{type}\`, etc.
841
+
842
+ Name the parameters by adding comments at the beginning of the script (without specifying the type):
843
+
844
+ \`\`\`sql
845
+ -- $1 name1
846
+ -- $2 name2 = default_value
847
+ SELECT * FROM users WHERE name = $1::TEXT AND age > $2::INT;
848
+ \`\`\`
849
+
850
+
851
+ # PowerShell
852
+
853
+ ## Structure
854
+
855
+ Arguments are obtained by calling the \`param\` function on the first line:
856
+
857
+ \`\`\`powershell
858
+ param($Name, $Count = 0, [int]$Age)
859
+
860
+ # Your code here
861
+ Write-Output "Processing $Name, count: $Count, age: $Age"
862
+
863
+ # Return object
864
+ @{
865
+ name = $Name
866
+ count = $Count
867
+ age = $Age
868
+ }
869
+ \`\`\`
870
+
871
+ ## Parameter Types
872
+
873
+ You can specify types for parameters:
874
+
875
+ \`\`\`powershell
876
+ param(
877
+ [string]$Name,
878
+ [int]$Count = 0,
879
+ [bool]$Enabled = $true,
880
+ [array]$Items
881
+ )
882
+
883
+ @{
884
+ name = $Name
885
+ count = $Count
886
+ enabled = $Enabled
887
+ items = $Items
888
+ }
889
+ \`\`\`
890
+
891
+ ## Return Values
892
+
893
+ Return values by outputting them at the end of the script:
894
+
895
+ \`\`\`powershell
896
+ param($Input)
897
+
898
+ $result = @{
899
+ processed = $true
900
+ data = $Input
901
+ timestamp = Get-Date -Format "o"
902
+ }
903
+
904
+ $result
905
+ \`\`\`
906
+
907
+
908
+ # Python
909
+
910
+ ## Structure
911
+
912
+ The script must contain at least one function called \`main\`:
913
+
914
+ \`\`\`python
915
+ def main(param1: str, param2: int):
916
+ # Your code here
917
+ return {"result": param1, "count": param2}
918
+ \`\`\`
919
+
920
+ Do not call the main function. Libraries are installed automatically.
921
+
922
+ ## Resource Types
923
+
924
+ On Windmill, credentials and configuration are stored in resources and passed as parameters to main.
925
+
926
+ You need to **redefine** the type of the resources that are needed before the main function as TypedDict:
927
+
928
+ \`\`\`python
929
+ from typing import TypedDict
930
+
931
+ class postgresql(TypedDict):
932
+ host: str
933
+ port: int
934
+ user: str
935
+ password: str
936
+ dbname: str
937
+
938
+ def main(db: postgresql):
939
+ # db contains the database connection details
940
+ pass
941
+ \`\`\`
942
+
943
+ **Important rules:**
944
+
945
+ - The resource type name must be **IN LOWERCASE**
946
+ - Only include resource types if they are actually needed
947
+ - If an import conflicts with a resource type name, **rename the imported object, not the type name**
948
+ - Make sure to import TypedDict from typing **if you're using it**
949
+
950
+ ## Imports
951
+
952
+ Libraries are installed automatically. Do not show installation instructions.
953
+
954
+ \`\`\`python
955
+ import requests
956
+ import pandas as pd
957
+ from datetime import datetime
958
+ \`\`\`
959
+
960
+ If an import name conflicts with a resource type:
961
+
962
+ \`\`\`python
963
+ # Wrong - don't rename the type
964
+ import stripe as stripe_lib
965
+ class stripe_type(TypedDict): ...
966
+
967
+ # Correct - rename the import
968
+ import stripe as stripe_sdk
969
+ class stripe(TypedDict):
970
+ api_key: str
971
+ \`\`\`
972
+
973
+ ## Windmill Client
974
+
975
+ Import the windmill client for platform interactions:
976
+
977
+ \`\`\`python
978
+ import wmill
979
+ \`\`\`
980
+
981
+ See the SDK documentation for available methods.
982
+
983
+ ## Preprocessor Scripts
984
+
985
+ For preprocessor scripts, the function should be named \`preprocessor\` and receives an \`event\` parameter:
986
+
987
+ \`\`\`python
988
+ from typing import TypedDict, Literal, Any
989
+
990
+ class Event(TypedDict):
991
+ kind: Literal["webhook", "http", "websocket", "kafka", "email", "nats", "postgres", "sqs", "mqtt", "gcp"]
992
+ body: Any
993
+ headers: dict[str, str]
994
+ query: dict[str, str]
995
+
996
+ def preprocessor(event: Event):
997
+ # Transform the event into flow input parameters
998
+ return {
999
+ "param1": event["body"]["field1"],
1000
+ "param2": event["query"]["id"]
1001
+ }
1002
+ \`\`\`
1003
+
1004
+ ## S3 Object Operations
1005
+
1006
+ Windmill provides built-in support for S3-compatible storage operations.
1007
+
1008
+ \`\`\`python
1009
+ import wmill
1010
+
1011
+ # Load file content from S3
1012
+ content: bytes = wmill.load_s3_file(s3object)
1013
+
1014
+ # Load file as stream reader
1015
+ reader: BufferedReader = wmill.load_s3_file_reader(s3object)
1016
+
1017
+ # Write file to S3
1018
+ result: S3Object = wmill.write_s3_file(
1019
+ s3object, # Target path (or None to auto-generate)
1020
+ file_content, # bytes or BufferedReader
1021
+ s3_resource_path, # Optional: specific S3 resource
1022
+ content_type, # Optional: MIME type
1023
+ content_disposition # Optional: Content-Disposition header
1024
+ )
1025
+ \`\`\`
1026
+
1027
+
1028
+ # Rust
1029
+
1030
+ ## Structure
1031
+
1032
+ The script must contain a function called \`main\` with proper return type:
1033
+
1034
+ \`\`\`rust
1035
+ use anyhow::anyhow;
1036
+ use serde::Serialize;
1037
+
1038
+ #[derive(Serialize, Debug)]
1039
+ struct ReturnType {
1040
+ result: String,
1041
+ count: i32,
1042
+ }
1043
+
1044
+ fn main(param1: String, param2: i32) -> anyhow::Result<ReturnType> {
1045
+ Ok(ReturnType {
1046
+ result: param1,
1047
+ count: param2,
1048
+ })
1049
+ }
1050
+ \`\`\`
1051
+
1052
+ **Important:**
1053
+ - Arguments should be owned types
1054
+ - Return type must be serializable (\`#[derive(Serialize)]\`)
1055
+ - Return type is \`anyhow::Result<T>\`
1056
+
1057
+ ## Dependencies
1058
+
1059
+ Packages must be specified with a partial cargo.toml at the beginning of the script:
1060
+
1061
+ \`\`\`rust
1062
+ //! \`\`\`cargo
1063
+ //! [dependencies]
1064
+ //! anyhow = "1.0.86"
1065
+ //! reqwest = { version = "0.11", features = ["json"] }
1066
+ //! tokio = { version = "1", features = ["full"] }
1067
+ //! \`\`\`
1068
+
1069
+ use anyhow::anyhow;
1070
+ // ... rest of the code
1071
+ \`\`\`
1072
+
1073
+ **Note:** Serde is already included, no need to add it again.
1074
+
1075
+ ## Async Functions
1076
+
1077
+ If you need to handle async functions (e.g., using tokio), keep the main function sync and create the runtime inside:
1078
+
1079
+ \`\`\`rust
1080
+ //! \`\`\`cargo
1081
+ //! [dependencies]
1082
+ //! anyhow = "1.0.86"
1083
+ //! tokio = { version = "1", features = ["full"] }
1084
+ //! reqwest = { version = "0.11", features = ["json"] }
1085
+ //! \`\`\`
1086
+
1087
+ use anyhow::anyhow;
1088
+ use serde::Serialize;
1089
+
1090
+ #[derive(Serialize, Debug)]
1091
+ struct Response {
1092
+ data: String,
1093
+ }
1094
+
1095
+ fn main(url: String) -> anyhow::Result<Response> {
1096
+ let rt = tokio::runtime::Runtime::new()?;
1097
+ rt.block_on(async {
1098
+ let resp = reqwest::get(&url).await?.text().await?;
1099
+ Ok(Response { data: resp })
1100
+ })
1101
+ }
1102
+ \`\`\`
1103
+
1104
+
1105
+ # Snowflake
1106
+
1107
+ Arguments use \`?\` placeholders.
1108
+
1109
+ Name the parameters by adding comments before the statement:
1110
+
1111
+ \`\`\`sql
1112
+ -- ? name1 (text)
1113
+ -- ? name2 (number) = 0
1114
+ SELECT * FROM users WHERE name = ? AND age > ?;
1115
+ \`\`\`
1116
+
1117
+
1118
+ # TypeScript SDK (windmill-client)
1119
+
1120
+ Import: import * as wmill from 'windmill-client'
1121
+
1122
+ /**
1123
+ * Initialize the Windmill client with authentication token and base URL
1124
+ * @param token - Authentication token (defaults to WM_TOKEN env variable)
1125
+ * @param baseUrl - API base URL (defaults to BASE_INTERNAL_URL or BASE_URL env variable)
1126
+ */
1127
+ setClient(token?: string, baseUrl?: string): void
1128
+
1129
+ /**
1130
+ * Create a client configuration from env variables
1131
+ * @returns client configuration
1132
+ */
1133
+ getWorkspace(): string
1134
+
1135
+ /**
1136
+ * Get a resource value by path
1137
+ * @param path path of the resource, default to internal state path
1138
+ * @param undefinedIfEmpty if the resource does not exist, return undefined instead of throwing an error
1139
+ * @returns resource value
1140
+ */
1141
+ async getResource(path?: string, undefinedIfEmpty?: boolean): Promise<any>
1142
+
1143
+ /**
1144
+ * Get the true root job id
1145
+ * @param jobId job id to get the root job id from (default to current job)
1146
+ * @returns root job id
1147
+ */
1148
+ async getRootJobId(jobId?: string): Promise<string>
1149
+
1150
+ /**
1151
+ * @deprecated Use runScriptByPath or runScriptByHash instead
1152
+ */
1153
+ async runScript(path: string | null = null, hash_: string | null = null, args: Record<string, any> | null = null, verbose: boolean = false): Promise<any>
1154
+
1155
+ /**
1156
+ * Run a script synchronously by its path and wait for the result
1157
+ * @param path - Script path in Windmill
1158
+ * @param args - Arguments to pass to the script
1159
+ * @param verbose - Enable verbose logging
1160
+ * @returns Script execution result
1161
+ */
1162
+ async runScriptByPath(path: string, args: Record<string, any> | null = null, verbose: boolean = false): Promise<any>
1163
+
1164
+ /**
1165
+ * Run a script synchronously by its hash and wait for the result
1166
+ * @param hash_ - Script hash in Windmill
1167
+ * @param args - Arguments to pass to the script
1168
+ * @param verbose - Enable verbose logging
1169
+ * @returns Script execution result
1170
+ */
1171
+ async runScriptByHash(hash_: string, args: Record<string, any> | null = null, verbose: boolean = false): Promise<any>
1172
+
1173
+ /**
1174
+ * Append a text to the result stream
1175
+ * @param text text to append to the result stream
1176
+ */
1177
+ appendToResultStream(text: string): void
1178
+
1179
+ /**
1180
+ * Stream to the result stream
1181
+ * @param stream stream to stream to the result stream
1182
+ */
1183
+ async streamResult(stream: AsyncIterable<string>): Promise<void>
1184
+
1185
+ /**
1186
+ * Run a flow synchronously by its path and wait for the result
1187
+ * @param path - Flow path in Windmill
1188
+ * @param args - Arguments to pass to the flow
1189
+ * @param verbose - Enable verbose logging
1190
+ * @returns Flow execution result
1191
+ */
1192
+ async runFlow(path: string | null = null, args: Record<string, any> | null = null, verbose: boolean = false): Promise<any>
1193
+
1194
+ /**
1195
+ * Wait for a job to complete and return its result
1196
+ * @param jobId - ID of the job to wait for
1197
+ * @param verbose - Enable verbose logging
1198
+ * @returns Job result when completed
1199
+ */
1200
+ async waitJob(jobId: string, verbose: boolean = false): Promise<any>
1201
+
1202
+ /**
1203
+ * Get the result of a completed job
1204
+ * @param jobId - ID of the completed job
1205
+ * @returns Job result
1206
+ */
1207
+ async getResult(jobId: string): Promise<any>
1208
+
1209
+ /**
1210
+ * Get the result of a job if completed, or its current status
1211
+ * @param jobId - ID of the job
1212
+ * @returns Object with started, completed, success, and result properties
1213
+ */
1214
+ async getResultMaybe(jobId: string): Promise<any>
1215
+
1216
+ /**
1217
+ * Wrap a function to execute as a Windmill task within a flow context
1218
+ * @param f - Function to wrap as a task
1219
+ * @returns Async wrapper function that executes as a Windmill job
1220
+ */
1221
+ task<P, T>(f: (_: P) => T): (_: P) => Promise<T>
1222
+
1223
+ /**
1224
+ * @deprecated Use runScriptByPathAsync or runScriptByHashAsync instead
1225
+ */
1226
+ async runScriptAsync(path: string | null, hash_: string | null, args: Record<string, any> | null, scheduledInSeconds: number | null = null): Promise<string>
1227
+
1228
+ /**
1229
+ * Run a script asynchronously by its path
1230
+ * @param path - Script path in Windmill
1231
+ * @param args - Arguments to pass to the script
1232
+ * @param scheduledInSeconds - Schedule execution for a future time (in seconds)
1233
+ * @returns Job ID of the created job
1234
+ */
1235
+ async runScriptByPathAsync(path: string, args: Record<string, any> | null = null, scheduledInSeconds: number | null = null): Promise<string>
1236
+
1237
+ /**
1238
+ * Run a script asynchronously by its hash
1239
+ * @param hash_ - Script hash in Windmill
1240
+ * @param args - Arguments to pass to the script
1241
+ * @param scheduledInSeconds - Schedule execution for a future time (in seconds)
1242
+ * @returns Job ID of the created job
1243
+ */
1244
+ async runScriptByHashAsync(hash_: string, args: Record<string, any> | null = null, scheduledInSeconds: number | null = null): Promise<string>
1245
+
1246
+ /**
1247
+ * Run a flow asynchronously by its path
1248
+ * @param path - Flow path in Windmill
1249
+ * @param args - Arguments to pass to the flow
1250
+ * @param scheduledInSeconds - Schedule execution for a future time (in seconds)
1251
+ * @param doNotTrackInParent - If false, tracks state in parent job (only use when fully awaiting the job)
1252
+ * @returns Job ID of the created job
1253
+ */
1254
+ async runFlowAsync(path: string | null, args: Record<string, any> | null, scheduledInSeconds: number | null = null, // can only be set to false if this the job will be fully await and not concurrent with any other job // as otherwise the child flow and its own child will store their state in the parent job which will // lead to incorrectness and failures doNotTrackInParent: boolean = true): Promise<string>
1255
+
1256
+ /**
1257
+ * Resolve a resource value in case the default value was picked because the input payload was undefined
1258
+ * @param obj resource value or path of the resource under the format \`$res:path\`
1259
+ * @returns resource value
1260
+ */
1261
+ async resolveDefaultResource(obj: any): Promise<any>
1262
+
1263
+ /**
1264
+ * Get the state file path from environment variables
1265
+ * @returns State path string
1266
+ */
1267
+ getStatePath(): string
1268
+
1269
+ /**
1270
+ * Set a resource value by path
1271
+ * @param path path of the resource to set, default to state path
1272
+ * @param value new value of the resource to set
1273
+ * @param initializeToTypeIfNotExist if the resource does not exist, initialize it with this type
1274
+ */
1275
+ async setResource(value: any, path?: string, initializeToTypeIfNotExist?: string): Promise<void>
1276
+
1277
+ /**
1278
+ * Set the state
1279
+ * @param state state to set
1280
+ * @deprecated use setState instead
1281
+ */
1282
+ async setInternalState(state: any): Promise<void>
1283
+
1284
+ /**
1285
+ * Set the state
1286
+ * @param state state to set
1287
+ */
1288
+ async setState(state: any): Promise<void>
1289
+
1290
+ /**
1291
+ * Set the progress
1292
+ * Progress cannot go back and limited to 0% to 99% range
1293
+ * @param percent Progress to set in %
1294
+ * @param jobId? Job to set progress for
1295
+ */
1296
+ async setProgress(percent: number, jobId?: any): Promise<void>
1297
+
1298
+ /**
1299
+ * Get the progress
1300
+ * @param jobId? Job to get progress from
1301
+ * @returns Optional clamped between 0 and 100 progress value
1302
+ */
1303
+ async getProgress(jobId?: any): Promise<number | null>
1304
+
1305
+ /**
1306
+ * Set a flow user state
1307
+ * @param key key of the state
1308
+ * @param value value of the state
1309
+ */
1310
+ async setFlowUserState(key: string, value: any, errorIfNotPossible?: boolean): Promise<void>
1311
+
1312
+ /**
1313
+ * Get a flow user state
1314
+ * @param path path of the variable
1315
+ */
1316
+ async getFlowUserState(key: string, errorIfNotPossible?: boolean): Promise<any>
1317
+
1318
+ /**
1319
+ * Get the internal state
1320
+ * @deprecated use getState instead
1321
+ */
1322
+ async getInternalState(): Promise<any>
1323
+
1324
+ /**
1325
+ * Get the state shared across executions
1326
+ */
1327
+ async getState(): Promise<any>
1328
+
1329
+ /**
1330
+ * Get a variable by path
1331
+ * @param path path of the variable
1332
+ * @returns variable value
1333
+ */
1334
+ async getVariable(path: string): Promise<string>
1335
+
1336
+ /**
1337
+ * Set a variable by path, create if not exist
1338
+ * @param path path of the variable
1339
+ * @param value value of the variable
1340
+ * @param isSecretIfNotExist if the variable does not exist, create it as secret or not (default: false)
1341
+ * @param descriptionIfNotExist if the variable does not exist, create it with this description (default: "")
1342
+ */
1343
+ async setVariable(path: string, value: string, isSecretIfNotExist?: boolean, descriptionIfNotExist?: string): Promise<void>
1344
+
1345
+ /**
1346
+ * Build a PostgreSQL connection URL from a database resource
1347
+ * @param path - Path to the database resource
1348
+ * @returns PostgreSQL connection URL string
1349
+ */
1350
+ async databaseUrlFromResource(path: string): Promise<string>
1351
+
1352
+ /**
1353
+ * Get S3 client settings from a resource or workspace default
1354
+ * @param s3_resource_path - Path to S3 resource (uses workspace default if undefined)
1355
+ * @returns S3 client configuration settings
1356
+ */
1357
+ async denoS3LightClientSettings(s3_resource_path: string | undefined): Promise<DenoS3LightClientSettings>
1358
+
1359
+ /**
1360
+ * Load the content of a file stored in S3. If the s3ResourcePath is undefined, it will default to the workspace S3 resource.
1361
+ *
1362
+ * \`\`\`typescript
1363
+ * let fileContent = await wmill.loadS3FileContent(inputFile)
1364
+ * // if the file is a raw text file, it can be decoded and printed directly:
1365
+ * const text = new TextDecoder().decode(fileContentStream)
1366
+ * console.log(text);
1367
+ * \`\`\`
1368
+ */
1369
+ async loadS3File(s3object: S3Object, s3ResourcePath: string | undefined = undefined): Promise<Uint8Array | undefined>
1370
+
1371
+ /**
1372
+ * Load the content of a file stored in S3 as a stream. If the s3ResourcePath is undefined, it will default to the workspace S3 resource.
1373
+ *
1374
+ * \`\`\`typescript
1375
+ * let fileContentBlob = await wmill.loadS3FileStream(inputFile)
1376
+ * // if the content is plain text, the blob can be read directly:
1377
+ * console.log(await fileContentBlob.text());
1378
+ * \`\`\`
1379
+ */
1380
+ async loadS3FileStream(s3object: S3Object, s3ResourcePath: string | undefined = undefined): Promise<Blob | undefined>
1381
+
1382
+ /**
1383
+ * Persist a file to the S3 bucket. If the s3ResourcePath is undefined, it will default to the workspace S3 resource.
1384
+ *
1385
+ * \`\`\`typescript
1386
+ * const s3object = await writeS3File(s3Object, "Hello Windmill!")
1387
+ * const fileContentAsUtf8Str = (await s3object.toArray()).toString('utf-8')
1388
+ * console.log(fileContentAsUtf8Str)
1389
+ * \`\`\`
1390
+ */
1391
+ async writeS3File(s3object: S3Object | undefined, fileContent: string | Blob, s3ResourcePath: string | undefined = undefined, contentType: string | undefined = undefined, contentDisposition: string | undefined = undefined): Promise<S3Object>
1392
+
1393
+ /**
1394
+ * Sign S3 objects to be used by anonymous users in public apps
1395
+ * @param s3objects s3 objects to sign
1396
+ * @returns signed s3 objects
1397
+ */
1398
+ async signS3Objects(s3objects: S3Object[]): Promise<S3Object[]>
1399
+
1400
+ /**
1401
+ * Sign S3 object to be used by anonymous users in public apps
1402
+ * @param s3object s3 object to sign
1403
+ * @returns signed s3 object
1404
+ */
1405
+ async signS3Object(s3object: S3Object): Promise<S3Object>
1406
+
1407
+ /**
1408
+ * Generate a presigned public URL for an array of S3 objects.
1409
+ * If an S3 object is not signed yet, it will be signed first.
1410
+ * @param s3Objects s3 objects to sign
1411
+ * @returns list of signed public URLs
1412
+ */
1413
+ async getPresignedS3PublicUrls(s3Objects: S3Object[], { baseUrl }: { baseUrl?: string } = {}): Promise<string[]>
1414
+
1415
+ /**
1416
+ * Generate a presigned public URL for an S3 object. If the S3 object is not signed yet, it will be signed first.
1417
+ * @param s3Object s3 object to sign
1418
+ * @returns signed public URL
1419
+ */
1420
+ async getPresignedS3PublicUrl(s3Objects: S3Object, { baseUrl }: { baseUrl?: string } = {}): Promise<string>
1421
+
1422
+ /**
1423
+ * Get URLs needed for resuming a flow after this step
1424
+ * @param approver approver name
1425
+ * @returns approval page UI URL, resume and cancel API URLs for resuming the flow
1426
+ */
1427
+ async getResumeUrls(approver?: string): Promise<{
1428
+ approvalPage: string;
1429
+ resume: string;
1430
+ cancel: string;
1431
+ }>
1432
+
1433
+ /**
1434
+ * @deprecated use getResumeUrls instead
1435
+ */
1436
+ getResumeEndpoints(approver?: string): Promise<{
1437
+ approvalPage: string;
1438
+ resume: string;
1439
+ cancel: string;
1440
+ }>
1441
+
1442
+ /**
1443
+ * Get an OIDC jwt token for auth to external services (e.g: Vault, AWS) (ee only)
1444
+ * @param audience audience of the token
1445
+ * @param expiresIn Optional number of seconds until the token expires
1446
+ * @returns jwt token
1447
+ */
1448
+ async getIdToken(audience: string, expiresIn?: number): Promise<string>
1449
+
1450
+ /**
1451
+ * Convert a base64-encoded string to Uint8Array
1452
+ * @param data - Base64-encoded string
1453
+ * @returns Decoded Uint8Array
1454
+ */
1455
+ base64ToUint8Array(data: string): Uint8Array
1456
+
1457
+ /**
1458
+ * Convert a Uint8Array to base64-encoded string
1459
+ * @param arrayBuffer - Uint8Array to encode
1460
+ * @returns Base64-encoded string
1461
+ */
1462
+ uint8ArrayToBase64(arrayBuffer: Uint8Array): string
1463
+
1464
+ /**
1465
+ * Get email from workspace username
1466
+ * This method is particularly useful for apps that require the email address of the viewer.
1467
+ * Indeed, in the viewer context, WM_USERNAME is set to the username of the viewer but WM_EMAIL is set to the email of the creator of the app.
1468
+ * @param username
1469
+ * @returns email address
1470
+ */
1471
+ async usernameToEmail(username: string): Promise<string>
1472
+
1473
+ /**
1474
+ * Sends an interactive approval request via Slack, allowing optional customization of the message, approver, and form fields.
1475
+ *
1476
+ * **[Enterprise Edition Only]** To include form fields in the Slack approval request, go to **Advanced -> Suspend -> Form**
1477
+ * and define a form. Learn more at [Windmill Documentation](https://www.windmill.dev/docs/flows/flow_approval#form).
1478
+ *
1479
+ * @param {Object} options - The configuration options for the Slack approval request.
1480
+ * @param {string} options.slackResourcePath - The path to the Slack resource in Windmill.
1481
+ * @param {string} options.channelId - The Slack channel ID where the approval request will be sent.
1482
+ * @param {string} [options.message] - Optional custom message to include in the Slack approval request.
1483
+ * @param {string} [options.approver] - Optional user ID or name of the approver for the request.
1484
+ * @param {DefaultArgs} [options.defaultArgsJson] - Optional object defining or overriding the default arguments to a form field.
1485
+ * @param {Enums} [options.dynamicEnumsJson] - Optional object overriding the enum default values of an enum form field.
1486
+ *
1487
+ * @returns {Promise<void>} Resolves when the Slack approval request is successfully sent.
1488
+ *
1489
+ * @throws {Error} If the function is not called within a flow or flow preview.
1490
+ * @throws {Error} If the \`JobService.getSlackApprovalPayload\` call fails.
1491
+ *
1492
+ * **Usage Example:**
1493
+ * \`\`\`typescript
1494
+ * await requestInteractiveSlackApproval({
1495
+ * slackResourcePath: "/u/alex/my_slack_resource",
1496
+ * channelId: "admins-slack-channel",
1497
+ * message: "Please approve this request",
1498
+ * approver: "approver123",
1499
+ * defaultArgsJson: { key1: "value1", key2: 42 },
1500
+ * dynamicEnumsJson: { foo: ["choice1", "choice2"], bar: ["optionA", "optionB"] },
1501
+ * });
1502
+ * \`\`\`
1503
+ *
1504
+ * **Note:** This function requires execution within a Windmill flow or flow preview.
1505
+ */
1506
+ async requestInteractiveSlackApproval({ slackResourcePath, channelId, message, approver, defaultArgsJson, dynamicEnumsJson, }: SlackApprovalOptions): Promise<void>
1507
+
1508
+ /**
1509
+ * Sends an interactive approval request via Teams, allowing optional customization of the message, approver, and form fields.
1510
+ *
1511
+ * **[Enterprise Edition Only]** To include form fields in the Teams approval request, go to **Advanced -> Suspend -> Form**
1512
+ * and define a form. Learn more at [Windmill Documentation](https://www.windmill.dev/docs/flows/flow_approval#form).
1513
+ *
1514
+ * @param {Object} options - The configuration options for the Teams approval request.
1515
+ * @param {string} options.teamName - The Teams team name where the approval request will be sent.
1516
+ * @param {string} options.channelName - The Teams channel name where the approval request will be sent.
1517
+ * @param {string} [options.message] - Optional custom message to include in the Teams approval request.
1518
+ * @param {string} [options.approver] - Optional user ID or name of the approver for the request.
1519
+ * @param {DefaultArgs} [options.defaultArgsJson] - Optional object defining or overriding the default arguments to a form field.
1520
+ * @param {Enums} [options.dynamicEnumsJson] - Optional object overriding the enum default values of an enum form field.
1521
+ *
1522
+ * @returns {Promise<void>} Resolves when the Teams approval request is successfully sent.
1523
+ *
1524
+ * @throws {Error} If the function is not called within a flow or flow preview.
1525
+ * @throws {Error} If the \`JobService.getTeamsApprovalPayload\` call fails.
1526
+ *
1527
+ * **Usage Example:**
1528
+ * \`\`\`typescript
1529
+ * await requestInteractiveTeamsApproval({
1530
+ * teamName: "admins-teams",
1531
+ * channelName: "admins-teams-channel",
1532
+ * message: "Please approve this request",
1533
+ * approver: "approver123",
1534
+ * defaultArgsJson: { key1: "value1", key2: 42 },
1535
+ * dynamicEnumsJson: { foo: ["choice1", "choice2"], bar: ["optionA", "optionB"] },
1536
+ * });
1537
+ * \`\`\`
1538
+ *
1539
+ * **Note:** This function requires execution within a Windmill flow or flow preview.
1540
+ */
1541
+ async requestInteractiveTeamsApproval({ teamName, channelName, message, approver, defaultArgsJson, dynamicEnumsJson, }: TeamsApprovalOptions): Promise<void>
1542
+
1543
+ /**
1544
+ * Parse an S3 object from URI string or record format
1545
+ * @param s3Object - S3 object as URI string (s3://storage/key) or record
1546
+ * @returns S3 object record with storage and s3 key
1547
+ */
1548
+ parseS3Object(s3Object: S3Object): S3ObjectRecord
1549
+
1550
+ /**
1551
+ * Create a SQL template function for PostgreSQL/datatable queries
1552
+ * @param name - Database/datatable name (default: "main")
1553
+ * @returns SQL template function for building parameterized queries
1554
+ * @example
1555
+ * let sql = wmill.datatable()
1556
+ * let name = 'Robin'
1557
+ * let age = 21
1558
+ * await sql\`
1559
+ * SELECT * FROM friends
1560
+ * WHERE name = \${name} AND age = \${age}::int
1561
+ * \`.fetch()
1562
+ */
1563
+ datatable(name: string = "main"): SqlTemplateFunction
1564
+
1565
+ /**
1566
+ * Create a SQL template function for DuckDB/ducklake queries
1567
+ * @param name - DuckDB database name (default: "main")
1568
+ * @returns SQL template function for building parameterized queries
1569
+ * @example
1570
+ * let sql = wmill.ducklake()
1571
+ * let name = 'Robin'
1572
+ * let age = 21
1573
+ * await sql\`
1574
+ * SELECT * FROM friends
1575
+ * WHERE name = \${name} AND age = \${age}
1576
+ * \`.fetch()
1577
+ */
1578
+ ducklake(name: string = "main"): SqlTemplateFunction
1579
+
1580
+ async polarsConnectionSettings(s3_resource_path: string | undefined): Promise<any>
1581
+
1582
+ async duckdbConnectionSettings(s3_resource_path: string | undefined): Promise<any>
1583
+
1584
+
1585
+ # Python SDK (wmill)
1586
+
1587
+ Import: import wmill
1588
+
1589
+ def get_mocked_api() -> Optional[dict]
1590
+
1591
+ # Get the HTTP client instance.
1592
+ #
1593
+ # Returns:
1594
+ # Configured httpx.Client for API requests
1595
+ def get_client() -> httpx.Client
1596
+
1597
+ # Make an HTTP GET request to the Windmill API.
1598
+ #
1599
+ # Args:
1600
+ # endpoint: API endpoint path
1601
+ # raise_for_status: Whether to raise an exception on HTTP errors
1602
+ # **kwargs: Additional arguments passed to httpx.get
1603
+ #
1604
+ # Returns:
1605
+ # HTTP response object
1606
+ def get(endpoint, raise_for_status = True, **kwargs) -> httpx.Response
1607
+
1608
+ # Make an HTTP POST request to the Windmill API.
1609
+ #
1610
+ # Args:
1611
+ # endpoint: API endpoint path
1612
+ # raise_for_status: Whether to raise an exception on HTTP errors
1613
+ # **kwargs: Additional arguments passed to httpx.post
1614
+ #
1615
+ # Returns:
1616
+ # HTTP response object
1617
+ def post(endpoint, raise_for_status = True, **kwargs) -> httpx.Response
1618
+
1619
+ # Create a new authentication token.
1620
+ #
1621
+ # Args:
1622
+ # duration: Token validity duration (default: 1 day)
1623
+ #
1624
+ # Returns:
1625
+ # New authentication token string
1626
+ def create_token(duration = dt.timedelta(days=1)) -> str
1627
+
1628
+ # Create a script job and return its job id.
1629
+ #
1630
+ # .. deprecated:: Use run_script_by_path_async or run_script_by_hash_async instead.
1631
+ def run_script_async(path: str = None, hash_: str = None, args: dict = None, scheduled_in_secs: int = None) -> str
1632
+
1633
+ # Create a script job by path and return its job id.
1634
+ def run_script_by_path_async(path: str, args: dict = None, scheduled_in_secs: int = None) -> str
1635
+
1636
+ # Create a script job by hash and return its job id.
1637
+ def run_script_by_hash_async(hash_: str, args: dict = None, scheduled_in_secs: int = None) -> str
1638
+
1639
+ # Create a flow job and return its job id.
1640
+ def run_flow_async(path: str, args: dict = None, scheduled_in_secs: int = None, do_not_track_in_parent: bool = True) -> str
1641
+
1642
+ # Run script synchronously and return its result.
1643
+ #
1644
+ # .. deprecated:: Use run_script_by_path or run_script_by_hash instead.
1645
+ def run_script(path: str = None, hash_: str = None, args: dict = None, timeout: dt.timedelta | int | float | None = None, verbose: bool = False, cleanup: bool = True, assert_result_is_not_none: bool = False) -> Any
1646
+
1647
+ # Run script by path synchronously and return its result.
1648
+ def run_script_by_path(path: str, args: dict = None, timeout: dt.timedelta | int | float | None = None, verbose: bool = False, cleanup: bool = True, assert_result_is_not_none: bool = False) -> Any
1649
+
1650
+ # Run script by hash synchronously and return its result.
1651
+ def run_script_by_hash(hash_: str, args: dict = None, timeout: dt.timedelta | int | float | None = None, verbose: bool = False, cleanup: bool = True, assert_result_is_not_none: bool = False) -> Any
1652
+
1653
+ # Run a script on the current worker without creating a job
1654
+ def run_inline_script_preview(content: str, language: str, args: dict = None) -> Any
1655
+
1656
+ # Wait for a job to complete and return its result.
1657
+ #
1658
+ # Args:
1659
+ # job_id: ID of the job to wait for
1660
+ # timeout: Maximum time to wait (seconds or timedelta)
1661
+ # verbose: Enable verbose logging
1662
+ # cleanup: Register cleanup handler to cancel job on exit
1663
+ # assert_result_is_not_none: Raise exception if result is None
1664
+ #
1665
+ # Returns:
1666
+ # Job result when completed
1667
+ #
1668
+ # Raises:
1669
+ # TimeoutError: If timeout is reached
1670
+ # Exception: If job fails
1671
+ def wait_job(job_id, timeout: dt.timedelta | int | float | None = None, verbose: bool = False, cleanup: bool = True, assert_result_is_not_none: bool = False)
1672
+
1673
+ # Cancel a specific job by ID.
1674
+ #
1675
+ # Args:
1676
+ # job_id: UUID of the job to cancel
1677
+ # reason: Optional reason for cancellation
1678
+ #
1679
+ # Returns:
1680
+ # Response message from the cancel endpoint
1681
+ def cancel_job(job_id: str, reason: str = None) -> str
1682
+
1683
+ # Cancel currently running executions of the same script.
1684
+ def cancel_running() -> dict
1685
+
1686
+ # Get job details by ID.
1687
+ #
1688
+ # Args:
1689
+ # job_id: UUID of the job
1690
+ #
1691
+ # Returns:
1692
+ # Job details dictionary
1693
+ def get_job(job_id: str) -> dict
1694
+
1695
+ # Get the root job ID for a flow hierarchy.
1696
+ #
1697
+ # Args:
1698
+ # job_id: Job ID (defaults to current WM_JOB_ID)
1699
+ #
1700
+ # Returns:
1701
+ # Root job ID
1702
+ def get_root_job_id(job_id: str | None = None) -> dict
1703
+
1704
+ # Get an OIDC JWT token for authentication to external services.
1705
+ #
1706
+ # Args:
1707
+ # audience: Token audience (e.g., "vault", "aws")
1708
+ # expires_in: Optional expiration time in seconds
1709
+ #
1710
+ # Returns:
1711
+ # JWT token string
1712
+ def get_id_token(audience: str, expires_in: int | None = None) -> str
1713
+
1714
+ # Get the status of a job.
1715
+ #
1716
+ # Args:
1717
+ # job_id: UUID of the job
1718
+ #
1719
+ # Returns:
1720
+ # Job status: "RUNNING", "WAITING", or "COMPLETED"
1721
+ def get_job_status(job_id: str) -> JobStatus
1722
+
1723
+ # Get the result of a completed job.
1724
+ #
1725
+ # Args:
1726
+ # job_id: UUID of the completed job
1727
+ # assert_result_is_not_none: Raise exception if result is None
1728
+ #
1729
+ # Returns:
1730
+ # Job result
1731
+ def get_result(job_id: str, assert_result_is_not_none: bool = True) -> Any
1732
+
1733
+ # Get a variable value by path.
1734
+ #
1735
+ # Args:
1736
+ # path: Variable path in Windmill
1737
+ #
1738
+ # Returns:
1739
+ # Variable value as string
1740
+ def get_variable(path: str) -> str
1741
+
1742
+ # Set a variable value by path, creating it if it doesn't exist.
1743
+ #
1744
+ # Args:
1745
+ # path: Variable path in Windmill
1746
+ # value: Variable value to set
1747
+ # is_secret: Whether the variable should be secret (default: False)
1748
+ def set_variable(path: str, value: str, is_secret: bool = False) -> None
1749
+
1750
+ # Get a resource value by path.
1751
+ #
1752
+ # Args:
1753
+ # path: Resource path in Windmill
1754
+ # none_if_undefined: Return None instead of raising if not found
1755
+ #
1756
+ # Returns:
1757
+ # Resource value dictionary or None
1758
+ def get_resource(path: str, none_if_undefined: bool = False) -> dict | None
1759
+
1760
+ # Set a resource value by path, creating it if it doesn't exist.
1761
+ #
1762
+ # Args:
1763
+ # value: Resource value to set
1764
+ # path: Resource path in Windmill
1765
+ # resource_type: Resource type for creation
1766
+ def set_resource(value: Any, path: str, resource_type: str)
1767
+
1768
+ # List resources from Windmill workspace.
1769
+ #
1770
+ # Args:
1771
+ # resource_type: Optional resource type to filter by (e.g., "postgresql", "mysql", "s3")
1772
+ # page: Optional page number for pagination
1773
+ # per_page: Optional number of results per page
1774
+ #
1775
+ # Returns:
1776
+ # List of resource dictionaries
1777
+ def list_resources(resource_type: str = None, page: int = None, per_page: int = None) -> list[dict]
1778
+
1779
+ # Set the workflow state.
1780
+ #
1781
+ # Args:
1782
+ # value: State value to set
1783
+ def set_state(value: Any)
1784
+
1785
+ # Set job progress percentage (0-99).
1786
+ #
1787
+ # Args:
1788
+ # value: Progress percentage
1789
+ # job_id: Job ID (defaults to current WM_JOB_ID)
1790
+ def set_progress(value: int, job_id: Optional[str] = None)
1791
+
1792
+ # Get job progress percentage.
1793
+ #
1794
+ # Args:
1795
+ # job_id: Job ID (defaults to current WM_JOB_ID)
1796
+ #
1797
+ # Returns:
1798
+ # Progress value (0-100) or None if not set
1799
+ def get_progress(job_id: Optional[str] = None) -> Any
1800
+
1801
+ # Set the user state of a flow at a given key
1802
+ def set_flow_user_state(key: str, value: Any) -> None
1803
+
1804
+ # Get the user state of a flow at a given key
1805
+ def get_flow_user_state(key: str) -> Any
1806
+
1807
+ # Get the Windmill server version.
1808
+ #
1809
+ # Returns:
1810
+ # Version string
1811
+ def version()
1812
+
1813
+ # Convenient helpers that takes an S3 resource as input and returns the settings necessary to
1814
+ # initiate an S3 connection from DuckDB
1815
+ def get_duckdb_connection_settings(s3_resource_path: str = '') -> DuckDbConnectionSettings | None
1816
+
1817
+ # Convenient helpers that takes an S3 resource as input and returns the settings necessary to
1818
+ # initiate an S3 connection from Polars
1819
+ def get_polars_connection_settings(s3_resource_path: str = '') -> PolarsConnectionSettings
1820
+
1821
+ # Convenient helpers that takes an S3 resource as input and returns the settings necessary to
1822
+ # initiate an S3 connection using boto3
1823
+ def get_boto3_connection_settings(s3_resource_path: str = '') -> Boto3ConnectionSettings
1824
+
1825
+ # Load a file from the workspace s3 bucket and returns its content as bytes.
1826
+ #
1827
+ # '''python
1828
+ # from wmill import S3Object
1829
+ #
1830
+ # s3_obj = S3Object(s3="/path/to/my_file.txt")
1831
+ # my_obj_content = client.load_s3_file(s3_obj)
1832
+ # file_content = my_obj_content.decode("utf-8")
1833
+ # '''
1834
+ def load_s3_file(s3object: S3Object | str, s3_resource_path: str | None) -> bytes
1835
+
1836
+ # Load a file from the workspace s3 bucket and returns the bytes stream.
1837
+ #
1838
+ # '''python
1839
+ # from wmill import S3Object
1840
+ #
1841
+ # s3_obj = S3Object(s3="/path/to/my_file.txt")
1842
+ # with wmill.load_s3_file_reader(s3object, s3_resource_path) as file_reader:
1843
+ # print(file_reader.read())
1844
+ # '''
1845
+ def load_s3_file_reader(s3object: S3Object | str, s3_resource_path: str | None) -> BufferedReader
1846
+
1847
+ # Write a file to the workspace S3 bucket
1848
+ #
1849
+ # '''python
1850
+ # from wmill import S3Object
1851
+ #
1852
+ # s3_obj = S3Object(s3="/path/to/my_file.txt")
1853
+ #
1854
+ # # for an in memory bytes array:
1855
+ # file_content = b'Hello Windmill!'
1856
+ # client.write_s3_file(s3_obj, file_content)
1857
+ #
1858
+ # # for a file:
1859
+ # with open("my_file.txt", "rb") as my_file:
1860
+ # client.write_s3_file(s3_obj, my_file)
1861
+ # '''
1862
+ def write_s3_file(s3object: S3Object | str | None, file_content: BufferedReader | bytes, s3_resource_path: str | None, content_type: str | None = None, content_disposition: str | None = None) -> S3Object
1863
+
1864
+ # Sign S3 objects for use by anonymous users in public apps.
1865
+ #
1866
+ # Args:
1867
+ # s3_objects: List of S3 objects to sign
1868
+ #
1869
+ # Returns:
1870
+ # List of signed S3 objects
1871
+ def sign_s3_objects(s3_objects: list[S3Object | str]) -> list[S3Object]
1872
+
1873
+ # Sign a single S3 object for use by anonymous users in public apps.
1874
+ #
1875
+ # Args:
1876
+ # s3_object: S3 object to sign
1877
+ #
1878
+ # Returns:
1879
+ # Signed S3 object
1880
+ def sign_s3_object(s3_object: S3Object | str) -> S3Object
1881
+
1882
+ # Generate presigned public URLs for an array of S3 objects.
1883
+ # If an S3 object is not signed yet, it will be signed first.
1884
+ #
1885
+ # Args:
1886
+ # s3_objects: List of S3 objects to sign
1887
+ # base_url: Optional base URL for the presigned URLs (defaults to WM_BASE_URL)
1888
+ #
1889
+ # Returns:
1890
+ # List of signed public URLs
1891
+ #
1892
+ # Example:
1893
+ # >>> s3_objs = [S3Object(s3="/path/to/file1.txt"), S3Object(s3="/path/to/file2.txt")]
1894
+ # >>> urls = client.get_presigned_s3_public_urls(s3_objs)
1895
+ def get_presigned_s3_public_urls(s3_objects: list[S3Object | str], base_url: str | None = None) -> list[str]
1896
+
1897
+ # Generate a presigned public URL for an S3 object.
1898
+ # If the S3 object is not signed yet, it will be signed first.
1899
+ #
1900
+ # Args:
1901
+ # s3_object: S3 object to sign
1902
+ # base_url: Optional base URL for the presigned URL (defaults to WM_BASE_URL)
1903
+ #
1904
+ # Returns:
1905
+ # Signed public URL
1906
+ #
1907
+ # Example:
1908
+ # >>> s3_obj = S3Object(s3="/path/to/file.txt")
1909
+ # >>> url = client.get_presigned_s3_public_url(s3_obj)
1910
+ def get_presigned_s3_public_url(s3_object: S3Object | str, base_url: str | None = None) -> str
1911
+
1912
+ # Get the current user information.
1913
+ #
1914
+ # Returns:
1915
+ # User details dictionary
1916
+ def whoami() -> dict
1917
+
1918
+ # Get the current user information (alias for whoami).
1919
+ #
1920
+ # Returns:
1921
+ # User details dictionary
1922
+ def user() -> dict
1923
+
1924
+ # Get the state resource path from environment.
1925
+ #
1926
+ # Returns:
1927
+ # State path string
1928
+ def state_path() -> str
1929
+
1930
+ # Get the workflow state.
1931
+ #
1932
+ # Returns:
1933
+ # State value or None if not set
1934
+ def state() -> Any
1935
+
1936
+ # Set the state in the shared folder using pickle
1937
+ def set_shared_state_pickle(value: Any, path: str = 'state.pickle') -> None
1938
+
1939
+ # Get the state in the shared folder using pickle
1940
+ def get_shared_state_pickle(path: str = 'state.pickle') -> Any
1941
+
1942
+ # Set the state in the shared folder using pickle
1943
+ def set_shared_state(value: Any, path: str = 'state.json') -> None
1944
+
1945
+ # Get the state in the shared folder using pickle
1946
+ def get_shared_state(path: str = 'state.json') -> None
1947
+
1948
+ # Get URLs needed for resuming a flow after suspension.
1949
+ #
1950
+ # Args:
1951
+ # approver: Optional approver name
1952
+ #
1953
+ # Returns:
1954
+ # Dictionary with approvalPage, resume, and cancel URLs
1955
+ def get_resume_urls(approver: str = None) -> dict
1956
+
1957
+ # Sends an interactive approval request via Slack, allowing optional customization of the message, approver, and form fields.
1958
+ #
1959
+ # **[Enterprise Edition Only]** To include form fields in the Slack approval request, use the "Advanced -> Suspend -> Form" functionality.
1960
+ # Learn more at: https://www.windmill.dev/docs/flows/flow_approval#form
1961
+ #
1962
+ # :param slack_resource_path: The path to the Slack resource in Windmill.
1963
+ # :type slack_resource_path: str
1964
+ # :param channel_id: The Slack channel ID where the approval request will be sent.
1965
+ # :type channel_id: str
1966
+ # :param message: Optional custom message to include in the Slack approval request.
1967
+ # :type message: str, optional
1968
+ # :param approver: Optional user ID or name of the approver for the request.
1969
+ # :type approver: str, optional
1970
+ # :param default_args_json: Optional dictionary defining or overriding the default arguments for form fields.
1971
+ # :type default_args_json: dict, optional
1972
+ # :param dynamic_enums_json: Optional dictionary overriding the enum default values of enum form fields.
1973
+ # :type dynamic_enums_json: dict, optional
1974
+ #
1975
+ # :raises Exception: If the function is not called within a flow or flow preview.
1976
+ # :raises Exception: If the required flow job or flow step environment variables are not set.
1977
+ #
1978
+ # :return: None
1979
+ #
1980
+ # **Usage Example:**
1981
+ # >>> client.request_interactive_slack_approval(
1982
+ # ... slack_resource_path="/u/alex/my_slack_resource",
1983
+ # ... channel_id="admins-slack-channel",
1984
+ # ... message="Please approve this request",
1985
+ # ... approver="approver123",
1986
+ # ... default_args_json={"key1": "value1", "key2": 42},
1987
+ # ... dynamic_enums_json={"foo": ["choice1", "choice2"], "bar": ["optionA", "optionB"]},
1988
+ # ... )
1989
+ #
1990
+ # **Notes:**
1991
+ # - This function must be executed within a Windmill flow or flow preview.
1992
+ # - The function checks for required environment variables (\`WM_FLOW_JOB_ID\`, \`WM_FLOW_STEP_ID\`) to ensure it is run in the appropriate context.
1993
+ def request_interactive_slack_approval(slack_resource_path: str, channel_id: str, message: str = None, approver: str = None, default_args_json: dict = None, dynamic_enums_json: dict = None) -> None
1994
+
1995
+ # Get email from workspace username
1996
+ # This method is particularly useful for apps that require the email address of the viewer.
1997
+ # Indeed, in the viewer context WM_USERNAME is set to the username of the viewer but WM_EMAIL is set to the email of the creator of the app.
1998
+ def username_to_email(username: str) -> str
1999
+
2000
+ # Send a message to a Microsoft Teams conversation with conversation_id, where success is used to style the message
2001
+ def send_teams_message(conversation_id: str, text: str, success: bool = True, card_block: dict = None)
2002
+
2003
+ # Get a DataTable client for SQL queries.
2004
+ #
2005
+ # Args:
2006
+ # name: Database name (default: "main")
2007
+ #
2008
+ # Returns:
2009
+ # DataTableClient instance
2010
+ def datatable(name: str = 'main')
2011
+
2012
+ # Get a DuckLake client for DuckDB queries.
2013
+ #
2014
+ # Args:
2015
+ # name: Database name (default: "main")
2016
+ #
2017
+ # Returns:
2018
+ # DucklakeClient instance
2019
+ def ducklake(name: str = 'main')
2020
+
2021
+ def init_global_client(f)
2022
+
2023
+ def deprecate(in_favor_of: str)
2024
+
2025
+ # Get the current workspace ID.
2026
+ #
2027
+ # Returns:
2028
+ # Workspace ID string
2029
+ def get_workspace() -> str
2030
+
2031
+ def get_version() -> str
2032
+
2033
+ # Run a script synchronously by hash and return its result.
2034
+ #
2035
+ # Args:
2036
+ # hash: Script hash
2037
+ # args: Script arguments
2038
+ # verbose: Enable verbose logging
2039
+ # assert_result_is_not_none: Raise exception if result is None
2040
+ # cleanup: Register cleanup handler to cancel job on exit
2041
+ # timeout: Maximum time to wait
2042
+ #
2043
+ # Returns:
2044
+ # Script result
2045
+ def run_script_sync(hash: str, args: Dict[str, Any] = None, verbose: bool = False, assert_result_is_not_none: bool = True, cleanup: bool = True, timeout: dt.timedelta = None) -> Any
2046
+
2047
+ # Run a script synchronously by path and return its result.
2048
+ #
2049
+ # Args:
2050
+ # path: Script path
2051
+ # args: Script arguments
2052
+ # verbose: Enable verbose logging
2053
+ # assert_result_is_not_none: Raise exception if result is None
2054
+ # cleanup: Register cleanup handler to cancel job on exit
2055
+ # timeout: Maximum time to wait
2056
+ #
2057
+ # Returns:
2058
+ # Script result
2059
+ def run_script_by_path_sync(path: str, args: Dict[str, Any] = None, verbose: bool = False, assert_result_is_not_none: bool = True, cleanup: bool = True, timeout: dt.timedelta = None) -> Any
2060
+
2061
+ # Convenient helpers that takes an S3 resource as input and returns the settings necessary to
2062
+ # initiate an S3 connection from DuckDB
2063
+ def duckdb_connection_settings(s3_resource_path: str = '') -> DuckDbConnectionSettings
2064
+
2065
+ # Convenient helpers that takes an S3 resource as input and returns the settings necessary to
2066
+ # initiate an S3 connection from Polars
2067
+ def polars_connection_settings(s3_resource_path: str = '') -> PolarsConnectionSettings
2068
+
2069
+ # Convenient helpers that takes an S3 resource as input and returns the settings necessary to
2070
+ # initiate an S3 connection using boto3
2071
+ def boto3_connection_settings(s3_resource_path: str = '') -> Boto3ConnectionSettings
2072
+
2073
+ # Get the state
2074
+ def get_state() -> Any
2075
+
2076
+ # Get the state resource path from environment.
2077
+ #
2078
+ # Returns:
2079
+ # State path string
2080
+ def get_state_path() -> str
2081
+
2082
+ # Decorator to mark a function as a workflow task.
2083
+ #
2084
+ # When executed inside a Windmill job, the decorated function runs as a
2085
+ # separate workflow step. Outside Windmill, it executes normally.
2086
+ #
2087
+ # Args:
2088
+ # tag: Optional worker tag for execution
2089
+ #
2090
+ # Returns:
2091
+ # Decorated function
2092
+ def task(*args, **kwargs)
2093
+
2094
+ # Parse resource syntax from string.
2095
+ def parse_resource_syntax(s: str) -> Optional[str]
2096
+
2097
+ # Parse S3 object from string or S3Object format.
2098
+ def parse_s3_object(s3_object: S3Object | str) -> S3Object
2099
+
2100
+ # Parse variable syntax from string.
2101
+ def parse_variable_syntax(s: str) -> Optional[str]
2102
+
2103
+ # Append a text to the result stream.
2104
+ #
2105
+ # Args:
2106
+ # text: text to append to the result stream
2107
+ def append_to_result_stream(text: str) -> None
2108
+
2109
+ # Stream to the result stream.
2110
+ #
2111
+ # Args:
2112
+ # stream: stream to stream to the result stream
2113
+ def stream_result(stream) -> None
2114
+
2115
+ # Execute a SQL query against the DataTable.
2116
+ #
2117
+ # Args:
2118
+ # sql: SQL query string with $1, $2, etc. placeholders
2119
+ # *args: Positional arguments to bind to query placeholders
2120
+ #
2121
+ # Returns:
2122
+ # SqlQuery instance for fetching results
2123
+ def query(sql: str, *args)
2124
+
2125
+ # Execute query and fetch results.
2126
+ #
2127
+ # Args:
2128
+ # result_collection: Optional result collection mode
2129
+ #
2130
+ # Returns:
2131
+ # Query results
2132
+ def fetch(result_collection: str | None = None)
2133
+
2134
+ # Execute query and fetch first row of results.
2135
+ #
2136
+ # Returns:
2137
+ # First row of query results
2138
+ def fetch_one()
2139
+
2140
+ # DuckDB executor requires explicit argument types at declaration
2141
+ # These types exist in both DuckDB and Postgres
2142
+ # Check that the types exist if you plan to extend this function for other SQL engines.
2143
+ def infer_sql_type(value) -> str
2144
+
2145
+ `;
2146
+ export const FLOW_PROMPT = `# Windmill Flow Building Guide
2147
+
2148
+ The OpenFlow schema (openflow.openapi.yaml) is the source of truth for flow structure. Refer to OPENFLOW_SCHEMA for the complete type definitions.
2149
+
2150
+ ## Reserved Module IDs
2151
+
2152
+ - \`failure\` - Reserved for failure handler module
2153
+ - \`preprocessor\` - Reserved for preprocessor module
2154
+ - \`Input\` - Reserved for flow input reference
2155
+
2156
+ ## Module ID Rules
2157
+
2158
+ - Must be unique across the entire flow
2159
+ - Use underscores, not spaces (e.g., \`fetch_data\` not \`fetch data\`)
2160
+ - Use descriptive names that reflect the step's purpose
2161
+
2162
+ ## Common Mistakes to Avoid
2163
+
2164
+ - Missing \`input_transforms\` - Rawscript parameters won't receive values without them
2165
+ - Referencing future steps - \`results.step_id\` only works for steps that execute before the current one
2166
+ - Duplicate module IDs - Each module ID must be unique in the flow
2167
+
2168
+ ## Data Flow Between Steps
2169
+
2170
+ - \`flow_input.property\` - Access flow input parameters
2171
+ - \`results.step_id\` - Access output from a previous step
2172
+ - \`results.step_id.property\` - Access specific property from previous step output
2173
+ - \`flow_input.iter.value\` - Current item when inside a for-loop
2174
+ - \`flow_input.iter.index\` - Current index when inside a for-loop
2175
+
2176
+ ## Input Transforms
2177
+
2178
+ Every rawscript module needs \`input_transforms\` to map function parameters to values:
2179
+
2180
+ Static transform (fixed value):
2181
+ {"param_name": {"type": "static", "value": "fixed_string"}}
2182
+
2183
+ JavaScript transform (dynamic expression):
2184
+ {"param_name": {"type": "javascript", "expr": "results.previous_step.data"}}
2185
+
2186
+ ## Resource References
2187
+
2188
+ - For flow inputs: Use type \`"object"\` with format \`"resource-{type}"\` (e.g., \`"resource-postgresql"\`)
2189
+ - For step inputs: Use static value \`"$res:path/to/resource"\`
2190
+
2191
+ ## Failure Handler
2192
+
2193
+ Executes when any step fails. Has access to error details:
2194
+
2195
+ - \`error.message\` - Error message
2196
+ - \`error.step_id\` - ID of failed step
2197
+ - \`error.name\` - Error name
2198
+ - \`error.stack\` - Stack trace
2199
+
2200
+ ## S3 Object Operations
2201
+
2202
+ Windmill provides built-in support for S3-compatible storage operations.
2203
+
2204
+ To accept an S3 object as flow input:
2205
+
2206
+ \`\`\`json
2207
+ {
2208
+ "type": "object",
2209
+ "properties": {
2210
+ "file": {
2211
+ "type": "object",
2212
+ "format": "resource-s3_object",
2213
+ "description": "File to process"
2214
+ }
2215
+ }
2216
+ }
2217
+ \`\`\`
2218
+
2219
+ ## Using Resources in Flows
2220
+
2221
+ On Windmill, credentials and configuration are stored in resources. Resource types define the format of the resource.
2222
+
2223
+ ### As Flow Input
2224
+
2225
+ In the flow schema, set the property type to \`"object"\` with format \`"resource-{type}"\`:
2226
+
2227
+ \`\`\`json
2228
+ {
2229
+ "type": "object",
2230
+ "properties": {
2231
+ "database": {
2232
+ "type": "object",
2233
+ "format": "resource-postgresql",
2234
+ "description": "Database connection"
2235
+ }
2236
+ }
2237
+ }
2238
+ \`\`\`
2239
+
2240
+ ### As Step Input (Static Reference)
2241
+
2242
+ Reference a specific resource using \`$res:\` prefix:
2243
+
2244
+ \`\`\`json
2245
+ {
2246
+ "database": {
2247
+ "type": "static",
2248
+ "value": "$res:f/folder/my_database"
2249
+ }
2250
+ }
2251
+ \`\`\`
2252
+
2253
+
2254
+ ## OpenFlow Schema
2255
+
2256
+ {"OpenFlow":{"type":"object","description":"Top-level flow definition containing metadata, configuration, and the flow structure","properties":{"summary":{"type":"string","description":"Short description of what this flow does"},"description":{"type":"string","description":"Detailed documentation for this flow"},"value":{"$ref":"#/components/schemas/FlowValue"},"schema":{"type":"object","description":"JSON Schema for flow inputs. Use this to define input parameters, their types, defaults, and validation. For resource inputs, set type to 'object' and format to 'resource-<type>' (e.g., 'resource-stripe')"}},"required":["summary","value"]},"FlowValue":{"type":"object","description":"The flow structure containing modules and optional preprocessor/failure handlers","properties":{"modules":{"type":"array","description":"Array of steps that execute in sequence. Each step can be a script, subflow, loop, or branch","items":{"$ref":"#/components/schemas/FlowModule"}},"failure_module":{"description":"Special module that executes when the flow fails. Receives error object with message, name, stack, and step_id. Must have id 'failure'. Only supports script/rawscript types","$ref":"#/components/schemas/FlowModule"},"preprocessor_module":{"description":"Special module that runs before the first step on external triggers. Must have id 'preprocessor'. Only supports script/rawscript types. Cannot reference other step results","$ref":"#/components/schemas/FlowModule"},"same_worker":{"type":"boolean","description":"If true, all steps run on the same worker for better performance"},"concurrent_limit":{"type":"number","description":"Maximum number of concurrent executions of this flow"},"concurrency_key":{"type":"string","description":"Expression to group concurrent executions (e.g., by user ID)"},"concurrency_time_window_s":{"type":"number","description":"Time window in seconds for concurrent_limit"},"debounce_delay_s":{"type":"number","description":"Delay in seconds to debounce flow executions"},"debounce_key":{"type":"string","description":"Expression to group debounced executions"},"skip_expr":{"type":"string","description":"JavaScript expression to conditionally skip the entire flow"},"cache_ttl":{"type":"number","description":"Cache duration in seconds for flow results"},"cache_ignore_s3_path":{"type":"boolean"},"flow_env":{"type":"object","description":"Environment variables available to all steps","additionalProperties":{"type":"string"}},"priority":{"type":"number","description":"Execution priority (higher numbers run first)"},"early_return":{"type":"string","description":"JavaScript expression to return early from the flow"},"chat_input_enabled":{"type":"boolean","description":"Whether this flow accepts chat-style input"},"notes":{"type":"array","description":"Sticky notes attached to the flow","items":{"$ref":"#/components/schemas/FlowNote"}}},"required":["modules"]},"Retry":{"type":"object","description":"Retry configuration for failed module executions","properties":{"constant":{"type":"object","description":"Retry with constant delay between attempts","properties":{"attempts":{"type":"integer","description":"Number of retry attempts"},"seconds":{"type":"integer","description":"Seconds to wait between retries"}}},"exponential":{"type":"object","description":"Retry with exponential backoff (delay doubles each time)","properties":{"attempts":{"type":"integer","description":"Number of retry attempts"},"multiplier":{"type":"integer","description":"Multiplier for exponential backoff"},"seconds":{"type":"integer","minimum":1,"description":"Initial delay in seconds"},"random_factor":{"type":"integer","minimum":0,"maximum":100,"description":"Random jitter percentage (0-100) to avoid thundering herd"}}},"retry_if":{"$ref":"#/components/schemas/RetryIf"}}},"FlowNote":{"type":"object","description":"A sticky note attached to a flow for documentation and annotation","properties":{"id":{"type":"string","description":"Unique identifier for the note"},"text":{"type":"string","description":"Content of the note"},"position":{"type":"object","description":"Position of the note in the flow editor","properties":{"x":{"type":"number","description":"X coordinate"},"y":{"type":"number","description":"Y coordinate"}},"required":["x","y"]},"size":{"type":"object","description":"Size of the note in the flow editor","properties":{"width":{"type":"number","description":"Width in pixels"},"height":{"type":"number","description":"Height in pixels"}},"required":["width","height"]},"color":{"type":"string","description":"Color of the note (e.g., \\"yellow\\", \\"#ffff00\\")"},"type":{"type":"string","enum":["free","group"],"description":"Type of note - 'free' for standalone notes, 'group' for notes that group other nodes"},"locked":{"type":"boolean","default":false,"description":"Whether the note is locked and cannot be edited or moved"},"contained_node_ids":{"type":"array","items":{"type":"string"},"description":"For group notes, the IDs of nodes contained within this group"}},"required":["id","text","color","type"]},"RetryIf":{"type":"object","description":"Conditional retry based on error or result","properties":{"expr":{"type":"string","description":"JavaScript expression that returns true to retry. Has access to 'result' and 'error' variables"}},"required":["expr"]},"StopAfterIf":{"type":"object","description":"Early termination condition for a module","properties":{"skip_if_stopped":{"type":"boolean","description":"If true, following steps are skipped when this condition triggers"},"expr":{"type":"string","description":"JavaScript expression evaluated after the module runs. Can use 'result' (step's result) or 'flow_input'. Return true to stop"},"error_message":{"type":"string","description":"Custom error message shown when stopping"}},"required":["expr"]},"FlowModule":{"type":"object","description":"A single step in a flow. Can be a script, subflow, loop, or branch","properties":{"id":{"type":"string","description":"Unique identifier for this step. Used to reference results via 'results.step_id'. Must be a valid identifier (alphanumeric, underscore, hyphen)"},"value":{"$ref":"#/components/schemas/FlowModuleValue"},"stop_after_if":{"description":"Early termination condition evaluated after this step completes","$ref":"#/components/schemas/StopAfterIf"},"stop_after_all_iters_if":{"description":"For loops only - early termination condition evaluated after all iterations complete","$ref":"#/components/schemas/StopAfterIf"},"skip_if":{"type":"object","description":"Conditionally skip this step based on previous results or flow inputs","properties":{"expr":{"type":"string","description":"JavaScript expression that returns true to skip. Can use 'flow_input' or 'results.<step_id>'"}},"required":["expr"]},"sleep":{"description":"Delay before executing this step (in seconds or as expression)","$ref":"#/components/schemas/InputTransform"},"cache_ttl":{"type":"number","description":"Cache duration in seconds for this step's results"},"cache_ignore_s3_path":{"type":"boolean"},"timeout":{"description":"Maximum execution time in seconds (static value or expression)","$ref":"#/components/schemas/InputTransform"},"delete_after_use":{"type":"boolean","description":"If true, this step's result is deleted after use to save memory"},"summary":{"type":"string","description":"Short description of what this step does"},"mock":{"type":"object","description":"Mock configuration for testing without executing the actual step","properties":{"enabled":{"type":"boolean","description":"If true, return mock value instead of executing"},"return_value":{"description":"Value to return when mocked"}}},"suspend":{"type":"object","description":"Configuration for approval/resume steps that wait for user input","properties":{"required_events":{"type":"integer","description":"Number of approvals required before continuing"},"timeout":{"type":"integer","description":"Timeout in seconds before auto-continuing or canceling"},"resume_form":{"type":"object","description":"Form schema for collecting input when resuming","properties":{"schema":{"type":"object","description":"JSON Schema for the resume form"}}},"user_auth_required":{"type":"boolean","description":"If true, only authenticated users can approve"},"user_groups_required":{"description":"Expression or list of groups that can approve","$ref":"#/components/schemas/InputTransform"},"self_approval_disabled":{"type":"boolean","description":"If true, the user who started the flow cannot approve"},"hide_cancel":{"type":"boolean","description":"If true, hide the cancel button on the approval form"},"continue_on_disapprove_timeout":{"type":"boolean","description":"If true, continue flow on timeout instead of canceling"}}},"priority":{"type":"number","description":"Execution priority for this step (higher numbers run first)"},"continue_on_error":{"type":"boolean","description":"If true, flow continues even if this step fails"},"retry":{"description":"Retry configuration if this step fails","$ref":"#/components/schemas/Retry"}},"required":["value","id"]},"InputTransform":{"description":"Maps input parameters for a step. Can be a static value or a JavaScript expression that references previous results or flow inputs","oneOf":[{"$ref":"#/components/schemas/StaticTransform"},{"$ref":"#/components/schemas/JavascriptTransform"}],"discriminator":{"propertyName":"type","mapping":{"static":"#/components/schemas/StaticTransform","javascript":"#/components/schemas/JavascriptTransform"}}},"StaticTransform":{"type":"object","description":"Static value passed directly to the step. Use for hardcoded values or resource references like '$res:path/to/resource'","properties":{"value":{"description":"The static value. For resources, use format '$res:path/to/resource'"},"type":{"type":"string","enum":["static"]}},"required":["type"]},"JavascriptTransform":{"type":"object","description":"JavaScript expression evaluated at runtime. Can reference previous step results via 'results.step_id' or flow inputs via 'flow_input.property'. Inside loops, use 'flow_input.iter.value' for the current iteration value","properties":{"expr":{"type":"string","description":"JavaScript expression returning the value. Available variables - results (object with all previous step results), flow_input (flow inputs), flow_input.iter (in loops)"},"type":{"type":"string","enum":["javascript"]}},"required":["expr","type"]},"FlowModuleValue":{"description":"The actual implementation of a flow step. Can be a script (inline or referenced), subflow, loop, branch, or special module type","oneOf":[{"$ref":"#/components/schemas/RawScript"},{"$ref":"#/components/schemas/PathScript"},{"$ref":"#/components/schemas/PathFlow"},{"$ref":"#/components/schemas/ForloopFlow"},{"$ref":"#/components/schemas/WhileloopFlow"},{"$ref":"#/components/schemas/BranchOne"},{"$ref":"#/components/schemas/BranchAll"},{"$ref":"#/components/schemas/Identity"},{"$ref":"#/components/schemas/AiAgent"}],"discriminator":{"propertyName":"type","mapping":{"rawscript":"#/components/schemas/RawScript","script":"#/components/schemas/PathScript","flow":"#/components/schemas/PathFlow","forloopflow":"#/components/schemas/ForloopFlow","whileloopflow":"#/components/schemas/WhileloopFlow","branchone":"#/components/schemas/BranchOne","branchall":"#/components/schemas/BranchAll","identity":"#/components/schemas/Identity","aiagent":"#/components/schemas/AiAgent"}}},"RawScript":{"type":"object","description":"Inline script with code defined directly in the flow. Use 'bun' as default language if unspecified. The script receives arguments from input_transforms","properties":{"input_transforms":{"type":"object","description":"Map of parameter names to their values (static or JavaScript expressions). These become the script's input arguments","additionalProperties":{"$ref":"#/components/schemas/InputTransform"}},"content":{"type":"string","description":"The script source code. Should export a 'main' function"},"language":{"type":"string","description":"Programming language for this script","enum":["deno","bun","python3","go","bash","powershell","postgresql","mysql","bigquery","snowflake","mssql","oracledb","graphql","nativets","php"]},"path":{"type":"string","description":"Optional path for saving this script"},"lock":{"type":"string","description":"Lock file content for dependencies"},"type":{"type":"string","enum":["rawscript"]},"tag":{"type":"string","description":"Worker group tag for execution routing"},"concurrent_limit":{"type":"number","description":"Maximum concurrent executions of this script"},"concurrency_time_window_s":{"type":"number","description":"Time window for concurrent_limit"},"custom_concurrency_key":{"type":"string","description":"Custom key for grouping concurrent executions"},"is_trigger":{"type":"boolean","description":"If true, this script is a trigger that can start the flow"},"assets":{"type":"array","description":"External resources this script accesses (S3 objects, resources, etc.)","items":{"type":"object","required":["path","kind"],"properties":{"path":{"type":"string","description":"Path to the asset"},"kind":{"type":"string","description":"Type of asset","enum":["s3object","resource","ducklake","datatable"]},"access_type":{"type":"string","description":"Access level for this asset","enum":["r","w","rw"]},"alt_access_type":{"type":"string","description":"Alternative access level","enum":["r","w","rw"]}}}}},"required":["type","content","language","input_transforms"]},"PathScript":{"type":"object","description":"Reference to an existing script by path. Use this when calling a previously saved script instead of writing inline code","properties":{"input_transforms":{"type":"object","description":"Map of parameter names to their values (static or JavaScript expressions). These become the script's input arguments","additionalProperties":{"$ref":"#/components/schemas/InputTransform"}},"path":{"type":"string","description":"Path to the script in the workspace (e.g., 'f/scripts/send_email')"},"hash":{"type":"string","description":"Optional specific version hash of the script to use"},"type":{"type":"string","enum":["script"]},"tag_override":{"type":"string","description":"Override the script's default worker group tag"},"is_trigger":{"type":"boolean","description":"If true, this script is a trigger that can start the flow"}},"required":["type","path","input_transforms"]},"PathFlow":{"type":"object","description":"Reference to an existing flow by path. Use this to call another flow as a subflow","properties":{"input_transforms":{"type":"object","description":"Map of parameter names to their values (static or JavaScript expressions). These become the subflow's input arguments","additionalProperties":{"$ref":"#/components/schemas/InputTransform"}},"path":{"type":"string","description":"Path to the flow in the workspace (e.g., 'f/flows/process_user')"},"type":{"type":"string","enum":["flow"]}},"required":["type","path","input_transforms"]},"ForloopFlow":{"type":"object","description":"Executes nested modules in a loop over an iterator. Inside the loop, use 'flow_input.iter.value' to access the current iteration value, and 'flow_input.iter.index' for the index. Supports parallel execution for better performance on I/O-bound operations","properties":{"modules":{"type":"array","description":"Steps to execute for each iteration. These can reference the iteration value via 'flow_input.iter.value'","items":{"$ref":"#/components/schemas/FlowModule"}},"iterator":{"description":"JavaScript expression that returns an array to iterate over. Can reference 'results.step_id' or 'flow_input'","$ref":"#/components/schemas/InputTransform"},"skip_failures":{"type":"boolean","description":"If true, iteration failures don't stop the loop. Failed iterations return null"},"type":{"type":"string","enum":["forloopflow"]},"parallel":{"type":"boolean","description":"If true, iterations run concurrently (faster for I/O-bound operations). Use with parallelism to control concurrency"},"parallelism":{"description":"Maximum number of concurrent iterations when parallel=true. Limits resource usage. Can be static number or expression","$ref":"#/components/schemas/InputTransform"},"squash":{"type":"boolean"}},"required":["modules","iterator","skip_failures","type"]},"WhileloopFlow":{"type":"object","description":"Executes nested modules repeatedly while a condition is true. The loop checks the condition after each iteration. Use stop_after_if on modules to control loop termination","properties":{"modules":{"type":"array","description":"Steps to execute in each iteration. Use stop_after_if to control when the loop ends","items":{"$ref":"#/components/schemas/FlowModule"}},"skip_failures":{"type":"boolean","description":"If true, iteration failures don't stop the loop. Failed iterations return null"},"type":{"type":"string","enum":["whileloopflow"]},"parallel":{"type":"boolean","description":"If true, iterations run concurrently (use with caution in while loops)"},"parallelism":{"description":"Maximum number of concurrent iterations when parallel=true","$ref":"#/components/schemas/InputTransform"},"squash":{"type":"boolean"}},"required":["modules","skip_failures","type"]},"BranchOne":{"type":"object","description":"Conditional branching where only the first matching branch executes. Branches are evaluated in order, and the first one with a true expression runs. If no branches match, the default branch executes","properties":{"branches":{"type":"array","description":"Array of branches to evaluate in order. The first branch with expr evaluating to true executes","items":{"type":"object","properties":{"summary":{"type":"string","description":"Short description of this branch condition"},"expr":{"type":"string","description":"JavaScript expression that returns boolean. Can use 'results.step_id' or 'flow_input'. First true expr wins"},"modules":{"type":"array","description":"Steps to execute if this branch's expr is true","items":{"$ref":"#/components/schemas/FlowModule"}}},"required":["modules","expr"]}},"default":{"type":"array","description":"Steps to execute if no branch expressions match","items":{"$ref":"#/components/schemas/FlowModule"}},"type":{"type":"string","enum":["branchone"]}},"required":["branches","default","type"]},"BranchAll":{"type":"object","description":"Parallel branching where all branches execute simultaneously. Unlike BranchOne, all branches run regardless of conditions. Useful for executing independent tasks concurrently","properties":{"branches":{"type":"array","description":"Array of branches that all execute (either in parallel or sequentially)","items":{"type":"object","properties":{"summary":{"type":"string","description":"Short description of this branch's purpose"},"skip_failure":{"type":"boolean","description":"If true, failure in this branch doesn't fail the entire flow"},"modules":{"type":"array","description":"Steps to execute in this branch","items":{"$ref":"#/components/schemas/FlowModule"}}},"required":["modules"]}},"type":{"type":"string","enum":["branchall"]},"parallel":{"type":"boolean","description":"If true, all branches execute concurrently. If false, they execute sequentially"}},"required":["branches","type"]},"AgentTool":{"type":"object","description":"A tool available to an AI agent. Can be a flow module or an external MCP (Model Context Protocol) tool","properties":{"id":{"type":"string","description":"Unique identifier for this tool. Cannot contain spaces - use underscores instead (e.g., 'get_user_data' not 'get user data')"},"summary":{"type":"string","description":"Short description of what this tool does (shown to the AI)"},"value":{"$ref":"#/components/schemas/ToolValue"}},"required":["id","value"]},"ToolValue":{"description":"The implementation of a tool. Can be a flow module (script/flow) or an MCP tool reference","oneOf":[{"$ref":"#/components/schemas/FlowModuleTool"},{"$ref":"#/components/schemas/McpToolValue"}],"discriminator":{"propertyName":"tool_type","mapping":{"flowmodule":"#/components/schemas/FlowModuleTool","mcp":"#/components/schemas/McpToolValue"}}},"FlowModuleTool":{"description":"A tool implemented as a flow module (script, flow, etc.). The AI can call this like any other flow module","allOf":[{"type":"object","properties":{"tool_type":{"type":"string","enum":["flowmodule"]}},"required":["tool_type"]},{"$ref":"#/components/schemas/FlowModuleValue"}]},"McpToolValue":{"type":"object","description":"Reference to an external MCP (Model Context Protocol) tool. The AI can call tools from MCP servers","properties":{"tool_type":{"type":"string","enum":["mcp"]},"resource_path":{"type":"string","description":"Path to the MCP resource/server configuration"},"include_tools":{"type":"array","description":"Whitelist of specific tools to include from this MCP server","items":{"type":"string"}},"exclude_tools":{"type":"array","description":"Blacklist of tools to exclude from this MCP server","items":{"type":"string"}}},"required":["tool_type","resource_path"]},"AiAgent":{"type":"object","description":"AI agent step that can use tools to accomplish tasks. The agent receives inputs and can call any of its configured tools to complete the task","properties":{"input_transforms":{"type":"object","description":"Input parameters for the AI agent mapped to their values","properties":{"provider":{"$ref":"#/components/schemas/InputTransform"},"output_type":{"$ref":"#/components/schemas/InputTransform"},"user_message":{"$ref":"#/components/schemas/InputTransform"},"system_prompt":{"$ref":"#/components/schemas/InputTransform"},"streaming":{"$ref":"#/components/schemas/InputTransform"},"messages_context_length":{"$ref":"#/components/schemas/InputTransform"},"output_schema":{"$ref":"#/components/schemas/InputTransform"},"user_images":{"$ref":"#/components/schemas/InputTransform"},"max_completion_tokens":{"$ref":"#/components/schemas/InputTransform"},"temperature":{"$ref":"#/components/schemas/InputTransform"}},"required":["provider","user_message","output_type"]},"tools":{"type":"array","description":"Array of tools the agent can use. The agent decides which tools to call based on the task","items":{"$ref":"#/components/schemas/AgentTool"}},"type":{"type":"string","enum":["aiagent"]},"parallel":{"type":"boolean","description":"If true, the agent can execute multiple tool calls in parallel"}},"required":["tools","type","input_transforms"]},"Identity":{"type":"object","description":"Pass-through module that returns its input unchanged. Useful for flow structure or as a placeholder","properties":{"type":{"type":"string","enum":["identity"]},"flow":{"type":"boolean","description":"If true, marks this as a flow identity (special handling)"}},"required":["type"]},"FlowStatus":{"type":"object","properties":{"step":{"type":"integer"},"modules":{"type":"array","items":{"$ref":"#/components/schemas/FlowStatusModule"}},"user_states":{"additionalProperties":true},"preprocessor_module":{"allOf":[{"$ref":"#/components/schemas/FlowStatusModule"}]},"failure_module":{"allOf":[{"$ref":"#/components/schemas/FlowStatusModule"},{"type":"object","properties":{"parent_module":{"type":"string"}}}]},"retry":{"type":"object","properties":{"fail_count":{"type":"integer"},"failed_jobs":{"type":"array","items":{"type":"string","format":"uuid"}}}}},"required":["step","modules","failure_module"]},"FlowStatusModule":{"type":"object","properties":{"type":{"type":"string","enum":["WaitingForPriorSteps","WaitingForEvents","WaitingForExecutor","InProgress","Success","Failure"]},"id":{"type":"string"},"job":{"type":"string","format":"uuid"},"count":{"type":"integer"},"progress":{"type":"integer"},"iterator":{"type":"object","properties":{"index":{"type":"integer"},"itered":{"type":"array","items":{}},"args":{}}},"flow_jobs":{"type":"array","items":{"type":"string"}},"flow_jobs_success":{"type":"array","items":{"type":"boolean"}},"flow_jobs_duration":{"type":"object","properties":{"started_at":{"type":"array","items":{"type":"string"}},"duration_ms":{"type":"array","items":{"type":"integer"}}}},"branch_chosen":{"type":"object","properties":{"type":{"type":"string","enum":["branch","default"]},"branch":{"type":"integer"}},"required":["type"]},"branchall":{"type":"object","properties":{"branch":{"type":"integer"},"len":{"type":"integer"}},"required":["branch","len"]},"approvers":{"type":"array","items":{"type":"object","properties":{"resume_id":{"type":"integer"},"approver":{"type":"string"}},"required":["resume_id","approver"]}},"failed_retries":{"type":"array","items":{"type":"string","format":"uuid"}},"skipped":{"type":"boolean"},"agent_actions":{"type":"array","items":{"type":"object","oneOf":[{"type":"object","properties":{"job_id":{"type":"string","format":"uuid"},"function_name":{"type":"string"},"type":{"type":"string","enum":["tool_call"]},"module_id":{"type":"string"}},"required":["job_id","function_name","type","module_id"]},{"type":"object","properties":{"call_id":{"type":"string","format":"uuid"},"function_name":{"type":"string"},"resource_path":{"type":"string"},"type":{"type":"string","enum":["mcp_tool_call"]},"arguments":{"type":"object"}},"required":["call_id","function_name","resource_path","type"]},{"type":"object","properties":{"type":{"type":"string","enum":["message"]}},"required":["content","type"]}]}},"agent_actions_success":{"type":"array","items":{"type":"boolean"}}},"required":["type"]}}`;
2257
+ export const CLI_COMMANDS = `# Windmill CLI Commands
2258
+
2259
+ The Windmill CLI (\`wmill\`) provides commands for managing scripts, flows, apps, and other resources.
2260
+
2261
+ Current version: 1.591.2
2262
+
2263
+ ## Global Options
2264
+
2265
+ - \`--workspace <workspace:string>\` - Specify the target workspace. This overrides the default workspace.
2266
+ - \`--debug --verbose\` - Show debug/verbose logs
2267
+ - \`--show-diffs\` - Show diff informations when syncing (may show sensitive informations)
2268
+ - \`--token <token:string>\` - Specify an API token. This will override any stored token.
2269
+ - \`--base-url <baseUrl:string>\` - Specify the base URL of the API. If used, --token and --workspace are required and no local remote/workspace already set will be used.
2270
+ - \`--config-dir <configDir:string>\` - Specify a custom config directory. Overrides WMILL_CONFIG_DIR environment variable and default ~/.config location.
2271
+
2272
+ ## Commands
2273
+
2274
+ ### app
2275
+
2276
+ app related commands
2277
+
2278
+ **Subcommands:**
2279
+
2280
+ - \`app push <file_path:string> <remote_path:string>\` - push a local app
2281
+ - \`app generate-locks [app_folder:string]\` - re-generate the lockfiles for app runnables inline scripts that have changed
2282
+ - \`--yes\` - Skip confirmation prompt
2283
+ - \`--dry-run\` - Perform a dry run without making changes
2284
+ - \`--default-ts <runtime:string>\` - Default TypeScript runtime (bun or deno)
2285
+
2286
+ ### dependencies
2287
+
2288
+ workspace dependencies related commands
2289
+
2290
+ **Alias:** \`deps\`
2291
+
2292
+ **Subcommands:**
2293
+
2294
+ - \`dependencies push <file_path:string>\` - Push workspace dependencies from a local file
2295
+ - \`--language <language:string>\` - Programming language (python3, typescript, go, php). If not specified, will be inferred from file extension.
2296
+ - \`--name <name:string>\` - Name for the dependencies. If not specified, creates workspace default dependencies.
2297
+
2298
+ ### dev
2299
+
2300
+ Launch a dev server that will spawn a webserver with HMR
2301
+
2302
+ **Options:**
2303
+ - \`--includes <pattern...:string>\` - Filter paths givena glob pattern or path
2304
+
2305
+ ### flow
2306
+
2307
+ flow related commands
2308
+
2309
+ **Options:**
2310
+ - \`--show-archived\` - Enable archived scripts in output
2311
+
2312
+ **Subcommands:**
2313
+
2314
+ - \`flow push <file_path:string> <remote_path:string>\` - push a local flow spec. This overrides any remote versions.
2315
+ - \`flow run <path:string>\` - run a flow by path.
2316
+ - \`-d --data <data:string>\` - Inputs specified as a JSON string or a file using @<filename> or stdin using @-.
2317
+ - \`-s --silent\` - Do not ouput anything other then the final output. Useful for scripting.
2318
+ - \`flow generate-locks [flow:file]\` - re-generate the lock files of all inline scripts of all updated flows
2319
+ - \`--yes\` - Skip confirmation prompt
2320
+ - \`-e --excludes <patterns:file[]>\` - Comma separated patterns to specify which file to NOT take into account.
2321
+ - \`flow bootstrap <flow_path:string>\` - create a new empty flow
2322
+ - \`--summary <summary:string>\` - script summary
2323
+ - \`--description <description:string>\` - script description
2324
+
2325
+ ### folder
2326
+
2327
+ folder related commands
2328
+
2329
+ **Subcommands:**
2330
+
2331
+ - \`folder push <file_path:string> <remote_path:string>\` - push a local folder spec. This overrides any remote versions.
2332
+
2333
+ ### gitsync-settings
2334
+
2335
+ Manage git-sync settings between local wmill.yaml and Windmill backend
2336
+
2337
+ **Subcommands:**
2338
+
2339
+ - \`gitsync-settings pull\` - Pull git-sync settings from Windmill backend to local wmill.yaml
2340
+ - \`--replace\` - Replace existing settings (non-interactive mode)
2341
+ - \`--diff\` - Show differences without applying changes
2342
+ - \`--json-output\` - Output in JSON format
2343
+ - \`--yes\` - Skip interactive prompts and use default behavior
2344
+ - \`--promotion <branch:string>\` - Use promotionOverrides from the specified branch instead of regular overrides
2345
+ - \`gitsync-settings push\` - Push git-sync settings from local wmill.yaml to Windmill backend
2346
+ - \`--diff\` - Show what would be pushed without applying changes
2347
+ - \`--json-output\` - Output in JSON format
2348
+ - \`--yes\` - Skip interactive prompts and use default behavior
2349
+ - \`--promotion <branch:string>\` - Use promotionOverrides from the specified branch instead of regular overrides
2350
+
2351
+ ### hub
2352
+
2353
+ Hub related commands. EXPERIMENTAL. INTERNAL USE ONLY.
2354
+
2355
+ **Subcommands:**
2356
+
2357
+ - \`hub pull\` - pull any supported definitions. EXPERIMENTAL.
2358
+
2359
+ ### init
2360
+
2361
+ Bootstrap a windmill project with a wmill.yaml file
2362
+
2363
+ **Options:**
2364
+ - \`--use-default\` - Use default settings without checking backend
2365
+ - \`--use-backend\` - Use backend git-sync settings if available
2366
+ - \`--repository <repo:string>\` - Specify repository path (e.g., u/user/repo) when using backend settings
2367
+ - \`--bind-profile\` - Automatically bind active workspace profile to current Git branch
2368
+ - \`--no-bind-profile\` - Skip workspace profile binding prompt
2369
+
2370
+ ### instance
2371
+
2372
+ sync local with a remote instance or the opposite (push or pull)
2373
+
2374
+ **Subcommands:**
2375
+
2376
+ - \`instance add [instance_name:string] [remote:string] [token:string]\` - Add a new instance
2377
+ - \`instance remove <instance:string:instance>\` - Remove an instance
2378
+ - \`instance switch <instance:string:instance>\` - Switch the current instance
2379
+ - \`instance pull\` - Pull instance settings, users, configs, instance groups and overwrite local
2380
+ - \`--yes\` - Pull without needing confirmation
2381
+ - \`--dry-run\` - Perform a dry run without making changes
2382
+ - \`--skip-users\` - Skip pulling users
2383
+ - \`--skip-settings\` - Skip pulling settings
2384
+ - \`--skip-configs\` - Skip pulling configs (worker groups and SMTP)
2385
+ - \`--skip-groups\` - Skip pulling instance groups
2386
+ - \`--include-workspaces\` - Also pull workspaces
2387
+ - \`--folder-per-instance\` - Create a folder per instance
2388
+ - \`--instance <instance:string>\` - Name of the instance to pull from, override the active instance
2389
+ - \`--prefix <prefix:string>\` - Prefix of the local workspaces to pull, used to create the folders when using --include-workspaces
2390
+ - \`--prefix-settings\` - Store instance yamls inside prefixed folders when using --prefix and --folder-per-instance
2391
+ - \`instance push\` - Push instance settings, users, configs, group and overwrite remote
2392
+ - \`--yes\` - Push without needing confirmation
2393
+ - \`--dry-run\` - Perform a dry run without making changes
2394
+ - \`--skip-users\` - Skip pushing users
2395
+ - \`--skip-settings\` - Skip pushing settings
2396
+ - \`--skip-configs\` - Skip pushing configs (worker groups and SMTP)
2397
+ - \`--skip-groups\` - Skip pushing instance groups
2398
+ - \`--include-workspaces\` - Also push workspaces
2399
+ - \`--folder-per-instance\` - Create a folder per instance
2400
+ - \`--instance <instance:string>\` - Name of the instance to push to, override the active instance
2401
+ - \`--prefix <prefix:string>\` - Prefix of the local workspaces folders to push
2402
+ - \`--prefix-settings\` - Store instance yamls inside prefixed folders when using --prefix and --folder-per-instance
2403
+ - \`instance whoami\` - Display information about the currently logged-in user
2404
+
2405
+ ### jobs
2406
+
2407
+ Pull completed and queued jobs from workspace
2408
+
2409
+ **Arguments:** \`[workspace:string]\`
2410
+
2411
+ **Options:**
2412
+ - \`-c, --completed-output <file:string>\` - Completed jobs output file (default: completed_jobs.json)
2413
+ - \`-q, --queued-output <file:string>\` - Queued jobs output file (default: queued_jobs.json)
2414
+ - \`--skip-worker-check\` - Skip checking for active workers before export
2415
+
2416
+ ### queues
2417
+
2418
+ List all queues with their metrics
2419
+
2420
+ **Arguments:** \`[workspace:string] the optional workspace to filter by (default to all workspaces)\`
2421
+
2422
+ **Options:**
2423
+ - \`--instance [instance]\` - Name of the instance to push to, override the active instance
2424
+ - \`--base-url [baseUrl]\` - If used with --token, will be used as the base url for the instance
2425
+
2426
+ ### resource
2427
+
2428
+ resource related commands
2429
+
2430
+ **Subcommands:**
2431
+
2432
+ - \`resource push <file_path:string> <remote_path:string>\` - push a local resource spec. This overrides any remote versions.
2433
+
2434
+ ### resource-type
2435
+
2436
+ resource type related commands
2437
+
2438
+ **Subcommands:**
2439
+
2440
+ - \`resource-type list\` - list all resource types
2441
+ - \`--schema\` - Show schema in the output
2442
+ - \`resource-type push <file_path:string> <name:string>\` - push a local resource spec. This overrides any remote versions.
2443
+ - \`resource-type generate-namespace\` - Create a TypeScript definition file with the RT namespace generated from the resource types
2444
+
2445
+ ### schedule
2446
+
2447
+ schedule related commands
2448
+
2449
+ **Subcommands:**
2450
+
2451
+ - \`schedule push <file_path:string> <remote_path:string>\` - push a local schedule spec. This overrides any remote versions.
2452
+
2453
+ ### script
2454
+
2455
+ script related commands
2456
+
2457
+ **Options:**
2458
+ - \`--show-archived\` - Enable archived scripts in output
2459
+
2460
+ **Subcommands:**
2461
+
2462
+ - \`script push <path:file>\` - push a local script spec. This overrides any remote versions. Use the script file (.ts, .js, .py, .sh)
2463
+ - \`script show <path:file>\` - show a scripts content
2464
+ - \`script run <path:file>\` - run a script by path
2465
+ - \`-d --data <data:file>\` - Inputs specified as a JSON string or a file using @<filename> or stdin using @-.
2466
+ - \`-s --silent\` - Do not output anything other then the final output. Useful for scripting.
2467
+ - \`script bootstrap <path:file> <language:string>\` - create a new script
2468
+ - \`--summary <summary:string>\` - script summary
2469
+ - \`--description <description:string>\` - script description
2470
+ - \`script generate-metadata [script:file]\` - re-generate the metadata file updating the lock and the script schema (for flows, use \`wmill flow generate-locks\`)
2471
+ - \`--yes\` - Skip confirmation prompt
2472
+ - \`--dry-run\` - Perform a dry run without making changes
2473
+ - \`--lock-only\` - re-generate only the lock
2474
+ - \`--schema-only\` - re-generate only script schema
2475
+ - \`-e --excludes <patterns:file[]>\` - Comma separated patterns to specify which file to NOT take into account.
2476
+
2477
+ ### sync
2478
+
2479
+ sync local with a remote workspaces or the opposite (push or pull)
2480
+
2481
+ **Subcommands:**
2482
+
2483
+ - \`sync pull\` - Pull any remote changes and apply them locally.
2484
+ - \`--yes\` - Pull without needing confirmation
2485
+ - \`--dry-run\` - Show changes that would be pulled without actually pushing
2486
+ - \`--plain-secrets\` - Pull secrets as plain text
2487
+ - \`--json\` - Use JSON instead of YAML
2488
+ - \`--skip-variables\` - Skip syncing variables (including secrets)
2489
+ - \`--skip-secrets\` - Skip syncing only secrets variables
2490
+ - \`--skip-resources\` - Skip syncing resources
2491
+ - \`--skip-resource-types\` - Skip syncing resource types
2492
+ - \`--skip-scripts\` - Skip syncing scripts
2493
+ - \`--skip-flows\` - Skip syncing flows
2494
+ - \`--skip-apps\` - Skip syncing apps
2495
+ - \`--skip-folders\` - Skip syncing folders
2496
+ - \`--skip-workspace-dependencies\` - Skip syncing workspace dependencies
2497
+ - \`--skip-scripts-metadata\` - Skip syncing scripts metadata, focus solely on logic
2498
+ - \`--include-schedules\` - Include syncing schedules
2499
+ - \`--include-triggers\` - Include syncing triggers
2500
+ - \`--include-users\` - Include syncing users
2501
+ - \`--include-groups\` - Include syncing groups
2502
+ - \`--include-settings\` - Include syncing workspace settings
2503
+ - \`--include-key\` - Include workspace encryption key
2504
+ - \`--skip-branch-validation\` - Skip git branch validation and prompts
2505
+ - \`--json-output\` - Output results in JSON format
2506
+ - \`-e --excludes <patterns:file[]>\` - Comma separated patterns to specify which file to NOT take into account. Overrides wmill.yaml excludes
2507
+ - \`--repository <repo:string>\` - Specify repository path (e.g., u/user/repo) when multiple repositories exist
2508
+ - \`--promotion <branch:string>\` - Use promotionOverrides from the specified branch instead of regular overrides
2509
+ - \`sync push\` - Push any local changes and apply them remotely.
2510
+ - \`--yes\` - Push without needing confirmation
2511
+ - \`--dry-run\` - Show changes that would be pushed without actually pushing
2512
+ - \`--plain-secrets\` - Push secrets as plain text
2513
+ - \`--json\` - Use JSON instead of YAML
2514
+ - \`--skip-variables\` - Skip syncing variables (including secrets)
2515
+ - \`--skip-secrets\` - Skip syncing only secrets variables
2516
+ - \`--skip-resources\` - Skip syncing resources
2517
+ - \`--skip-resource-types\` - Skip syncing resource types
2518
+ - \`--skip-scripts\` - Skip syncing scripts
2519
+ - \`--skip-flows\` - Skip syncing flows
2520
+ - \`--skip-apps\` - Skip syncing apps
2521
+ - \`--skip-folders\` - Skip syncing folders
2522
+ - \`--skip-workspace-dependencies\` - Skip syncing workspace dependencies
2523
+ - \`--skip-scripts-metadata\` - Skip syncing scripts metadata, focus solely on logic
2524
+ - \`--include-schedules\` - Include syncing schedules
2525
+ - \`--include-triggers\` - Include syncing triggers
2526
+ - \`--include-users\` - Include syncing users
2527
+ - \`--include-groups\` - Include syncing groups
2528
+ - \`--include-settings\` - Include syncing workspace settings
2529
+ - \`--include-key\` - Include workspace encryption key
2530
+ - \`--skip-branch-validation\` - Skip git branch validation and prompts
2531
+ - \`--json-output\` - Output results in JSON format
2532
+ - \`-e --excludes <patterns:file[]>\` - Comma separated patterns to specify which file to NOT take into account.
2533
+ - \`--message <message:string>\` - Include a message that will be added to all scripts/flows/apps updated during this push
2534
+ - \`--parallel <number>\` - Number of changes to process in parallel
2535
+ - \`--repository <repo:string>\` - Specify repository path (e.g., u/user/repo) when multiple repositories exist
2536
+
2537
+ ### trigger
2538
+
2539
+ trigger related commands
2540
+
2541
+ **Subcommands:**
2542
+
2543
+ - \`trigger push <file_path:string> <remote_path:string>\` - push a local trigger spec. This overrides any remote versions.
2544
+
2545
+ ### user
2546
+
2547
+ user related commands
2548
+
2549
+ **Subcommands:**
2550
+
2551
+ - \`user add <email:string> [password:string]\` - Create a user
2552
+ - \`--superadmin\` - Specify to make the new user superadmin.
2553
+ - \`--company <company:string>\` - Specify to set the company of the new user.
2554
+ - \`--name <name:string>\` - Specify to set the name of the new user.
2555
+ - \`user remove <email:string>\` - Delete a user
2556
+ - \`user create-token\`
2557
+
2558
+ ### variable
2559
+
2560
+ variable related commands
2561
+
2562
+ **Subcommands:**
2563
+
2564
+ - \`variable push <file_path:string> <remote_path:string>\` - Push a local variable spec. This overrides any remote versions.
2565
+ - \`--plain-secrets\` - Push secrets as plain text
2566
+ - \`variable add <value:string> <remote_path:string>\` - Create a new variable on the remote. This will update the variable if it already exists.
2567
+ - \`--plain-secrets\` - Push secrets as plain text
2568
+ - \`--public\` - Legacy option, use --plain-secrets instead
2569
+
2570
+ ### version
2571
+
2572
+ Show version information
2573
+
2574
+ ### worker-groups
2575
+
2576
+ display worker groups, pull and push worker groups configs
2577
+
2578
+ **Subcommands:**
2579
+
2580
+ - \`worker-groups pull\` - Pull worker groups (similar to \`wmill instance pull --skip-users --skip-settings --skip-groups\`)
2581
+ - \`--instance\` - Name of the instance to push to, override the active instance
2582
+ - \`--base-url\` - Base url to be passed to the instance settings instead of the local one
2583
+ - \`--yes\` - Pull without needing confirmation
2584
+ - \`worker-groups push\` - Push instance settings, users, configs, group and overwrite remote
2585
+ - \`--instance [instance]\` - Name of the instance to push to, override the active instance
2586
+ - \`--base-url [baseUrl]\` - If used with --token, will be used as the base url for the instance
2587
+ - \`--yes\` - Push without needing confirmation
2588
+
2589
+ ### workers
2590
+
2591
+ List all workers grouped by worker groups
2592
+
2593
+ **Options:**
2594
+ - \`--instance [instance]\` - Name of the instance to push to, override the active instance
2595
+ - \`--base-url [baseUrl]\` - If used with --token, will be used as the base url for the instance
2596
+
2597
+ ### workspace
2598
+
2599
+ workspace related commands
2600
+
2601
+ **Alias:** \`profile\`
2602
+
2603
+ **Subcommands:**
2604
+
2605
+ - \`workspace switch <workspace_name:string:workspace>\` - Switch to another workspace
2606
+ - \`workspace add [workspace_name:string] [workspace_id:string] [remote:string]\` - Add a workspace
2607
+ - \`-c --create\` - Create the workspace if it does not exist
2608
+ - \`--create-workspace-name <workspace_name:string>\` - Specify the workspace name. Ignored if --create is not specified or the workspace already exists. Will default to the workspace id.
2609
+ - \`workspace remove <workspace_name:string>\` - Remove a workspace
2610
+ - \`workspace whoami\` - Show the currently active user
2611
+ - \`workspace bind\` - Bind the current Git branch to the active workspace
2612
+ - \`--branch <branch:string>\` - Specify branch (defaults to current)
2613
+ - \`workspace unbind\` - Remove workspace binding from the current Git branch
2614
+ - \`--branch <branch:string>\` - Specify branch (defaults to current)
2615
+ - \`workspace fork [workspace_name:string] [workspace_id:string]\` - Create a forked workspace
2616
+ - \`--create-workspace-name <workspace_name:string>\` - Specify the workspace name. Ignored if --create is not specified or the workspace already exists. Will default to the workspace id.
2617
+ - \`workspace delete-fork <fork_name:string>\` - Delete a forked workspace and git branch
2618
+ - \`-y --yes\` - Skip confirmation prompt
2619
+
2620
+ `;