windmill-cli 1.694.0 → 1.695.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/esm/main.js +223 -29
  2. package/package.json +1 -1
package/esm/main.js CHANGED
@@ -16710,7 +16710,7 @@ var init_OpenAPI = __esm(() => {
16710
16710
  PASSWORD: undefined,
16711
16711
  TOKEN: getEnv3("WM_TOKEN"),
16712
16712
  USERNAME: undefined,
16713
- VERSION: "1.694.0",
16713
+ VERSION: "1.695.0",
16714
16714
  WITH_CREDENTIALS: true,
16715
16715
  interceptors: {
16716
16716
  request: new Interceptors,
@@ -79022,6 +79022,37 @@ Name the parameters by adding comments before the statement:
79022
79022
  -- @name2 (int64) = 0
79023
79023
  SELECT * FROM users WHERE name = @name1 AND age > @name2;
79024
79024
  \`\`\`
79025
+
79026
+ ## Receiving an S3Object as a script parameter
79027
+
79028
+ Declare the arg with type \`(s3object)\`. Windmill renders an S3 file picker for
79029
+ it, downloads the file, and binds it as a \`STRING\` JSON parameter — Parquet/CSV
79030
+ files are decoded server-side into a JSON array of records, JSON/JSONL pass
79031
+ through. Consume with \`JSON_EXTRACT_ARRAY\` / \`JSON_VALUE\`:
79032
+
79033
+ \`\`\`sql
79034
+ -- @file (s3object)
79035
+ SELECT
79036
+ CAST(JSON_VALUE(row, '$.id') AS INT64) AS id,
79037
+ JSON_VALUE(row, '$.name') AS name
79038
+ FROM UNNEST(JSON_EXTRACT_ARRAY(@file)) AS row;
79039
+ \`\`\`
79040
+
79041
+ ## Streaming query results to S3
79042
+
79043
+ Add a \`-- s3\` directive at the top of the script to stream the result set to S3
79044
+ instead of returning rows. Windmill writes the file and returns its \`S3Object\`
79045
+ as the script result.
79046
+
79047
+ \`\`\`sql
79048
+ -- s3 prefix=exports/users format=parquet
79049
+ SELECT id, name FROM users;
79050
+ \`\`\`
79051
+
79052
+ All keys are optional: \`prefix\` (object key prefix), \`storage\` (named storage —
79053
+ omit to use the workspace default), \`format\` (\`json\` (default), \`parquet\`, or
79054
+ \`csv\`). Use this for large result sets — rows stream directly to S3 instead of
79055
+ being buffered, bypassing the 10000-row return cap.
79025
79056
  `,
79026
79057
  "write-script-bun": `---
79027
79058
  name: write-script-bun
@@ -79145,19 +79176,20 @@ export async function preprocessor(event: Event) {
79145
79176
 
79146
79177
  ## S3 Object Operations
79147
79178
 
79148
- Windmill provides built-in support for S3-compatible storage operations.
79149
-
79150
- ### S3Object Type
79179
+ Windmill provides built-in support for S3-compatible storage operations. The \`wmill.S3Object\` type covers both the \`s3://storage/key\` URI form (\`s3:///key\` for the workspace default storage) and the \`{ s3, storage? }\` record form — always use it instead of redefining your own.
79151
79180
 
79152
- The S3Object type represents a file in S3 storage:
79181
+ ### Receiving an S3Object as a script parameter
79153
79182
 
79154
79183
  \`\`\`typescript
79155
- type S3Object = {
79156
- s3: string; // Path within the bucket
79157
- };
79184
+ import * as wmill from "windmill-client";
79185
+
79186
+ export async function main(file: wmill.S3Object) {
79187
+ const content = await wmill.loadS3File(file);
79188
+ // ...
79189
+ }
79158
79190
  \`\`\`
79159
79191
 
79160
- ## TypeScript Operations
79192
+ ### S3 operations
79161
79193
 
79162
79194
  \`\`\`typescript
79163
79195
  import * as wmill from "windmill-client";
@@ -79169,7 +79201,7 @@ const content: Uint8Array = await wmill.loadS3File(s3object);
79169
79201
  const blob: Blob = await wmill.loadS3FileStream(s3object);
79170
79202
 
79171
79203
  // Write file to S3
79172
- const result: S3Object = await wmill.writeS3File(
79204
+ const result: wmill.S3Object = await wmill.writeS3File(
79173
79205
  s3object, // Target path (or undefined to auto-generate)
79174
79206
  fileContent, // string or Blob
79175
79207
  s3ResourcePath // Optional: specific S3 resource to use
@@ -79835,19 +79867,20 @@ export async function preprocessor(event: Event) {
79835
79867
 
79836
79868
  ## S3 Object Operations
79837
79869
 
79838
- Windmill provides built-in support for S3-compatible storage operations.
79839
-
79840
- ### S3Object Type
79870
+ Windmill provides built-in support for S3-compatible storage operations. The \`wmill.S3Object\` type covers both the \`s3://storage/key\` URI form (\`s3:///key\` for the workspace default storage) and the \`{ s3, storage? }\` record form — always use it instead of redefining your own.
79841
79871
 
79842
- The S3Object type represents a file in S3 storage:
79872
+ ### Receiving an S3Object as a script parameter
79843
79873
 
79844
79874
  \`\`\`typescript
79845
- type S3Object = {
79846
- s3: string; // Path within the bucket
79847
- };
79875
+ import * as wmill from "windmill-client";
79876
+
79877
+ export async function main(file: wmill.S3Object) {
79878
+ const content = await wmill.loadS3File(file);
79879
+ // ...
79880
+ }
79848
79881
  \`\`\`
79849
79882
 
79850
- ## TypeScript Operations
79883
+ ### S3 operations
79851
79884
 
79852
79885
  \`\`\`typescript
79853
79886
  import * as wmill from "windmill-client";
@@ -79859,7 +79892,7 @@ const content: Uint8Array = await wmill.loadS3File(s3object);
79859
79892
  const blob: Blob = await wmill.loadS3FileStream(s3object);
79860
79893
 
79861
79894
  // Write file to S3
79862
- const result: S3Object = await wmill.writeS3File(
79895
+ const result: wmill.S3Object = await wmill.writeS3File(
79863
79896
  s3object, // Target path (or undefined to auto-generate)
79864
79897
  fileContent, // string or Blob
79865
79898
  s3ResourcePath // Optional: specific S3 resource to use
@@ -80613,19 +80646,20 @@ export async function preprocessor(event: Event) {
80613
80646
 
80614
80647
  ## S3 Object Operations
80615
80648
 
80616
- Windmill provides built-in support for S3-compatible storage operations.
80649
+ Windmill provides built-in support for S3-compatible storage operations. The \`wmill.S3Object\` type covers both the \`s3://storage/key\` URI form (\`s3:///key\` for the workspace default storage) and the \`{ s3, storage? }\` record form — always use it instead of redefining your own.
80617
80650
 
80618
- ### S3Object Type
80619
-
80620
- The S3Object type represents a file in S3 storage:
80651
+ ### Receiving an S3Object as a script parameter
80621
80652
 
80622
80653
  \`\`\`typescript
80623
- type S3Object = {
80624
- s3: string; // Path within the bucket
80625
- };
80654
+ import * as wmill from "windmill-client";
80655
+
80656
+ export async function main(file: wmill.S3Object) {
80657
+ const content = await wmill.loadS3File(file);
80658
+ // ...
80659
+ }
80626
80660
  \`\`\`
80627
80661
 
80628
- ## TypeScript Operations
80662
+ ### S3 operations
80629
80663
 
80630
80664
  \`\`\`typescript
80631
80665
  import * as wmill from "windmill-client";
@@ -80637,7 +80671,7 @@ const content: Uint8Array = await wmill.loadS3File(s3object);
80637
80671
  const blob: Blob = await wmill.loadS3FileStream(s3object);
80638
80672
 
80639
80673
  // Write file to S3
80640
- const result: S3Object = await wmill.writeS3File(
80674
+ const result: wmill.S3Object = await wmill.writeS3File(
80641
80675
  s3object, // Target path (or undefined to auto-generate)
80642
80676
  fileContent, // string or Blob
80643
80677
  s3ResourcePath // Optional: specific S3 resource to use
@@ -81274,6 +81308,30 @@ SELECT * FROM read_parquet('s3:///path/to/file.parquet');
81274
81308
  -- JSON files
81275
81309
  SELECT * FROM read_json('s3:///path/to/file.json');
81276
81310
  \`\`\`
81311
+
81312
+ ### Receiving an S3Object as a script parameter
81313
+
81314
+ Declare the arg with type \`(s3object)\`. Windmill renders an S3 file picker for it
81315
+ and binds the arg as the bare \`s3://storage/key\` URI, which DuckDB's reader
81316
+ functions consume directly:
81317
+
81318
+ \`\`\`sql
81319
+ -- $file (s3object)
81320
+ SELECT * FROM read_parquet($file);
81321
+ \`\`\`
81322
+
81323
+ Works with any DuckDB reader: \`read_csv($file)\`, \`read_json($file)\`, etc.
81324
+
81325
+ ### Writing query results to S3
81326
+
81327
+ DuckDB writes to S3 natively via \`COPY ... TO\`:
81328
+
81329
+ \`\`\`sql
81330
+ COPY (SELECT * FROM users) TO 's3:///exports/users.parquet' (FORMAT PARQUET);
81331
+ \`\`\`
81332
+
81333
+ Use this instead of the \`-- s3\` streaming directive supported by the other SQL
81334
+ dialects — that directive is not available in DuckDB.
81277
81335
  `,
81278
81336
  "write-script-go": `---
81279
81337
  name: write-script-go
@@ -81590,6 +81648,36 @@ Name the parameters by adding comments before the statement:
81590
81648
  -- @P2 name2 (int) = 0
81591
81649
  SELECT * FROM users WHERE name = @P1 AND age > @P2;
81592
81650
  \`\`\`
81651
+
81652
+ ## Receiving an S3Object as a script parameter
81653
+
81654
+ Declare the arg with type \`(s3object)\`. Windmill renders an S3 file picker for
81655
+ it, downloads the file, and binds it as \`nvarchar(max)\` JSON text — Parquet/CSV
81656
+ files are decoded server-side into a JSON array of records, JSON/JSONL pass
81657
+ through. Consume with \`OPENJSON\`:
81658
+
81659
+ \`\`\`sql
81660
+ -- @P1 file (s3object)
81661
+ SELECT id, name
81662
+ FROM OPENJSON(@P1)
81663
+ WITH (id INT, name NVARCHAR(200));
81664
+ \`\`\`
81665
+
81666
+ ## Streaming query results to S3
81667
+
81668
+ Add a \`-- s3\` directive at the top of the script to stream the result set to S3
81669
+ instead of returning rows. Windmill writes the file and returns its \`S3Object\`
81670
+ as the script result.
81671
+
81672
+ \`\`\`sql
81673
+ -- s3 prefix=exports/users format=parquet
81674
+ SELECT id, name FROM users;
81675
+ \`\`\`
81676
+
81677
+ All keys are optional: \`prefix\` (object key prefix), \`storage\` (named storage —
81678
+ omit to use the workspace default), \`format\` (\`json\` (default), \`parquet\`, or
81679
+ \`csv\`). Use this for large result sets — rows stream directly to S3 instead of
81680
+ being buffered as the script return value.
81593
81681
  `,
81594
81682
  "write-script-mysql": `---
81595
81683
  name: write-script-mysql
@@ -81642,6 +81730,37 @@ Name the parameters by adding comments before the statement:
81642
81730
  -- ? name2 (int) = 0
81643
81731
  SELECT * FROM users WHERE name = ? AND age > ?;
81644
81732
  \`\`\`
81733
+
81734
+ ## Receiving an S3Object as a script parameter
81735
+
81736
+ Declare the arg with type \`(s3object)\`. Windmill renders an S3 file picker for
81737
+ it, downloads the file, and binds it as JSON text — Parquet/CSV files are
81738
+ decoded server-side into a JSON array of records, JSON/JSONL pass through.
81739
+ Consume with \`JSON_TABLE\`:
81740
+
81741
+ \`\`\`sql
81742
+ -- ? file (s3object)
81743
+ SELECT id, name
81744
+ FROM JSON_TABLE(?, '$[*]'
81745
+ COLUMNS (id INT PATH '$.id', name VARCHAR(200) PATH '$.name')
81746
+ ) AS r;
81747
+ \`\`\`
81748
+
81749
+ ## Streaming query results to S3
81750
+
81751
+ Add a \`-- s3\` directive at the top of the script to stream the result set to S3
81752
+ instead of returning rows. Windmill writes the file and returns its \`S3Object\`
81753
+ as the script result.
81754
+
81755
+ \`\`\`sql
81756
+ -- s3 prefix=exports/users format=parquet
81757
+ SELECT id, name FROM users;
81758
+ \`\`\`
81759
+
81760
+ All keys are optional: \`prefix\` (object key prefix), \`storage\` (named storage —
81761
+ omit to use the workspace default), \`format\` (\`json\` (default), \`parquet\`, or
81762
+ \`csv\`). Use this for large result sets — rows stream directly to S3 instead of
81763
+ being buffered as the script return value.
81645
81764
  `,
81646
81765
  "write-script-nativets": `---
81647
81766
  name: write-script-nativets
@@ -82449,6 +82568,35 @@ Name the parameters by adding comments at the beginning of the script (without s
82449
82568
  -- $2 name2 = default_value
82450
82569
  SELECT * FROM users WHERE name = $1::TEXT AND age > $2::INT;
82451
82570
  \`\`\`
82571
+
82572
+ ## Receiving an S3Object as a script parameter
82573
+
82574
+ Declare the arg with type \`(s3object)\`. Windmill renders an S3 file picker for
82575
+ it, downloads the file, and binds it as a \`jsonb\` parameter — Parquet/CSV files
82576
+ are decoded server-side into a JSON array of records, JSON/JSONL pass through.
82577
+ Consume with \`jsonb_to_recordset\` (or any \`jsonb\` API):
82578
+
82579
+ \`\`\`sql
82580
+ -- $1 file (s3object)
82581
+ SELECT *
82582
+ FROM jsonb_to_recordset($1::jsonb) AS r(id INT, name TEXT);
82583
+ \`\`\`
82584
+
82585
+ ## Streaming query results to S3
82586
+
82587
+ Add a \`-- s3\` directive at the top of the script to stream the result set to S3
82588
+ instead of returning rows. Windmill writes the file and returns its \`S3Object\`
82589
+ as the script result.
82590
+
82591
+ \`\`\`sql
82592
+ -- s3 prefix=exports/users format=parquet
82593
+ SELECT id, name FROM users;
82594
+ \`\`\`
82595
+
82596
+ All keys are optional: \`prefix\` (object key prefix), \`storage\` (named storage —
82597
+ omit to use the workspace default), \`format\` (\`json\` (default), \`parquet\`, or
82598
+ \`csv\`). Use this for large result sets — rows stream directly to S3 instead of
82599
+ being buffered as the script return value.
82452
82600
  `,
82453
82601
  "write-script-powershell": `---
82454
82602
  name: write-script-powershell
@@ -82686,6 +82834,21 @@ def preprocessor(event: Event):
82686
82834
 
82687
82835
  Windmill provides built-in support for S3-compatible storage operations.
82688
82836
 
82837
+ ### Receiving an S3Object as a script parameter
82838
+
82839
+ To accept a file from S3 as input to a script, type the parameter with \`S3Object\` (imported from \`wmill\`):
82840
+
82841
+ \`\`\`python
82842
+ import wmill
82843
+ from wmill import S3Object
82844
+
82845
+ def main(file: S3Object):
82846
+ content = wmill.load_s3_file(file)
82847
+ # ...
82848
+ \`\`\`
82849
+
82850
+ ### S3 operations
82851
+
82689
82852
  \`\`\`python
82690
82853
  import wmill
82691
82854
 
@@ -83690,6 +83853,37 @@ Name the parameters by adding comments before the statement:
83690
83853
  -- ? name2 (number) = 0
83691
83854
  SELECT * FROM users WHERE name = ? AND age > ?;
83692
83855
  \`\`\`
83856
+
83857
+ ## Receiving an S3Object as a script parameter
83858
+
83859
+ Declare the arg with type \`(s3object)\`. Windmill renders an S3 file picker for
83860
+ it, downloads the file, and binds it as JSON text — Parquet/CSV files are
83861
+ decoded server-side into a JSON array of records, JSON/JSONL pass through.
83862
+ Wrap the bind with \`PARSE_JSON(?)\` and walk it with \`LATERAL FLATTEN\`:
83863
+
83864
+ \`\`\`sql
83865
+ -- ? file (s3object)
83866
+ SELECT
83867
+ v.value:id::NUMBER AS id,
83868
+ v.value:name::STRING AS name
83869
+ FROM LATERAL FLATTEN(input => PARSE_JSON(?)) v;
83870
+ \`\`\`
83871
+
83872
+ ## Streaming query results to S3
83873
+
83874
+ Add a \`-- s3\` directive at the top of the script to stream the result set to S3
83875
+ instead of returning rows. Windmill writes the file and returns its \`S3Object\`
83876
+ as the script result.
83877
+
83878
+ \`\`\`sql
83879
+ -- s3 prefix=exports/users format=parquet
83880
+ SELECT id, name FROM users;
83881
+ \`\`\`
83882
+
83883
+ All keys are optional: \`prefix\` (object key prefix), \`storage\` (named storage —
83884
+ omit to use the workspace default), \`format\` (\`json\` (default), \`parquet\`, or
83885
+ \`csv\`). Use this for large result sets — rows stream directly to S3 instead of
83886
+ being buffered, bypassing the 10000-row return cap.
83693
83887
  `,
83694
83888
  "write-flow": `---
83695
83889
  name: write-flow
@@ -88745,7 +88939,7 @@ var config_default = command35;
88745
88939
 
88746
88940
  // src/main.ts
88747
88941
  await init_context();
88748
- var VERSION = "1.694.0";
88942
+ var VERSION = "1.695.0";
88749
88943
  async function checkVersionSafe(cmd) {
88750
88944
  const mainCommand = cmd.getMainCommand();
88751
88945
  const upgradeCommand = mainCommand.getCommand("upgrade");
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "windmill-cli",
3
- "version": "1.694.0",
3
+ "version": "1.695.0",
4
4
  "description": "CLI for Windmill",
5
5
  "license": "Apache 2.0",
6
6
  "type": "module",