wmill 1.555.1__tar.gz → 1.605.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of wmill might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: wmill
3
- Version: 1.555.1
3
+ Version: 1.605.0
4
4
  Summary: A client library for accessing Windmill server wrapping the Windmill client API
5
5
  Home-page: https://windmill.dev
6
6
  License: Apache-2.0
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "wmill"
3
- version = "1.555.1"
3
+ version = "1.605.0"
4
4
  description = "A client library for accessing Windmill server wrapping the Windmill client API"
5
5
  license = "Apache-2.0"
6
6
  homepage = "https://windmill.dev"
@@ -32,7 +32,17 @@ JobStatus = Literal["RUNNING", "WAITING", "COMPLETED"]
32
32
 
33
33
 
34
34
  class Windmill:
35
+ """Windmill client for interacting with the Windmill API."""
36
+
35
37
  def __init__(self, base_url=None, token=None, workspace=None, verify=True):
38
+ """Initialize the Windmill client.
39
+
40
+ Args:
41
+ base_url: API base URL (defaults to BASE_INTERNAL_URL or WM_BASE_URL env)
42
+ token: Authentication token (defaults to WM_TOKEN env)
43
+ workspace: Workspace ID (defaults to WM_WORKSPACE env)
44
+ verify: Whether to verify SSL certificates
45
+ """
36
46
  base = (
37
47
  base_url
38
48
  or os.environ.get("BASE_INTERNAL_URL")
@@ -75,6 +85,11 @@ class Windmill:
75
85
  return mocked_api
76
86
 
77
87
  def get_client(self) -> httpx.Client:
88
+ """Get the HTTP client instance.
89
+
90
+ Returns:
91
+ Configured httpx.Client for API requests
92
+ """
78
93
  return httpx.Client(
79
94
  base_url=self.base_url,
80
95
  headers=self.headers,
@@ -82,6 +97,16 @@ class Windmill:
82
97
  )
83
98
 
84
99
  def get(self, endpoint, raise_for_status=True, **kwargs) -> httpx.Response:
100
+ """Make an HTTP GET request to the Windmill API.
101
+
102
+ Args:
103
+ endpoint: API endpoint path
104
+ raise_for_status: Whether to raise an exception on HTTP errors
105
+ **kwargs: Additional arguments passed to httpx.get
106
+
107
+ Returns:
108
+ HTTP response object
109
+ """
85
110
  endpoint = endpoint.lstrip("/")
86
111
  resp = self.client.get(f"/{endpoint}", **kwargs)
87
112
  if raise_for_status:
@@ -94,6 +119,16 @@ class Windmill:
94
119
  return resp
95
120
 
96
121
  def post(self, endpoint, raise_for_status=True, **kwargs) -> httpx.Response:
122
+ """Make an HTTP POST request to the Windmill API.
123
+
124
+ Args:
125
+ endpoint: API endpoint path
126
+ raise_for_status: Whether to raise an exception on HTTP errors
127
+ **kwargs: Additional arguments passed to httpx.post
128
+
129
+ Returns:
130
+ HTTP response object
131
+ """
97
132
  endpoint = endpoint.lstrip("/")
98
133
  resp = self.client.post(f"/{endpoint}", **kwargs)
99
134
  if raise_for_status:
@@ -106,6 +141,14 @@ class Windmill:
106
141
  return resp
107
142
 
108
143
  def create_token(self, duration=dt.timedelta(days=1)) -> str:
144
+ """Create a new authentication token.
145
+
146
+ Args:
147
+ duration: Token validity duration (default: 1 day)
148
+
149
+ Returns:
150
+ New authentication token string
151
+ """
109
152
  endpoint = "/users/tokens/create"
110
153
  payload = {
111
154
  "label": f"refresh {time.time()}",
@@ -276,6 +319,21 @@ class Windmill:
276
319
  cleanup=cleanup, assert_result_is_not_none=assert_result_is_not_none
277
320
  )
278
321
 
322
+ def run_inline_script_preview(
323
+ self,
324
+ content: str,
325
+ language: str,
326
+ args: dict = None,
327
+ ) -> Any:
328
+ """Run a script on the current worker without creating a job"""
329
+ endpoint = f"/w/{self.workspace}/jobs/run_inline/preview"
330
+ body = {
331
+ "content": content,
332
+ "language": language,
333
+ "args": args or {},
334
+ }
335
+ return self.post(endpoint, json=body).json()
336
+
279
337
  def wait_job(
280
338
  self,
281
339
  job_id,
@@ -284,6 +342,22 @@ class Windmill:
284
342
  cleanup: bool = True,
285
343
  assert_result_is_not_none: bool = False,
286
344
  ):
345
+ """Wait for a job to complete and return its result.
346
+
347
+ Args:
348
+ job_id: ID of the job to wait for
349
+ timeout: Maximum time to wait (seconds or timedelta)
350
+ verbose: Enable verbose logging
351
+ cleanup: Register cleanup handler to cancel job on exit
352
+ assert_result_is_not_none: Raise exception if result is None
353
+
354
+ Returns:
355
+ Job result when completed
356
+
357
+ Raises:
358
+ TimeoutError: If timeout is reached
359
+ Exception: If job fails
360
+ """
287
361
  def cancel_job():
288
362
  logger.warning(f"cancelling job: {job_id}")
289
363
  self.post(
@@ -337,6 +411,27 @@ class Windmill:
337
411
 
338
412
  time.sleep(0.5)
339
413
 
414
+ def cancel_job(self, job_id: str, reason: str = None) -> str:
415
+ """Cancel a specific job by ID.
416
+
417
+ Args:
418
+ job_id: UUID of the job to cancel
419
+ reason: Optional reason for cancellation
420
+
421
+ Returns:
422
+ Response message from the cancel endpoint
423
+ """
424
+ logger.info(f"cancelling job: {job_id}")
425
+
426
+ payload = {"reason": reason or "cancelled via cancel_job method"}
427
+
428
+ response = self.post(
429
+ f"/w/{self.workspace}/jobs_u/queue/cancel/{job_id}",
430
+ json=payload,
431
+ )
432
+
433
+ return response.text
434
+
340
435
  def cancel_running(self) -> dict:
341
436
  """Cancel currently running executions of the same script."""
342
437
  logger.info("canceling running executions of this script")
@@ -371,16 +466,52 @@ class Windmill:
371
466
  return result
372
467
 
373
468
  def get_job(self, job_id: str) -> dict:
469
+ """Get job details by ID.
470
+
471
+ Args:
472
+ job_id: UUID of the job
473
+
474
+ Returns:
475
+ Job details dictionary
476
+ """
374
477
  return self.get(f"/w/{self.workspace}/jobs_u/get/{job_id}").json()
375
478
 
376
479
  def get_root_job_id(self, job_id: str | None = None) -> dict:
480
+ """Get the root job ID for a flow hierarchy.
481
+
482
+ Args:
483
+ job_id: Job ID (defaults to current WM_JOB_ID)
484
+
485
+ Returns:
486
+ Root job ID
487
+ """
377
488
  job_id = job_id or os.environ.get("WM_JOB_ID")
378
489
  return self.get(f"/w/{self.workspace}/jobs_u/get_root_job_id/{job_id}").json()
379
490
 
380
- def get_id_token(self, audience: str) -> str:
381
- return self.post(f"/w/{self.workspace}/oidc/token/{audience}").text
491
+ def get_id_token(self, audience: str, expires_in: int | None = None) -> str:
492
+ """Get an OIDC JWT token for authentication to external services.
493
+
494
+ Args:
495
+ audience: Token audience (e.g., "vault", "aws")
496
+ expires_in: Optional expiration time in seconds
497
+
498
+ Returns:
499
+ JWT token string
500
+ """
501
+ params = {}
502
+ if expires_in is not None:
503
+ params["expires_in"] = expires_in
504
+ return self.post(f"/w/{self.workspace}/oidc/token/{audience}", params=params).text
382
505
 
383
506
  def get_job_status(self, job_id: str) -> JobStatus:
507
+ """Get the status of a job.
508
+
509
+ Args:
510
+ job_id: UUID of the job
511
+
512
+ Returns:
513
+ Job status: "RUNNING", "WAITING", or "COMPLETED"
514
+ """
384
515
  job = self.get_job(job_id)
385
516
  job_type = job.get("type", "")
386
517
  assert job_type, f"{job} is not a valid job"
@@ -395,6 +526,15 @@ class Windmill:
395
526
  job_id: str,
396
527
  assert_result_is_not_none: bool = True,
397
528
  ) -> Any:
529
+ """Get the result of a completed job.
530
+
531
+ Args:
532
+ job_id: UUID of the completed job
533
+ assert_result_is_not_none: Raise exception if result is None
534
+
535
+ Returns:
536
+ Job result
537
+ """
398
538
  result = self.get(f"/w/{self.workspace}/jobs_u/completed/get_result/{job_id}")
399
539
  result_text = result.text
400
540
  if assert_result_is_not_none and result_text is None:
@@ -405,6 +545,14 @@ class Windmill:
405
545
  return result_text
406
546
 
407
547
  def get_variable(self, path: str) -> str:
548
+ """Get a variable value by path.
549
+
550
+ Args:
551
+ path: Variable path in Windmill
552
+
553
+ Returns:
554
+ Variable value as string
555
+ """
408
556
  path = parse_variable_syntax(path) or path
409
557
  if self.mocked_api is not None:
410
558
  variables = self.mocked_api["variables"]
@@ -415,17 +563,20 @@ class Windmill:
415
563
  logger.info(
416
564
  f"MockedAPI present, but variable not found at {path}, falling back to real API"
417
565
  )
418
-
419
- """Get variable from Windmill"""
420
566
  return self.get(f"/w/{self.workspace}/variables/get_value/{path}").json()
421
567
 
422
568
  def set_variable(self, path: str, value: str, is_secret: bool = False) -> None:
569
+ """Set a variable value by path, creating it if it doesn't exist.
570
+
571
+ Args:
572
+ path: Variable path in Windmill
573
+ value: Variable value to set
574
+ is_secret: Whether the variable should be secret (default: False)
575
+ """
423
576
  path = parse_variable_syntax(path) or path
424
577
  if self.mocked_api is not None:
425
578
  self.mocked_api["variables"][path] = value
426
579
  return
427
-
428
- """Set variable from Windmill"""
429
580
  # check if variable exists
430
581
  r = self.get(
431
582
  f"/w/{self.workspace}/variables/get/{path}", raise_for_status=False
@@ -453,6 +604,15 @@ class Windmill:
453
604
  path: str,
454
605
  none_if_undefined: bool = False,
455
606
  ) -> dict | None:
607
+ """Get a resource value by path.
608
+
609
+ Args:
610
+ path: Resource path in Windmill
611
+ none_if_undefined: Return None instead of raising if not found
612
+
613
+ Returns:
614
+ Resource value dictionary or None
615
+ """
456
616
  path = parse_resource_syntax(path) or path
457
617
  if self.mocked_api is not None:
458
618
  resources = self.mocked_api["resources"]
@@ -469,8 +629,6 @@ class Windmill:
469
629
  logger.info(
470
630
  f"MockedAPI present, but resource not found at ${path}, falling back to real API"
471
631
  )
472
-
473
- """Get resource from Windmill"""
474
632
  try:
475
633
  return self.get(
476
634
  f"/w/{self.workspace}/resources/get_value_interpolated/{path}"
@@ -487,6 +645,13 @@ class Windmill:
487
645
  path: str,
488
646
  resource_type: str,
489
647
  ):
648
+ """Set a resource value by path, creating it if it doesn't exist.
649
+
650
+ Args:
651
+ value: Resource value to set
652
+ path: Resource path in Windmill
653
+ resource_type: Resource type for creation
654
+ """
490
655
  path = parse_resource_syntax(path) or path
491
656
  if self.mocked_api is not None:
492
657
  self.mocked_api["resources"][path] = value
@@ -513,10 +678,62 @@ class Windmill:
513
678
  json={"value": value},
514
679
  )
515
680
 
516
- def set_state(self, value: Any):
517
- self.set_resource(value, path=self.state_path, resource_type="state")
681
+ def list_resources(
682
+ self,
683
+ resource_type: str = None,
684
+ page: int = None,
685
+ per_page: int = None,
686
+ ) -> list[dict]:
687
+ """List resources from Windmill workspace.
688
+
689
+ Args:
690
+ resource_type: Optional resource type to filter by (e.g., "postgresql", "mysql", "s3")
691
+ page: Optional page number for pagination
692
+ per_page: Optional number of results per page
693
+
694
+ Returns:
695
+ List of resource dictionaries
696
+ """
697
+ params = {}
698
+ if resource_type is not None:
699
+ params["resource_type"] = resource_type
700
+ if page is not None:
701
+ params["page"] = page
702
+ if per_page is not None:
703
+ params["per_page"] = per_page
704
+
705
+ return self.get(
706
+ f"/w/{self.workspace}/resources/list",
707
+ params=params if params else None,
708
+ ).json()
709
+
710
+ def set_state(self, value: Any, path: str | None = None) -> None:
711
+ """Set the workflow state.
712
+
713
+ Args:
714
+ value: State value to set
715
+ path: Optional state resource path override.
716
+ """
717
+ self.set_resource(value, path=path or self.state_path, resource_type="state")
718
+
719
+ def get_state(self, path: str | None = None) -> Any:
720
+ """Get the workflow state.
721
+
722
+ Args:
723
+ path: Optional state resource path override.
724
+
725
+ Returns:
726
+ State value or None if not set
727
+ """
728
+ return self.get_resource(path=path or self.state_path, none_if_undefined=True)
518
729
 
519
730
  def set_progress(self, value: int, job_id: Optional[str] = None):
731
+ """Set job progress percentage (0-99).
732
+
733
+ Args:
734
+ value: Progress percentage
735
+ job_id: Job ID (defaults to current WM_JOB_ID)
736
+ """
520
737
  workspace = get_workspace()
521
738
  flow_id = os.environ.get("WM_FLOW_JOB_ID")
522
739
  job_id = job_id or os.environ.get("WM_JOB_ID")
@@ -534,6 +751,14 @@ class Windmill:
534
751
  )
535
752
 
536
753
  def get_progress(self, job_id: Optional[str] = None) -> Any:
754
+ """Get job progress percentage.
755
+
756
+ Args:
757
+ job_id: Job ID (defaults to current WM_JOB_ID)
758
+
759
+ Returns:
760
+ Progress value (0-100) or None if not set
761
+ """
537
762
  workspace = get_workspace()
538
763
  job_id = job_id or os.environ.get("WM_JOB_ID")
539
764
 
@@ -572,6 +797,11 @@ class Windmill:
572
797
 
573
798
  @property
574
799
  def version(self):
800
+ """Get the Windmill server version.
801
+
802
+ Returns:
803
+ Version string
804
+ """
575
805
  return self.get("version").text
576
806
 
577
807
  def get_duckdb_connection_settings(
@@ -748,16 +978,107 @@ class Windmill:
748
978
  return S3Object(s3=response["file_key"])
749
979
 
750
980
  def sign_s3_objects(self, s3_objects: list[S3Object | str]) -> list[S3Object]:
981
+ """Sign S3 objects for use by anonymous users in public apps.
982
+
983
+ Args:
984
+ s3_objects: List of S3 objects to sign
985
+
986
+ Returns:
987
+ List of signed S3 objects
988
+ """
751
989
  return self.post(
752
990
  f"/w/{self.workspace}/apps/sign_s3_objects", json={"s3_objects": list(map(parse_s3_object, s3_objects))}
753
991
  ).json()
754
992
 
755
993
  def sign_s3_object(self, s3_object: S3Object | str) -> S3Object:
994
+ """Sign a single S3 object for use by anonymous users in public apps.
995
+
996
+ Args:
997
+ s3_object: S3 object to sign
998
+
999
+ Returns:
1000
+ Signed S3 object
1001
+ """
756
1002
  return self.post(
757
1003
  f"/w/{self.workspace}/apps/sign_s3_objects",
758
1004
  json={"s3_objects": [s3_object]},
759
1005
  ).json()[0]
760
1006
 
1007
+ def get_presigned_s3_public_urls(
1008
+ self,
1009
+ s3_objects: list[S3Object | str],
1010
+ base_url: str | None = None,
1011
+ ) -> list[str]:
1012
+ """
1013
+ Generate presigned public URLs for an array of S3 objects.
1014
+ If an S3 object is not signed yet, it will be signed first.
1015
+
1016
+ Args:
1017
+ s3_objects: List of S3 objects to sign
1018
+ base_url: Optional base URL for the presigned URLs (defaults to WM_BASE_URL)
1019
+
1020
+ Returns:
1021
+ List of signed public URLs
1022
+
1023
+ Example:
1024
+ >>> s3_objs = [S3Object(s3="/path/to/file1.txt"), S3Object(s3="/path/to/file2.txt")]
1025
+ >>> urls = client.get_presigned_s3_public_urls(s3_objs)
1026
+ """
1027
+ base_url = base_url or self._get_public_base_url()
1028
+
1029
+ s3_objs = [parse_s3_object(s3_obj) for s3_obj in s3_objects]
1030
+
1031
+ # Sign all S3 objects that need to be signed in one go
1032
+ s3_objs_to_sign: list[tuple[S3Object, int]] = [
1033
+ (s3_obj, index)
1034
+ for index, s3_obj in enumerate(s3_objs)
1035
+ if s3_obj.get("presigned") is None
1036
+ ]
1037
+
1038
+ if s3_objs_to_sign:
1039
+ signed_s3_objs = self.sign_s3_objects(
1040
+ [s3_obj for s3_obj, _ in s3_objs_to_sign]
1041
+ )
1042
+ for i, (_, original_index) in enumerate(s3_objs_to_sign):
1043
+ s3_objs[original_index] = parse_s3_object(signed_s3_objs[i])
1044
+
1045
+ signed_urls: list[str] = []
1046
+ for s3_obj in s3_objs:
1047
+ s3 = s3_obj.get("s3", "")
1048
+ presigned = s3_obj.get("presigned", "")
1049
+ storage = s3_obj.get("storage", "_default_")
1050
+ signed_url = f"{base_url}/api/w/{self.workspace}/s3_proxy/{storage}/{s3}?{presigned}"
1051
+ signed_urls.append(signed_url)
1052
+
1053
+ return signed_urls
1054
+
1055
+ def get_presigned_s3_public_url(
1056
+ self,
1057
+ s3_object: S3Object | str,
1058
+ base_url: str | None = None,
1059
+ ) -> str:
1060
+ """
1061
+ Generate a presigned public URL for an S3 object.
1062
+ If the S3 object is not signed yet, it will be signed first.
1063
+
1064
+ Args:
1065
+ s3_object: S3 object to sign
1066
+ base_url: Optional base URL for the presigned URL (defaults to WM_BASE_URL)
1067
+
1068
+ Returns:
1069
+ Signed public URL
1070
+
1071
+ Example:
1072
+ >>> s3_obj = S3Object(s3="/path/to/file.txt")
1073
+ >>> url = client.get_presigned_s3_public_url(s3_obj)
1074
+ """
1075
+ urls = self.get_presigned_s3_public_urls([s3_object], base_url)
1076
+ return urls[0]
1077
+
1078
+ def _get_public_base_url(self) -> str:
1079
+ """Get the public base URL from environment or default to localhost"""
1080
+ return os.environ.get("WM_BASE_URL", "http://localhost:3000")
1081
+
761
1082
  def __boto3_connection_settings(self, s3_resource) -> Boto3ConnectionSettings:
762
1083
  endpoint_url_prefix = "https://" if s3_resource["useSSL"] else "http://"
763
1084
  return Boto3ConnectionSettings(
@@ -774,14 +1095,29 @@ class Windmill:
774
1095
  )
775
1096
 
776
1097
  def whoami(self) -> dict:
1098
+ """Get the current user information.
1099
+
1100
+ Returns:
1101
+ User details dictionary
1102
+ """
777
1103
  return self.get("/users/whoami").json()
778
1104
 
779
1105
  @property
780
1106
  def user(self) -> dict:
1107
+ """Get the current user information (alias for whoami).
1108
+
1109
+ Returns:
1110
+ User details dictionary
1111
+ """
781
1112
  return self.whoami()
782
1113
 
783
1114
  @property
784
1115
  def state_path(self) -> str:
1116
+ """Get the state resource path from environment.
1117
+
1118
+ Returns:
1119
+ State path string
1120
+ """
785
1121
  state_path = os.environ.get(
786
1122
  "WM_STATE_PATH_NEW", os.environ.get("WM_STATE_PATH")
787
1123
  )
@@ -791,10 +1127,16 @@ class Windmill:
791
1127
 
792
1128
  @property
793
1129
  def state(self) -> Any:
1130
+ """Get the workflow state.
1131
+
1132
+ Returns:
1133
+ State value or None if not set
1134
+ """
794
1135
  return self.get_resource(path=self.state_path, none_if_undefined=True)
795
1136
 
796
1137
  @state.setter
797
1138
  def state(self, value: Any) -> None:
1139
+ """Set the workflow state."""
798
1140
  self.set_state(value)
799
1141
 
800
1142
  @staticmethod
@@ -838,6 +1180,14 @@ class Windmill:
838
1180
  return json.load(f)
839
1181
 
840
1182
  def get_resume_urls(self, approver: str = None) -> dict:
1183
+ """Get URLs needed for resuming a flow after suspension.
1184
+
1185
+ Args:
1186
+ approver: Optional approver name
1187
+
1188
+ Returns:
1189
+ Dictionary with approvalPage, resume, and cancel URLs
1190
+ """
841
1191
  nonce = random.randint(0, 1000000000)
842
1192
  job_id = os.environ.get("WM_JOB_ID") or "NO_ID"
843
1193
  return self.get(
@@ -950,6 +1300,29 @@ class Windmill:
950
1300
  },
951
1301
  )
952
1302
 
1303
+ def datatable(self, name: str = "main"):
1304
+ """Get a DataTable client for SQL queries.
1305
+
1306
+ Args:
1307
+ name: Database name (default: "main")
1308
+
1309
+ Returns:
1310
+ DataTableClient instance
1311
+ """
1312
+ return DataTableClient(self, name)
1313
+
1314
+ def ducklake(self, name: str = "main"):
1315
+ """Get a DuckLake client for DuckDB queries.
1316
+
1317
+ Args:
1318
+ name: Database name (default: "main")
1319
+
1320
+ Returns:
1321
+ DucklakeClient instance
1322
+ """
1323
+ return DucklakeClient(self, name)
1324
+
1325
+
953
1326
 
954
1327
  def init_global_client(f):
955
1328
  @functools.wraps(f)
@@ -982,11 +1355,24 @@ def deprecate(in_favor_of: str):
982
1355
 
983
1356
  @init_global_client
984
1357
  def get_workspace() -> str:
1358
+ """Get the current workspace ID.
1359
+
1360
+ Returns:
1361
+ Workspace ID string
1362
+ """
985
1363
  return _client.workspace
986
1364
 
987
1365
 
988
1366
  @init_global_client
989
1367
  def get_root_job_id(job_id: str | None = None) -> str:
1368
+ """Get the root job ID for a flow hierarchy.
1369
+
1370
+ Args:
1371
+ job_id: Job ID (defaults to current WM_JOB_ID)
1372
+
1373
+ Returns:
1374
+ Root job ID
1375
+ """
990
1376
  return _client.get_root_job_id(job_id)
991
1377
 
992
1378
 
@@ -1002,6 +1388,16 @@ def run_script_async(
1002
1388
  args: Dict[str, Any] = None,
1003
1389
  scheduled_in_secs: int = None,
1004
1390
  ) -> str:
1391
+ """Create a script job and return its job ID.
1392
+
1393
+ Args:
1394
+ hash_or_path: Script hash or path (determined by presence of '/')
1395
+ args: Script arguments
1396
+ scheduled_in_secs: Delay before execution in seconds
1397
+
1398
+ Returns:
1399
+ Job ID string
1400
+ """
1005
1401
  is_path = "/" in hash_or_path
1006
1402
  hash_ = None if is_path else hash_or_path
1007
1403
  path = hash_or_path if is_path else None
@@ -1023,6 +1419,17 @@ def run_flow_async(
1023
1419
  # lead to incorrectness and failures
1024
1420
  do_not_track_in_parent: bool = True,
1025
1421
  ) -> str:
1422
+ """Create a flow job and return its job ID.
1423
+
1424
+ Args:
1425
+ path: Flow path
1426
+ args: Flow arguments
1427
+ scheduled_in_secs: Delay before execution in seconds
1428
+ do_not_track_in_parent: Whether to track in parent job (default: True)
1429
+
1430
+ Returns:
1431
+ Job ID string
1432
+ """
1026
1433
  return _client.run_flow_async(
1027
1434
  path=path,
1028
1435
  args=args,
@@ -1040,6 +1447,19 @@ def run_script_sync(
1040
1447
  cleanup: bool = True,
1041
1448
  timeout: dt.timedelta = None,
1042
1449
  ) -> Any:
1450
+ """Run a script synchronously by hash and return its result.
1451
+
1452
+ Args:
1453
+ hash: Script hash
1454
+ args: Script arguments
1455
+ verbose: Enable verbose logging
1456
+ assert_result_is_not_none: Raise exception if result is None
1457
+ cleanup: Register cleanup handler to cancel job on exit
1458
+ timeout: Maximum time to wait
1459
+
1460
+ Returns:
1461
+ Script result
1462
+ """
1043
1463
  return _client.run_script(
1044
1464
  hash_=hash,
1045
1465
  args=args,
@@ -1056,6 +1476,16 @@ def run_script_by_path_async(
1056
1476
  args: Dict[str, Any] = None,
1057
1477
  scheduled_in_secs: Union[None, int] = None,
1058
1478
  ) -> str:
1479
+ """Create a script job by path and return its job ID.
1480
+
1481
+ Args:
1482
+ path: Script path
1483
+ args: Script arguments
1484
+ scheduled_in_secs: Delay before execution in seconds
1485
+
1486
+ Returns:
1487
+ Job ID string
1488
+ """
1059
1489
  return _client.run_script_by_path_async(
1060
1490
  path=path,
1061
1491
  args=args,
@@ -1069,6 +1499,16 @@ def run_script_by_hash_async(
1069
1499
  args: Dict[str, Any] = None,
1070
1500
  scheduled_in_secs: Union[None, int] = None,
1071
1501
  ) -> str:
1502
+ """Create a script job by hash and return its job ID.
1503
+
1504
+ Args:
1505
+ hash_: Script hash
1506
+ args: Script arguments
1507
+ scheduled_in_secs: Delay before execution in seconds
1508
+
1509
+ Returns:
1510
+ Job ID string
1511
+ """
1072
1512
  return _client.run_script_by_hash_async(
1073
1513
  hash_=hash_,
1074
1514
  args=args,
@@ -1085,6 +1525,19 @@ def run_script_by_path_sync(
1085
1525
  cleanup: bool = True,
1086
1526
  timeout: dt.timedelta = None,
1087
1527
  ) -> Any:
1528
+ """Run a script synchronously by path and return its result.
1529
+
1530
+ Args:
1531
+ path: Script path
1532
+ args: Script arguments
1533
+ verbose: Enable verbose logging
1534
+ assert_result_is_not_none: Raise exception if result is None
1535
+ cleanup: Register cleanup handler to cancel job on exit
1536
+ timeout: Maximum time to wait
1537
+
1538
+ Returns:
1539
+ Script result
1540
+ """
1088
1541
  return _client.run_script(
1089
1542
  path=path,
1090
1543
  args=args,
@@ -1105,11 +1558,28 @@ def get_id_token(audience: str) -> str:
1105
1558
 
1106
1559
  @init_global_client
1107
1560
  def get_job_status(job_id: str) -> JobStatus:
1561
+ """Get the status of a job.
1562
+
1563
+ Args:
1564
+ job_id: UUID of the job
1565
+
1566
+ Returns:
1567
+ Job status: "RUNNING", "WAITING", or "COMPLETED"
1568
+ """
1108
1569
  return _client.get_job_status(job_id)
1109
1570
 
1110
1571
 
1111
1572
  @init_global_client
1112
1573
  def get_result(job_id: str, assert_result_is_not_none=True) -> Dict[str, Any]:
1574
+ """Get the result of a completed job.
1575
+
1576
+ Args:
1577
+ job_id: UUID of the completed job
1578
+ assert_result_is_not_none: Raise exception if result is None
1579
+
1580
+ Returns:
1581
+ Job result
1582
+ """
1113
1583
  return _client.get_result(
1114
1584
  job_id=job_id, assert_result_is_not_none=assert_result_is_not_none
1115
1585
  )
@@ -1208,6 +1678,56 @@ def sign_s3_object(s3_object: S3Object| str) -> S3Object:
1208
1678
  return _client.sign_s3_object(s3_object)
1209
1679
 
1210
1680
 
1681
+ @init_global_client
1682
+ def get_presigned_s3_public_urls(
1683
+ s3_objects: list[S3Object | str],
1684
+ base_url: str | None = None,
1685
+ ) -> list[str]:
1686
+ """
1687
+ Generate presigned public URLs for an array of S3 objects.
1688
+ If an S3 object is not signed yet, it will be signed first.
1689
+
1690
+ Args:
1691
+ s3_objects: List of S3 objects to sign
1692
+ base_url: Optional base URL for the presigned URLs (defaults to WM_BASE_URL)
1693
+
1694
+ Returns:
1695
+ List of signed public URLs
1696
+
1697
+ Example:
1698
+ >>> import wmill
1699
+ >>> from wmill import S3Object
1700
+ >>> s3_objs = [S3Object(s3="/path/to/file1.txt"), S3Object(s3="/path/to/file2.txt")]
1701
+ >>> urls = wmill.get_presigned_s3_public_urls(s3_objs)
1702
+ """
1703
+ return _client.get_presigned_s3_public_urls(s3_objects, base_url)
1704
+
1705
+
1706
+ @init_global_client
1707
+ def get_presigned_s3_public_url(
1708
+ s3_object: S3Object | str,
1709
+ base_url: str | None = None,
1710
+ ) -> str:
1711
+ """
1712
+ Generate a presigned public URL for an S3 object.
1713
+ If the S3 object is not signed yet, it will be signed first.
1714
+
1715
+ Args:
1716
+ s3_object: S3 object to sign
1717
+ base_url: Optional base URL for the presigned URL (defaults to WM_BASE_URL)
1718
+
1719
+ Returns:
1720
+ Signed public URL
1721
+
1722
+ Example:
1723
+ >>> import wmill
1724
+ >>> from wmill import S3Object
1725
+ >>> s3_obj = S3Object(s3="/path/to/file.txt")
1726
+ >>> url = wmill.get_presigned_s3_public_url(s3_obj)
1727
+ """
1728
+ return _client.get_presigned_s3_public_url(s3_object, base_url)
1729
+
1730
+
1211
1731
  @init_global_client
1212
1732
  def whoami() -> dict:
1213
1733
  """
@@ -1217,12 +1737,11 @@ def whoami() -> dict:
1217
1737
 
1218
1738
 
1219
1739
  @init_global_client
1220
- @deprecate("Windmill().state")
1221
- def get_state() -> Any:
1740
+ def get_state(path: str | None = None) -> Any:
1222
1741
  """
1223
1742
  Get the state
1224
1743
  """
1225
- return _client.state
1744
+ return _client.get_state(path=path)
1226
1745
 
1227
1746
 
1228
1747
  @init_global_client
@@ -1243,11 +1762,41 @@ def set_resource(path: str, value: Any, resource_type: str = "any") -> None:
1243
1762
 
1244
1763
 
1245
1764
  @init_global_client
1246
- def set_state(value: Any) -> None:
1765
+ def list_resources(
1766
+ resource_type: str = None,
1767
+ page: int = None,
1768
+ per_page: int = None,
1769
+ ) -> list[dict]:
1770
+ """List resources from Windmill workspace.
1771
+
1772
+ Args:
1773
+ resource_type: Optional resource type to filter by (e.g., "postgresql", "mysql", "s3")
1774
+ page: Optional page number for pagination
1775
+ per_page: Optional number of results per page
1776
+
1777
+ Returns:
1778
+ List of resource dictionaries
1779
+
1780
+ Example:
1781
+ >>> # Get all resources
1782
+ >>> all_resources = wmill.list_resources()
1783
+
1784
+ >>> # Get only PostgreSQL resources
1785
+ >>> pg_resources = wmill.list_resources(resource_type="postgresql")
1786
+ """
1787
+ return _client.list_resources(
1788
+ resource_type=resource_type,
1789
+ page=page,
1790
+ per_page=per_page,
1791
+ )
1792
+
1793
+
1794
+ @init_global_client
1795
+ def set_state(value: Any, path: str | None = None) -> None:
1247
1796
  """
1248
1797
  Set the state
1249
1798
  """
1250
- return _client.set_state(value)
1799
+ return _client.set_state(value, path=path)
1251
1800
 
1252
1801
 
1253
1802
  @init_global_client
@@ -1329,11 +1878,24 @@ def set_flow_user_state(key: str, value: Any) -> None:
1329
1878
 
1330
1879
  @init_global_client
1331
1880
  def get_state_path() -> str:
1881
+ """Get the state resource path from environment.
1882
+
1883
+ Returns:
1884
+ State path string
1885
+ """
1332
1886
  return _client.state_path
1333
1887
 
1334
1888
 
1335
1889
  @init_global_client
1336
1890
  def get_resume_urls(approver: str = None) -> dict:
1891
+ """Get URLs needed for resuming a flow after suspension.
1892
+
1893
+ Args:
1894
+ approver: Optional approver name
1895
+
1896
+ Returns:
1897
+ Dictionary with approvalPage, resume, and cancel URLs
1898
+ """
1337
1899
  return _client.get_resume_urls(approver)
1338
1900
 
1339
1901
 
@@ -1360,9 +1922,34 @@ def request_interactive_slack_approval(
1360
1922
  def send_teams_message(
1361
1923
  conversation_id: str, text: str, success: bool, card_block: dict = None
1362
1924
  ):
1925
+ """Send a message to a Microsoft Teams conversation.
1926
+
1927
+ Args:
1928
+ conversation_id: Teams conversation ID
1929
+ text: Message text
1930
+ success: Whether to style as success message
1931
+ card_block: Optional adaptive card block
1932
+
1933
+ Returns:
1934
+ HTTP response from Teams
1935
+ """
1363
1936
  return _client.send_teams_message(conversation_id, text, success, card_block)
1364
1937
 
1365
1938
 
1939
+ @init_global_client
1940
+ def cancel_job(job_id: str, reason: str = None) -> str:
1941
+ """Cancel a specific job by ID.
1942
+
1943
+ Args:
1944
+ job_id: UUID of the job to cancel
1945
+ reason: Optional reason for cancellation
1946
+
1947
+ Returns:
1948
+ Response message from the cancel endpoint
1949
+ """
1950
+ return _client.cancel_job(job_id, reason)
1951
+
1952
+
1366
1953
  @init_global_client
1367
1954
  def cancel_running() -> dict:
1368
1955
  """Cancel currently running executions of the same script."""
@@ -1433,6 +2020,18 @@ def run_script_by_hash(
1433
2020
  timeout=timeout,
1434
2021
  )
1435
2022
 
2023
+ @init_global_client
2024
+ def run_inline_script_preview(
2025
+ content: str,
2026
+ language: str,
2027
+ args: dict = None,
2028
+ ) -> Any:
2029
+ """Run a script on the current worker without creating a job"""
2030
+ return _client.run_inline_script_preview(
2031
+ content=content,
2032
+ language=language,
2033
+ args=args,
2034
+ )
1436
2035
 
1437
2036
  @init_global_client
1438
2037
  def username_to_email(username: str) -> str:
@@ -1444,7 +2043,42 @@ def username_to_email(username: str) -> str:
1444
2043
  return _client.username_to_email(username)
1445
2044
 
1446
2045
 
2046
+ @init_global_client
2047
+ def datatable(name: str = "main") -> DataTableClient:
2048
+ """Get a DataTable client for SQL queries.
2049
+
2050
+ Args:
2051
+ name: Database name (default: "main")
2052
+
2053
+ Returns:
2054
+ DataTableClient instance
2055
+ """
2056
+ return _client.datatable(name)
2057
+
2058
+ @init_global_client
2059
+ def ducklake(name: str = "main") -> DucklakeClient:
2060
+ """Get a DuckLake client for DuckDB queries.
2061
+
2062
+ Args:
2063
+ name: Database name (default: "main")
2064
+
2065
+ Returns:
2066
+ DucklakeClient instance
2067
+ """
2068
+ return _client.ducklake(name)
2069
+
1447
2070
  def task(*args, **kwargs):
2071
+ """Decorator to mark a function as a workflow task.
2072
+
2073
+ When executed inside a Windmill job, the decorated function runs as a
2074
+ separate workflow step. Outside Windmill, it executes normally.
2075
+
2076
+ Args:
2077
+ tag: Optional worker tag for execution
2078
+
2079
+ Returns:
2080
+ Decorated function
2081
+ """
1448
2082
  from inspect import signature
1449
2083
 
1450
2084
  def f(func, tag: str | None = None):
@@ -1541,3 +2175,161 @@ def stream_result(stream) -> None:
1541
2175
  """
1542
2176
  for text in stream:
1543
2177
  append_to_result_stream(text)
2178
+
2179
+ class DataTableClient:
2180
+ """Client for executing SQL queries against Windmill DataTables."""
2181
+
2182
+ def __init__(self, client: Windmill, name: str):
2183
+ """Initialize DataTableClient.
2184
+
2185
+ Args:
2186
+ client: Windmill client instance
2187
+ name: DataTable name
2188
+ """
2189
+ self.client = client
2190
+ self.name, self.schema = parse_sql_client_name(name)
2191
+ def query(self, sql: str, *args) -> SqlQuery:
2192
+ """Execute a SQL query against the DataTable.
2193
+
2194
+ Args:
2195
+ sql: SQL query string with $1, $2, etc. placeholders
2196
+ *args: Positional arguments to bind to query placeholders
2197
+
2198
+ Returns:
2199
+ SqlQuery instance for fetching results
2200
+ """
2201
+ if self.schema is not None:
2202
+ sql = f'SET search_path TO "{self.schema}";\n' + sql
2203
+
2204
+ args_dict = {}
2205
+ args_def = ""
2206
+ for i, arg in enumerate(args):
2207
+ args_dict[f"arg{i+1}"] = arg
2208
+ args_def += f"-- ${i+1} arg{i+1}\n"
2209
+ sql = args_def + sql
2210
+ return SqlQuery(
2211
+ sql,
2212
+ lambda sql: self.client.run_inline_script_preview(
2213
+ content=sql,
2214
+ language="postgresql",
2215
+ args={"database": f"datatable://{self.name}", **args_dict},
2216
+ )
2217
+ )
2218
+
2219
+ class DucklakeClient:
2220
+ """Client for executing DuckDB queries against Windmill DuckLake."""
2221
+
2222
+ def __init__(self, client: Windmill, name: str):
2223
+ """Initialize DucklakeClient.
2224
+
2225
+ Args:
2226
+ client: Windmill client instance
2227
+ name: DuckLake database name
2228
+ """
2229
+ self.client = client
2230
+ self.name = name
2231
+
2232
+ def query(self, sql: str, **kwargs):
2233
+ """Execute a DuckDB query against the DuckLake database.
2234
+
2235
+ Args:
2236
+ sql: SQL query string with $name placeholders
2237
+ **kwargs: Named arguments to bind to query placeholders
2238
+
2239
+ Returns:
2240
+ SqlQuery instance for fetching results
2241
+ """
2242
+ args_dict = {}
2243
+ args_def = ""
2244
+ for key, value in kwargs.items():
2245
+ args_dict[key] = value
2246
+ args_def += f"-- ${key} ({infer_sql_type(value)})\n"
2247
+ attach = f"ATTACH 'ducklake://{self.name}' AS dl;USE dl;\n"
2248
+ sql = args_def + attach + sql
2249
+ return SqlQuery(
2250
+ sql,
2251
+ lambda sql: self.client.run_inline_script_preview(
2252
+ content=sql,
2253
+ language="duckdb",
2254
+ args=args_dict,
2255
+ )
2256
+ )
2257
+
2258
+ class SqlQuery:
2259
+ """Query result handler for DataTable and DuckLake queries."""
2260
+
2261
+ def __init__(self, sql: str, fetch_fn):
2262
+ """Initialize SqlQuery.
2263
+
2264
+ Args:
2265
+ sql: SQL query string
2266
+ fetch_fn: Function to execute the query
2267
+ """
2268
+ self.sql = sql
2269
+ self.fetch_fn = fetch_fn
2270
+
2271
+ def fetch(self, result_collection: str | None = None):
2272
+ """Execute query and fetch results.
2273
+
2274
+ Args:
2275
+ result_collection: Optional result collection mode
2276
+
2277
+ Returns:
2278
+ Query results
2279
+ """
2280
+ sql = self.sql
2281
+ if result_collection is not None:
2282
+ sql = f'-- result_collection={result_collection}\n{sql}'
2283
+ return self.fetch_fn(sql)
2284
+
2285
+ def fetch_one(self):
2286
+ """Execute query and fetch first row of results.
2287
+
2288
+ Returns:
2289
+ First row of query results
2290
+ """
2291
+ return self.fetch(result_collection="last_statement_first_row")
2292
+
2293
+ def fetch_one_scalar(self):
2294
+ """Execute query and fetch first row of results. Return result as a scalar value.
2295
+
2296
+ Returns:
2297
+ First row of query result as a scalar value
2298
+ """
2299
+ return self.fetch(result_collection="last_statement_first_row_scalar")
2300
+
2301
+ def execute(self):
2302
+ """Execute query and don't return any results.
2303
+ """
2304
+ self.fetch_one()
2305
+
2306
+ def infer_sql_type(value) -> str:
2307
+ """
2308
+ DuckDB executor requires explicit argument types at declaration
2309
+ These types exist in both DuckDB and Postgres
2310
+ Check that the types exist if you plan to extend this function for other SQL engines.
2311
+ """
2312
+ if isinstance(value, bool):
2313
+ # Check bool before int since bool is a subclass of int in Python
2314
+ return "BOOLEAN"
2315
+ elif isinstance(value, int):
2316
+ return "BIGINT"
2317
+ elif isinstance(value, float):
2318
+ return "DOUBLE PRECISION"
2319
+ elif value is None:
2320
+ return "TEXT"
2321
+ elif isinstance(value, str):
2322
+ return "TEXT"
2323
+ elif isinstance(value, dict) or isinstance(value, list):
2324
+ return "JSON"
2325
+ else:
2326
+ return "TEXT"
2327
+
2328
+ def parse_sql_client_name(name: str) -> tuple[str, Optional[str]]:
2329
+ name = name
2330
+ schema = None
2331
+ if ":" in name:
2332
+ name, schema = name.split(":", 1)
2333
+ if not name:
2334
+ name = "main"
2335
+ return name, schema
@@ -5,6 +5,15 @@ import httpx
5
5
 
6
6
 
7
7
  class S3BufferedReader(BufferedReader):
8
+ """Streaming buffered reader for S3 files via Windmill's S3 proxy.
9
+
10
+ Args:
11
+ workspace: Windmill workspace ID
12
+ windmill_client: HTTP client for Windmill API
13
+ file_key: S3 file key/path
14
+ s3_resource_path: Optional path to S3 resource configuration
15
+ storage: Optional storage backend identifier
16
+ """
8
17
  def __init__(self, workspace: str, windmill_client: httpx.Client, file_key: str, s3_resource_path: Optional[str], storage: Optional[str]):
9
18
  params = {
10
19
  "file_key": file_key,
@@ -62,6 +71,14 @@ class S3BufferedReader(BufferedReader):
62
71
 
63
72
 
64
73
  def bytes_generator(buffered_reader: Union[BufferedReader, BytesIO]):
74
+ """Yield 50KB chunks from a buffered reader.
75
+
76
+ Args:
77
+ buffered_reader: File-like object to read from
78
+
79
+ Yields:
80
+ Bytes chunks of up to 50KB
81
+ """
65
82
  while True:
66
83
  byte = buffered_reader.read(50 * 1024)
67
84
  if not byte:
@@ -2,6 +2,7 @@ from typing import Optional
2
2
 
3
3
 
4
4
  class S3Object(dict):
5
+ """S3 file reference with file key, optional storage identifier, and presigned token."""
5
6
  s3: str
6
7
  storage: Optional[str]
7
8
  presigned: Optional[str]
@@ -11,6 +12,7 @@ class S3Object(dict):
11
12
 
12
13
 
13
14
  class S3FsClientKwargs(dict):
15
+ """S3FS client keyword arguments for region configuration."""
14
16
  region_name: str
15
17
 
16
18
  def __getattr__(self, attr):
@@ -18,6 +20,7 @@ class S3FsClientKwargs(dict):
18
20
 
19
21
 
20
22
  class S3FsArgs(dict):
23
+ """S3FS connection arguments including endpoint, credentials, and client settings."""
21
24
  endpoint_url: str
22
25
  key: str
23
26
  secret: str
@@ -30,6 +33,7 @@ class S3FsArgs(dict):
30
33
 
31
34
 
32
35
  class StorageOptions(dict):
36
+ """Storage options for Polars S3 connectivity with AWS credentials and endpoint."""
33
37
  aws_endpoint_url: str
34
38
  aws_access_key_id: str
35
39
  aws_secret_access_key: str
@@ -41,6 +45,7 @@ class StorageOptions(dict):
41
45
 
42
46
 
43
47
  class PolarsConnectionSettings(dict):
48
+ """Polars S3 connection settings containing S3FS args and storage options."""
44
49
  s3fs_args: S3FsArgs
45
50
  storage_options: StorageOptions
46
51
 
@@ -49,6 +54,7 @@ class PolarsConnectionSettings(dict):
49
54
 
50
55
 
51
56
  class Boto3ConnectionSettings(dict):
57
+ """Boto3 S3 connection settings with endpoint, region, and AWS credentials."""
52
58
  endpoint_url: str
53
59
  region_name: str
54
60
  use_ssl: bool
@@ -60,6 +66,7 @@ class Boto3ConnectionSettings(dict):
60
66
 
61
67
 
62
68
  class DuckDbConnectionSettings(dict):
69
+ """DuckDB S3 connection settings as a configuration string."""
63
70
  connection_settings_str: str
64
71
 
65
72
  def __getattr__(self, attr):
File without changes
File without changes
File without changes