wmill 1.583.0__tar.gz → 1.609.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of wmill might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: wmill
3
- Version: 1.583.0
3
+ Version: 1.609.0
4
4
  Summary: A client library for accessing Windmill server wrapping the Windmill client API
5
5
  Home-page: https://windmill.dev
6
6
  License: Apache-2.0
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "wmill"
3
- version = "1.583.0"
3
+ version = "1.609.0"
4
4
  description = "A client library for accessing Windmill server wrapping the Windmill client API"
5
5
  license = "Apache-2.0"
6
6
  homepage = "https://windmill.dev"
@@ -32,7 +32,17 @@ JobStatus = Literal["RUNNING", "WAITING", "COMPLETED"]
32
32
 
33
33
 
34
34
  class Windmill:
35
+ """Windmill client for interacting with the Windmill API."""
36
+
35
37
  def __init__(self, base_url=None, token=None, workspace=None, verify=True):
38
+ """Initialize the Windmill client.
39
+
40
+ Args:
41
+ base_url: API base URL (defaults to BASE_INTERNAL_URL or WM_BASE_URL env)
42
+ token: Authentication token (defaults to WM_TOKEN env)
43
+ workspace: Workspace ID (defaults to WM_WORKSPACE env)
44
+ verify: Whether to verify SSL certificates
45
+ """
36
46
  base = (
37
47
  base_url
38
48
  or os.environ.get("BASE_INTERNAL_URL")
@@ -75,6 +85,11 @@ class Windmill:
75
85
  return mocked_api
76
86
 
77
87
  def get_client(self) -> httpx.Client:
88
+ """Get the HTTP client instance.
89
+
90
+ Returns:
91
+ Configured httpx.Client for API requests
92
+ """
78
93
  return httpx.Client(
79
94
  base_url=self.base_url,
80
95
  headers=self.headers,
@@ -82,6 +97,16 @@ class Windmill:
82
97
  )
83
98
 
84
99
  def get(self, endpoint, raise_for_status=True, **kwargs) -> httpx.Response:
100
+ """Make an HTTP GET request to the Windmill API.
101
+
102
+ Args:
103
+ endpoint: API endpoint path
104
+ raise_for_status: Whether to raise an exception on HTTP errors
105
+ **kwargs: Additional arguments passed to httpx.get
106
+
107
+ Returns:
108
+ HTTP response object
109
+ """
85
110
  endpoint = endpoint.lstrip("/")
86
111
  resp = self.client.get(f"/{endpoint}", **kwargs)
87
112
  if raise_for_status:
@@ -94,6 +119,16 @@ class Windmill:
94
119
  return resp
95
120
 
96
121
  def post(self, endpoint, raise_for_status=True, **kwargs) -> httpx.Response:
122
+ """Make an HTTP POST request to the Windmill API.
123
+
124
+ Args:
125
+ endpoint: API endpoint path
126
+ raise_for_status: Whether to raise an exception on HTTP errors
127
+ **kwargs: Additional arguments passed to httpx.post
128
+
129
+ Returns:
130
+ HTTP response object
131
+ """
97
132
  endpoint = endpoint.lstrip("/")
98
133
  resp = self.client.post(f"/{endpoint}", **kwargs)
99
134
  if raise_for_status:
@@ -106,6 +141,14 @@ class Windmill:
106
141
  return resp
107
142
 
108
143
  def create_token(self, duration=dt.timedelta(days=1)) -> str:
144
+ """Create a new authentication token.
145
+
146
+ Args:
147
+ duration: Token validity duration (default: 1 day)
148
+
149
+ Returns:
150
+ New authentication token string
151
+ """
109
152
  endpoint = "/users/tokens/create"
110
153
  payload = {
111
154
  "label": f"refresh {time.time()}",
@@ -276,6 +319,21 @@ class Windmill:
276
319
  cleanup=cleanup, assert_result_is_not_none=assert_result_is_not_none
277
320
  )
278
321
 
322
+ def run_inline_script_preview(
323
+ self,
324
+ content: str,
325
+ language: str,
326
+ args: dict = None,
327
+ ) -> Any:
328
+ """Run a script on the current worker without creating a job"""
329
+ endpoint = f"/w/{self.workspace}/jobs/run_inline/preview"
330
+ body = {
331
+ "content": content,
332
+ "language": language,
333
+ "args": args or {},
334
+ }
335
+ return self.post(endpoint, json=body).json()
336
+
279
337
  def wait_job(
280
338
  self,
281
339
  job_id,
@@ -284,6 +342,22 @@ class Windmill:
284
342
  cleanup: bool = True,
285
343
  assert_result_is_not_none: bool = False,
286
344
  ):
345
+ """Wait for a job to complete and return its result.
346
+
347
+ Args:
348
+ job_id: ID of the job to wait for
349
+ timeout: Maximum time to wait (seconds or timedelta)
350
+ verbose: Enable verbose logging
351
+ cleanup: Register cleanup handler to cancel job on exit
352
+ assert_result_is_not_none: Raise exception if result is None
353
+
354
+ Returns:
355
+ Job result when completed
356
+
357
+ Raises:
358
+ TimeoutError: If timeout is reached
359
+ Exception: If job fails
360
+ """
287
361
  def cancel_job():
288
362
  logger.warning(f"cancelling job: {job_id}")
289
363
  self.post(
@@ -392,16 +466,52 @@ class Windmill:
392
466
  return result
393
467
 
394
468
  def get_job(self, job_id: str) -> dict:
469
+ """Get job details by ID.
470
+
471
+ Args:
472
+ job_id: UUID of the job
473
+
474
+ Returns:
475
+ Job details dictionary
476
+ """
395
477
  return self.get(f"/w/{self.workspace}/jobs_u/get/{job_id}").json()
396
478
 
397
479
  def get_root_job_id(self, job_id: str | None = None) -> dict:
480
+ """Get the root job ID for a flow hierarchy.
481
+
482
+ Args:
483
+ job_id: Job ID (defaults to current WM_JOB_ID)
484
+
485
+ Returns:
486
+ Root job ID
487
+ """
398
488
  job_id = job_id or os.environ.get("WM_JOB_ID")
399
489
  return self.get(f"/w/{self.workspace}/jobs_u/get_root_job_id/{job_id}").json()
400
490
 
401
- def get_id_token(self, audience: str) -> str:
402
- return self.post(f"/w/{self.workspace}/oidc/token/{audience}").text
491
+ def get_id_token(self, audience: str, expires_in: int | None = None) -> str:
492
+ """Get an OIDC JWT token for authentication to external services.
493
+
494
+ Args:
495
+ audience: Token audience (e.g., "vault", "aws")
496
+ expires_in: Optional expiration time in seconds
497
+
498
+ Returns:
499
+ JWT token string
500
+ """
501
+ params = {}
502
+ if expires_in is not None:
503
+ params["expires_in"] = expires_in
504
+ return self.post(f"/w/{self.workspace}/oidc/token/{audience}", params=params).text
403
505
 
404
506
  def get_job_status(self, job_id: str) -> JobStatus:
507
+ """Get the status of a job.
508
+
509
+ Args:
510
+ job_id: UUID of the job
511
+
512
+ Returns:
513
+ Job status: "RUNNING", "WAITING", or "COMPLETED"
514
+ """
405
515
  job = self.get_job(job_id)
406
516
  job_type = job.get("type", "")
407
517
  assert job_type, f"{job} is not a valid job"
@@ -416,6 +526,15 @@ class Windmill:
416
526
  job_id: str,
417
527
  assert_result_is_not_none: bool = True,
418
528
  ) -> Any:
529
+ """Get the result of a completed job.
530
+
531
+ Args:
532
+ job_id: UUID of the completed job
533
+ assert_result_is_not_none: Raise exception if result is None
534
+
535
+ Returns:
536
+ Job result
537
+ """
419
538
  result = self.get(f"/w/{self.workspace}/jobs_u/completed/get_result/{job_id}")
420
539
  result_text = result.text
421
540
  if assert_result_is_not_none and result_text is None:
@@ -426,6 +545,14 @@ class Windmill:
426
545
  return result_text
427
546
 
428
547
  def get_variable(self, path: str) -> str:
548
+ """Get a variable value by path.
549
+
550
+ Args:
551
+ path: Variable path in Windmill
552
+
553
+ Returns:
554
+ Variable value as string
555
+ """
429
556
  path = parse_variable_syntax(path) or path
430
557
  if self.mocked_api is not None:
431
558
  variables = self.mocked_api["variables"]
@@ -436,17 +563,20 @@ class Windmill:
436
563
  logger.info(
437
564
  f"MockedAPI present, but variable not found at {path}, falling back to real API"
438
565
  )
439
-
440
- """Get variable from Windmill"""
441
566
  return self.get(f"/w/{self.workspace}/variables/get_value/{path}").json()
442
567
 
443
568
  def set_variable(self, path: str, value: str, is_secret: bool = False) -> None:
569
+ """Set a variable value by path, creating it if it doesn't exist.
570
+
571
+ Args:
572
+ path: Variable path in Windmill
573
+ value: Variable value to set
574
+ is_secret: Whether the variable should be secret (default: False)
575
+ """
444
576
  path = parse_variable_syntax(path) or path
445
577
  if self.mocked_api is not None:
446
578
  self.mocked_api["variables"][path] = value
447
579
  return
448
-
449
- """Set variable from Windmill"""
450
580
  # check if variable exists
451
581
  r = self.get(
452
582
  f"/w/{self.workspace}/variables/get/{path}", raise_for_status=False
@@ -474,6 +604,15 @@ class Windmill:
474
604
  path: str,
475
605
  none_if_undefined: bool = False,
476
606
  ) -> dict | None:
607
+ """Get a resource value by path.
608
+
609
+ Args:
610
+ path: Resource path in Windmill
611
+ none_if_undefined: Return None instead of raising if not found
612
+
613
+ Returns:
614
+ Resource value dictionary or None
615
+ """
477
616
  path = parse_resource_syntax(path) or path
478
617
  if self.mocked_api is not None:
479
618
  resources = self.mocked_api["resources"]
@@ -490,8 +629,6 @@ class Windmill:
490
629
  logger.info(
491
630
  f"MockedAPI present, but resource not found at ${path}, falling back to real API"
492
631
  )
493
-
494
- """Get resource from Windmill"""
495
632
  try:
496
633
  return self.get(
497
634
  f"/w/{self.workspace}/resources/get_value_interpolated/{path}"
@@ -508,6 +645,13 @@ class Windmill:
508
645
  path: str,
509
646
  resource_type: str,
510
647
  ):
648
+ """Set a resource value by path, creating it if it doesn't exist.
649
+
650
+ Args:
651
+ value: Resource value to set
652
+ path: Resource path in Windmill
653
+ resource_type: Resource type for creation
654
+ """
511
655
  path = parse_resource_syntax(path) or path
512
656
  if self.mocked_api is not None:
513
657
  self.mocked_api["resources"][path] = value
@@ -563,10 +707,33 @@ class Windmill:
563
707
  params=params if params else None,
564
708
  ).json()
565
709
 
566
- def set_state(self, value: Any):
567
- self.set_resource(value, path=self.state_path, resource_type="state")
710
+ def set_state(self, value: Any, path: str | None = None) -> None:
711
+ """Set the workflow state.
712
+
713
+ Args:
714
+ value: State value to set
715
+ path: Optional state resource path override.
716
+ """
717
+ self.set_resource(value, path=path or self.state_path, resource_type="state")
718
+
719
+ def get_state(self, path: str | None = None) -> Any:
720
+ """Get the workflow state.
721
+
722
+ Args:
723
+ path: Optional state resource path override.
724
+
725
+ Returns:
726
+ State value or None if not set
727
+ """
728
+ return self.get_resource(path=path or self.state_path, none_if_undefined=True)
568
729
 
569
730
  def set_progress(self, value: int, job_id: Optional[str] = None):
731
+ """Set job progress percentage (0-99).
732
+
733
+ Args:
734
+ value: Progress percentage
735
+ job_id: Job ID (defaults to current WM_JOB_ID)
736
+ """
570
737
  workspace = get_workspace()
571
738
  flow_id = os.environ.get("WM_FLOW_JOB_ID")
572
739
  job_id = job_id or os.environ.get("WM_JOB_ID")
@@ -584,6 +751,14 @@ class Windmill:
584
751
  )
585
752
 
586
753
  def get_progress(self, job_id: Optional[str] = None) -> Any:
754
+ """Get job progress percentage.
755
+
756
+ Args:
757
+ job_id: Job ID (defaults to current WM_JOB_ID)
758
+
759
+ Returns:
760
+ Progress value (0-100) or None if not set
761
+ """
587
762
  workspace = get_workspace()
588
763
  job_id = job_id or os.environ.get("WM_JOB_ID")
589
764
 
@@ -622,6 +797,11 @@ class Windmill:
622
797
 
623
798
  @property
624
799
  def version(self):
800
+ """Get the Windmill server version.
801
+
802
+ Returns:
803
+ Version string
804
+ """
625
805
  return self.get("version").text
626
806
 
627
807
  def get_duckdb_connection_settings(
@@ -795,19 +975,110 @@ class Windmill:
795
975
  ).json()
796
976
  except Exception as e:
797
977
  raise Exception("Could not write file to S3") from e
798
- return S3Object(s3=response["file_key"])
978
+ return S3Object(s3=response["file_key"], storage=s3object["storage"])
799
979
 
800
980
  def sign_s3_objects(self, s3_objects: list[S3Object | str]) -> list[S3Object]:
981
+ """Sign S3 objects for use by anonymous users in public apps.
982
+
983
+ Args:
984
+ s3_objects: List of S3 objects to sign
985
+
986
+ Returns:
987
+ List of signed S3 objects
988
+ """
801
989
  return self.post(
802
990
  f"/w/{self.workspace}/apps/sign_s3_objects", json={"s3_objects": list(map(parse_s3_object, s3_objects))}
803
991
  ).json()
804
992
 
805
993
  def sign_s3_object(self, s3_object: S3Object | str) -> S3Object:
994
+ """Sign a single S3 object for use by anonymous users in public apps.
995
+
996
+ Args:
997
+ s3_object: S3 object to sign
998
+
999
+ Returns:
1000
+ Signed S3 object
1001
+ """
806
1002
  return self.post(
807
1003
  f"/w/{self.workspace}/apps/sign_s3_objects",
808
1004
  json={"s3_objects": [s3_object]},
809
1005
  ).json()[0]
810
1006
 
1007
+ def get_presigned_s3_public_urls(
1008
+ self,
1009
+ s3_objects: list[S3Object | str],
1010
+ base_url: str | None = None,
1011
+ ) -> list[str]:
1012
+ """
1013
+ Generate presigned public URLs for an array of S3 objects.
1014
+ If an S3 object is not signed yet, it will be signed first.
1015
+
1016
+ Args:
1017
+ s3_objects: List of S3 objects to sign
1018
+ base_url: Optional base URL for the presigned URLs (defaults to WM_BASE_URL)
1019
+
1020
+ Returns:
1021
+ List of signed public URLs
1022
+
1023
+ Example:
1024
+ >>> s3_objs = [S3Object(s3="/path/to/file1.txt"), S3Object(s3="/path/to/file2.txt")]
1025
+ >>> urls = client.get_presigned_s3_public_urls(s3_objs)
1026
+ """
1027
+ base_url = base_url or self._get_public_base_url()
1028
+
1029
+ s3_objs = [parse_s3_object(s3_obj) for s3_obj in s3_objects]
1030
+
1031
+ # Sign all S3 objects that need to be signed in one go
1032
+ s3_objs_to_sign: list[tuple[S3Object, int]] = [
1033
+ (s3_obj, index)
1034
+ for index, s3_obj in enumerate(s3_objs)
1035
+ if s3_obj.get("presigned") is None
1036
+ ]
1037
+
1038
+ if s3_objs_to_sign:
1039
+ signed_s3_objs = self.sign_s3_objects(
1040
+ [s3_obj for s3_obj, _ in s3_objs_to_sign]
1041
+ )
1042
+ for i, (_, original_index) in enumerate(s3_objs_to_sign):
1043
+ s3_objs[original_index] = parse_s3_object(signed_s3_objs[i])
1044
+
1045
+ signed_urls: list[str] = []
1046
+ for s3_obj in s3_objs:
1047
+ s3 = s3_obj.get("s3", "")
1048
+ presigned = s3_obj.get("presigned", "")
1049
+ storage = s3_obj.get("storage", "_default_")
1050
+ signed_url = f"{base_url}/api/w/{self.workspace}/s3_proxy/{storage}/{s3}?{presigned}"
1051
+ signed_urls.append(signed_url)
1052
+
1053
+ return signed_urls
1054
+
1055
+ def get_presigned_s3_public_url(
1056
+ self,
1057
+ s3_object: S3Object | str,
1058
+ base_url: str | None = None,
1059
+ ) -> str:
1060
+ """
1061
+ Generate a presigned public URL for an S3 object.
1062
+ If the S3 object is not signed yet, it will be signed first.
1063
+
1064
+ Args:
1065
+ s3_object: S3 object to sign
1066
+ base_url: Optional base URL for the presigned URL (defaults to WM_BASE_URL)
1067
+
1068
+ Returns:
1069
+ Signed public URL
1070
+
1071
+ Example:
1072
+ >>> s3_obj = S3Object(s3="/path/to/file.txt")
1073
+ >>> url = client.get_presigned_s3_public_url(s3_obj)
1074
+ """
1075
+ urls = self.get_presigned_s3_public_urls([s3_object], base_url)
1076
+ return urls[0]
1077
+
1078
+ def _get_public_base_url(self) -> str:
1079
+ """Get the public base URL from environment or default to localhost"""
1080
+ return os.environ.get("WM_BASE_URL", "http://localhost:3000")
1081
+
811
1082
  def __boto3_connection_settings(self, s3_resource) -> Boto3ConnectionSettings:
812
1083
  endpoint_url_prefix = "https://" if s3_resource["useSSL"] else "http://"
813
1084
  return Boto3ConnectionSettings(
@@ -824,14 +1095,29 @@ class Windmill:
824
1095
  )
825
1096
 
826
1097
  def whoami(self) -> dict:
1098
+ """Get the current user information.
1099
+
1100
+ Returns:
1101
+ User details dictionary
1102
+ """
827
1103
  return self.get("/users/whoami").json()
828
1104
 
829
1105
  @property
830
1106
  def user(self) -> dict:
1107
+ """Get the current user information (alias for whoami).
1108
+
1109
+ Returns:
1110
+ User details dictionary
1111
+ """
831
1112
  return self.whoami()
832
1113
 
833
1114
  @property
834
1115
  def state_path(self) -> str:
1116
+ """Get the state resource path from environment.
1117
+
1118
+ Returns:
1119
+ State path string
1120
+ """
835
1121
  state_path = os.environ.get(
836
1122
  "WM_STATE_PATH_NEW", os.environ.get("WM_STATE_PATH")
837
1123
  )
@@ -841,10 +1127,16 @@ class Windmill:
841
1127
 
842
1128
  @property
843
1129
  def state(self) -> Any:
1130
+ """Get the workflow state.
1131
+
1132
+ Returns:
1133
+ State value or None if not set
1134
+ """
844
1135
  return self.get_resource(path=self.state_path, none_if_undefined=True)
845
1136
 
846
1137
  @state.setter
847
1138
  def state(self, value: Any) -> None:
1139
+ """Set the workflow state."""
848
1140
  self.set_state(value)
849
1141
 
850
1142
  @staticmethod
@@ -887,12 +1179,26 @@ class Windmill:
887
1179
  with open(f"/shared/{path}", "r", encoding="utf-8") as f:
888
1180
  return json.load(f)
889
1181
 
890
- def get_resume_urls(self, approver: str = None) -> dict:
1182
+ def get_resume_urls(self, approver: str = None, flow_level: bool = None) -> dict:
1183
+ """Get URLs needed for resuming a flow after suspension.
1184
+
1185
+ Args:
1186
+ approver: Optional approver name
1187
+ flow_level: If True, generate resume URLs for the parent flow instead of the
1188
+ specific step. This allows pre-approvals that can be consumed by any later
1189
+ suspend step in the same flow.
1190
+
1191
+ Returns:
1192
+ Dictionary with approvalPage, resume, and cancel URLs
1193
+ """
891
1194
  nonce = random.randint(0, 1000000000)
892
1195
  job_id = os.environ.get("WM_JOB_ID") or "NO_ID"
1196
+ params = {"approver": approver}
1197
+ if flow_level is not None:
1198
+ params["flow_level"] = flow_level
893
1199
  return self.get(
894
1200
  f"/w/{self.workspace}/jobs/resume_urls/{job_id}/{nonce}",
895
- params={"approver": approver},
1201
+ params=params,
896
1202
  ).json()
897
1203
 
898
1204
  def request_interactive_slack_approval(
@@ -1000,6 +1306,29 @@ class Windmill:
1000
1306
  },
1001
1307
  )
1002
1308
 
1309
+ def datatable(self, name: str = "main"):
1310
+ """Get a DataTable client for SQL queries.
1311
+
1312
+ Args:
1313
+ name: Database name (default: "main")
1314
+
1315
+ Returns:
1316
+ DataTableClient instance
1317
+ """
1318
+ return DataTableClient(self, name)
1319
+
1320
+ def ducklake(self, name: str = "main"):
1321
+ """Get a DuckLake client for DuckDB queries.
1322
+
1323
+ Args:
1324
+ name: Database name (default: "main")
1325
+
1326
+ Returns:
1327
+ DucklakeClient instance
1328
+ """
1329
+ return DucklakeClient(self, name)
1330
+
1331
+
1003
1332
 
1004
1333
  def init_global_client(f):
1005
1334
  @functools.wraps(f)
@@ -1032,11 +1361,24 @@ def deprecate(in_favor_of: str):
1032
1361
 
1033
1362
  @init_global_client
1034
1363
  def get_workspace() -> str:
1364
+ """Get the current workspace ID.
1365
+
1366
+ Returns:
1367
+ Workspace ID string
1368
+ """
1035
1369
  return _client.workspace
1036
1370
 
1037
1371
 
1038
1372
  @init_global_client
1039
1373
  def get_root_job_id(job_id: str | None = None) -> str:
1374
+ """Get the root job ID for a flow hierarchy.
1375
+
1376
+ Args:
1377
+ job_id: Job ID (defaults to current WM_JOB_ID)
1378
+
1379
+ Returns:
1380
+ Root job ID
1381
+ """
1040
1382
  return _client.get_root_job_id(job_id)
1041
1383
 
1042
1384
 
@@ -1052,6 +1394,16 @@ def run_script_async(
1052
1394
  args: Dict[str, Any] = None,
1053
1395
  scheduled_in_secs: int = None,
1054
1396
  ) -> str:
1397
+ """Create a script job and return its job ID.
1398
+
1399
+ Args:
1400
+ hash_or_path: Script hash or path (determined by presence of '/')
1401
+ args: Script arguments
1402
+ scheduled_in_secs: Delay before execution in seconds
1403
+
1404
+ Returns:
1405
+ Job ID string
1406
+ """
1055
1407
  is_path = "/" in hash_or_path
1056
1408
  hash_ = None if is_path else hash_or_path
1057
1409
  path = hash_or_path if is_path else None
@@ -1073,6 +1425,17 @@ def run_flow_async(
1073
1425
  # lead to incorrectness and failures
1074
1426
  do_not_track_in_parent: bool = True,
1075
1427
  ) -> str:
1428
+ """Create a flow job and return its job ID.
1429
+
1430
+ Args:
1431
+ path: Flow path
1432
+ args: Flow arguments
1433
+ scheduled_in_secs: Delay before execution in seconds
1434
+ do_not_track_in_parent: Whether to track in parent job (default: True)
1435
+
1436
+ Returns:
1437
+ Job ID string
1438
+ """
1076
1439
  return _client.run_flow_async(
1077
1440
  path=path,
1078
1441
  args=args,
@@ -1090,6 +1453,19 @@ def run_script_sync(
1090
1453
  cleanup: bool = True,
1091
1454
  timeout: dt.timedelta = None,
1092
1455
  ) -> Any:
1456
+ """Run a script synchronously by hash and return its result.
1457
+
1458
+ Args:
1459
+ hash: Script hash
1460
+ args: Script arguments
1461
+ verbose: Enable verbose logging
1462
+ assert_result_is_not_none: Raise exception if result is None
1463
+ cleanup: Register cleanup handler to cancel job on exit
1464
+ timeout: Maximum time to wait
1465
+
1466
+ Returns:
1467
+ Script result
1468
+ """
1093
1469
  return _client.run_script(
1094
1470
  hash_=hash,
1095
1471
  args=args,
@@ -1106,6 +1482,16 @@ def run_script_by_path_async(
1106
1482
  args: Dict[str, Any] = None,
1107
1483
  scheduled_in_secs: Union[None, int] = None,
1108
1484
  ) -> str:
1485
+ """Create a script job by path and return its job ID.
1486
+
1487
+ Args:
1488
+ path: Script path
1489
+ args: Script arguments
1490
+ scheduled_in_secs: Delay before execution in seconds
1491
+
1492
+ Returns:
1493
+ Job ID string
1494
+ """
1109
1495
  return _client.run_script_by_path_async(
1110
1496
  path=path,
1111
1497
  args=args,
@@ -1119,6 +1505,16 @@ def run_script_by_hash_async(
1119
1505
  args: Dict[str, Any] = None,
1120
1506
  scheduled_in_secs: Union[None, int] = None,
1121
1507
  ) -> str:
1508
+ """Create a script job by hash and return its job ID.
1509
+
1510
+ Args:
1511
+ hash_: Script hash
1512
+ args: Script arguments
1513
+ scheduled_in_secs: Delay before execution in seconds
1514
+
1515
+ Returns:
1516
+ Job ID string
1517
+ """
1122
1518
  return _client.run_script_by_hash_async(
1123
1519
  hash_=hash_,
1124
1520
  args=args,
@@ -1135,6 +1531,19 @@ def run_script_by_path_sync(
1135
1531
  cleanup: bool = True,
1136
1532
  timeout: dt.timedelta = None,
1137
1533
  ) -> Any:
1534
+ """Run a script synchronously by path and return its result.
1535
+
1536
+ Args:
1537
+ path: Script path
1538
+ args: Script arguments
1539
+ verbose: Enable verbose logging
1540
+ assert_result_is_not_none: Raise exception if result is None
1541
+ cleanup: Register cleanup handler to cancel job on exit
1542
+ timeout: Maximum time to wait
1543
+
1544
+ Returns:
1545
+ Script result
1546
+ """
1138
1547
  return _client.run_script(
1139
1548
  path=path,
1140
1549
  args=args,
@@ -1155,11 +1564,28 @@ def get_id_token(audience: str) -> str:
1155
1564
 
1156
1565
  @init_global_client
1157
1566
  def get_job_status(job_id: str) -> JobStatus:
1567
+ """Get the status of a job.
1568
+
1569
+ Args:
1570
+ job_id: UUID of the job
1571
+
1572
+ Returns:
1573
+ Job status: "RUNNING", "WAITING", or "COMPLETED"
1574
+ """
1158
1575
  return _client.get_job_status(job_id)
1159
1576
 
1160
1577
 
1161
1578
  @init_global_client
1162
1579
  def get_result(job_id: str, assert_result_is_not_none=True) -> Dict[str, Any]:
1580
+ """Get the result of a completed job.
1581
+
1582
+ Args:
1583
+ job_id: UUID of the completed job
1584
+ assert_result_is_not_none: Raise exception if result is None
1585
+
1586
+ Returns:
1587
+ Job result
1588
+ """
1163
1589
  return _client.get_result(
1164
1590
  job_id=job_id, assert_result_is_not_none=assert_result_is_not_none
1165
1591
  )
@@ -1258,6 +1684,56 @@ def sign_s3_object(s3_object: S3Object| str) -> S3Object:
1258
1684
  return _client.sign_s3_object(s3_object)
1259
1685
 
1260
1686
 
1687
+ @init_global_client
1688
+ def get_presigned_s3_public_urls(
1689
+ s3_objects: list[S3Object | str],
1690
+ base_url: str | None = None,
1691
+ ) -> list[str]:
1692
+ """
1693
+ Generate presigned public URLs for an array of S3 objects.
1694
+ If an S3 object is not signed yet, it will be signed first.
1695
+
1696
+ Args:
1697
+ s3_objects: List of S3 objects to sign
1698
+ base_url: Optional base URL for the presigned URLs (defaults to WM_BASE_URL)
1699
+
1700
+ Returns:
1701
+ List of signed public URLs
1702
+
1703
+ Example:
1704
+ >>> import wmill
1705
+ >>> from wmill import S3Object
1706
+ >>> s3_objs = [S3Object(s3="/path/to/file1.txt"), S3Object(s3="/path/to/file2.txt")]
1707
+ >>> urls = wmill.get_presigned_s3_public_urls(s3_objs)
1708
+ """
1709
+ return _client.get_presigned_s3_public_urls(s3_objects, base_url)
1710
+
1711
+
1712
+ @init_global_client
1713
+ def get_presigned_s3_public_url(
1714
+ s3_object: S3Object | str,
1715
+ base_url: str | None = None,
1716
+ ) -> str:
1717
+ """
1718
+ Generate a presigned public URL for an S3 object.
1719
+ If the S3 object is not signed yet, it will be signed first.
1720
+
1721
+ Args:
1722
+ s3_object: S3 object to sign
1723
+ base_url: Optional base URL for the presigned URL (defaults to WM_BASE_URL)
1724
+
1725
+ Returns:
1726
+ Signed public URL
1727
+
1728
+ Example:
1729
+ >>> import wmill
1730
+ >>> from wmill import S3Object
1731
+ >>> s3_obj = S3Object(s3="/path/to/file.txt")
1732
+ >>> url = wmill.get_presigned_s3_public_url(s3_obj)
1733
+ """
1734
+ return _client.get_presigned_s3_public_url(s3_object, base_url)
1735
+
1736
+
1261
1737
  @init_global_client
1262
1738
  def whoami() -> dict:
1263
1739
  """
@@ -1267,12 +1743,11 @@ def whoami() -> dict:
1267
1743
 
1268
1744
 
1269
1745
  @init_global_client
1270
- @deprecate("Windmill().state")
1271
- def get_state() -> Any:
1746
+ def get_state(path: str | None = None) -> Any:
1272
1747
  """
1273
1748
  Get the state
1274
1749
  """
1275
- return _client.state
1750
+ return _client.get_state(path=path)
1276
1751
 
1277
1752
 
1278
1753
  @init_global_client
@@ -1323,11 +1798,11 @@ def list_resources(
1323
1798
 
1324
1799
 
1325
1800
  @init_global_client
1326
- def set_state(value: Any) -> None:
1801
+ def set_state(value: Any, path: str | None = None) -> None:
1327
1802
  """
1328
1803
  Set the state
1329
1804
  """
1330
- return _client.set_state(value)
1805
+ return _client.set_state(value, path=path)
1331
1806
 
1332
1807
 
1333
1808
  @init_global_client
@@ -1409,12 +1884,28 @@ def set_flow_user_state(key: str, value: Any) -> None:
1409
1884
 
1410
1885
  @init_global_client
1411
1886
  def get_state_path() -> str:
1887
+ """Get the state resource path from environment.
1888
+
1889
+ Returns:
1890
+ State path string
1891
+ """
1412
1892
  return _client.state_path
1413
1893
 
1414
1894
 
1415
1895
  @init_global_client
1416
- def get_resume_urls(approver: str = None) -> dict:
1417
- return _client.get_resume_urls(approver)
1896
+ def get_resume_urls(approver: str = None, flow_level: bool = None) -> dict:
1897
+ """Get URLs needed for resuming a flow after suspension.
1898
+
1899
+ Args:
1900
+ approver: Optional approver name
1901
+ flow_level: If True, generate resume URLs for the parent flow instead of the
1902
+ specific step. This allows pre-approvals that can be consumed by any later
1903
+ suspend step in the same flow.
1904
+
1905
+ Returns:
1906
+ Dictionary with approvalPage, resume, and cancel URLs
1907
+ """
1908
+ return _client.get_resume_urls(approver, flow_level)
1418
1909
 
1419
1910
 
1420
1911
  @init_global_client
@@ -1440,6 +1931,17 @@ def request_interactive_slack_approval(
1440
1931
  def send_teams_message(
1441
1932
  conversation_id: str, text: str, success: bool, card_block: dict = None
1442
1933
  ):
1934
+ """Send a message to a Microsoft Teams conversation.
1935
+
1936
+ Args:
1937
+ conversation_id: Teams conversation ID
1938
+ text: Message text
1939
+ success: Whether to style as success message
1940
+ card_block: Optional adaptive card block
1941
+
1942
+ Returns:
1943
+ HTTP response from Teams
1944
+ """
1443
1945
  return _client.send_teams_message(conversation_id, text, success, card_block)
1444
1946
 
1445
1947
 
@@ -1527,6 +2029,18 @@ def run_script_by_hash(
1527
2029
  timeout=timeout,
1528
2030
  )
1529
2031
 
2032
+ @init_global_client
2033
+ def run_inline_script_preview(
2034
+ content: str,
2035
+ language: str,
2036
+ args: dict = None,
2037
+ ) -> Any:
2038
+ """Run a script on the current worker without creating a job"""
2039
+ return _client.run_inline_script_preview(
2040
+ content=content,
2041
+ language=language,
2042
+ args=args,
2043
+ )
1530
2044
 
1531
2045
  @init_global_client
1532
2046
  def username_to_email(username: str) -> str:
@@ -1538,7 +2052,42 @@ def username_to_email(username: str) -> str:
1538
2052
  return _client.username_to_email(username)
1539
2053
 
1540
2054
 
2055
+ @init_global_client
2056
+ def datatable(name: str = "main") -> DataTableClient:
2057
+ """Get a DataTable client for SQL queries.
2058
+
2059
+ Args:
2060
+ name: Database name (default: "main")
2061
+
2062
+ Returns:
2063
+ DataTableClient instance
2064
+ """
2065
+ return _client.datatable(name)
2066
+
2067
+ @init_global_client
2068
+ def ducklake(name: str = "main") -> DucklakeClient:
2069
+ """Get a DuckLake client for DuckDB queries.
2070
+
2071
+ Args:
2072
+ name: Database name (default: "main")
2073
+
2074
+ Returns:
2075
+ DucklakeClient instance
2076
+ """
2077
+ return _client.ducklake(name)
2078
+
1541
2079
  def task(*args, **kwargs):
2080
+ """Decorator to mark a function as a workflow task.
2081
+
2082
+ When executed inside a Windmill job, the decorated function runs as a
2083
+ separate workflow step. Outside Windmill, it executes normally.
2084
+
2085
+ Args:
2086
+ tag: Optional worker tag for execution
2087
+
2088
+ Returns:
2089
+ Decorated function
2090
+ """
1542
2091
  from inspect import signature
1543
2092
 
1544
2093
  def f(func, tag: str | None = None):
@@ -1635,3 +2184,161 @@ def stream_result(stream) -> None:
1635
2184
  """
1636
2185
  for text in stream:
1637
2186
  append_to_result_stream(text)
2187
+
2188
+ class DataTableClient:
2189
+ """Client for executing SQL queries against Windmill DataTables."""
2190
+
2191
+ def __init__(self, client: Windmill, name: str):
2192
+ """Initialize DataTableClient.
2193
+
2194
+ Args:
2195
+ client: Windmill client instance
2196
+ name: DataTable name
2197
+ """
2198
+ self.client = client
2199
+ self.name, self.schema = parse_sql_client_name(name)
2200
+ def query(self, sql: str, *args) -> SqlQuery:
2201
+ """Execute a SQL query against the DataTable.
2202
+
2203
+ Args:
2204
+ sql: SQL query string with $1, $2, etc. placeholders
2205
+ *args: Positional arguments to bind to query placeholders
2206
+
2207
+ Returns:
2208
+ SqlQuery instance for fetching results
2209
+ """
2210
+ if self.schema is not None:
2211
+ sql = f'SET search_path TO "{self.schema}";\n' + sql
2212
+
2213
+ args_dict = {}
2214
+ args_def = ""
2215
+ for i, arg in enumerate(args):
2216
+ args_dict[f"arg{i+1}"] = arg
2217
+ args_def += f"-- ${i+1} arg{i+1}\n"
2218
+ sql = args_def + sql
2219
+ return SqlQuery(
2220
+ sql,
2221
+ lambda sql: self.client.run_inline_script_preview(
2222
+ content=sql,
2223
+ language="postgresql",
2224
+ args={"database": f"datatable://{self.name}", **args_dict},
2225
+ )
2226
+ )
2227
+
2228
+ class DucklakeClient:
2229
+ """Client for executing DuckDB queries against Windmill DuckLake."""
2230
+
2231
+ def __init__(self, client: Windmill, name: str):
2232
+ """Initialize DucklakeClient.
2233
+
2234
+ Args:
2235
+ client: Windmill client instance
2236
+ name: DuckLake database name
2237
+ """
2238
+ self.client = client
2239
+ self.name = name
2240
+
2241
+ def query(self, sql: str, **kwargs):
2242
+ """Execute a DuckDB query against the DuckLake database.
2243
+
2244
+ Args:
2245
+ sql: SQL query string with $name placeholders
2246
+ **kwargs: Named arguments to bind to query placeholders
2247
+
2248
+ Returns:
2249
+ SqlQuery instance for fetching results
2250
+ """
2251
+ args_dict = {}
2252
+ args_def = ""
2253
+ for key, value in kwargs.items():
2254
+ args_dict[key] = value
2255
+ args_def += f"-- ${key} ({infer_sql_type(value)})\n"
2256
+ attach = f"ATTACH 'ducklake://{self.name}' AS dl;USE dl;\n"
2257
+ sql = args_def + attach + sql
2258
+ return SqlQuery(
2259
+ sql,
2260
+ lambda sql: self.client.run_inline_script_preview(
2261
+ content=sql,
2262
+ language="duckdb",
2263
+ args=args_dict,
2264
+ )
2265
+ )
2266
+
2267
+ class SqlQuery:
2268
+ """Query result handler for DataTable and DuckLake queries."""
2269
+
2270
+ def __init__(self, sql: str, fetch_fn):
2271
+ """Initialize SqlQuery.
2272
+
2273
+ Args:
2274
+ sql: SQL query string
2275
+ fetch_fn: Function to execute the query
2276
+ """
2277
+ self.sql = sql
2278
+ self.fetch_fn = fetch_fn
2279
+
2280
+ def fetch(self, result_collection: str | None = None):
2281
+ """Execute query and fetch results.
2282
+
2283
+ Args:
2284
+ result_collection: Optional result collection mode
2285
+
2286
+ Returns:
2287
+ Query results
2288
+ """
2289
+ sql = self.sql
2290
+ if result_collection is not None:
2291
+ sql = f'-- result_collection={result_collection}\n{sql}'
2292
+ return self.fetch_fn(sql)
2293
+
2294
+ def fetch_one(self):
2295
+ """Execute query and fetch first row of results.
2296
+
2297
+ Returns:
2298
+ First row of query results
2299
+ """
2300
+ return self.fetch(result_collection="last_statement_first_row")
2301
+
2302
+ def fetch_one_scalar(self):
2303
+ """Execute query and fetch first row of results. Return result as a scalar value.
2304
+
2305
+ Returns:
2306
+ First row of query result as a scalar value
2307
+ """
2308
+ return self.fetch(result_collection="last_statement_first_row_scalar")
2309
+
2310
+ def execute(self):
2311
+ """Execute query and don't return any results.
2312
+ """
2313
+ self.fetch_one()
2314
+
2315
+ def infer_sql_type(value) -> str:
2316
+ """
2317
+ DuckDB executor requires explicit argument types at declaration
2318
+ These types exist in both DuckDB and Postgres
2319
+ Check that the types exist if you plan to extend this function for other SQL engines.
2320
+ """
2321
+ if isinstance(value, bool):
2322
+ # Check bool before int since bool is a subclass of int in Python
2323
+ return "BOOLEAN"
2324
+ elif isinstance(value, int):
2325
+ return "BIGINT"
2326
+ elif isinstance(value, float):
2327
+ return "DOUBLE PRECISION"
2328
+ elif value is None:
2329
+ return "TEXT"
2330
+ elif isinstance(value, str):
2331
+ return "TEXT"
2332
+ elif isinstance(value, dict) or isinstance(value, list):
2333
+ return "JSON"
2334
+ else:
2335
+ return "TEXT"
2336
+
2337
+ def parse_sql_client_name(name: str) -> tuple[str, Optional[str]]:
2338
+ name = name
2339
+ schema = None
2340
+ if ":" in name:
2341
+ name, schema = name.split(":", 1)
2342
+ if not name:
2343
+ name = "main"
2344
+ return name, schema
@@ -5,6 +5,15 @@ import httpx
5
5
 
6
6
 
7
7
  class S3BufferedReader(BufferedReader):
8
+ """Streaming buffered reader for S3 files via Windmill's S3 proxy.
9
+
10
+ Args:
11
+ workspace: Windmill workspace ID
12
+ windmill_client: HTTP client for Windmill API
13
+ file_key: S3 file key/path
14
+ s3_resource_path: Optional path to S3 resource configuration
15
+ storage: Optional storage backend identifier
16
+ """
8
17
  def __init__(self, workspace: str, windmill_client: httpx.Client, file_key: str, s3_resource_path: Optional[str], storage: Optional[str]):
9
18
  params = {
10
19
  "file_key": file_key,
@@ -62,6 +71,14 @@ class S3BufferedReader(BufferedReader):
62
71
 
63
72
 
64
73
  def bytes_generator(buffered_reader: Union[BufferedReader, BytesIO]):
74
+ """Yield 50KB chunks from a buffered reader.
75
+
76
+ Args:
77
+ buffered_reader: File-like object to read from
78
+
79
+ Yields:
80
+ Bytes chunks of up to 50KB
81
+ """
65
82
  while True:
66
83
  byte = buffered_reader.read(50 * 1024)
67
84
  if not byte:
@@ -2,6 +2,7 @@ from typing import Optional
2
2
 
3
3
 
4
4
  class S3Object(dict):
5
+ """S3 file reference with file key, optional storage identifier, and presigned token."""
5
6
  s3: str
6
7
  storage: Optional[str]
7
8
  presigned: Optional[str]
@@ -11,6 +12,7 @@ class S3Object(dict):
11
12
 
12
13
 
13
14
  class S3FsClientKwargs(dict):
15
+ """S3FS client keyword arguments for region configuration."""
14
16
  region_name: str
15
17
 
16
18
  def __getattr__(self, attr):
@@ -18,6 +20,7 @@ class S3FsClientKwargs(dict):
18
20
 
19
21
 
20
22
  class S3FsArgs(dict):
23
+ """S3FS connection arguments including endpoint, credentials, and client settings."""
21
24
  endpoint_url: str
22
25
  key: str
23
26
  secret: str
@@ -30,6 +33,7 @@ class S3FsArgs(dict):
30
33
 
31
34
 
32
35
  class StorageOptions(dict):
36
+ """Storage options for Polars S3 connectivity with AWS credentials and endpoint."""
33
37
  aws_endpoint_url: str
34
38
  aws_access_key_id: str
35
39
  aws_secret_access_key: str
@@ -41,6 +45,7 @@ class StorageOptions(dict):
41
45
 
42
46
 
43
47
  class PolarsConnectionSettings(dict):
48
+ """Polars S3 connection settings containing S3FS args and storage options."""
44
49
  s3fs_args: S3FsArgs
45
50
  storage_options: StorageOptions
46
51
 
@@ -49,6 +54,7 @@ class PolarsConnectionSettings(dict):
49
54
 
50
55
 
51
56
  class Boto3ConnectionSettings(dict):
57
+ """Boto3 S3 connection settings with endpoint, region, and AWS credentials."""
52
58
  endpoint_url: str
53
59
  region_name: str
54
60
  use_ssl: bool
@@ -60,6 +66,7 @@ class Boto3ConnectionSettings(dict):
60
66
 
61
67
 
62
68
  class DuckDbConnectionSettings(dict):
69
+ """DuckDB S3 connection settings as a configuration string."""
63
70
  connection_settings_str: str
64
71
 
65
72
  def __getattr__(self, attr):
File without changes
File without changes
File without changes