wmill 1.568.0__py3-none-any.whl → 1.598.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
wmill/client.py CHANGED
@@ -32,7 +32,17 @@ JobStatus = Literal["RUNNING", "WAITING", "COMPLETED"]
32
32
 
33
33
 
34
34
  class Windmill:
35
+ """Windmill client for interacting with the Windmill API."""
36
+
35
37
  def __init__(self, base_url=None, token=None, workspace=None, verify=True):
38
+ """Initialize the Windmill client.
39
+
40
+ Args:
41
+ base_url: API base URL (defaults to BASE_INTERNAL_URL or WM_BASE_URL env)
42
+ token: Authentication token (defaults to WM_TOKEN env)
43
+ workspace: Workspace ID (defaults to WM_WORKSPACE env)
44
+ verify: Whether to verify SSL certificates
45
+ """
36
46
  base = (
37
47
  base_url
38
48
  or os.environ.get("BASE_INTERNAL_URL")
@@ -75,6 +85,11 @@ class Windmill:
75
85
  return mocked_api
76
86
 
77
87
  def get_client(self) -> httpx.Client:
88
+ """Get the HTTP client instance.
89
+
90
+ Returns:
91
+ Configured httpx.Client for API requests
92
+ """
78
93
  return httpx.Client(
79
94
  base_url=self.base_url,
80
95
  headers=self.headers,
@@ -82,6 +97,16 @@ class Windmill:
82
97
  )
83
98
 
84
99
  def get(self, endpoint, raise_for_status=True, **kwargs) -> httpx.Response:
100
+ """Make an HTTP GET request to the Windmill API.
101
+
102
+ Args:
103
+ endpoint: API endpoint path
104
+ raise_for_status: Whether to raise an exception on HTTP errors
105
+ **kwargs: Additional arguments passed to httpx.get
106
+
107
+ Returns:
108
+ HTTP response object
109
+ """
85
110
  endpoint = endpoint.lstrip("/")
86
111
  resp = self.client.get(f"/{endpoint}", **kwargs)
87
112
  if raise_for_status:
@@ -94,6 +119,16 @@ class Windmill:
94
119
  return resp
95
120
 
96
121
  def post(self, endpoint, raise_for_status=True, **kwargs) -> httpx.Response:
122
+ """Make an HTTP POST request to the Windmill API.
123
+
124
+ Args:
125
+ endpoint: API endpoint path
126
+ raise_for_status: Whether to raise an exception on HTTP errors
127
+ **kwargs: Additional arguments passed to httpx.post
128
+
129
+ Returns:
130
+ HTTP response object
131
+ """
97
132
  endpoint = endpoint.lstrip("/")
98
133
  resp = self.client.post(f"/{endpoint}", **kwargs)
99
134
  if raise_for_status:
@@ -106,6 +141,14 @@ class Windmill:
106
141
  return resp
107
142
 
108
143
  def create_token(self, duration=dt.timedelta(days=1)) -> str:
144
+ """Create a new authentication token.
145
+
146
+ Args:
147
+ duration: Token validity duration (default: 1 day)
148
+
149
+ Returns:
150
+ New authentication token string
151
+ """
109
152
  endpoint = "/users/tokens/create"
110
153
  payload = {
111
154
  "label": f"refresh {time.time()}",
@@ -276,6 +319,21 @@ class Windmill:
276
319
  cleanup=cleanup, assert_result_is_not_none=assert_result_is_not_none
277
320
  )
278
321
 
322
+ def run_inline_script_preview(
323
+ self,
324
+ content: str,
325
+ language: str,
326
+ args: dict = None,
327
+ ) -> Any:
328
+ """Run a script on the current worker without creating a job"""
329
+ endpoint = f"/w/{self.workspace}/jobs/run_inline/preview"
330
+ body = {
331
+ "content": content,
332
+ "language": language,
333
+ "args": args or {},
334
+ }
335
+ return self.post(endpoint, json=body).json()
336
+
279
337
  def wait_job(
280
338
  self,
281
339
  job_id,
@@ -284,6 +342,22 @@ class Windmill:
284
342
  cleanup: bool = True,
285
343
  assert_result_is_not_none: bool = False,
286
344
  ):
345
+ """Wait for a job to complete and return its result.
346
+
347
+ Args:
348
+ job_id: ID of the job to wait for
349
+ timeout: Maximum time to wait (seconds or timedelta)
350
+ verbose: Enable verbose logging
351
+ cleanup: Register cleanup handler to cancel job on exit
352
+ assert_result_is_not_none: Raise exception if result is None
353
+
354
+ Returns:
355
+ Job result when completed
356
+
357
+ Raises:
358
+ TimeoutError: If timeout is reached
359
+ Exception: If job fails
360
+ """
287
361
  def cancel_job():
288
362
  logger.warning(f"cancelling job: {job_id}")
289
363
  self.post(
@@ -392,16 +466,52 @@ class Windmill:
392
466
  return result
393
467
 
394
468
  def get_job(self, job_id: str) -> dict:
469
+ """Get job details by ID.
470
+
471
+ Args:
472
+ job_id: UUID of the job
473
+
474
+ Returns:
475
+ Job details dictionary
476
+ """
395
477
  return self.get(f"/w/{self.workspace}/jobs_u/get/{job_id}").json()
396
478
 
397
479
  def get_root_job_id(self, job_id: str | None = None) -> dict:
480
+ """Get the root job ID for a flow hierarchy.
481
+
482
+ Args:
483
+ job_id: Job ID (defaults to current WM_JOB_ID)
484
+
485
+ Returns:
486
+ Root job ID
487
+ """
398
488
  job_id = job_id or os.environ.get("WM_JOB_ID")
399
489
  return self.get(f"/w/{self.workspace}/jobs_u/get_root_job_id/{job_id}").json()
400
490
 
401
- def get_id_token(self, audience: str) -> str:
402
- return self.post(f"/w/{self.workspace}/oidc/token/{audience}").text
491
+ def get_id_token(self, audience: str, expires_in: int | None = None) -> str:
492
+ """Get an OIDC JWT token for authentication to external services.
493
+
494
+ Args:
495
+ audience: Token audience (e.g., "vault", "aws")
496
+ expires_in: Optional expiration time in seconds
497
+
498
+ Returns:
499
+ JWT token string
500
+ """
501
+ params = {}
502
+ if expires_in is not None:
503
+ params["expires_in"] = expires_in
504
+ return self.post(f"/w/{self.workspace}/oidc/token/{audience}", params=params).text
403
505
 
404
506
  def get_job_status(self, job_id: str) -> JobStatus:
507
+ """Get the status of a job.
508
+
509
+ Args:
510
+ job_id: UUID of the job
511
+
512
+ Returns:
513
+ Job status: "RUNNING", "WAITING", or "COMPLETED"
514
+ """
405
515
  job = self.get_job(job_id)
406
516
  job_type = job.get("type", "")
407
517
  assert job_type, f"{job} is not a valid job"
@@ -416,6 +526,15 @@ class Windmill:
416
526
  job_id: str,
417
527
  assert_result_is_not_none: bool = True,
418
528
  ) -> Any:
529
+ """Get the result of a completed job.
530
+
531
+ Args:
532
+ job_id: UUID of the completed job
533
+ assert_result_is_not_none: Raise exception if result is None
534
+
535
+ Returns:
536
+ Job result
537
+ """
419
538
  result = self.get(f"/w/{self.workspace}/jobs_u/completed/get_result/{job_id}")
420
539
  result_text = result.text
421
540
  if assert_result_is_not_none and result_text is None:
@@ -426,6 +545,14 @@ class Windmill:
426
545
  return result_text
427
546
 
428
547
  def get_variable(self, path: str) -> str:
548
+ """Get a variable value by path.
549
+
550
+ Args:
551
+ path: Variable path in Windmill
552
+
553
+ Returns:
554
+ Variable value as string
555
+ """
429
556
  path = parse_variable_syntax(path) or path
430
557
  if self.mocked_api is not None:
431
558
  variables = self.mocked_api["variables"]
@@ -436,17 +563,20 @@ class Windmill:
436
563
  logger.info(
437
564
  f"MockedAPI present, but variable not found at {path}, falling back to real API"
438
565
  )
439
-
440
- """Get variable from Windmill"""
441
566
  return self.get(f"/w/{self.workspace}/variables/get_value/{path}").json()
442
567
 
443
568
  def set_variable(self, path: str, value: str, is_secret: bool = False) -> None:
569
+ """Set a variable value by path, creating it if it doesn't exist.
570
+
571
+ Args:
572
+ path: Variable path in Windmill
573
+ value: Variable value to set
574
+ is_secret: Whether the variable should be secret (default: False)
575
+ """
444
576
  path = parse_variable_syntax(path) or path
445
577
  if self.mocked_api is not None:
446
578
  self.mocked_api["variables"][path] = value
447
579
  return
448
-
449
- """Set variable from Windmill"""
450
580
  # check if variable exists
451
581
  r = self.get(
452
582
  f"/w/{self.workspace}/variables/get/{path}", raise_for_status=False
@@ -474,6 +604,15 @@ class Windmill:
474
604
  path: str,
475
605
  none_if_undefined: bool = False,
476
606
  ) -> dict | None:
607
+ """Get a resource value by path.
608
+
609
+ Args:
610
+ path: Resource path in Windmill
611
+ none_if_undefined: Return None instead of raising if not found
612
+
613
+ Returns:
614
+ Resource value dictionary or None
615
+ """
477
616
  path = parse_resource_syntax(path) or path
478
617
  if self.mocked_api is not None:
479
618
  resources = self.mocked_api["resources"]
@@ -490,8 +629,6 @@ class Windmill:
490
629
  logger.info(
491
630
  f"MockedAPI present, but resource not found at ${path}, falling back to real API"
492
631
  )
493
-
494
- """Get resource from Windmill"""
495
632
  try:
496
633
  return self.get(
497
634
  f"/w/{self.workspace}/resources/get_value_interpolated/{path}"
@@ -508,6 +645,13 @@ class Windmill:
508
645
  path: str,
509
646
  resource_type: str,
510
647
  ):
648
+ """Set a resource value by path, creating it if it doesn't exist.
649
+
650
+ Args:
651
+ value: Resource value to set
652
+ path: Resource path in Windmill
653
+ resource_type: Resource type for creation
654
+ """
511
655
  path = parse_resource_syntax(path) or path
512
656
  if self.mocked_api is not None:
513
657
  self.mocked_api["resources"][path] = value
@@ -534,10 +678,50 @@ class Windmill:
534
678
  json={"value": value},
535
679
  )
536
680
 
681
+ def list_resources(
682
+ self,
683
+ resource_type: str = None,
684
+ page: int = None,
685
+ per_page: int = None,
686
+ ) -> list[dict]:
687
+ """List resources from Windmill workspace.
688
+
689
+ Args:
690
+ resource_type: Optional resource type to filter by (e.g., "postgresql", "mysql", "s3")
691
+ page: Optional page number for pagination
692
+ per_page: Optional number of results per page
693
+
694
+ Returns:
695
+ List of resource dictionaries
696
+ """
697
+ params = {}
698
+ if resource_type is not None:
699
+ params["resource_type"] = resource_type
700
+ if page is not None:
701
+ params["page"] = page
702
+ if per_page is not None:
703
+ params["per_page"] = per_page
704
+
705
+ return self.get(
706
+ f"/w/{self.workspace}/resources/list",
707
+ params=params if params else None,
708
+ ).json()
709
+
537
710
  def set_state(self, value: Any):
711
+ """Set the workflow state.
712
+
713
+ Args:
714
+ value: State value to set
715
+ """
538
716
  self.set_resource(value, path=self.state_path, resource_type="state")
539
717
 
540
718
  def set_progress(self, value: int, job_id: Optional[str] = None):
719
+ """Set job progress percentage (0-99).
720
+
721
+ Args:
722
+ value: Progress percentage
723
+ job_id: Job ID (defaults to current WM_JOB_ID)
724
+ """
541
725
  workspace = get_workspace()
542
726
  flow_id = os.environ.get("WM_FLOW_JOB_ID")
543
727
  job_id = job_id or os.environ.get("WM_JOB_ID")
@@ -555,6 +739,14 @@ class Windmill:
555
739
  )
556
740
 
557
741
  def get_progress(self, job_id: Optional[str] = None) -> Any:
742
+ """Get job progress percentage.
743
+
744
+ Args:
745
+ job_id: Job ID (defaults to current WM_JOB_ID)
746
+
747
+ Returns:
748
+ Progress value (0-100) or None if not set
749
+ """
558
750
  workspace = get_workspace()
559
751
  job_id = job_id or os.environ.get("WM_JOB_ID")
560
752
 
@@ -593,6 +785,11 @@ class Windmill:
593
785
 
594
786
  @property
595
787
  def version(self):
788
+ """Get the Windmill server version.
789
+
790
+ Returns:
791
+ Version string
792
+ """
596
793
  return self.get("version").text
597
794
 
598
795
  def get_duckdb_connection_settings(
@@ -769,16 +966,107 @@ class Windmill:
769
966
  return S3Object(s3=response["file_key"])
770
967
 
771
968
  def sign_s3_objects(self, s3_objects: list[S3Object | str]) -> list[S3Object]:
969
+ """Sign S3 objects for use by anonymous users in public apps.
970
+
971
+ Args:
972
+ s3_objects: List of S3 objects to sign
973
+
974
+ Returns:
975
+ List of signed S3 objects
976
+ """
772
977
  return self.post(
773
978
  f"/w/{self.workspace}/apps/sign_s3_objects", json={"s3_objects": list(map(parse_s3_object, s3_objects))}
774
979
  ).json()
775
980
 
776
981
  def sign_s3_object(self, s3_object: S3Object | str) -> S3Object:
982
+ """Sign a single S3 object for use by anonymous users in public apps.
983
+
984
+ Args:
985
+ s3_object: S3 object to sign
986
+
987
+ Returns:
988
+ Signed S3 object
989
+ """
777
990
  return self.post(
778
991
  f"/w/{self.workspace}/apps/sign_s3_objects",
779
992
  json={"s3_objects": [s3_object]},
780
993
  ).json()[0]
781
994
 
995
+ def get_presigned_s3_public_urls(
996
+ self,
997
+ s3_objects: list[S3Object | str],
998
+ base_url: str | None = None,
999
+ ) -> list[str]:
1000
+ """
1001
+ Generate presigned public URLs for an array of S3 objects.
1002
+ If an S3 object is not signed yet, it will be signed first.
1003
+
1004
+ Args:
1005
+ s3_objects: List of S3 objects to sign
1006
+ base_url: Optional base URL for the presigned URLs (defaults to WM_BASE_URL)
1007
+
1008
+ Returns:
1009
+ List of signed public URLs
1010
+
1011
+ Example:
1012
+ >>> s3_objs = [S3Object(s3="/path/to/file1.txt"), S3Object(s3="/path/to/file2.txt")]
1013
+ >>> urls = client.get_presigned_s3_public_urls(s3_objs)
1014
+ """
1015
+ base_url = base_url or self._get_public_base_url()
1016
+
1017
+ s3_objs = [parse_s3_object(s3_obj) for s3_obj in s3_objects]
1018
+
1019
+ # Sign all S3 objects that need to be signed in one go
1020
+ s3_objs_to_sign: list[tuple[S3Object, int]] = [
1021
+ (s3_obj, index)
1022
+ for index, s3_obj in enumerate(s3_objs)
1023
+ if s3_obj.get("presigned") is None
1024
+ ]
1025
+
1026
+ if s3_objs_to_sign:
1027
+ signed_s3_objs = self.sign_s3_objects(
1028
+ [s3_obj for s3_obj, _ in s3_objs_to_sign]
1029
+ )
1030
+ for i, (_, original_index) in enumerate(s3_objs_to_sign):
1031
+ s3_objs[original_index] = parse_s3_object(signed_s3_objs[i])
1032
+
1033
+ signed_urls: list[str] = []
1034
+ for s3_obj in s3_objs:
1035
+ s3 = s3_obj.get("s3", "")
1036
+ presigned = s3_obj.get("presigned", "")
1037
+ storage = s3_obj.get("storage", "_default_")
1038
+ signed_url = f"{base_url}/api/w/{self.workspace}/s3_proxy/{storage}/{s3}?{presigned}"
1039
+ signed_urls.append(signed_url)
1040
+
1041
+ return signed_urls
1042
+
1043
+ def get_presigned_s3_public_url(
1044
+ self,
1045
+ s3_object: S3Object | str,
1046
+ base_url: str | None = None,
1047
+ ) -> str:
1048
+ """
1049
+ Generate a presigned public URL for an S3 object.
1050
+ If the S3 object is not signed yet, it will be signed first.
1051
+
1052
+ Args:
1053
+ s3_object: S3 object to sign
1054
+ base_url: Optional base URL for the presigned URL (defaults to WM_BASE_URL)
1055
+
1056
+ Returns:
1057
+ Signed public URL
1058
+
1059
+ Example:
1060
+ >>> s3_obj = S3Object(s3="/path/to/file.txt")
1061
+ >>> url = client.get_presigned_s3_public_url(s3_obj)
1062
+ """
1063
+ urls = self.get_presigned_s3_public_urls([s3_object], base_url)
1064
+ return urls[0]
1065
+
1066
+ def _get_public_base_url(self) -> str:
1067
+ """Get the public base URL from environment or default to localhost"""
1068
+ return os.environ.get("WM_BASE_URL", "http://localhost:3000")
1069
+
782
1070
  def __boto3_connection_settings(self, s3_resource) -> Boto3ConnectionSettings:
783
1071
  endpoint_url_prefix = "https://" if s3_resource["useSSL"] else "http://"
784
1072
  return Boto3ConnectionSettings(
@@ -795,14 +1083,29 @@ class Windmill:
795
1083
  )
796
1084
 
797
1085
  def whoami(self) -> dict:
1086
+ """Get the current user information.
1087
+
1088
+ Returns:
1089
+ User details dictionary
1090
+ """
798
1091
  return self.get("/users/whoami").json()
799
1092
 
800
1093
  @property
801
1094
  def user(self) -> dict:
1095
+ """Get the current user information (alias for whoami).
1096
+
1097
+ Returns:
1098
+ User details dictionary
1099
+ """
802
1100
  return self.whoami()
803
1101
 
804
1102
  @property
805
1103
  def state_path(self) -> str:
1104
+ """Get the state resource path from environment.
1105
+
1106
+ Returns:
1107
+ State path string
1108
+ """
806
1109
  state_path = os.environ.get(
807
1110
  "WM_STATE_PATH_NEW", os.environ.get("WM_STATE_PATH")
808
1111
  )
@@ -812,10 +1115,16 @@ class Windmill:
812
1115
 
813
1116
  @property
814
1117
  def state(self) -> Any:
1118
+ """Get the workflow state.
1119
+
1120
+ Returns:
1121
+ State value or None if not set
1122
+ """
815
1123
  return self.get_resource(path=self.state_path, none_if_undefined=True)
816
1124
 
817
1125
  @state.setter
818
1126
  def state(self, value: Any) -> None:
1127
+ """Set the workflow state."""
819
1128
  self.set_state(value)
820
1129
 
821
1130
  @staticmethod
@@ -859,6 +1168,14 @@ class Windmill:
859
1168
  return json.load(f)
860
1169
 
861
1170
  def get_resume_urls(self, approver: str = None) -> dict:
1171
+ """Get URLs needed for resuming a flow after suspension.
1172
+
1173
+ Args:
1174
+ approver: Optional approver name
1175
+
1176
+ Returns:
1177
+ Dictionary with approvalPage, resume, and cancel URLs
1178
+ """
862
1179
  nonce = random.randint(0, 1000000000)
863
1180
  job_id = os.environ.get("WM_JOB_ID") or "NO_ID"
864
1181
  return self.get(
@@ -971,6 +1288,29 @@ class Windmill:
971
1288
  },
972
1289
  )
973
1290
 
1291
+ def datatable(self, name: str = "main"):
1292
+ """Get a DataTable client for SQL queries.
1293
+
1294
+ Args:
1295
+ name: Database name (default: "main")
1296
+
1297
+ Returns:
1298
+ DataTableClient instance
1299
+ """
1300
+ return DataTableClient(self, name)
1301
+
1302
+ def ducklake(self, name: str = "main"):
1303
+ """Get a DuckLake client for DuckDB queries.
1304
+
1305
+ Args:
1306
+ name: Database name (default: "main")
1307
+
1308
+ Returns:
1309
+ DucklakeClient instance
1310
+ """
1311
+ return DucklakeClient(self, name)
1312
+
1313
+
974
1314
 
975
1315
  def init_global_client(f):
976
1316
  @functools.wraps(f)
@@ -1003,11 +1343,24 @@ def deprecate(in_favor_of: str):
1003
1343
 
1004
1344
  @init_global_client
1005
1345
  def get_workspace() -> str:
1346
+ """Get the current workspace ID.
1347
+
1348
+ Returns:
1349
+ Workspace ID string
1350
+ """
1006
1351
  return _client.workspace
1007
1352
 
1008
1353
 
1009
1354
  @init_global_client
1010
1355
  def get_root_job_id(job_id: str | None = None) -> str:
1356
+ """Get the root job ID for a flow hierarchy.
1357
+
1358
+ Args:
1359
+ job_id: Job ID (defaults to current WM_JOB_ID)
1360
+
1361
+ Returns:
1362
+ Root job ID
1363
+ """
1011
1364
  return _client.get_root_job_id(job_id)
1012
1365
 
1013
1366
 
@@ -1023,6 +1376,16 @@ def run_script_async(
1023
1376
  args: Dict[str, Any] = None,
1024
1377
  scheduled_in_secs: int = None,
1025
1378
  ) -> str:
1379
+ """Create a script job and return its job ID.
1380
+
1381
+ Args:
1382
+ hash_or_path: Script hash or path (determined by presence of '/')
1383
+ args: Script arguments
1384
+ scheduled_in_secs: Delay before execution in seconds
1385
+
1386
+ Returns:
1387
+ Job ID string
1388
+ """
1026
1389
  is_path = "/" in hash_or_path
1027
1390
  hash_ = None if is_path else hash_or_path
1028
1391
  path = hash_or_path if is_path else None
@@ -1044,6 +1407,17 @@ def run_flow_async(
1044
1407
  # lead to incorrectness and failures
1045
1408
  do_not_track_in_parent: bool = True,
1046
1409
  ) -> str:
1410
+ """Create a flow job and return its job ID.
1411
+
1412
+ Args:
1413
+ path: Flow path
1414
+ args: Flow arguments
1415
+ scheduled_in_secs: Delay before execution in seconds
1416
+ do_not_track_in_parent: Whether to track in parent job (default: True)
1417
+
1418
+ Returns:
1419
+ Job ID string
1420
+ """
1047
1421
  return _client.run_flow_async(
1048
1422
  path=path,
1049
1423
  args=args,
@@ -1061,6 +1435,19 @@ def run_script_sync(
1061
1435
  cleanup: bool = True,
1062
1436
  timeout: dt.timedelta = None,
1063
1437
  ) -> Any:
1438
+ """Run a script synchronously by hash and return its result.
1439
+
1440
+ Args:
1441
+ hash: Script hash
1442
+ args: Script arguments
1443
+ verbose: Enable verbose logging
1444
+ assert_result_is_not_none: Raise exception if result is None
1445
+ cleanup: Register cleanup handler to cancel job on exit
1446
+ timeout: Maximum time to wait
1447
+
1448
+ Returns:
1449
+ Script result
1450
+ """
1064
1451
  return _client.run_script(
1065
1452
  hash_=hash,
1066
1453
  args=args,
@@ -1077,6 +1464,16 @@ def run_script_by_path_async(
1077
1464
  args: Dict[str, Any] = None,
1078
1465
  scheduled_in_secs: Union[None, int] = None,
1079
1466
  ) -> str:
1467
+ """Create a script job by path and return its job ID.
1468
+
1469
+ Args:
1470
+ path: Script path
1471
+ args: Script arguments
1472
+ scheduled_in_secs: Delay before execution in seconds
1473
+
1474
+ Returns:
1475
+ Job ID string
1476
+ """
1080
1477
  return _client.run_script_by_path_async(
1081
1478
  path=path,
1082
1479
  args=args,
@@ -1090,6 +1487,16 @@ def run_script_by_hash_async(
1090
1487
  args: Dict[str, Any] = None,
1091
1488
  scheduled_in_secs: Union[None, int] = None,
1092
1489
  ) -> str:
1490
+ """Create a script job by hash and return its job ID.
1491
+
1492
+ Args:
1493
+ hash_: Script hash
1494
+ args: Script arguments
1495
+ scheduled_in_secs: Delay before execution in seconds
1496
+
1497
+ Returns:
1498
+ Job ID string
1499
+ """
1093
1500
  return _client.run_script_by_hash_async(
1094
1501
  hash_=hash_,
1095
1502
  args=args,
@@ -1106,6 +1513,19 @@ def run_script_by_path_sync(
1106
1513
  cleanup: bool = True,
1107
1514
  timeout: dt.timedelta = None,
1108
1515
  ) -> Any:
1516
+ """Run a script synchronously by path and return its result.
1517
+
1518
+ Args:
1519
+ path: Script path
1520
+ args: Script arguments
1521
+ verbose: Enable verbose logging
1522
+ assert_result_is_not_none: Raise exception if result is None
1523
+ cleanup: Register cleanup handler to cancel job on exit
1524
+ timeout: Maximum time to wait
1525
+
1526
+ Returns:
1527
+ Script result
1528
+ """
1109
1529
  return _client.run_script(
1110
1530
  path=path,
1111
1531
  args=args,
@@ -1126,11 +1546,28 @@ def get_id_token(audience: str) -> str:
1126
1546
 
1127
1547
  @init_global_client
1128
1548
  def get_job_status(job_id: str) -> JobStatus:
1549
+ """Get the status of a job.
1550
+
1551
+ Args:
1552
+ job_id: UUID of the job
1553
+
1554
+ Returns:
1555
+ Job status: "RUNNING", "WAITING", or "COMPLETED"
1556
+ """
1129
1557
  return _client.get_job_status(job_id)
1130
1558
 
1131
1559
 
1132
1560
  @init_global_client
1133
1561
  def get_result(job_id: str, assert_result_is_not_none=True) -> Dict[str, Any]:
1562
+ """Get the result of a completed job.
1563
+
1564
+ Args:
1565
+ job_id: UUID of the completed job
1566
+ assert_result_is_not_none: Raise exception if result is None
1567
+
1568
+ Returns:
1569
+ Job result
1570
+ """
1134
1571
  return _client.get_result(
1135
1572
  job_id=job_id, assert_result_is_not_none=assert_result_is_not_none
1136
1573
  )
@@ -1229,6 +1666,56 @@ def sign_s3_object(s3_object: S3Object| str) -> S3Object:
1229
1666
  return _client.sign_s3_object(s3_object)
1230
1667
 
1231
1668
 
1669
+ @init_global_client
1670
+ def get_presigned_s3_public_urls(
1671
+ s3_objects: list[S3Object | str],
1672
+ base_url: str | None = None,
1673
+ ) -> list[str]:
1674
+ """
1675
+ Generate presigned public URLs for an array of S3 objects.
1676
+ If an S3 object is not signed yet, it will be signed first.
1677
+
1678
+ Args:
1679
+ s3_objects: List of S3 objects to sign
1680
+ base_url: Optional base URL for the presigned URLs (defaults to WM_BASE_URL)
1681
+
1682
+ Returns:
1683
+ List of signed public URLs
1684
+
1685
+ Example:
1686
+ >>> import wmill
1687
+ >>> from wmill import S3Object
1688
+ >>> s3_objs = [S3Object(s3="/path/to/file1.txt"), S3Object(s3="/path/to/file2.txt")]
1689
+ >>> urls = wmill.get_presigned_s3_public_urls(s3_objs)
1690
+ """
1691
+ return _client.get_presigned_s3_public_urls(s3_objects, base_url)
1692
+
1693
+
1694
+ @init_global_client
1695
+ def get_presigned_s3_public_url(
1696
+ s3_object: S3Object | str,
1697
+ base_url: str | None = None,
1698
+ ) -> str:
1699
+ """
1700
+ Generate a presigned public URL for an S3 object.
1701
+ If the S3 object is not signed yet, it will be signed first.
1702
+
1703
+ Args:
1704
+ s3_object: S3 object to sign
1705
+ base_url: Optional base URL for the presigned URL (defaults to WM_BASE_URL)
1706
+
1707
+ Returns:
1708
+ Signed public URL
1709
+
1710
+ Example:
1711
+ >>> import wmill
1712
+ >>> from wmill import S3Object
1713
+ >>> s3_obj = S3Object(s3="/path/to/file.txt")
1714
+ >>> url = wmill.get_presigned_s3_public_url(s3_obj)
1715
+ """
1716
+ return _client.get_presigned_s3_public_url(s3_object, base_url)
1717
+
1718
+
1232
1719
  @init_global_client
1233
1720
  def whoami() -> dict:
1234
1721
  """
@@ -1263,6 +1750,36 @@ def set_resource(path: str, value: Any, resource_type: str = "any") -> None:
1263
1750
  return _client.set_resource(value=value, path=path, resource_type=resource_type)
1264
1751
 
1265
1752
 
1753
+ @init_global_client
1754
+ def list_resources(
1755
+ resource_type: str = None,
1756
+ page: int = None,
1757
+ per_page: int = None,
1758
+ ) -> list[dict]:
1759
+ """List resources from Windmill workspace.
1760
+
1761
+ Args:
1762
+ resource_type: Optional resource type to filter by (e.g., "postgresql", "mysql", "s3")
1763
+ page: Optional page number for pagination
1764
+ per_page: Optional number of results per page
1765
+
1766
+ Returns:
1767
+ List of resource dictionaries
1768
+
1769
+ Example:
1770
+ >>> # Get all resources
1771
+ >>> all_resources = wmill.list_resources()
1772
+
1773
+ >>> # Get only PostgreSQL resources
1774
+ >>> pg_resources = wmill.list_resources(resource_type="postgresql")
1775
+ """
1776
+ return _client.list_resources(
1777
+ resource_type=resource_type,
1778
+ page=page,
1779
+ per_page=per_page,
1780
+ )
1781
+
1782
+
1266
1783
  @init_global_client
1267
1784
  def set_state(value: Any) -> None:
1268
1785
  """
@@ -1350,11 +1867,24 @@ def set_flow_user_state(key: str, value: Any) -> None:
1350
1867
 
1351
1868
  @init_global_client
1352
1869
  def get_state_path() -> str:
1870
+ """Get the state resource path from environment.
1871
+
1872
+ Returns:
1873
+ State path string
1874
+ """
1353
1875
  return _client.state_path
1354
1876
 
1355
1877
 
1356
1878
  @init_global_client
1357
1879
  def get_resume_urls(approver: str = None) -> dict:
1880
+ """Get URLs needed for resuming a flow after suspension.
1881
+
1882
+ Args:
1883
+ approver: Optional approver name
1884
+
1885
+ Returns:
1886
+ Dictionary with approvalPage, resume, and cancel URLs
1887
+ """
1358
1888
  return _client.get_resume_urls(approver)
1359
1889
 
1360
1890
 
@@ -1381,6 +1911,17 @@ def request_interactive_slack_approval(
1381
1911
  def send_teams_message(
1382
1912
  conversation_id: str, text: str, success: bool, card_block: dict = None
1383
1913
  ):
1914
+ """Send a message to a Microsoft Teams conversation.
1915
+
1916
+ Args:
1917
+ conversation_id: Teams conversation ID
1918
+ text: Message text
1919
+ success: Whether to style as success message
1920
+ card_block: Optional adaptive card block
1921
+
1922
+ Returns:
1923
+ HTTP response from Teams
1924
+ """
1384
1925
  return _client.send_teams_message(conversation_id, text, success, card_block)
1385
1926
 
1386
1927
 
@@ -1468,6 +2009,18 @@ def run_script_by_hash(
1468
2009
  timeout=timeout,
1469
2010
  )
1470
2011
 
2012
+ @init_global_client
2013
+ def run_inline_script_preview(
2014
+ content: str,
2015
+ language: str,
2016
+ args: dict = None,
2017
+ ) -> Any:
2018
+ """Run a script on the current worker without creating a job"""
2019
+ return _client.run_inline_script_preview(
2020
+ content=content,
2021
+ language=language,
2022
+ args=args,
2023
+ )
1471
2024
 
1472
2025
  @init_global_client
1473
2026
  def username_to_email(username: str) -> str:
@@ -1479,7 +2032,42 @@ def username_to_email(username: str) -> str:
1479
2032
  return _client.username_to_email(username)
1480
2033
 
1481
2034
 
2035
+ @init_global_client
2036
+ def datatable(name: str = "main") -> DataTableClient:
2037
+ """Get a DataTable client for SQL queries.
2038
+
2039
+ Args:
2040
+ name: Database name (default: "main")
2041
+
2042
+ Returns:
2043
+ DataTableClient instance
2044
+ """
2045
+ return _client.datatable(name)
2046
+
2047
+ @init_global_client
2048
+ def ducklake(name: str = "main") -> DucklakeClient:
2049
+ """Get a DuckLake client for DuckDB queries.
2050
+
2051
+ Args:
2052
+ name: Database name (default: "main")
2053
+
2054
+ Returns:
2055
+ DucklakeClient instance
2056
+ """
2057
+ return _client.ducklake(name)
2058
+
1482
2059
  def task(*args, **kwargs):
2060
+ """Decorator to mark a function as a workflow task.
2061
+
2062
+ When executed inside a Windmill job, the decorated function runs as a
2063
+ separate workflow step. Outside Windmill, it executes normally.
2064
+
2065
+ Args:
2066
+ tag: Optional worker tag for execution
2067
+
2068
+ Returns:
2069
+ Decorated function
2070
+ """
1483
2071
  from inspect import signature
1484
2072
 
1485
2073
  def f(func, tag: str | None = None):
@@ -1576,3 +2164,161 @@ def stream_result(stream) -> None:
1576
2164
  """
1577
2165
  for text in stream:
1578
2166
  append_to_result_stream(text)
2167
+
2168
+ class DataTableClient:
2169
+ """Client for executing SQL queries against Windmill DataTables."""
2170
+
2171
+ def __init__(self, client: Windmill, name: str):
2172
+ """Initialize DataTableClient.
2173
+
2174
+ Args:
2175
+ client: Windmill client instance
2176
+ name: DataTable name
2177
+ """
2178
+ self.client = client
2179
+ self.name, self.schema = parse_sql_client_name(name)
2180
+ def query(self, sql: str, *args) -> SqlQuery:
2181
+ """Execute a SQL query against the DataTable.
2182
+
2183
+ Args:
2184
+ sql: SQL query string with $1, $2, etc. placeholders
2185
+ *args: Positional arguments to bind to query placeholders
2186
+
2187
+ Returns:
2188
+ SqlQuery instance for fetching results
2189
+ """
2190
+ if self.schema is not None:
2191
+ sql = f'SET search_path TO "{self.schema}";\n' + sql
2192
+
2193
+ args_dict = {}
2194
+ args_def = ""
2195
+ for i, arg in enumerate(args):
2196
+ args_dict[f"arg{i+1}"] = arg
2197
+ args_def += f"-- ${i+1} arg{i+1}\n"
2198
+ sql = args_def + sql
2199
+ return SqlQuery(
2200
+ sql,
2201
+ lambda sql: self.client.run_inline_script_preview(
2202
+ content=sql,
2203
+ language="postgresql",
2204
+ args={"database": f"datatable://{self.name}", **args_dict},
2205
+ )
2206
+ )
2207
+
2208
+ class DucklakeClient:
2209
+ """Client for executing DuckDB queries against Windmill DuckLake."""
2210
+
2211
+ def __init__(self, client: Windmill, name: str):
2212
+ """Initialize DucklakeClient.
2213
+
2214
+ Args:
2215
+ client: Windmill client instance
2216
+ name: DuckLake database name
2217
+ """
2218
+ self.client = client
2219
+ self.name = name
2220
+
2221
+ def query(self, sql: str, **kwargs):
2222
+ """Execute a DuckDB query against the DuckLake database.
2223
+
2224
+ Args:
2225
+ sql: SQL query string with $name placeholders
2226
+ **kwargs: Named arguments to bind to query placeholders
2227
+
2228
+ Returns:
2229
+ SqlQuery instance for fetching results
2230
+ """
2231
+ args_dict = {}
2232
+ args_def = ""
2233
+ for key, value in kwargs.items():
2234
+ args_dict[key] = value
2235
+ args_def += f"-- ${key} ({infer_sql_type(value)})\n"
2236
+ attach = f"ATTACH 'ducklake://{self.name}' AS dl;USE dl;\n"
2237
+ sql = args_def + attach + sql
2238
+ return SqlQuery(
2239
+ sql,
2240
+ lambda sql: self.client.run_inline_script_preview(
2241
+ content=sql,
2242
+ language="duckdb",
2243
+ args=args_dict,
2244
+ )
2245
+ )
2246
+
2247
+ class SqlQuery:
2248
+ """Query result handler for DataTable and DuckLake queries."""
2249
+
2250
+ def __init__(self, sql: str, fetch_fn):
2251
+ """Initialize SqlQuery.
2252
+
2253
+ Args:
2254
+ sql: SQL query string
2255
+ fetch_fn: Function to execute the query
2256
+ """
2257
+ self.sql = sql
2258
+ self.fetch_fn = fetch_fn
2259
+
2260
+ def fetch(self, result_collection: str | None = None):
2261
+ """Execute query and fetch results.
2262
+
2263
+ Args:
2264
+ result_collection: Optional result collection mode
2265
+
2266
+ Returns:
2267
+ Query results
2268
+ """
2269
+ sql = self.sql
2270
+ if result_collection is not None:
2271
+ sql = f'-- result_collection={result_collection}\n{sql}'
2272
+ return self.fetch_fn(sql)
2273
+
2274
+ def fetch_one(self):
2275
+ """Execute query and fetch first row of results.
2276
+
2277
+ Returns:
2278
+ First row of query results
2279
+ """
2280
+ return self.fetch(result_collection="last_statement_first_row")
2281
+
2282
+ def fetch_one_scalar(self):
2283
+ """Execute query and fetch first row of results. Return result as a scalar value.
2284
+
2285
+ Returns:
2286
+ First row of query result as a scalar value
2287
+ """
2288
+ return self.fetch(result_collection="last_statement_first_row_scalar")
2289
+
2290
+ def execute(self):
2291
+ """Execute query and don't return any results.
2292
+ """
2293
+ self.fetch_one()
2294
+
2295
+ def infer_sql_type(value) -> str:
2296
+ """
2297
+ DuckDB executor requires explicit argument types at declaration
2298
+ These types exist in both DuckDB and Postgres
2299
+ Check that the types exist if you plan to extend this function for other SQL engines.
2300
+ """
2301
+ if isinstance(value, bool):
2302
+ # Check bool before int since bool is a subclass of int in Python
2303
+ return "BOOLEAN"
2304
+ elif isinstance(value, int):
2305
+ return "BIGINT"
2306
+ elif isinstance(value, float):
2307
+ return "DOUBLE PRECISION"
2308
+ elif value is None:
2309
+ return "TEXT"
2310
+ elif isinstance(value, str):
2311
+ return "TEXT"
2312
+ elif isinstance(value, dict) or isinstance(value, list):
2313
+ return "JSON"
2314
+ else:
2315
+ return "TEXT"
2316
+
2317
+ def parse_sql_client_name(name: str) -> tuple[str, Optional[str]]:
2318
+ name = name
2319
+ schema = None
2320
+ if ":" in name:
2321
+ name, schema = name.split(":", 1)
2322
+ if not name:
2323
+ name = "main"
2324
+ return name, schema
wmill/s3_reader.py CHANGED
@@ -5,6 +5,15 @@ import httpx
5
5
 
6
6
 
7
7
  class S3BufferedReader(BufferedReader):
8
+ """Streaming buffered reader for S3 files via Windmill's S3 proxy.
9
+
10
+ Args:
11
+ workspace: Windmill workspace ID
12
+ windmill_client: HTTP client for Windmill API
13
+ file_key: S3 file key/path
14
+ s3_resource_path: Optional path to S3 resource configuration
15
+ storage: Optional storage backend identifier
16
+ """
8
17
  def __init__(self, workspace: str, windmill_client: httpx.Client, file_key: str, s3_resource_path: Optional[str], storage: Optional[str]):
9
18
  params = {
10
19
  "file_key": file_key,
@@ -62,6 +71,14 @@ class S3BufferedReader(BufferedReader):
62
71
 
63
72
 
64
73
  def bytes_generator(buffered_reader: Union[BufferedReader, BytesIO]):
74
+ """Yield 50KB chunks from a buffered reader.
75
+
76
+ Args:
77
+ buffered_reader: File-like object to read from
78
+
79
+ Yields:
80
+ Bytes chunks of up to 50KB
81
+ """
65
82
  while True:
66
83
  byte = buffered_reader.read(50 * 1024)
67
84
  if not byte:
wmill/s3_types.py CHANGED
@@ -2,6 +2,7 @@ from typing import Optional
2
2
 
3
3
 
4
4
  class S3Object(dict):
5
+ """S3 file reference with file key, optional storage identifier, and presigned token."""
5
6
  s3: str
6
7
  storage: Optional[str]
7
8
  presigned: Optional[str]
@@ -11,6 +12,7 @@ class S3Object(dict):
11
12
 
12
13
 
13
14
  class S3FsClientKwargs(dict):
15
+ """S3FS client keyword arguments for region configuration."""
14
16
  region_name: str
15
17
 
16
18
  def __getattr__(self, attr):
@@ -18,6 +20,7 @@ class S3FsClientKwargs(dict):
18
20
 
19
21
 
20
22
  class S3FsArgs(dict):
23
+ """S3FS connection arguments including endpoint, credentials, and client settings."""
21
24
  endpoint_url: str
22
25
  key: str
23
26
  secret: str
@@ -30,6 +33,7 @@ class S3FsArgs(dict):
30
33
 
31
34
 
32
35
  class StorageOptions(dict):
36
+ """Storage options for Polars S3 connectivity with AWS credentials and endpoint."""
33
37
  aws_endpoint_url: str
34
38
  aws_access_key_id: str
35
39
  aws_secret_access_key: str
@@ -41,6 +45,7 @@ class StorageOptions(dict):
41
45
 
42
46
 
43
47
  class PolarsConnectionSettings(dict):
48
+ """Polars S3 connection settings containing S3FS args and storage options."""
44
49
  s3fs_args: S3FsArgs
45
50
  storage_options: StorageOptions
46
51
 
@@ -49,6 +54,7 @@ class PolarsConnectionSettings(dict):
49
54
 
50
55
 
51
56
  class Boto3ConnectionSettings(dict):
57
+ """Boto3 S3 connection settings with endpoint, region, and AWS credentials."""
52
58
  endpoint_url: str
53
59
  region_name: str
54
60
  use_ssl: bool
@@ -60,6 +66,7 @@ class Boto3ConnectionSettings(dict):
60
66
 
61
67
 
62
68
  class DuckDbConnectionSettings(dict):
69
+ """DuckDB S3 connection settings as a configuration string."""
63
70
  connection_settings_str: str
64
71
 
65
72
  def __getattr__(self, attr):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: wmill
3
- Version: 1.568.0
3
+ Version: 1.598.0
4
4
  Summary: A client library for accessing Windmill server wrapping the Windmill client API
5
5
  Home-page: https://windmill.dev
6
6
  License: Apache-2.0
@@ -0,0 +1,8 @@
1
+ wmill/__init__.py,sha256=nGZnQPezTdrBnBW1D0JqUtm75Gdf_xi3tAcPGwHRZ5A,46
2
+ wmill/client.py,sha256=QhJbL2osMHO-v_ngjvB1_r8gtasS43_CwzOTsH6e5ek,72774
3
+ wmill/py.typed,sha256=8PjyZ1aVoQpRVvt71muvuq5qE-jTFZkK-GLHkhdebmc,26
4
+ wmill/s3_reader.py,sha256=ySd1R2F9krbyhFU_-ogGDZAWQsF2CkPgq7K7xCEvhDU,2662
5
+ wmill/s3_types.py,sha256=gQZRdQoDRp7IRS9MTgGAiHZr1MIdtrbWeN-NJ28kIkA,1804
6
+ wmill-1.598.0.dist-info/METADATA,sha256=xkigMdAvREyO7Va2dZ2WBGn4bvX_wVUdNI27o8yKlzI,2693
7
+ wmill-1.598.0.dist-info/WHEEL,sha256=d2fvjOD7sXsVzChCqf0Ty0JbHKBaLYwDbGQDwQTnJ50,88
8
+ wmill-1.598.0.dist-info/RECORD,,
@@ -1,8 +0,0 @@
1
- wmill/__init__.py,sha256=nGZnQPezTdrBnBW1D0JqUtm75Gdf_xi3tAcPGwHRZ5A,46
2
- wmill/client.py,sha256=N-HehJJ8raRaz8z8gxHoWlbui1VnFcYm4_6vlQgzowM,51335
3
- wmill/py.typed,sha256=8PjyZ1aVoQpRVvt71muvuq5qE-jTFZkK-GLHkhdebmc,26
4
- wmill/s3_reader.py,sha256=Rq6m9T_SxReIi0OJA28FXSddHQRk2wUJCfbcw_FQ8Ao,2149
5
- wmill/s3_types.py,sha256=D4W1miV41Sa0YC_p7Jqr8j8Sp4Z2GgFAVsFR0ZYIrVM,1235
6
- wmill-1.568.0.dist-info/METADATA,sha256=4mDV2JKHrto6igyJliGrwBbJFNTmViYdE8cL98vMKSE,2693
7
- wmill-1.568.0.dist-info/WHEEL,sha256=d2fvjOD7sXsVzChCqf0Ty0JbHKBaLYwDbGQDwQTnJ50,88
8
- wmill-1.568.0.dist-info/RECORD,,