wmill 1.258.4__tar.gz → 1.259.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of wmill might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: wmill
3
- Version: 1.258.4
3
+ Version: 1.259.1
4
4
  Summary: A client library for accessing Windmill server wrapping the Windmill client API
5
5
  Home-page: https://windmill.dev
6
6
  License: Apache-2.0
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "wmill"
3
- version = "1.258.4"
3
+ version = "1.259.1"
4
4
  description = "A client library for accessing Windmill server wrapping the Windmill client API"
5
5
  license = "Apache-2.0"
6
6
  homepage = "https://windmill.dev"
@@ -386,7 +386,8 @@ class Windmill:
386
386
  file_content = my_obj_content.decode("utf-8")
387
387
  '''
388
388
  """
389
- return self.load_s3_file_reader(s3object, s3_resource_path).read()
389
+ with self.load_s3_file_reader(s3object, s3_resource_path) as file_reader:
390
+ return file_reader.read()
390
391
 
391
392
  def load_s3_file_reader(self, s3object: S3Object, s3_resource_path: str | None) -> BufferedReader:
392
393
  """
@@ -396,24 +397,17 @@ class Windmill:
396
397
  from wmill import S3Object
397
398
 
398
399
  s3_obj = S3Object(s3="/path/to/my_file.txt")
399
- my_obj_content_reader = client.load_s3_file_reader(s3_obj)
400
- file_content = my_obj_content_reader.read().decode("utf-8")
400
+ with wmill.load_s3_file(s3object, s3_resource_path) as file_reader:
401
+ print(file_reader.read())
401
402
  '''
402
403
  """
403
-
404
- result = S3BufferedReader(
405
- workspace=f"{self.workspace}",
406
- windmill_client=self.client,
407
- file_key=s3object["s3"],
408
- s3_resource_path=s3_resource_path,
409
- )
410
- return result
404
+ reader = S3BufferedReader(f"{self.workspace}", self.client, s3object["s3"], s3_resource_path)
405
+ return reader
411
406
 
412
407
  def write_s3_file(
413
408
  self,
414
409
  s3object: S3Object | None,
415
410
  file_content: BufferedReader | bytes,
416
- file_expiration: dt.datetime | None,
417
411
  s3_resource_path: str | None,
418
412
  ) -> S3Object:
419
413
  """
@@ -441,38 +435,25 @@ class Windmill:
441
435
  else:
442
436
  raise Exception("Type of file_content not supported")
443
437
 
444
- file_key = s3object["s3"] if s3object is not None else None
445
- parts = []
446
- upload_id = None
447
- chunk = content_reader.read(5 * 1024 * 1024)
448
- if len(chunk) == 0:
449
- raise Exception("File content is empty, nothing to upload")
450
- while True:
451
- chunk_2 = content_reader.read(5 * 1024 * 1024)
452
- reader_done = len(chunk_2) == 0
453
- try:
454
- response = self.post(
455
- f"/w/{self.workspace}/job_helpers/multipart_upload_s3_file",
456
- json={
457
- "file_key": file_key,
458
- "part_content": [b for b in chunk],
459
- "upload_id": upload_id,
460
- "parts": parts,
461
- "is_final": reader_done,
462
- "cancel_upload": False,
463
- "s3_resource_path": s3_resource_path,
464
- "file_expiration": file_expiration.isoformat() if file_expiration else None,
465
- },
466
- ).json()
467
- except Exception as e:
468
- raise Exception("Could not write file to S3") from e
469
- parts = response["parts"]
470
- upload_id = response["upload_id"]
471
- file_key = response["file_key"]
472
- if response["is_done"]:
473
- break
474
- chunk = chunk_2
475
- return S3Object(s3=file_key)
438
+ query_params = {}
439
+ if s3object is not None and s3object["s3"] != "":
440
+ query_params["file_key"] = s3object["s3"]
441
+ if s3_resource_path is not None and s3_resource_path != "":
442
+ query_params["s3_resource_path"] = s3_resource_path
443
+
444
+ try:
445
+ # need a vanilla client b/c content-type is not application/json here
446
+ response = httpx.post(
447
+ f"{self.base_url}/w/{self.workspace}/job_helpers/upload_s3_file",
448
+ headers={"Authorization": f"Bearer {self.token}", "Content-Type": "application/octet-stream"},
449
+ params=query_params,
450
+ content=content_reader,
451
+ verify=self.verify,
452
+ timeout=None,
453
+ ).json()
454
+ except Exception as e:
455
+ raise Exception("Could not write file to S3") from e
456
+ return S3Object(s3=response["file_key"])
476
457
 
477
458
  def __boto3_connection_settings(self, s3_resource) -> Boto3ConnectionSettings:
478
459
  endpoint_url_prefix = "https://" if s3_resource["useSSL"] else "http://"
@@ -727,7 +708,7 @@ def boto3_connection_settings(s3_resource_path: str = "") -> Boto3ConnectionSett
727
708
  @init_global_client
728
709
  def load_s3_file(s3object: S3Object, s3_resource_path: str = "") -> bytes:
729
710
  """
730
- Load the entire content of a file stored in S3
711
+ Load the entire content of a file stored in S3 as bytes
731
712
  """
732
713
  return _client.load_s3_file(s3object, s3_resource_path if s3_resource_path != "" else None)
733
714
 
@@ -735,7 +716,7 @@ def load_s3_file(s3object: S3Object, s3_resource_path: str = "") -> bytes:
735
716
  @init_global_client
736
717
  def load_s3_file_reader(s3object: S3Object, s3_resource_path: str = "") -> BufferedReader:
737
718
  """
738
- Load the content of a file stored in S3 as a buffered reader
719
+ Load the content of a file stored in S3
739
720
  """
740
721
  return _client.load_s3_file_reader(s3object, s3_resource_path if s3_resource_path != "" else None)
741
722
 
@@ -744,15 +725,12 @@ def load_s3_file_reader(s3object: S3Object, s3_resource_path: str = "") -> Buffe
744
725
  def write_s3_file(
745
726
  s3object: S3Object | None,
746
727
  file_content: BufferedReader | bytes,
747
- file_expiration: dt.datetime | None = None,
748
728
  s3_resource_path: str = "",
749
729
  ) -> S3Object:
750
730
  """
751
731
  Upload a file to S3
752
732
  """
753
- return _client.write_s3_file(
754
- s3object, file_content, file_expiration, s3_resource_path if s3_resource_path != "" else None
755
- )
733
+ return _client.write_s3_file(s3object, file_content, s3_resource_path if s3_resource_path != "" else None)
756
734
 
757
735
 
758
736
  @init_global_client
@@ -0,0 +1,48 @@
1
+ from io import BufferedReader
2
+ from json import JSONDecodeError
3
+
4
+ import httpx
5
+
6
+
7
+ class S3BufferedReader(BufferedReader):
8
+ def __init__(self, workspace: str, windmill_client: httpx.Client, file_key: str, s3_resource_path: str | None):
9
+ params = {
10
+ "file_key": file_key,
11
+ }
12
+ if s3_resource_path is not None:
13
+ params["s3_resource_path"] = s3_resource_path
14
+ self._context_manager = windmill_client.stream(
15
+ "GET",
16
+ f"/w/{workspace}/job_helpers/download_s3_file",
17
+ params=params,
18
+ timeout=None,
19
+ )
20
+
21
+ def __enter__(self):
22
+ reader = self._context_manager.__enter__()
23
+ self._iterator = reader.iter_bytes()
24
+ return self
25
+
26
+ def peek(self, size=0):
27
+ raise Exception("Not implemented, use read() instead")
28
+
29
+ def read(self, size=-1):
30
+ read_result = []
31
+ if size < 0:
32
+ for b in self._iterator:
33
+ read_result.append(b)
34
+ else:
35
+ for i in range(size):
36
+ try:
37
+ b = self._iterator.__next__()
38
+ except StopIteration:
39
+ break
40
+ read_result.append(b)
41
+
42
+ return b''.join(read_result)
43
+
44
+ def read1(self, size=-1):
45
+ return self.read(size)
46
+
47
+ def __exit__(self, *args):
48
+ self._context_manager.__exit__(*args)
@@ -1,149 +0,0 @@
1
- from io import BufferedReader
2
- from json import JSONDecodeError
3
-
4
- import httpx
5
-
6
-
7
- class S3BufferedReader(BufferedReader):
8
- def __init__(self, workspace: str, windmill_client: httpx.Client, file_key: str, s3_resource_path: str | None):
9
- self._workspace = workspace
10
- self._client = windmill_client
11
- self._file_key = file_key
12
- self._s3_resource_path = s3_resource_path
13
- self._file_size: int | None = None
14
-
15
- self._part_number: int | None = 0
16
- self._current_chunk: list[int] = []
17
- self._position_in_chunk = 0
18
-
19
- def peek(self, size=0):
20
- read_result = []
21
-
22
- if size > 0 or (
23
- len(self._current_chunk) > self._position_in_chunk
24
- and len(self._current_chunk) > self._position_in_chunk + size
25
- ):
26
- payload_to_return = self._current_chunk[self._position_in_chunk : (self._position_in_chunk + size)]
27
- read_result += payload_to_return
28
- return bytes(read_result)
29
-
30
- if self._position_in_chunk < len(self._current_chunk):
31
- payload_to_return = self._current_chunk[self._position_in_chunk :]
32
- read_result += bytes(payload_to_return)
33
-
34
- previous_chunk = self._current_chunk
35
- previous_part_number = self._part_number
36
- previous_position_in_chunk = self._position_in_chunk
37
- try:
38
- while len(read_result) < size or self._part_number is not None:
39
- self._download_new_chunk()
40
- if size > 0 and size - len(read_result) < len(self._current_chunk):
41
- payload_to_return = self._current_chunk[: (size - len(read_result))]
42
- self._position_in_chunk = size - len(read_result)
43
- read_result += bytes(payload_to_return)
44
- break
45
-
46
- read_result += bytes(self._current_chunk)
47
- if self._part_number is None:
48
- break
49
- finally:
50
- # always roll back the changes to the stream state
51
- self._current_chunk = previous_chunk
52
- self._part_number = previous_part_number
53
- self._position_in_chunk = previous_position_in_chunk
54
- return read_result
55
-
56
- def read(self, size=-1):
57
- read_result = []
58
-
59
- if size > 0 and (
60
- len(self._current_chunk) > self._position_in_chunk
61
- and len(self._current_chunk) > self._position_in_chunk + size
62
- ):
63
- payload_to_return = self._current_chunk[self._position_in_chunk : (self._position_in_chunk + size)]
64
- self._position_in_chunk += size
65
- read_result += payload_to_return
66
- return bytes(read_result)
67
-
68
- if self._position_in_chunk < len(self._current_chunk):
69
- payload_to_return = self._current_chunk[self._position_in_chunk :]
70
- self._position_in_chunk = len(self._current_chunk)
71
- read_result += payload_to_return
72
-
73
- previous_chunk = self._current_chunk
74
- previous_part_number = self._part_number
75
- previous_position_in_chunk = self._position_in_chunk
76
- try:
77
- while len(read_result) < size or self._part_number is not None:
78
- self._download_new_chunk()
79
- if size > 0 and size - len(read_result) < len(self._current_chunk):
80
- payload_to_return = self._current_chunk[: (size - len(read_result))]
81
- self._position_in_chunk = size - len(read_result)
82
- read_result += payload_to_return
83
- break
84
-
85
- read_result += self._current_chunk
86
- if self._part_number is None:
87
- break
88
- except Exception as e:
89
- # roll back the changes to the stream state
90
- self._current_chunk = previous_chunk
91
- self._part_number = previous_part_number
92
- self._position_in_chunk = previous_position_in_chunk
93
- raise e
94
- return bytes(read_result)
95
-
96
- def read1(self, size=-1):
97
- read_result = []
98
-
99
- if size < 0:
100
- payload_to_return = self._current_chunk[self._position_in_chunk :]
101
- self._position_in_chunk = len(self._current_chunk)
102
- read_result += payload_to_return
103
- return bytes(read_result)
104
-
105
- if size > 0 and len(self._current_chunk) > self._position_in_chunk:
106
- end_byte = min(self._position_in_chunk + size, len(self._current_chunk))
107
- payload_to_return = self._current_chunk[self._position_in_chunk : end_byte]
108
- self._position_in_chunk = end_byte
109
- read_result += payload_to_return
110
- return bytes(read_result)
111
-
112
- # no bytes in current buffer, load a new chunk
113
- self._download_new_chunk()
114
- end_byte = min(size, len(self._current_chunk))
115
- payload_to_return = self._current_chunk[:end_byte]
116
- self._position_in_chunk = end_byte
117
- read_result += payload_to_return
118
- return bytes(read_result)
119
-
120
- def close(self):
121
- self._part_number = 0
122
- self._current_chunk = []
123
- self._position_in_chunk = 0
124
-
125
- def _download_new_chunk(
126
- self,
127
- ):
128
- try:
129
- raw_response = self._client.post(
130
- f"/w/{self._workspace}/job_helpers/multipart_download_s3_file",
131
- json={
132
- "file_key": self._file_key,
133
- "part_number": self._part_number,
134
- "file_size": self._file_size,
135
- "s3_resource_path": self._s3_resource_path,
136
- },
137
- )
138
- try:
139
- raw_response.raise_for_status()
140
- except httpx.HTTPStatusError as err:
141
- raise Exception(f"{err.request.url}: {err.response.status_code}, {err.response.text}")
142
- response = raw_response.json()
143
- except JSONDecodeError as e:
144
- raise Exception("Could not generate download S3 file part") from e
145
-
146
- self._current_chunk = response["part_content"]
147
- self._part_number = response["next_part_number"]
148
- self._file_size = response["file_size"]
149
- self._position_in_chunk = 0
File without changes
File without changes
File without changes
File without changes