wmill 1.253.2__tar.gz → 1.253.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of wmill might be problematic. Click here for more details.
- {wmill-1.253.2 → wmill-1.253.7}/PKG-INFO +1 -1
- {wmill-1.253.2 → wmill-1.253.7}/pyproject.toml +1 -1
- {wmill-1.253.2 → wmill-1.253.7}/wmill/client.py +103 -30
- {wmill-1.253.2 → wmill-1.253.7}/README.md +0 -0
- {wmill-1.253.2 → wmill-1.253.7}/wmill/__init__.py +0 -0
- {wmill-1.253.2 → wmill-1.253.7}/wmill/py.typed +0 -0
- {wmill-1.253.2 → wmill-1.253.7}/wmill/s3_types.py +0 -0
|
@@ -3,6 +3,7 @@ from __future__ import annotations
|
|
|
3
3
|
import atexit
|
|
4
4
|
import datetime as dt
|
|
5
5
|
import functools
|
|
6
|
+
from io import BufferedReader, BytesIO
|
|
6
7
|
import logging
|
|
7
8
|
import os
|
|
8
9
|
import random
|
|
@@ -372,7 +373,7 @@ class Windmill:
|
|
|
372
373
|
except JSONDecodeError as e:
|
|
373
374
|
raise Exception("Could not generate Boto3 S3 connection settings from the provided resource") from e
|
|
374
375
|
|
|
375
|
-
def load_s3_file(self, s3object: S3Object, s3_resource_path: str
|
|
376
|
+
def load_s3_file(self, s3object: S3Object, s3_resource_path: str | None) -> bytes:
|
|
376
377
|
"""
|
|
377
378
|
Load a file from the workspace s3 bucket and returns the bytes stream.
|
|
378
379
|
|
|
@@ -384,22 +385,39 @@ class Windmill:
|
|
|
384
385
|
file_content = my_obj["Body"].read().decode("utf-8")
|
|
385
386
|
'''
|
|
386
387
|
"""
|
|
387
|
-
try:
|
|
388
|
-
s3_resource = self.post(
|
|
389
|
-
f"/w/{self.workspace}/job_helpers/v2/s3_resource_info",
|
|
390
|
-
json={} if s3_resource_path == "" else {"s3_resource_path": s3_resource_path},
|
|
391
|
-
).json()
|
|
392
|
-
except JSONDecodeError as e:
|
|
393
|
-
raise Exception("Could not generate Boto3 S3 connection settings from the provided resource") from e
|
|
394
|
-
|
|
395
|
-
import boto3
|
|
396
388
|
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
389
|
+
part_number = 0
|
|
390
|
+
file_total_size = None
|
|
391
|
+
file_content: list[int] = []
|
|
392
|
+
while True:
|
|
393
|
+
if part_number is None:
|
|
394
|
+
break
|
|
395
|
+
try:
|
|
396
|
+
part_response = self.post(
|
|
397
|
+
f"/w/{self.workspace}/job_helpers/multipart_download_s3_file",
|
|
398
|
+
json={
|
|
399
|
+
"file_key": s3object["s3"],
|
|
400
|
+
"part_number": part_number,
|
|
401
|
+
"file_size": file_total_size,
|
|
402
|
+
"s3_resource_path": s3_resource_path,
|
|
403
|
+
},
|
|
404
|
+
).json()
|
|
405
|
+
except JSONDecodeError as e:
|
|
406
|
+
raise Exception("Could not generate download S3 file part") from e
|
|
407
|
+
|
|
408
|
+
if len(part_response["part_content"]) > 0:
|
|
409
|
+
file_content = file_content + part_response["part_content"]
|
|
410
|
+
part_number = part_response["next_part_number"]
|
|
411
|
+
file_total_size = part_response["file_size"]
|
|
412
|
+
return bytes(file_content)
|
|
413
|
+
|
|
414
|
+
def write_s3_file(
|
|
415
|
+
self,
|
|
416
|
+
s3object: S3Object | None,
|
|
417
|
+
file_content: BufferedReader | bytes,
|
|
418
|
+
file_expiration: dt.datetime | None,
|
|
419
|
+
s3_resource_path: str | None,
|
|
420
|
+
) -> S3Object:
|
|
403
421
|
"""
|
|
404
422
|
Write a file to the workspace S3 bucket
|
|
405
423
|
|
|
@@ -407,24 +425,56 @@ class Windmill:
|
|
|
407
425
|
from wmill import S3Object
|
|
408
426
|
|
|
409
427
|
s3_obj = S3Object(s3="/path/to/my_file.txt")
|
|
428
|
+
|
|
429
|
+
# for an in memory bytes array:
|
|
410
430
|
file_content = b'Hello Windmill!'
|
|
411
431
|
client.write_s3_file(s3_obj, file_content)
|
|
432
|
+
|
|
433
|
+
# for a file:
|
|
434
|
+
with open("my_file.txt", "rb") as my_file:
|
|
435
|
+
client.write_s3_file(s3_obj, my_file)
|
|
412
436
|
'''
|
|
413
437
|
"""
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
raise Exception("
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
438
|
+
content_reader: BufferedReader | BytesIO
|
|
439
|
+
if isinstance(file_content, BufferedReader):
|
|
440
|
+
content_reader = file_content
|
|
441
|
+
elif isinstance(file_content, bytes):
|
|
442
|
+
content_reader = BytesIO(file_content)
|
|
443
|
+
else:
|
|
444
|
+
raise Exception("Type of file_content not supported")
|
|
445
|
+
|
|
446
|
+
file_key = s3object["s3"] if s3object is not None else None
|
|
447
|
+
parts = []
|
|
448
|
+
upload_id = None
|
|
449
|
+
chunk = content_reader.read(5 * 1024 * 1024)
|
|
450
|
+
if len(chunk) == 0:
|
|
451
|
+
raise Exception("File content is empty, nothing to upload")
|
|
452
|
+
while True:
|
|
453
|
+
chunk_2 = content_reader.read(5 * 1024 * 1024)
|
|
454
|
+
reader_done = len(chunk_2) == 0
|
|
455
|
+
try:
|
|
456
|
+
response = self.post(
|
|
457
|
+
f"/w/{self.workspace}/job_helpers/multipart_upload_s3_file",
|
|
458
|
+
json={
|
|
459
|
+
"file_key": file_key,
|
|
460
|
+
"part_content": [b for b in chunk],
|
|
461
|
+
"upload_id": upload_id,
|
|
462
|
+
"parts": parts,
|
|
463
|
+
"is_final": reader_done,
|
|
464
|
+
"cancel_upload": False,
|
|
465
|
+
"s3_resource_path": s3_resource_path,
|
|
466
|
+
"file_expiration": file_expiration.isoformat() if file_expiration else None,
|
|
467
|
+
},
|
|
468
|
+
).json()
|
|
469
|
+
except Exception as e:
|
|
470
|
+
raise Exception("Could not write file to S3") from e
|
|
471
|
+
parts = response["parts"]
|
|
472
|
+
upload_id = response["upload_id"]
|
|
473
|
+
file_key = response["file_key"]
|
|
474
|
+
if response["is_done"]:
|
|
475
|
+
break
|
|
476
|
+
chunk = chunk_2
|
|
477
|
+
return S3Object(s3=file_key)
|
|
428
478
|
|
|
429
479
|
def __boto3_connection_settings(self, s3_resource) -> Boto3ConnectionSettings:
|
|
430
480
|
endpoint_url_prefix = "https://" if s3_resource["useSSL"] else "http://"
|
|
@@ -676,6 +726,29 @@ def boto3_connection_settings(s3_resource_path: str = "") -> Boto3ConnectionSett
|
|
|
676
726
|
return _client.get_boto3_connection_settings(s3_resource_path)
|
|
677
727
|
|
|
678
728
|
|
|
729
|
+
@init_global_client
|
|
730
|
+
def load_s3_file(s3object: S3Object, s3_resource_path: str = "") -> bytes:
|
|
731
|
+
"""
|
|
732
|
+
Load the content of a file stored in S3
|
|
733
|
+
"""
|
|
734
|
+
return _client.load_s3_file(s3object, s3_resource_path if s3_resource_path != "" else None)
|
|
735
|
+
|
|
736
|
+
|
|
737
|
+
@init_global_client
|
|
738
|
+
def write_s3_file(
|
|
739
|
+
s3object: S3Object | None,
|
|
740
|
+
file_content: BufferedReader | bytes,
|
|
741
|
+
file_expiration: dt.datetime | None = None,
|
|
742
|
+
s3_resource_path: str = "",
|
|
743
|
+
) -> S3Object:
|
|
744
|
+
"""
|
|
745
|
+
Upload a file to S3
|
|
746
|
+
"""
|
|
747
|
+
return _client.write_s3_file(
|
|
748
|
+
s3object, file_content, file_expiration, s3_resource_path if s3_resource_path != "" else None
|
|
749
|
+
)
|
|
750
|
+
|
|
751
|
+
|
|
679
752
|
@init_global_client
|
|
680
753
|
def whoami() -> dict:
|
|
681
754
|
"""
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|