uipath 2.1.122__py3-none-any.whl → 2.1.123__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of uipath might be problematic. Click here for more details.
- uipath/_cli/_runtime/_contracts.py +1 -0
- uipath/_services/buckets_service.py +879 -48
- uipath/models/__init__.py +2 -1
- uipath/models/buckets.py +50 -1
- {uipath-2.1.122.dist-info → uipath-2.1.123.dist-info}/METADATA +1 -1
- {uipath-2.1.122.dist-info → uipath-2.1.123.dist-info}/RECORD +9 -9
- {uipath-2.1.122.dist-info → uipath-2.1.123.dist-info}/WHEEL +0 -0
- {uipath-2.1.122.dist-info → uipath-2.1.123.dist-info}/entry_points.txt +0 -0
- {uipath-2.1.122.dist-info → uipath-2.1.123.dist-info}/licenses/LICENSE +0 -0
|
@@ -96,6 +96,7 @@ class UiPathErrorCategory(str, Enum):
|
|
|
96
96
|
class UiPathErrorCode(str, Enum):
|
|
97
97
|
"""Standard error codes for UiPath runtime errors."""
|
|
98
98
|
|
|
99
|
+
AUTHENTICATION_REQUIRED = "AUTHENTICATION_REQUIRED"
|
|
99
100
|
# Entrypoint related errors
|
|
100
101
|
ENTRYPOINT_MISSING = "ENTRYPOINT_MISSING"
|
|
101
102
|
ENTRYPOINT_NOT_FOUND = "ENTRYPOINT_NOT_FOUND"
|
|
@@ -1,5 +1,8 @@
|
|
|
1
|
+
import asyncio
|
|
1
2
|
import mimetypes
|
|
2
|
-
|
|
3
|
+
import uuid
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Any, AsyncIterator, Dict, Iterator, Optional, Union
|
|
3
6
|
|
|
4
7
|
import httpx
|
|
5
8
|
|
|
@@ -8,7 +11,7 @@ from .._execution_context import ExecutionContext
|
|
|
8
11
|
from .._folder_context import FolderContext
|
|
9
12
|
from .._utils import Endpoint, RequestSpec, header_folder, infer_bindings
|
|
10
13
|
from .._utils._ssl_context import get_httpx_client_kwargs
|
|
11
|
-
from ..models import Bucket
|
|
14
|
+
from ..models import Bucket, BucketFile
|
|
12
15
|
from ..tracing._traced import traced
|
|
13
16
|
from ._base_service import BaseService
|
|
14
17
|
|
|
@@ -20,11 +23,241 @@ class BucketsService(FolderContext, BaseService):
|
|
|
20
23
|
used by automation processes.
|
|
21
24
|
"""
|
|
22
25
|
|
|
26
|
+
_GET_FILES_PAGE_SIZE = 500 # OData GetFiles pagination page size
|
|
27
|
+
|
|
23
28
|
def __init__(self, config: Config, execution_context: ExecutionContext) -> None:
|
|
24
29
|
super().__init__(config=config, execution_context=execution_context)
|
|
25
30
|
self.custom_client = httpx.Client(**get_httpx_client_kwargs())
|
|
26
31
|
self.custom_client_async = httpx.AsyncClient(**get_httpx_client_kwargs())
|
|
27
32
|
|
|
33
|
+
@traced(name="buckets_list", run_type="uipath")
|
|
34
|
+
def list(
|
|
35
|
+
self,
|
|
36
|
+
*,
|
|
37
|
+
folder_path: Optional[str] = None,
|
|
38
|
+
folder_key: Optional[str] = None,
|
|
39
|
+
name: Optional[str] = None,
|
|
40
|
+
) -> Iterator[Bucket]:
|
|
41
|
+
"""List buckets with auto-pagination.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
folder_path: Folder path to filter buckets
|
|
45
|
+
folder_key: Folder key (mutually exclusive with folder_path)
|
|
46
|
+
name: Filter by bucket name (contains match)
|
|
47
|
+
|
|
48
|
+
Yields:
|
|
49
|
+
Bucket: Bucket resource instances
|
|
50
|
+
|
|
51
|
+
Examples:
|
|
52
|
+
>>> # List all buckets
|
|
53
|
+
>>> for bucket in sdk.buckets.list():
|
|
54
|
+
... print(bucket.name)
|
|
55
|
+
>>>
|
|
56
|
+
>>> # Filter by folder
|
|
57
|
+
>>> for bucket in sdk.buckets.list(folder_path="Production"):
|
|
58
|
+
... print(bucket.name)
|
|
59
|
+
>>>
|
|
60
|
+
>>> # Filter by name
|
|
61
|
+
>>> for bucket in sdk.buckets.list(name="invoice"):
|
|
62
|
+
... print(bucket.name)
|
|
63
|
+
"""
|
|
64
|
+
skip = 0
|
|
65
|
+
top = 100
|
|
66
|
+
|
|
67
|
+
while True:
|
|
68
|
+
spec = self._list_spec(
|
|
69
|
+
folder_path=folder_path,
|
|
70
|
+
folder_key=folder_key,
|
|
71
|
+
name=name,
|
|
72
|
+
skip=skip,
|
|
73
|
+
top=top,
|
|
74
|
+
)
|
|
75
|
+
response = self.request(
|
|
76
|
+
spec.method,
|
|
77
|
+
url=spec.endpoint,
|
|
78
|
+
params=spec.params,
|
|
79
|
+
headers=spec.headers,
|
|
80
|
+
).json()
|
|
81
|
+
|
|
82
|
+
items = response.get("value", [])
|
|
83
|
+
if not items:
|
|
84
|
+
break
|
|
85
|
+
|
|
86
|
+
for item in items:
|
|
87
|
+
bucket = Bucket.model_validate(item)
|
|
88
|
+
yield bucket
|
|
89
|
+
|
|
90
|
+
if len(items) < top:
|
|
91
|
+
break
|
|
92
|
+
|
|
93
|
+
skip += top
|
|
94
|
+
|
|
95
|
+
@traced(name="buckets_list", run_type="uipath")
|
|
96
|
+
async def list_async(
|
|
97
|
+
self,
|
|
98
|
+
*,
|
|
99
|
+
folder_path: Optional[str] = None,
|
|
100
|
+
folder_key: Optional[str] = None,
|
|
101
|
+
name: Optional[str] = None,
|
|
102
|
+
) -> AsyncIterator[Bucket]:
|
|
103
|
+
"""Async version of list() with auto-pagination."""
|
|
104
|
+
skip = 0
|
|
105
|
+
top = 50
|
|
106
|
+
|
|
107
|
+
while True:
|
|
108
|
+
spec = self._list_spec(
|
|
109
|
+
folder_path=folder_path,
|
|
110
|
+
folder_key=folder_key,
|
|
111
|
+
name=name,
|
|
112
|
+
skip=skip,
|
|
113
|
+
top=top,
|
|
114
|
+
)
|
|
115
|
+
response = (
|
|
116
|
+
await self.request_async(
|
|
117
|
+
spec.method,
|
|
118
|
+
url=spec.endpoint,
|
|
119
|
+
params=spec.params,
|
|
120
|
+
headers=spec.headers,
|
|
121
|
+
)
|
|
122
|
+
).json()
|
|
123
|
+
|
|
124
|
+
items = response.get("value", [])
|
|
125
|
+
if not items:
|
|
126
|
+
break
|
|
127
|
+
|
|
128
|
+
for item in items:
|
|
129
|
+
bucket = Bucket.model_validate(item)
|
|
130
|
+
yield bucket
|
|
131
|
+
|
|
132
|
+
if len(items) < top:
|
|
133
|
+
break
|
|
134
|
+
|
|
135
|
+
skip += top
|
|
136
|
+
|
|
137
|
+
@traced(name="buckets_exists", run_type="uipath")
|
|
138
|
+
def exists(
|
|
139
|
+
self,
|
|
140
|
+
name: str,
|
|
141
|
+
*,
|
|
142
|
+
folder_key: Optional[str] = None,
|
|
143
|
+
folder_path: Optional[str] = None,
|
|
144
|
+
) -> bool:
|
|
145
|
+
"""Check if bucket exists.
|
|
146
|
+
|
|
147
|
+
Args:
|
|
148
|
+
name: Bucket name
|
|
149
|
+
folder_key: Folder key
|
|
150
|
+
folder_path: Folder path
|
|
151
|
+
|
|
152
|
+
Returns:
|
|
153
|
+
bool: True if bucket exists
|
|
154
|
+
|
|
155
|
+
Examples:
|
|
156
|
+
>>> if sdk.buckets.exists("my-storage"):
|
|
157
|
+
... print("Bucket found")
|
|
158
|
+
"""
|
|
159
|
+
try:
|
|
160
|
+
self.retrieve(name=name, folder_key=folder_key, folder_path=folder_path)
|
|
161
|
+
return True
|
|
162
|
+
except LookupError:
|
|
163
|
+
return False
|
|
164
|
+
|
|
165
|
+
@traced(name="buckets_exists", run_type="uipath")
|
|
166
|
+
async def exists_async(
|
|
167
|
+
self,
|
|
168
|
+
name: str,
|
|
169
|
+
*,
|
|
170
|
+
folder_key: Optional[str] = None,
|
|
171
|
+
folder_path: Optional[str] = None,
|
|
172
|
+
) -> bool:
|
|
173
|
+
"""Async version of exists()."""
|
|
174
|
+
try:
|
|
175
|
+
await self.retrieve_async(
|
|
176
|
+
name=name, folder_key=folder_key, folder_path=folder_path
|
|
177
|
+
)
|
|
178
|
+
return True
|
|
179
|
+
except LookupError:
|
|
180
|
+
return False
|
|
181
|
+
|
|
182
|
+
@traced(name="buckets_create", run_type="uipath")
|
|
183
|
+
def create(
|
|
184
|
+
self,
|
|
185
|
+
name: str,
|
|
186
|
+
*,
|
|
187
|
+
description: Optional[str] = None,
|
|
188
|
+
identifier: Optional[str] = None,
|
|
189
|
+
folder_path: Optional[str] = None,
|
|
190
|
+
folder_key: Optional[str] = None,
|
|
191
|
+
) -> Bucket:
|
|
192
|
+
"""Create a new bucket.
|
|
193
|
+
|
|
194
|
+
Args:
|
|
195
|
+
name: Bucket name (must be unique within folder)
|
|
196
|
+
description: Optional description
|
|
197
|
+
identifier: UUID identifier (auto-generated if not provided)
|
|
198
|
+
folder_path: Folder to create bucket in
|
|
199
|
+
folder_key: Folder key
|
|
200
|
+
|
|
201
|
+
Returns:
|
|
202
|
+
Bucket: Newly created bucket resource
|
|
203
|
+
|
|
204
|
+
Raises:
|
|
205
|
+
Exception: If bucket creation fails
|
|
206
|
+
|
|
207
|
+
Examples:
|
|
208
|
+
>>> bucket = sdk.buckets.create("my-storage")
|
|
209
|
+
>>> bucket = sdk.buckets.create(
|
|
210
|
+
... "data-storage",
|
|
211
|
+
... description="Production data"
|
|
212
|
+
... )
|
|
213
|
+
"""
|
|
214
|
+
spec = self._create_spec(
|
|
215
|
+
name=name,
|
|
216
|
+
description=description,
|
|
217
|
+
identifier=identifier or str(uuid.uuid4()),
|
|
218
|
+
folder_path=folder_path,
|
|
219
|
+
folder_key=folder_key,
|
|
220
|
+
)
|
|
221
|
+
response = self.request(
|
|
222
|
+
spec.method,
|
|
223
|
+
url=spec.endpoint,
|
|
224
|
+
json=spec.json,
|
|
225
|
+
headers=spec.headers,
|
|
226
|
+
).json()
|
|
227
|
+
|
|
228
|
+
bucket = Bucket.model_validate(response)
|
|
229
|
+
return bucket
|
|
230
|
+
|
|
231
|
+
@traced(name="buckets_create", run_type="uipath")
|
|
232
|
+
async def create_async(
|
|
233
|
+
self,
|
|
234
|
+
name: str,
|
|
235
|
+
*,
|
|
236
|
+
description: Optional[str] = None,
|
|
237
|
+
identifier: Optional[str] = None,
|
|
238
|
+
folder_path: Optional[str] = None,
|
|
239
|
+
folder_key: Optional[str] = None,
|
|
240
|
+
) -> Bucket:
|
|
241
|
+
"""Async version of create()."""
|
|
242
|
+
spec = self._create_spec(
|
|
243
|
+
name=name,
|
|
244
|
+
description=description,
|
|
245
|
+
identifier=identifier or str(uuid.uuid4()),
|
|
246
|
+
folder_path=folder_path,
|
|
247
|
+
folder_key=folder_key,
|
|
248
|
+
)
|
|
249
|
+
response = (
|
|
250
|
+
await self.request_async(
|
|
251
|
+
spec.method,
|
|
252
|
+
url=spec.endpoint,
|
|
253
|
+
json=spec.json,
|
|
254
|
+
headers=spec.headers,
|
|
255
|
+
)
|
|
256
|
+
).json()
|
|
257
|
+
|
|
258
|
+
bucket = Bucket.model_validate(response)
|
|
259
|
+
return bucket
|
|
260
|
+
|
|
28
261
|
@traced(name="buckets_download", run_type="uipath")
|
|
29
262
|
@infer_bindings(resource_type="bucket")
|
|
30
263
|
def download(
|
|
@@ -74,7 +307,6 @@ class BucketsService(FolderContext, BaseService):
|
|
|
74
307
|
}
|
|
75
308
|
|
|
76
309
|
with open(destination_path, "wb") as file:
|
|
77
|
-
# the self.request adds auth bearer token
|
|
78
310
|
if result["RequiresAuth"]:
|
|
79
311
|
file_content = self.request("GET", read_uri, headers=headers).content
|
|
80
312
|
else:
|
|
@@ -131,17 +363,16 @@ class BucketsService(FolderContext, BaseService):
|
|
|
131
363
|
)
|
|
132
364
|
}
|
|
133
365
|
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
file.write(file_content)
|
|
366
|
+
if result["RequiresAuth"]:
|
|
367
|
+
file_content = (
|
|
368
|
+
await self.request_async("GET", read_uri, headers=headers)
|
|
369
|
+
).content
|
|
370
|
+
else:
|
|
371
|
+
file_content = (
|
|
372
|
+
await self.custom_client_async.get(read_uri, headers=headers)
|
|
373
|
+
).content
|
|
374
|
+
|
|
375
|
+
await asyncio.to_thread(Path(destination_path).write_bytes, file_content)
|
|
145
376
|
|
|
146
377
|
@traced(name="buckets_upload", run_type="uipath")
|
|
147
378
|
@infer_bindings(resource_type="bucket")
|
|
@@ -182,8 +413,6 @@ class BucketsService(FolderContext, BaseService):
|
|
|
182
413
|
name=name, key=key, folder_key=folder_key, folder_path=folder_path
|
|
183
414
|
)
|
|
184
415
|
|
|
185
|
-
# if source_path, dynamically detect the mime type
|
|
186
|
-
# default to application/octet-stream
|
|
187
416
|
if source_path:
|
|
188
417
|
_content_type, _ = mimetypes.guess_type(source_path)
|
|
189
418
|
else:
|
|
@@ -258,7 +487,7 @@ class BucketsService(FolderContext, BaseService):
|
|
|
258
487
|
name (Optional[str]): The name of the bucket.
|
|
259
488
|
blob_file_path (str): The path where the file will be stored in the bucket.
|
|
260
489
|
content_type (Optional[str]): The MIME type of the file. For file inputs this is computed dynamically. Default is "application/octet-stream".
|
|
261
|
-
source_path (str): The local path of the file to upload.
|
|
490
|
+
source_path (Optional[str]): The local path of the file to upload.
|
|
262
491
|
content (Optional[Union[str, bytes]]): The content to upload (string or bytes).
|
|
263
492
|
folder_key (Optional[str]): The key of the folder where the bucket resides.
|
|
264
493
|
folder_path (Optional[str]): The path of the folder where the bucket resides.
|
|
@@ -276,8 +505,6 @@ class BucketsService(FolderContext, BaseService):
|
|
|
276
505
|
name=name, key=key, folder_key=folder_key, folder_path=folder_path
|
|
277
506
|
)
|
|
278
507
|
|
|
279
|
-
# if source_path, dynamically detect the mime type
|
|
280
|
-
# default to application/octet-stream
|
|
281
508
|
if source_path:
|
|
282
509
|
_content_type, _ = mimetypes.guess_type(source_path)
|
|
283
510
|
else:
|
|
@@ -326,16 +553,15 @@ class BucketsService(FolderContext, BaseService):
|
|
|
326
553
|
)
|
|
327
554
|
|
|
328
555
|
if source_path is not None:
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
)
|
|
556
|
+
file_content = await asyncio.to_thread(Path(source_path).read_bytes)
|
|
557
|
+
if result["RequiresAuth"]:
|
|
558
|
+
await self.request_async(
|
|
559
|
+
"PUT", write_uri, headers=headers, content=file_content
|
|
560
|
+
)
|
|
561
|
+
else:
|
|
562
|
+
await self.custom_client_async.put(
|
|
563
|
+
write_uri, headers=headers, content=file_content
|
|
564
|
+
)
|
|
339
565
|
|
|
340
566
|
@traced(name="buckets_retrieve", run_type="uipath")
|
|
341
567
|
@infer_bindings(resource_type="bucket")
|
|
@@ -356,34 +582,61 @@ class BucketsService(FolderContext, BaseService):
|
|
|
356
582
|
folder_path (Optional[str]): The path of the folder where the bucket resides.
|
|
357
583
|
|
|
358
584
|
Returns:
|
|
359
|
-
|
|
585
|
+
Bucket: The bucket resource instance.
|
|
360
586
|
|
|
361
587
|
Raises:
|
|
362
588
|
ValueError: If neither bucket key nor bucket name is provided.
|
|
363
589
|
Exception: If the bucket with the specified name is not found.
|
|
590
|
+
|
|
591
|
+
Examples:
|
|
592
|
+
>>> bucket = sdk.buckets.retrieve(name="my-storage")
|
|
593
|
+
>>> print(bucket.name, bucket.identifier)
|
|
364
594
|
"""
|
|
365
595
|
if not (key or name):
|
|
366
596
|
raise ValueError("Must specify a bucket name or bucket key")
|
|
597
|
+
|
|
367
598
|
if key:
|
|
368
599
|
spec = self._retrieve_by_key_spec(
|
|
369
600
|
key, folder_key=folder_key, folder_path=folder_path
|
|
370
601
|
)
|
|
602
|
+
try:
|
|
603
|
+
response = self.request(
|
|
604
|
+
spec.method,
|
|
605
|
+
url=spec.endpoint,
|
|
606
|
+
params=spec.params,
|
|
607
|
+
headers=spec.headers,
|
|
608
|
+
).json()
|
|
609
|
+
if "value" in response:
|
|
610
|
+
items = response.get("value", [])
|
|
611
|
+
if not items:
|
|
612
|
+
raise LookupError(f"Bucket with key '{key}' not found")
|
|
613
|
+
bucket_data = items[0]
|
|
614
|
+
else:
|
|
615
|
+
bucket_data = response
|
|
616
|
+
except (KeyError, IndexError) as e:
|
|
617
|
+
raise LookupError(f"Bucket with key '{key}' not found") from e
|
|
371
618
|
else:
|
|
372
619
|
spec = self._retrieve_spec(
|
|
373
620
|
name, # type: ignore
|
|
374
621
|
folder_key=folder_key,
|
|
375
622
|
folder_path=folder_path,
|
|
376
623
|
)
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
624
|
+
try:
|
|
625
|
+
response = self.request(
|
|
626
|
+
spec.method,
|
|
627
|
+
url=spec.endpoint,
|
|
628
|
+
params=spec.params,
|
|
629
|
+
headers=spec.headers,
|
|
630
|
+
).json()
|
|
631
|
+
items = response.get("value", [])
|
|
632
|
+
if not items:
|
|
633
|
+
raise LookupError(f"Bucket with name '{name}' not found")
|
|
634
|
+
bucket_data = items[0]
|
|
635
|
+
except (KeyError, IndexError) as e:
|
|
636
|
+
raise LookupError(f"Bucket with name '{name}' not found") from e
|
|
637
|
+
|
|
638
|
+
bucket = Bucket.model_validate(bucket_data)
|
|
639
|
+
return bucket
|
|
387
640
|
|
|
388
641
|
@traced(name="buckets_retrieve", run_type="uipath")
|
|
389
642
|
@infer_bindings(resource_type="bucket")
|
|
@@ -404,26 +657,177 @@ class BucketsService(FolderContext, BaseService):
|
|
|
404
657
|
folder_path (Optional[str]): The path of the folder where the bucket resides.
|
|
405
658
|
|
|
406
659
|
Returns:
|
|
407
|
-
|
|
660
|
+
Bucket: The bucket resource instance.
|
|
408
661
|
|
|
409
662
|
Raises:
|
|
410
663
|
ValueError: If neither bucket key nor bucket name is provided.
|
|
411
664
|
Exception: If the bucket with the specified name is not found.
|
|
665
|
+
|
|
666
|
+
Examples:
|
|
667
|
+
>>> bucket = await sdk.buckets.retrieve_async(name="my-storage")
|
|
668
|
+
>>> print(bucket.name, bucket.identifier)
|
|
412
669
|
"""
|
|
413
670
|
if not (key or name):
|
|
414
671
|
raise ValueError("Must specify a bucket name or bucket key")
|
|
672
|
+
|
|
415
673
|
if key:
|
|
416
674
|
spec = self._retrieve_by_key_spec(
|
|
417
675
|
key, folder_key=folder_key, folder_path=folder_path
|
|
418
676
|
)
|
|
677
|
+
try:
|
|
678
|
+
response = (
|
|
679
|
+
await self.request_async(
|
|
680
|
+
spec.method,
|
|
681
|
+
url=spec.endpoint,
|
|
682
|
+
params=spec.params,
|
|
683
|
+
headers=spec.headers,
|
|
684
|
+
)
|
|
685
|
+
).json()
|
|
686
|
+
if "value" in response:
|
|
687
|
+
items = response.get("value", [])
|
|
688
|
+
if not items:
|
|
689
|
+
raise LookupError(f"Bucket with key '{key}' not found")
|
|
690
|
+
bucket_data = items[0]
|
|
691
|
+
else:
|
|
692
|
+
bucket_data = response
|
|
693
|
+
except (KeyError, IndexError) as e:
|
|
694
|
+
raise LookupError(f"Bucket with key '{key}' not found") from e
|
|
419
695
|
else:
|
|
420
696
|
spec = self._retrieve_spec(
|
|
421
697
|
name, # type: ignore
|
|
422
698
|
folder_key=folder_key,
|
|
423
699
|
folder_path=folder_path,
|
|
424
700
|
)
|
|
701
|
+
try:
|
|
702
|
+
response = (
|
|
703
|
+
await self.request_async(
|
|
704
|
+
spec.method,
|
|
705
|
+
url=spec.endpoint,
|
|
706
|
+
params=spec.params,
|
|
707
|
+
headers=spec.headers,
|
|
708
|
+
)
|
|
709
|
+
).json()
|
|
710
|
+
items = response.get("value", [])
|
|
711
|
+
if not items:
|
|
712
|
+
raise LookupError(f"Bucket with name '{name}' not found")
|
|
713
|
+
bucket_data = items[0]
|
|
714
|
+
except (KeyError, IndexError) as e:
|
|
715
|
+
raise LookupError(f"Bucket with name '{name}' not found") from e
|
|
425
716
|
|
|
426
|
-
|
|
717
|
+
bucket = Bucket.model_validate(bucket_data)
|
|
718
|
+
return bucket
|
|
719
|
+
|
|
720
|
+
@traced(name="buckets_list_files", run_type="uipath")
|
|
721
|
+
@infer_bindings(resource_type="bucket")
|
|
722
|
+
def list_files(
|
|
723
|
+
self,
|
|
724
|
+
*,
|
|
725
|
+
name: Optional[str] = None,
|
|
726
|
+
key: Optional[str] = None,
|
|
727
|
+
prefix: str = "",
|
|
728
|
+
folder_key: Optional[str] = None,
|
|
729
|
+
folder_path: Optional[str] = None,
|
|
730
|
+
) -> Iterator[BucketFile]:
|
|
731
|
+
"""List files in a bucket.
|
|
732
|
+
|
|
733
|
+
Args:
|
|
734
|
+
name: Bucket name
|
|
735
|
+
key: Bucket identifier
|
|
736
|
+
prefix: Filter files by prefix
|
|
737
|
+
folder_key: Folder key
|
|
738
|
+
folder_path: Folder path
|
|
739
|
+
|
|
740
|
+
Returns:
|
|
741
|
+
Iterator[BucketFile]: Iterator of files in the bucket
|
|
742
|
+
|
|
743
|
+
Note:
|
|
744
|
+
Returns an iterator for memory efficiency. Use list() to materialize all results:
|
|
745
|
+
files = list(sdk.buckets.list_files(name="my-storage"))
|
|
746
|
+
|
|
747
|
+
This method automatically handles pagination, fetching up to 500 files per request.
|
|
748
|
+
|
|
749
|
+
Examples:
|
|
750
|
+
>>> for file in sdk.buckets.list_files(name="my-storage"):
|
|
751
|
+
... print(file.path)
|
|
752
|
+
>>> files = list(sdk.buckets.list_files(name="my-storage", prefix="data/"))
|
|
753
|
+
"""
|
|
754
|
+
bucket = self.retrieve(
|
|
755
|
+
name=name, key=key, folder_key=folder_key, folder_path=folder_path
|
|
756
|
+
)
|
|
757
|
+
|
|
758
|
+
continuation_token: Optional[str] = None
|
|
759
|
+
|
|
760
|
+
while True:
|
|
761
|
+
spec = self._list_files_spec(
|
|
762
|
+
bucket.id,
|
|
763
|
+
prefix,
|
|
764
|
+
continuation_token=continuation_token,
|
|
765
|
+
folder_key=folder_key,
|
|
766
|
+
folder_path=folder_path,
|
|
767
|
+
)
|
|
768
|
+
response = self.request(
|
|
769
|
+
spec.method,
|
|
770
|
+
url=spec.endpoint,
|
|
771
|
+
params=spec.params,
|
|
772
|
+
headers=spec.headers,
|
|
773
|
+
).json()
|
|
774
|
+
|
|
775
|
+
items = response.get("items", [])
|
|
776
|
+
for item in items:
|
|
777
|
+
yield BucketFile.model_validate(item)
|
|
778
|
+
|
|
779
|
+
continuation_token = response.get("continuationToken")
|
|
780
|
+
if not continuation_token:
|
|
781
|
+
break
|
|
782
|
+
|
|
783
|
+
@traced(name="buckets_list_files", run_type="uipath")
|
|
784
|
+
@infer_bindings(resource_type="bucket")
|
|
785
|
+
async def list_files_async(
|
|
786
|
+
self,
|
|
787
|
+
*,
|
|
788
|
+
name: Optional[str] = None,
|
|
789
|
+
key: Optional[str] = None,
|
|
790
|
+
prefix: str = "",
|
|
791
|
+
folder_key: Optional[str] = None,
|
|
792
|
+
folder_path: Optional[str] = None,
|
|
793
|
+
) -> AsyncIterator[BucketFile]:
|
|
794
|
+
"""List files in a bucket asynchronously.
|
|
795
|
+
|
|
796
|
+
Args:
|
|
797
|
+
name: Bucket name
|
|
798
|
+
key: Bucket identifier
|
|
799
|
+
prefix: Filter files by prefix
|
|
800
|
+
folder_key: Folder key
|
|
801
|
+
folder_path: Folder path
|
|
802
|
+
|
|
803
|
+
Returns:
|
|
804
|
+
AsyncIterator[BucketFile]: Async iterator of files in the bucket
|
|
805
|
+
|
|
806
|
+
Note:
|
|
807
|
+
Returns an async iterator for memory efficiency. Use list comprehension to materialize:
|
|
808
|
+
files = [f async for f in sdk.buckets.list_files_async(name="my-storage")]
|
|
809
|
+
|
|
810
|
+
This method automatically handles pagination, fetching up to 500 files per request.
|
|
811
|
+
|
|
812
|
+
Examples:
|
|
813
|
+
>>> async for file in sdk.buckets.list_files_async(name="my-storage"):
|
|
814
|
+
... print(file.path)
|
|
815
|
+
>>> files = [f async for f in sdk.buckets.list_files_async(name="my-storage", prefix="data/")]
|
|
816
|
+
"""
|
|
817
|
+
bucket = await self.retrieve_async(
|
|
818
|
+
name=name, key=key, folder_key=folder_key, folder_path=folder_path
|
|
819
|
+
)
|
|
820
|
+
|
|
821
|
+
continuation_token: Optional[str] = None
|
|
822
|
+
|
|
823
|
+
while True:
|
|
824
|
+
spec = self._list_files_spec(
|
|
825
|
+
bucket.id,
|
|
826
|
+
prefix,
|
|
827
|
+
continuation_token=continuation_token,
|
|
828
|
+
folder_key=folder_key,
|
|
829
|
+
folder_path=folder_path,
|
|
830
|
+
)
|
|
427
831
|
response = (
|
|
428
832
|
await self.request_async(
|
|
429
833
|
spec.method,
|
|
@@ -431,25 +835,343 @@ class BucketsService(FolderContext, BaseService):
|
|
|
431
835
|
params=spec.params,
|
|
432
836
|
headers=spec.headers,
|
|
433
837
|
)
|
|
434
|
-
).json()
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
838
|
+
).json()
|
|
839
|
+
|
|
840
|
+
items = response.get("items", [])
|
|
841
|
+
for item in items:
|
|
842
|
+
yield BucketFile.model_validate(item)
|
|
843
|
+
|
|
844
|
+
continuation_token = response.get("continuationToken")
|
|
845
|
+
if not continuation_token:
|
|
846
|
+
break
|
|
847
|
+
|
|
848
|
+
@traced(name="buckets_delete", run_type="uipath")
|
|
849
|
+
@infer_bindings(resource_type="bucket")
|
|
850
|
+
def delete(
|
|
851
|
+
self,
|
|
852
|
+
*,
|
|
853
|
+
name: Optional[str] = None,
|
|
854
|
+
key: Optional[str] = None,
|
|
855
|
+
blob_file_path: str,
|
|
856
|
+
folder_key: Optional[str] = None,
|
|
857
|
+
folder_path: Optional[str] = None,
|
|
858
|
+
) -> None:
|
|
859
|
+
"""Delete a file from a bucket.
|
|
860
|
+
|
|
861
|
+
Args:
|
|
862
|
+
name: Bucket name
|
|
863
|
+
key: Bucket identifier
|
|
864
|
+
blob_file_path: Path to the file in the bucket
|
|
865
|
+
folder_key: Folder key
|
|
866
|
+
folder_path: Folder path
|
|
867
|
+
|
|
868
|
+
Examples:
|
|
869
|
+
>>> sdk.buckets.delete(name="my-storage", blob_file_path="data/file.txt")
|
|
870
|
+
"""
|
|
871
|
+
bucket = self.retrieve(
|
|
872
|
+
name=name, key=key, folder_key=folder_key, folder_path=folder_path
|
|
873
|
+
)
|
|
874
|
+
spec = self._delete_file_spec(
|
|
875
|
+
bucket.id, blob_file_path, folder_key=folder_key, folder_path=folder_path
|
|
876
|
+
)
|
|
877
|
+
self.request(
|
|
878
|
+
spec.method,
|
|
879
|
+
url=spec.endpoint,
|
|
880
|
+
params=spec.params,
|
|
881
|
+
headers=spec.headers,
|
|
882
|
+
)
|
|
883
|
+
|
|
884
|
+
@traced(name="buckets_delete", run_type="uipath")
|
|
885
|
+
@infer_bindings(resource_type="bucket")
|
|
886
|
+
async def delete_async(
|
|
887
|
+
self,
|
|
888
|
+
*,
|
|
889
|
+
name: Optional[str] = None,
|
|
890
|
+
key: Optional[str] = None,
|
|
891
|
+
blob_file_path: str,
|
|
892
|
+
folder_key: Optional[str] = None,
|
|
893
|
+
folder_path: Optional[str] = None,
|
|
894
|
+
) -> None:
|
|
895
|
+
"""Delete a file from a bucket asynchronously.
|
|
896
|
+
|
|
897
|
+
Args:
|
|
898
|
+
name: Bucket name
|
|
899
|
+
key: Bucket identifier
|
|
900
|
+
blob_file_path: Path to the file in the bucket
|
|
901
|
+
folder_key: Folder key
|
|
902
|
+
folder_path: Folder path
|
|
903
|
+
|
|
904
|
+
Examples:
|
|
905
|
+
>>> await sdk.buckets.delete_async(name="my-storage", blob_file_path="data/file.txt")
|
|
906
|
+
"""
|
|
907
|
+
bucket = await self.retrieve_async(
|
|
908
|
+
name=name, key=key, folder_key=folder_key, folder_path=folder_path
|
|
909
|
+
)
|
|
910
|
+
spec = self._delete_file_spec(
|
|
911
|
+
bucket.id, blob_file_path, folder_key=folder_key, folder_path=folder_path
|
|
912
|
+
)
|
|
913
|
+
await self.request_async(
|
|
914
|
+
spec.method,
|
|
915
|
+
url=spec.endpoint,
|
|
916
|
+
params=spec.params,
|
|
917
|
+
headers=spec.headers,
|
|
918
|
+
)
|
|
919
|
+
|
|
920
|
+
@traced(name="buckets_get_files", run_type="uipath")
|
|
921
|
+
@infer_bindings(resource_type="bucket")
|
|
922
|
+
def get_files(
|
|
923
|
+
self,
|
|
924
|
+
*,
|
|
925
|
+
name: Optional[str] = None,
|
|
926
|
+
key: Optional[str] = None,
|
|
927
|
+
prefix: str = "",
|
|
928
|
+
recursive: bool = False,
|
|
929
|
+
file_name_glob: Optional[str] = None,
|
|
930
|
+
folder_key: Optional[str] = None,
|
|
931
|
+
folder_path: Optional[str] = None,
|
|
932
|
+
) -> Iterator[BucketFile]:
|
|
933
|
+
"""Get files using OData GetFiles API (Studio-compatible).
|
|
934
|
+
|
|
935
|
+
This method uses the GetFiles API which is used by UiPath Studio activities.
|
|
936
|
+
Use this when you need:
|
|
937
|
+
- Recursive directory traversal
|
|
938
|
+
- Glob pattern filtering (e.g., "*.pdf")
|
|
939
|
+
- Compatibility with Studio activity behavior
|
|
940
|
+
|
|
941
|
+
Args:
|
|
942
|
+
name: Bucket name
|
|
943
|
+
key: Bucket identifier
|
|
944
|
+
prefix: Directory path to filter files (default: root)
|
|
945
|
+
recursive: Recurse subdirectories for flat view (default: False)
|
|
946
|
+
file_name_glob: File filter pattern (e.g., "*.pdf", "data_*.csv")
|
|
947
|
+
folder_key: Folder key
|
|
948
|
+
folder_path: Folder path
|
|
949
|
+
|
|
950
|
+
Returns:
|
|
951
|
+
Iterator[BucketFile]: Iterator of files matching criteria
|
|
952
|
+
|
|
953
|
+
Raises:
|
|
954
|
+
ValueError: If neither name nor key is provided
|
|
955
|
+
LookupError: If bucket not found
|
|
956
|
+
Exception: For API errors or invalid responses
|
|
957
|
+
|
|
958
|
+
Note:
|
|
959
|
+
For large buckets with 10,000+ files, consider using list_files()
|
|
960
|
+
which uses more efficient cursor-based pagination.
|
|
961
|
+
|
|
962
|
+
Examples:
|
|
963
|
+
>>> # Get all PDF files recursively
|
|
964
|
+
>>> for file in sdk.buckets.get_files(
|
|
965
|
+
... name="my-storage",
|
|
966
|
+
... recursive=True,
|
|
967
|
+
... file_name_glob="*.pdf"
|
|
968
|
+
... ):
|
|
969
|
+
... print(f"{file.path} - {file.size} bytes")
|
|
970
|
+
>>>
|
|
971
|
+
>>> # Get files in specific directory
|
|
972
|
+
>>> files = list(sdk.buckets.get_files(
|
|
973
|
+
... name="my-storage",
|
|
974
|
+
... prefix="reports/"
|
|
975
|
+
... ))
|
|
976
|
+
"""
|
|
977
|
+
if not (name or key):
|
|
978
|
+
raise ValueError("Must specify either bucket name or key")
|
|
979
|
+
|
|
980
|
+
if file_name_glob is not None and not file_name_glob.strip():
|
|
981
|
+
raise ValueError("file_name_glob cannot be empty")
|
|
982
|
+
|
|
983
|
+
bucket = self.retrieve(
|
|
984
|
+
name=name, key=key, folder_key=folder_key, folder_path=folder_path
|
|
985
|
+
)
|
|
986
|
+
|
|
987
|
+
skip = 0
|
|
988
|
+
top = self._GET_FILES_PAGE_SIZE
|
|
989
|
+
|
|
990
|
+
while True:
|
|
991
|
+
spec = self._get_files_spec(
|
|
992
|
+
bucket.id,
|
|
993
|
+
prefix=prefix,
|
|
994
|
+
recursive=recursive,
|
|
995
|
+
file_name_glob=file_name_glob,
|
|
996
|
+
skip=skip,
|
|
997
|
+
top=top,
|
|
998
|
+
folder_key=folder_key,
|
|
999
|
+
folder_path=folder_path,
|
|
1000
|
+
)
|
|
1001
|
+
|
|
1002
|
+
response = self.request(
|
|
1003
|
+
spec.method,
|
|
1004
|
+
url=spec.endpoint,
|
|
1005
|
+
params=spec.params,
|
|
1006
|
+
headers=spec.headers,
|
|
1007
|
+
).json()
|
|
1008
|
+
|
|
1009
|
+
items = response.get("value", [])
|
|
1010
|
+
|
|
1011
|
+
if not items:
|
|
1012
|
+
break
|
|
1013
|
+
|
|
1014
|
+
for item in items:
|
|
1015
|
+
if not item.get("IsDirectory", False):
|
|
1016
|
+
try:
|
|
1017
|
+
yield BucketFile.model_validate(item)
|
|
1018
|
+
except Exception as e:
|
|
1019
|
+
raise ValueError(
|
|
1020
|
+
f"Failed to parse file entry: {e}. Item: {item}"
|
|
1021
|
+
) from e
|
|
1022
|
+
|
|
1023
|
+
if len(items) < top:
|
|
1024
|
+
break
|
|
1025
|
+
|
|
1026
|
+
skip += top
|
|
1027
|
+
|
|
1028
|
+
@traced(name="buckets_get_files", run_type="uipath")
|
|
1029
|
+
@infer_bindings(resource_type="bucket")
|
|
1030
|
+
async def get_files_async(
|
|
1031
|
+
self,
|
|
1032
|
+
*,
|
|
1033
|
+
name: Optional[str] = None,
|
|
1034
|
+
key: Optional[str] = None,
|
|
1035
|
+
prefix: str = "",
|
|
1036
|
+
recursive: bool = False,
|
|
1037
|
+
file_name_glob: Optional[str] = None,
|
|
1038
|
+
folder_key: Optional[str] = None,
|
|
1039
|
+
folder_path: Optional[str] = None,
|
|
1040
|
+
) -> AsyncIterator[BucketFile]:
|
|
1041
|
+
"""Async version of get_files().
|
|
1042
|
+
|
|
1043
|
+
See get_files() for detailed documentation.
|
|
1044
|
+
|
|
1045
|
+
Examples:
|
|
1046
|
+
>>> async for file in sdk.buckets.get_files_async(
|
|
1047
|
+
... name="my-storage",
|
|
1048
|
+
... recursive=True,
|
|
1049
|
+
... file_name_glob="*.pdf"
|
|
1050
|
+
... ):
|
|
1051
|
+
... print(file.path)
|
|
1052
|
+
"""
|
|
1053
|
+
if not (name or key):
|
|
1054
|
+
raise ValueError("Must specify either bucket name or key")
|
|
1055
|
+
|
|
1056
|
+
if file_name_glob is not None and not file_name_glob.strip():
|
|
1057
|
+
raise ValueError("file_name_glob cannot be empty")
|
|
1058
|
+
|
|
1059
|
+
bucket = await self.retrieve_async(
|
|
1060
|
+
name=name, key=key, folder_key=folder_key, folder_path=folder_path
|
|
1061
|
+
)
|
|
1062
|
+
|
|
1063
|
+
skip = 0
|
|
1064
|
+
top = self._GET_FILES_PAGE_SIZE
|
|
1065
|
+
|
|
1066
|
+
while True:
|
|
1067
|
+
spec = self._get_files_spec(
|
|
1068
|
+
bucket.id,
|
|
1069
|
+
prefix=prefix,
|
|
1070
|
+
recursive=recursive,
|
|
1071
|
+
file_name_glob=file_name_glob,
|
|
1072
|
+
skip=skip,
|
|
1073
|
+
top=top,
|
|
1074
|
+
folder_key=folder_key,
|
|
1075
|
+
folder_path=folder_path,
|
|
1076
|
+
)
|
|
1077
|
+
|
|
1078
|
+
response = (
|
|
1079
|
+
await self.request_async(
|
|
1080
|
+
spec.method,
|
|
1081
|
+
url=spec.endpoint,
|
|
1082
|
+
params=spec.params,
|
|
1083
|
+
headers=spec.headers,
|
|
1084
|
+
)
|
|
1085
|
+
).json()
|
|
1086
|
+
|
|
1087
|
+
items = response.get("value", [])
|
|
1088
|
+
|
|
1089
|
+
if not items:
|
|
1090
|
+
break
|
|
1091
|
+
|
|
1092
|
+
for item in items:
|
|
1093
|
+
if not item.get("IsDirectory", False):
|
|
1094
|
+
try:
|
|
1095
|
+
yield BucketFile.model_validate(item)
|
|
1096
|
+
except Exception as e:
|
|
1097
|
+
raise ValueError(
|
|
1098
|
+
f"Failed to parse file entry: {e}. Item: {item}"
|
|
1099
|
+
) from e
|
|
1100
|
+
|
|
1101
|
+
if len(items) < top:
|
|
1102
|
+
break
|
|
1103
|
+
|
|
1104
|
+
skip += top
|
|
438
1105
|
|
|
439
1106
|
@property
|
|
440
1107
|
def custom_headers(self) -> Dict[str, str]:
|
|
441
1108
|
return self.folder_headers
|
|
442
1109
|
|
|
1110
|
+
def _list_spec(
|
|
1111
|
+
self,
|
|
1112
|
+
folder_path: Optional[str],
|
|
1113
|
+
folder_key: Optional[str],
|
|
1114
|
+
name: Optional[str],
|
|
1115
|
+
skip: int,
|
|
1116
|
+
top: int,
|
|
1117
|
+
) -> RequestSpec:
|
|
1118
|
+
"""Build OData request for listing buckets."""
|
|
1119
|
+
filters = []
|
|
1120
|
+
if name:
|
|
1121
|
+
escaped_name = name.replace("'", "''")
|
|
1122
|
+
filters.append(f"contains(tolower(Name), tolower('{escaped_name}'))")
|
|
1123
|
+
|
|
1124
|
+
filter_str = " and ".join(filters) if filters else None
|
|
1125
|
+
|
|
1126
|
+
params: Dict[str, Any] = {"$skip": skip, "$top": top}
|
|
1127
|
+
if filter_str:
|
|
1128
|
+
params["$filter"] = filter_str
|
|
1129
|
+
|
|
1130
|
+
return RequestSpec(
|
|
1131
|
+
method="GET",
|
|
1132
|
+
endpoint=Endpoint("/orchestrator_/odata/Buckets"),
|
|
1133
|
+
params=params,
|
|
1134
|
+
headers={
|
|
1135
|
+
**header_folder(folder_key, folder_path),
|
|
1136
|
+
},
|
|
1137
|
+
)
|
|
1138
|
+
|
|
1139
|
+
def _create_spec(
|
|
1140
|
+
self,
|
|
1141
|
+
name: str,
|
|
1142
|
+
description: Optional[str],
|
|
1143
|
+
identifier: str,
|
|
1144
|
+
folder_path: Optional[str],
|
|
1145
|
+
folder_key: Optional[str],
|
|
1146
|
+
) -> RequestSpec:
|
|
1147
|
+
"""Build request for creating bucket."""
|
|
1148
|
+
body = {
|
|
1149
|
+
"Name": name,
|
|
1150
|
+
"Identifier": identifier,
|
|
1151
|
+
}
|
|
1152
|
+
if description:
|
|
1153
|
+
body["Description"] = description
|
|
1154
|
+
|
|
1155
|
+
return RequestSpec(
|
|
1156
|
+
method="POST",
|
|
1157
|
+
endpoint=Endpoint("/orchestrator_/odata/Buckets"),
|
|
1158
|
+
json=body,
|
|
1159
|
+
headers={
|
|
1160
|
+
**header_folder(folder_key, folder_path),
|
|
1161
|
+
},
|
|
1162
|
+
)
|
|
1163
|
+
|
|
443
1164
|
def _retrieve_spec(
|
|
444
1165
|
self,
|
|
445
1166
|
name: str,
|
|
446
1167
|
folder_key: Optional[str] = None,
|
|
447
1168
|
folder_path: Optional[str] = None,
|
|
448
1169
|
) -> RequestSpec:
|
|
1170
|
+
escaped_name = name.replace("'", "''")
|
|
449
1171
|
return RequestSpec(
|
|
450
1172
|
method="GET",
|
|
451
1173
|
endpoint=Endpoint("/orchestrator_/odata/Buckets"),
|
|
452
|
-
params={"$filter": f"Name eq '{
|
|
1174
|
+
params={"$filter": f"Name eq '{escaped_name}'", "$top": 1},
|
|
453
1175
|
headers={
|
|
454
1176
|
**header_folder(folder_key, folder_path),
|
|
455
1177
|
},
|
|
@@ -498,11 +1220,120 @@ class BucketsService(FolderContext, BaseService):
|
|
|
498
1220
|
folder_key: Optional[str] = None,
|
|
499
1221
|
folder_path: Optional[str] = None,
|
|
500
1222
|
) -> RequestSpec:
|
|
1223
|
+
escaped_key = key.replace("'", "''")
|
|
1224
|
+
return RequestSpec(
|
|
1225
|
+
method="GET",
|
|
1226
|
+
endpoint=Endpoint(
|
|
1227
|
+
f"/orchestrator_/odata/Buckets/UiPath.Server.Configuration.OData.GetByKey(identifier='{escaped_key}')"
|
|
1228
|
+
),
|
|
1229
|
+
headers={
|
|
1230
|
+
**header_folder(folder_key, folder_path),
|
|
1231
|
+
},
|
|
1232
|
+
)
|
|
1233
|
+
|
|
1234
|
+
def _list_files_spec(
|
|
1235
|
+
self,
|
|
1236
|
+
bucket_id: int,
|
|
1237
|
+
prefix: str,
|
|
1238
|
+
continuation_token: Optional[str] = None,
|
|
1239
|
+
take_hint: int = 500,
|
|
1240
|
+
folder_key: Optional[str] = None,
|
|
1241
|
+
folder_path: Optional[str] = None,
|
|
1242
|
+
) -> RequestSpec:
|
|
1243
|
+
"""Build REST API request for listing files in a bucket.
|
|
1244
|
+
|
|
1245
|
+
Uses the /api/Buckets/{id}/ListFiles endpoint which supports pagination.
|
|
1246
|
+
|
|
1247
|
+
Args:
|
|
1248
|
+
bucket_id: The bucket ID
|
|
1249
|
+
prefix: Path prefix for filtering
|
|
1250
|
+
continuation_token: Token for pagination
|
|
1251
|
+
take_hint: Minimum number of files to return (default 500, max 1000)
|
|
1252
|
+
folder_key: Folder key
|
|
1253
|
+
folder_path: Folder path
|
|
1254
|
+
"""
|
|
1255
|
+
params: Dict[str, Any] = {}
|
|
1256
|
+
if prefix:
|
|
1257
|
+
params["prefix"] = prefix
|
|
1258
|
+
if continuation_token:
|
|
1259
|
+
params["continuationToken"] = continuation_token
|
|
1260
|
+
if take_hint:
|
|
1261
|
+
params["takeHint"] = take_hint
|
|
1262
|
+
|
|
1263
|
+
return RequestSpec(
|
|
1264
|
+
method="GET",
|
|
1265
|
+
endpoint=Endpoint(f"/api/Buckets/{bucket_id}/ListFiles"),
|
|
1266
|
+
params=params,
|
|
1267
|
+
headers={
|
|
1268
|
+
**header_folder(folder_key, folder_path),
|
|
1269
|
+
},
|
|
1270
|
+
)
|
|
1271
|
+
|
|
1272
|
+
def _delete_file_spec(
|
|
1273
|
+
self,
|
|
1274
|
+
bucket_id: int,
|
|
1275
|
+
blob_file_path: str,
|
|
1276
|
+
folder_key: Optional[str] = None,
|
|
1277
|
+
folder_path: Optional[str] = None,
|
|
1278
|
+
) -> RequestSpec:
|
|
1279
|
+
"""Build request for deleting a file from a bucket."""
|
|
1280
|
+
return RequestSpec(
|
|
1281
|
+
method="DELETE",
|
|
1282
|
+
endpoint=Endpoint(
|
|
1283
|
+
f"/orchestrator_/odata/Buckets({bucket_id})/UiPath.Server.Configuration.OData.DeleteFile"
|
|
1284
|
+
),
|
|
1285
|
+
params={"path": blob_file_path},
|
|
1286
|
+
headers={
|
|
1287
|
+
**header_folder(folder_key, folder_path),
|
|
1288
|
+
},
|
|
1289
|
+
)
|
|
1290
|
+
|
|
1291
|
+
def _get_files_spec(
|
|
1292
|
+
self,
|
|
1293
|
+
bucket_id: int,
|
|
1294
|
+
prefix: str = "",
|
|
1295
|
+
recursive: bool = False,
|
|
1296
|
+
file_name_glob: Optional[str] = None,
|
|
1297
|
+
skip: int = 0,
|
|
1298
|
+
top: int = 500,
|
|
1299
|
+
folder_key: Optional[str] = None,
|
|
1300
|
+
folder_path: Optional[str] = None,
|
|
1301
|
+
) -> RequestSpec:
|
|
1302
|
+
"""Build OData request for GetFiles endpoint.
|
|
1303
|
+
|
|
1304
|
+
Args:
|
|
1305
|
+
bucket_id: Bucket ID
|
|
1306
|
+
prefix: Directory path prefix
|
|
1307
|
+
recursive: Recurse subdirectories
|
|
1308
|
+
file_name_glob: File name filter pattern
|
|
1309
|
+
skip: Number of items to skip (pagination)
|
|
1310
|
+
top: Number of items to return (pagination)
|
|
1311
|
+
folder_key: Folder key
|
|
1312
|
+
folder_path: Folder path
|
|
1313
|
+
|
|
1314
|
+
Returns:
|
|
1315
|
+
RequestSpec: OData request specification
|
|
1316
|
+
"""
|
|
1317
|
+
params: Dict[str, Any] = {}
|
|
1318
|
+
|
|
1319
|
+
params["directory"] = "/" if not prefix else prefix
|
|
1320
|
+
|
|
1321
|
+
if recursive:
|
|
1322
|
+
params["recursive"] = "true"
|
|
1323
|
+
|
|
1324
|
+
if file_name_glob:
|
|
1325
|
+
params["fileNameGlob"] = file_name_glob
|
|
1326
|
+
|
|
1327
|
+
if skip > 0:
|
|
1328
|
+
params["$skip"] = skip
|
|
1329
|
+
params["$top"] = top
|
|
1330
|
+
|
|
501
1331
|
return RequestSpec(
|
|
502
1332
|
method="GET",
|
|
503
1333
|
endpoint=Endpoint(
|
|
504
|
-
f"/orchestrator_/odata/Buckets/UiPath.Server.Configuration.OData.
|
|
1334
|
+
f"/orchestrator_/odata/Buckets({bucket_id})/UiPath.Server.Configuration.OData.GetFiles"
|
|
505
1335
|
),
|
|
1336
|
+
params=params,
|
|
506
1337
|
headers={
|
|
507
1338
|
**header_folder(folder_key, folder_path),
|
|
508
1339
|
},
|
uipath/models/__init__.py
CHANGED
|
@@ -2,7 +2,7 @@ from .action_schema import ActionSchema
|
|
|
2
2
|
from .actions import Action
|
|
3
3
|
from .assets import Asset, UserAsset
|
|
4
4
|
from .attachment import Attachment
|
|
5
|
-
from .buckets import Bucket
|
|
5
|
+
from .buckets import Bucket, BucketFile
|
|
6
6
|
from .connections import Connection, ConnectionMetadata, ConnectionToken, EventArguments
|
|
7
7
|
from .context_grounding import ContextGroundingQueryResponse
|
|
8
8
|
from .context_grounding_index import ContextGroundingIndex
|
|
@@ -51,4 +51,5 @@ __all__ = [
|
|
|
51
51
|
"BaseUrlMissingError",
|
|
52
52
|
"SecretMissingError",
|
|
53
53
|
"Bucket",
|
|
54
|
+
"BucketFile",
|
|
54
55
|
]
|
uipath/models/buckets.py
CHANGED
|
@@ -1,6 +1,55 @@
|
|
|
1
1
|
from typing import List, Optional
|
|
2
2
|
|
|
3
|
-
from pydantic import BaseModel, ConfigDict, Field
|
|
3
|
+
from pydantic import AliasChoices, BaseModel, ConfigDict, Field
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class BucketFile(BaseModel):
|
|
7
|
+
"""Represents a file within a bucket.
|
|
8
|
+
|
|
9
|
+
Supports both ListFiles API (lowercase fields) and GetFiles API (PascalCase fields).
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
model_config = ConfigDict(
|
|
13
|
+
populate_by_name=True,
|
|
14
|
+
validate_by_alias=True,
|
|
15
|
+
extra="allow",
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
full_path: str = Field(
|
|
19
|
+
validation_alias=AliasChoices("fullPath", "FullPath"),
|
|
20
|
+
description="Full path within bucket",
|
|
21
|
+
)
|
|
22
|
+
content_type: Optional[str] = Field(
|
|
23
|
+
default=None,
|
|
24
|
+
validation_alias=AliasChoices("contentType", "ContentType"),
|
|
25
|
+
description="MIME type",
|
|
26
|
+
)
|
|
27
|
+
size: int = Field(
|
|
28
|
+
validation_alias=AliasChoices("size", "Size"),
|
|
29
|
+
description="File size in bytes",
|
|
30
|
+
)
|
|
31
|
+
last_modified: Optional[str] = Field(
|
|
32
|
+
default=None,
|
|
33
|
+
validation_alias=AliasChoices("lastModified", "LastModified"),
|
|
34
|
+
description="Last modification timestamp (ISO format)",
|
|
35
|
+
)
|
|
36
|
+
is_directory: bool = Field(
|
|
37
|
+
default=False,
|
|
38
|
+
validation_alias=AliasChoices("IsDirectory", "isDirectory"),
|
|
39
|
+
description="Whether this entry is a directory",
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
@property
|
|
43
|
+
def path(self) -> str:
|
|
44
|
+
"""Alias for full_path for consistency."""
|
|
45
|
+
return self.full_path
|
|
46
|
+
|
|
47
|
+
@property
|
|
48
|
+
def name(self) -> str:
|
|
49
|
+
"""Extract filename from full path."""
|
|
50
|
+
return (
|
|
51
|
+
self.full_path.split("/")[-1] if "/" in self.full_path else self.full_path
|
|
52
|
+
)
|
|
4
53
|
|
|
5
54
|
|
|
6
55
|
class Bucket(BaseModel):
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: uipath
|
|
3
|
-
Version: 2.1.
|
|
3
|
+
Version: 2.1.123
|
|
4
4
|
Summary: Python SDK and CLI for UiPath Platform, enabling programmatic interaction with automation services, process management, and deployment tools.
|
|
5
5
|
Project-URL: Homepage, https://uipath.com
|
|
6
6
|
Project-URL: Repository, https://github.com/UiPath/uipath-python
|
|
@@ -73,7 +73,7 @@ uipath/_cli/_evals/mocks/mockito_mocker.py,sha256=opwfELnvuh3krnPAg0MupkHTEmhCpQ
|
|
|
73
73
|
uipath/_cli/_evals/mocks/mocks.py,sha256=IrvhtTtIuU5geopvCRglNhxKoOcChnnjQZMoYygx0PU,2225
|
|
74
74
|
uipath/_cli/_push/models.py,sha256=K3k6QUMkiNIb3M4U0EgDlKz1UELxeMXLNVAj3qyhZ4U,470
|
|
75
75
|
uipath/_cli/_push/sw_file_handler.py,sha256=ivHj0qzCvEP45M3x-Oi2edO5I-uhyHzgnidDF3JRoTk,36192
|
|
76
|
-
uipath/_cli/_runtime/_contracts.py,sha256=
|
|
76
|
+
uipath/_cli/_runtime/_contracts.py,sha256=sVHUww13iFsB7tDkbmD4ki41xd6nbAoJkoc9BLiXEh8,36352
|
|
77
77
|
uipath/_cli/_runtime/_escalation.py,sha256=x3vI98qsfRA-fL_tNkRVTFXioM5Gv2w0GFcXJJ5eQtg,7981
|
|
78
78
|
uipath/_cli/_runtime/_hitl.py,sha256=JAwTUKvxO4HpnZMwE4E0AegAPw_uYOwgt0OYcu6EvTg,11474
|
|
79
79
|
uipath/_cli/_runtime/_logging.py,sha256=srjAi3Cy6g7b8WNHiYNjaZT4t40F3XRqquuoGd2kh4Y,14019
|
|
@@ -116,7 +116,7 @@ uipath/_services/actions_service.py,sha256=2RPMR-hFMsOlqEyjIf3aF7-lrf57jdrSD0pBj
|
|
|
116
116
|
uipath/_services/api_client.py,sha256=kGm04ijk9AOEQd2BMxvQg-2QoB8dmyoDwFFDPyutAGw,1966
|
|
117
117
|
uipath/_services/assets_service.py,sha256=pG0Io--SeiRRQmfUWPQPl1vq3csZlQgx30LBNKRmmF8,12145
|
|
118
118
|
uipath/_services/attachments_service.py,sha256=NPQYK7CGjfBaNT_1S5vEAfODmOChTbQZforllFM2ofU,26678
|
|
119
|
-
uipath/_services/buckets_service.py,sha256=
|
|
119
|
+
uipath/_services/buckets_service.py,sha256=FGWhJ3ewMEAahcSPY60wtFB0_qwAfaQAaAjqrC52VDk,44603
|
|
120
120
|
uipath/_services/connections_service.py,sha256=tKJHHOKQYKR6LkgB-V_2d0vFpLEdFeMzwj_xmBVHUDw,18416
|
|
121
121
|
uipath/_services/context_grounding_service.py,sha256=Pjx-QQQEiSKD-hY6ityj3QUSALN3fIcKLLHr_NZ0d_g,37117
|
|
122
122
|
uipath/_services/documents_service.py,sha256=2mPZzmOl2r5i8RYvdeRSJtEFWSSsiXqIauTgNTW75s4,45341
|
|
@@ -195,13 +195,13 @@ uipath/eval/mocks/mockable.py,sha256=FJEE4iz6nchowGhoGR3FgF9VvymHnWJkUyakKOK4fIg
|
|
|
195
195
|
uipath/eval/models/__init__.py,sha256=-V610Bw4daQQ2CwNUGwsEW5n56b_G2mMZY4vaChV2r4,716
|
|
196
196
|
uipath/eval/models/llm_judge_types.py,sha256=_kPnyAoWyV_Idx-lAgAbHGq4akQRh-eDC2PCYG6T0Zc,9620
|
|
197
197
|
uipath/eval/models/models.py,sha256=q-g-UDovrkNtA9jQxvAWvQRXG511ZQEVOm9mjTNBeVk,9746
|
|
198
|
-
uipath/models/__init__.py,sha256=
|
|
198
|
+
uipath/models/__init__.py,sha256=8ZIx9XTjDoPYrnuxxgEfFMaldfcoHfMAt7YZ_LP4_d4,1365
|
|
199
199
|
uipath/models/action_schema.py,sha256=tBn1qQ3NQLU5nwWlBIzIKIx3XK5pO_D1S51IjFlZ1FA,610
|
|
200
200
|
uipath/models/actions.py,sha256=1vRsJ3JSmMdPkbiYAiHzY8K44vmW3VlMsmQUBAkSgrQ,3141
|
|
201
201
|
uipath/models/assets.py,sha256=7x3swJRnG_a4VgjdXKKwraJLT5TF0u4wHsl6coOjX0g,2762
|
|
202
202
|
uipath/models/attachment.py,sha256=lI6BxBY6DY5U6qZbxhkNu-usseA1zovYSTRtLq50ubI,1029
|
|
203
203
|
uipath/models/auth.py,sha256=-CEo5KZVtZZgbAMatN6B1vBmGp8lTTumR8sMthRmL8I,345
|
|
204
|
-
uipath/models/buckets.py,sha256=
|
|
204
|
+
uipath/models/buckets.py,sha256=7uDonM5ddfhunP6Vn24kEa-iW_ZluJU4SaWEqB2dWu8,2754
|
|
205
205
|
uipath/models/connections.py,sha256=jmzlfnddqlxjmiVhqsETRV6TQPH3fFqJGsygG0gUf7g,2745
|
|
206
206
|
uipath/models/context_grounding.py,sha256=3MaF2Fv2QYle8UUWvKGkCN5XGpx2T4a34fdbBqJ2fCs,1137
|
|
207
207
|
uipath/models/context_grounding_index.py,sha256=OhRyxZDHDSrEmBFK0-JLqMMMT64jir4XkHtQ54IKtc0,2683
|
|
@@ -225,8 +225,8 @@ uipath/tracing/_utils.py,sha256=emsQRgYu-P1gj1q7XUPJD94mOa12JvhheRkuZJpLd9Y,1505
|
|
|
225
225
|
uipath/utils/__init__.py,sha256=VD-KXFpF_oWexFg6zyiWMkxl2HM4hYJMIUDZ1UEtGx0,105
|
|
226
226
|
uipath/utils/_endpoints_manager.py,sha256=tnF_FiCx8qI2XaJDQgYkMN_gl9V0VqNR1uX7iawuLp8,8230
|
|
227
227
|
uipath/utils/dynamic_schema.py,sha256=w0u_54MoeIAB-mf3GmwX1A_X8_HDrRy6p998PvX9evY,3839
|
|
228
|
-
uipath-2.1.
|
|
229
|
-
uipath-2.1.
|
|
230
|
-
uipath-2.1.
|
|
231
|
-
uipath-2.1.
|
|
232
|
-
uipath-2.1.
|
|
228
|
+
uipath-2.1.123.dist-info/METADATA,sha256=jnVXMo218T9D5w0meGX_NYFtoA7R64pfTpIg6UKypHo,6626
|
|
229
|
+
uipath-2.1.123.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
230
|
+
uipath-2.1.123.dist-info/entry_points.txt,sha256=9C2_29U6Oq1ExFu7usihR-dnfIVNSKc-0EFbh0rskB4,43
|
|
231
|
+
uipath-2.1.123.dist-info/licenses/LICENSE,sha256=-KBavWXepyDjimmzH5fVAsi-6jNVpIKFc2kZs0Ri4ng,1058
|
|
232
|
+
uipath-2.1.123.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|