uipath 2.1.69__py3-none-any.whl → 2.1.71__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- uipath/_cli/_dev/_terminal/__init__.py +15 -1
- uipath/_cli/_dev/_terminal/_utils/_logger.py +67 -1
- uipath/_services/context_grounding_service.py +304 -22
- uipath/_utils/constants.py +12 -0
- {uipath-2.1.69.dist-info → uipath-2.1.71.dist-info}/METADATA +1 -1
- {uipath-2.1.69.dist-info → uipath-2.1.71.dist-info}/RECORD +9 -9
- {uipath-2.1.69.dist-info → uipath-2.1.71.dist-info}/WHEEL +0 -0
- {uipath-2.1.69.dist-info → uipath-2.1.71.dist-info}/entry_points.txt +0 -0
- {uipath-2.1.69.dist-info → uipath-2.1.71.dist-info}/licenses/LICENSE +0 -0
@@ -37,7 +37,7 @@ from ._models._execution import ExecutionRun
|
|
37
37
|
from ._models._messages import LogMessage, TraceMessage
|
38
38
|
from ._utils._chat import RunContextChatHandler, build_user_message_event
|
39
39
|
from ._utils._exporter import RunContextExporter
|
40
|
-
from ._utils._logger import RunContextLogHandler
|
40
|
+
from ._utils._logger import RunContextLogHandler, patch_textual_stderr
|
41
41
|
|
42
42
|
|
43
43
|
class UiPathDevTerminal(App[Any]):
|
@@ -61,6 +61,8 @@ class UiPathDevTerminal(App[Any]):
|
|
61
61
|
runtime_factory: UiPathRuntimeFactory[Any, Any],
|
62
62
|
**kwargs,
|
63
63
|
):
|
64
|
+
self._stderr_write_fd: int = patch_textual_stderr(self._add_subprocess_log)
|
65
|
+
|
64
66
|
super().__init__(**kwargs)
|
65
67
|
|
66
68
|
self.initial_entrypoint: str = "main.py"
|
@@ -357,3 +359,15 @@ class UiPathDevTerminal(App[Any]):
|
|
357
359
|
)
|
358
360
|
log_msg = LogMessage(run.id, "ERROR", tb, timestamp)
|
359
361
|
self._handle_log_message(log_msg)
|
362
|
+
|
363
|
+
def _add_subprocess_log(self, level: str, message: str) -> None:
|
364
|
+
"""Handle a stderr line coming from subprocesses."""
|
365
|
+
|
366
|
+
def add_log() -> None:
|
367
|
+
details_panel = self.query_one("#details-panel", RunDetailsPanel)
|
368
|
+
run = getattr(details_panel, "current_run", None)
|
369
|
+
if run:
|
370
|
+
log_msg = LogMessage(run.id, level, message, datetime.now())
|
371
|
+
self._handle_log_message(log_msg)
|
372
|
+
|
373
|
+
self.call_from_thread(add_log)
|
@@ -1,6 +1,11 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
1
3
|
import logging
|
4
|
+
import os
|
5
|
+
import re
|
6
|
+
import threading
|
2
7
|
from datetime import datetime
|
3
|
-
from typing import Callable
|
8
|
+
from typing import Callable, Pattern
|
4
9
|
|
5
10
|
from .._models._messages import LogMessage
|
6
11
|
|
@@ -30,3 +35,64 @@ class RunContextLogHandler(logging.Handler):
|
|
30
35
|
except Exception:
|
31
36
|
# Don't let logging errors crash the app
|
32
37
|
pass
|
38
|
+
|
39
|
+
|
40
|
+
# A dispatcher is a callable that accepts (level, message) pairs
|
41
|
+
DispatchLog = Callable[[str, str], None]
|
42
|
+
|
43
|
+
LEVEL_PATTERNS: list[tuple[str, Pattern[str]]] = [
|
44
|
+
("DEBUG", re.compile(r"^(DEBUG)[:\s-]+", re.I)),
|
45
|
+
("INFO", re.compile(r"^(INFO)[:\s-]+", re.I)),
|
46
|
+
("WARN", re.compile(r"^(WARNING|WARN)[:\s-]+", re.I)),
|
47
|
+
("ERROR", re.compile(r"^(ERROR|ERRO)[:\s-]+", re.I)),
|
48
|
+
]
|
49
|
+
|
50
|
+
|
51
|
+
def patch_textual_stderr(dispatch_log: DispatchLog) -> int:
|
52
|
+
"""Redirect subprocess stderr into a provided dispatcher.
|
53
|
+
|
54
|
+
Args:
|
55
|
+
dispatch_log: Callable invoked with (level, message) for each stderr line.
|
56
|
+
This will be called from a background thread, so the caller
|
57
|
+
should use `App.call_from_thread` or equivalent.
|
58
|
+
|
59
|
+
Returns:
|
60
|
+
int: The write file descriptor for stderr (pass to subprocesses).
|
61
|
+
"""
|
62
|
+
from textual.app import _PrintCapture
|
63
|
+
|
64
|
+
read_fd, write_fd = os.pipe()
|
65
|
+
|
66
|
+
# Patch fileno() so subprocesses can write to our pipe
|
67
|
+
_PrintCapture.fileno = lambda self: write_fd # type: ignore[method-assign]
|
68
|
+
|
69
|
+
def read_stderr_pipe() -> None:
|
70
|
+
with os.fdopen(read_fd, "r", buffering=1) as pipe_reader:
|
71
|
+
try:
|
72
|
+
for raw in pipe_reader:
|
73
|
+
text = raw.rstrip()
|
74
|
+
level: str = "ERROR"
|
75
|
+
message: str = text
|
76
|
+
|
77
|
+
# Try to parse a known level prefix
|
78
|
+
for lvl, pattern in LEVEL_PATTERNS:
|
79
|
+
m = pattern.match(text)
|
80
|
+
if m:
|
81
|
+
level = lvl
|
82
|
+
message = text[m.end() :]
|
83
|
+
break
|
84
|
+
|
85
|
+
dispatch_log(level, message)
|
86
|
+
|
87
|
+
except Exception:
|
88
|
+
# Never raise from thread
|
89
|
+
pass
|
90
|
+
|
91
|
+
thread = threading.Thread(
|
92
|
+
target=read_stderr_pipe,
|
93
|
+
daemon=True,
|
94
|
+
name="stderr-reader",
|
95
|
+
)
|
96
|
+
thread.start()
|
97
|
+
|
98
|
+
return write_fd
|
@@ -1,4 +1,5 @@
|
|
1
|
-
|
1
|
+
import json
|
2
|
+
from typing import Any, Dict, List, Optional, Tuple, Union
|
2
3
|
|
3
4
|
import httpx
|
4
5
|
from pydantic import TypeAdapter
|
@@ -9,6 +10,11 @@ from .._execution_context import ExecutionContext
|
|
9
10
|
from .._folder_context import FolderContext
|
10
11
|
from .._utils import Endpoint, RequestSpec, header_folder, infer_bindings
|
11
12
|
from .._utils.constants import (
|
13
|
+
CONFLUENCE_DATA_SOURCE,
|
14
|
+
DROPBOX_DATA_SOURCE,
|
15
|
+
GOOGLE_DRIVE_DATA_SOURCE,
|
16
|
+
LLMV4,
|
17
|
+
ONEDRIVE_DATA_SOURCE,
|
12
18
|
ORCHESTRATOR_STORAGE_BUCKET_DATA_SOURCE,
|
13
19
|
)
|
14
20
|
from ..models import IngestionInProgressException
|
@@ -312,6 +318,122 @@ class ContextGroundingService(FolderContext, BaseService):
|
|
312
318
|
|
313
319
|
return response.json()
|
314
320
|
|
321
|
+
@traced(name="contextgrounding_create_index", run_type="uipath")
|
322
|
+
@infer_bindings(resource_type="index")
|
323
|
+
def create_index(
|
324
|
+
self,
|
325
|
+
name: str,
|
326
|
+
source: Dict[str, Any],
|
327
|
+
description: Optional[str] = None,
|
328
|
+
cron_expression: Optional[str] = None,
|
329
|
+
time_zone_id: Optional[str] = None,
|
330
|
+
advanced_ingestion: Optional[bool] = True,
|
331
|
+
preprocessing_request: Optional[str] = LLMV4,
|
332
|
+
folder_key: Optional[str] = None,
|
333
|
+
folder_path: Optional[str] = None,
|
334
|
+
) -> ContextGroundingIndex:
|
335
|
+
"""Create a new context grounding index.
|
336
|
+
|
337
|
+
Args:
|
338
|
+
name (str): The name of the index to create.
|
339
|
+
source (dict): Source configuration dictionary:
|
340
|
+
- For buckets: type="bucket", bucket_name, folder_path, directory_path="/" (optional), file_type (optional)
|
341
|
+
- For Google Drive: type="google", connection_name, connection_id, leaf_folder_id, directory_path, folder_path, file_type (optional)
|
342
|
+
- For Dropbox: type="dropbox", connection_name, connection_id, directory_path, folder_path, file_type (optional)
|
343
|
+
- For OneDrive: type="onedrive", connection_name, connection_id, leaf_folder_id, directory_path, folder_path, file_type (optional)
|
344
|
+
- For Confluence: type="confluence", connection_name, connection_id, space_id, directory_path, folder_path, file_type (optional)
|
345
|
+
description (Optional[str]): Description of the index.
|
346
|
+
cron_expression (Optional[str]): Cron expression for scheduled indexing (e.g., "0 0 18 ? * 2" for Tuesdays at 6 PM).
|
347
|
+
time_zone_id (Optional[str]): Valid Windows Timezone ID for the cron expression (e.g., "UTC", "Pacific Standard Time", "GTB Standard Time").
|
348
|
+
advanced_ingestion (Optional[bool]): Enable advanced ingestion with preprocessing. Defaults to True.
|
349
|
+
preprocessing_request (Optional[str]): The OData type for preprocessing request. Defaults to LLMV4.
|
350
|
+
folder_key (Optional[str]): The key of the folder where the index will be created.
|
351
|
+
folder_path (Optional[str]): The path of the folder where the index will be created.
|
352
|
+
|
353
|
+
Returns:
|
354
|
+
ContextGroundingIndex: The created index information.
|
355
|
+
"""
|
356
|
+
spec = self._create_spec(
|
357
|
+
name=name,
|
358
|
+
description=description,
|
359
|
+
source=source,
|
360
|
+
cron_expression=cron_expression,
|
361
|
+
time_zone_id=time_zone_id,
|
362
|
+
advanced_ingestion=advanced_ingestion
|
363
|
+
if advanced_ingestion is not None
|
364
|
+
else True,
|
365
|
+
preprocessing_request=preprocessing_request or LLMV4,
|
366
|
+
folder_path=folder_path,
|
367
|
+
folder_key=folder_key,
|
368
|
+
)
|
369
|
+
|
370
|
+
response = self.request(
|
371
|
+
spec.method,
|
372
|
+
spec.endpoint,
|
373
|
+
content=spec.content,
|
374
|
+
headers=spec.headers,
|
375
|
+
)
|
376
|
+
|
377
|
+
return ContextGroundingIndex.model_validate(response.json())
|
378
|
+
|
379
|
+
@traced(name="contextgrounding_create_index", run_type="uipath")
|
380
|
+
@infer_bindings(resource_type="index")
|
381
|
+
async def create_index_async(
|
382
|
+
self,
|
383
|
+
name: str,
|
384
|
+
source: Dict[str, Any],
|
385
|
+
description: Optional[str] = None,
|
386
|
+
cron_expression: Optional[str] = None,
|
387
|
+
time_zone_id: Optional[str] = None,
|
388
|
+
advanced_ingestion: Optional[bool] = True,
|
389
|
+
preprocessing_request: Optional[str] = LLMV4,
|
390
|
+
folder_key: Optional[str] = None,
|
391
|
+
folder_path: Optional[str] = None,
|
392
|
+
) -> ContextGroundingIndex:
|
393
|
+
"""Create a new context grounding index.
|
394
|
+
|
395
|
+
Args:
|
396
|
+
name (str): The name of the index to create.
|
397
|
+
source (dict): Source configuration dictionary:
|
398
|
+
- For buckets: type="bucket", bucket_name, folder_path, directory_path="/" (optional), file_type (optional)
|
399
|
+
- For Google Drive: type="google_drive", connection_name, connection_id, leaf_folder_id, directory_path, folder_path, file_type (optional)
|
400
|
+
- For Dropbox: type="dropbox", connection_name, connection_id, directory_path, folder_path, file_type (optional)
|
401
|
+
- For OneDrive: type="onedrive", connection_name, connection_id, leaf_folder_id, directory_path, folder_path, file_type (optional)
|
402
|
+
- For Confluence: type="confluence", connection_name, connection_id, space_id, directory_path, folder_path, file_type (optional)
|
403
|
+
description (Optional[str]): Description of the index.
|
404
|
+
cron_expression (Optional[str]): Cron expression for scheduled indexing (e.g., "0 0 18 ? * 2" for Tuesdays at 6 PM).
|
405
|
+
time_zone_id (Optional[str]): Valid Windows Timezone ID for the cron expression (e.g., "UTC", "Pacific Standard Time", "GTB Standard Time").
|
406
|
+
advanced_ingestion (Optional[bool]): Enable advanced ingestion with preprocessing. Defaults to True.
|
407
|
+
preprocessing_request (Optional[str]): The OData type for preprocessing request. Defaults to LLMV4.
|
408
|
+
folder_key (Optional[str]): The key of the folder where the index will be created.
|
409
|
+
folder_path (Optional[str]): The path of the folder where the index will be created.
|
410
|
+
|
411
|
+
Returns:
|
412
|
+
ContextGroundingIndex: The created index information.
|
413
|
+
"""
|
414
|
+
spec = self._create_spec(
|
415
|
+
name=name,
|
416
|
+
description=description,
|
417
|
+
source=source,
|
418
|
+
cron_expression=cron_expression,
|
419
|
+
time_zone_id=time_zone_id,
|
420
|
+
advanced_ingestion=advanced_ingestion
|
421
|
+
if advanced_ingestion is not None
|
422
|
+
else True,
|
423
|
+
preprocessing_request=preprocessing_request or LLMV4,
|
424
|
+
folder_path=folder_path,
|
425
|
+
folder_key=folder_key,
|
426
|
+
)
|
427
|
+
|
428
|
+
response = await self.request_async(
|
429
|
+
spec.method,
|
430
|
+
spec.endpoint,
|
431
|
+
content=spec.content,
|
432
|
+
headers=spec.headers,
|
433
|
+
)
|
434
|
+
|
435
|
+
return ContextGroundingIndex.model_validate(response.json())
|
436
|
+
|
315
437
|
@traced(name="contextgrounding_search", run_type="uipath")
|
316
438
|
def search(
|
317
439
|
self,
|
@@ -575,40 +697,200 @@ class ContextGroundingService(FolderContext, BaseService):
|
|
575
697
|
self,
|
576
698
|
name: str,
|
577
699
|
description: Optional[str],
|
578
|
-
|
579
|
-
|
580
|
-
|
700
|
+
source: Dict[str, Any],
|
701
|
+
advanced_ingestion: bool,
|
702
|
+
preprocessing_request: str,
|
703
|
+
cron_expression: Optional[str] = None,
|
704
|
+
time_zone_id: Optional[str] = None,
|
581
705
|
folder_key: Optional[str] = None,
|
582
706
|
folder_path: Optional[str] = None,
|
583
707
|
) -> RequestSpec:
|
708
|
+
"""Create request spec for index creation.
|
709
|
+
|
710
|
+
Args:
|
711
|
+
name: Index name
|
712
|
+
description: Index description
|
713
|
+
source: Source configuration dictionary
|
714
|
+
cron_expression: Optional cron expression for scheduled indexing
|
715
|
+
time_zone_id: Optional timezone for cron expression
|
716
|
+
advanced_ingestion: Whether to enable advanced ingestion with preprocessing
|
717
|
+
preprocessing_request: OData type for preprocessing request
|
718
|
+
folder_key: Optional folder key
|
719
|
+
folder_path: Optional folder path
|
720
|
+
|
721
|
+
Returns:
|
722
|
+
RequestSpec for the create index request
|
723
|
+
"""
|
724
|
+
source_type = source.get("type", "").lower()
|
725
|
+
|
584
726
|
folder_key = self._resolve_folder_key(folder_key, folder_path)
|
727
|
+
file_type = source.get("file_type")
|
728
|
+
file_name_glob = f"**/*.{file_type}" if file_type else "**/*"
|
729
|
+
|
730
|
+
data_source = self._build_data_source(source_type, source, file_name_glob)
|
731
|
+
|
732
|
+
if cron_expression:
|
733
|
+
data_source["indexer"] = {
|
734
|
+
"cronExpression": cron_expression,
|
735
|
+
"timeZoneId": time_zone_id or "UTC",
|
736
|
+
}
|
737
|
+
|
738
|
+
payload = {
|
739
|
+
"name": name,
|
740
|
+
"description": description or "",
|
741
|
+
"dataSource": data_source,
|
742
|
+
}
|
743
|
+
|
744
|
+
if advanced_ingestion and preprocessing_request:
|
745
|
+
payload["preProcessing"] = {
|
746
|
+
"@odata.type": preprocessing_request,
|
747
|
+
}
|
585
748
|
|
586
|
-
storage_bucket_folder_path = (
|
587
|
-
storage_bucket_folder_path
|
588
|
-
if storage_bucket_folder_path
|
589
|
-
else self._folder_path
|
590
|
-
)
|
591
749
|
return RequestSpec(
|
592
750
|
method="POST",
|
593
751
|
endpoint=Endpoint("/ecs_/v2/indexes/create"),
|
594
|
-
json
|
595
|
-
"name": name,
|
596
|
-
"description": description,
|
597
|
-
"dataSource": {
|
598
|
-
"@odata.type": ORCHESTRATOR_STORAGE_BUCKET_DATA_SOURCE,
|
599
|
-
"folder": storage_bucket_folder_path,
|
600
|
-
"bucketName": storage_bucket_name,
|
601
|
-
"fileNameGlob": file_name_glob
|
602
|
-
if file_name_glob is not None
|
603
|
-
else "*",
|
604
|
-
"directoryPath": "/",
|
605
|
-
},
|
606
|
-
},
|
752
|
+
content=json.dumps(payload),
|
607
753
|
headers={
|
608
754
|
**header_folder(folder_key, None),
|
755
|
+
"Content-Type": "application/json",
|
609
756
|
},
|
610
757
|
)
|
611
758
|
|
759
|
+
def _build_data_source(
|
760
|
+
self, source_type: str, source: Dict[str, Any], file_name_glob: str
|
761
|
+
) -> Dict[str, Any]:
|
762
|
+
"""Build data source configuration based on type."""
|
763
|
+
if source_type == "bucket":
|
764
|
+
return self._build_bucket_data_source(source, file_name_glob)
|
765
|
+
elif source_type in ["google_drive"]:
|
766
|
+
return self._build_google_drive_data_source(source, file_name_glob)
|
767
|
+
elif source_type == "dropbox":
|
768
|
+
return self._build_dropbox_data_source(source, file_name_glob)
|
769
|
+
elif source_type == "onedrive":
|
770
|
+
return self._build_onedrive_data_source(source, file_name_glob)
|
771
|
+
elif source_type == "confluence":
|
772
|
+
return self._build_confluence_data_source(source, file_name_glob)
|
773
|
+
else:
|
774
|
+
raise ValueError(
|
775
|
+
f"Unsupported data source type: {source_type}. "
|
776
|
+
f"Supported types: bucket, google_drive, dropbox, onedrive, confluence"
|
777
|
+
)
|
778
|
+
|
779
|
+
def _build_bucket_data_source(
|
780
|
+
self, source: Dict[str, Any], file_name_glob: str
|
781
|
+
) -> Dict[str, Any]:
|
782
|
+
"""Build data source configuration for storage bucket."""
|
783
|
+
required_fields = ["bucket_name", "folder_path"]
|
784
|
+
for field in required_fields:
|
785
|
+
if not source.get(field):
|
786
|
+
raise ValueError(f"{field} is required for bucket data source")
|
787
|
+
|
788
|
+
return {
|
789
|
+
"@odata.type": ORCHESTRATOR_STORAGE_BUCKET_DATA_SOURCE,
|
790
|
+
"folder": source["folder_path"],
|
791
|
+
"bucketName": source["bucket_name"],
|
792
|
+
"fileNameGlob": file_name_glob,
|
793
|
+
"directoryPath": source.get("directory_path", "/"),
|
794
|
+
}
|
795
|
+
|
796
|
+
def _build_google_drive_data_source(
|
797
|
+
self, source: Dict[str, Any], file_name_glob: str
|
798
|
+
) -> Dict[str, Any]:
|
799
|
+
"""Build data source configuration for Google Drive."""
|
800
|
+
required_fields = [
|
801
|
+
"connection_id",
|
802
|
+
"connection_name",
|
803
|
+
"leaf_folder_id",
|
804
|
+
"directory_path",
|
805
|
+
"folder_path",
|
806
|
+
]
|
807
|
+
for field in required_fields:
|
808
|
+
if not source.get(field):
|
809
|
+
raise ValueError(f"{field} is required for Google Drive data source")
|
810
|
+
|
811
|
+
return {
|
812
|
+
"@odata.type": GOOGLE_DRIVE_DATA_SOURCE,
|
813
|
+
"folder": source["folder_path"],
|
814
|
+
"connectionId": source["connection_id"],
|
815
|
+
"connectionName": source["connection_name"],
|
816
|
+
"leafFolderId": source["leaf_folder_id"],
|
817
|
+
"directoryPath": source["directory_path"],
|
818
|
+
"fileNameGlob": file_name_glob,
|
819
|
+
}
|
820
|
+
|
821
|
+
def _build_dropbox_data_source(
|
822
|
+
self, source: Dict[str, Any], file_name_glob: str
|
823
|
+
) -> Dict[str, Any]:
|
824
|
+
"""Build data source configuration for Dropbox."""
|
825
|
+
required_fields = [
|
826
|
+
"connection_id",
|
827
|
+
"connection_name",
|
828
|
+
"directory_path",
|
829
|
+
"folder_path",
|
830
|
+
]
|
831
|
+
for field in required_fields:
|
832
|
+
if not source.get(field):
|
833
|
+
raise ValueError(f"{field} is required for Dropbox data source")
|
834
|
+
|
835
|
+
return {
|
836
|
+
"@odata.type": DROPBOX_DATA_SOURCE,
|
837
|
+
"folder": source["folder_path"],
|
838
|
+
"connectionId": source["connection_id"],
|
839
|
+
"connectionName": source["connection_name"],
|
840
|
+
"directoryPath": source["directory_path"],
|
841
|
+
"fileNameGlob": file_name_glob,
|
842
|
+
}
|
843
|
+
|
844
|
+
def _build_onedrive_data_source(
|
845
|
+
self, source: Dict[str, Any], file_name_glob: str
|
846
|
+
) -> Dict[str, Any]:
|
847
|
+
"""Build data source configuration for OneDrive."""
|
848
|
+
required_fields = [
|
849
|
+
"connection_id",
|
850
|
+
"connection_name",
|
851
|
+
"leaf_folder_id",
|
852
|
+
"directory_path",
|
853
|
+
"folder_path",
|
854
|
+
]
|
855
|
+
for field in required_fields:
|
856
|
+
if not source.get(field):
|
857
|
+
raise ValueError(f"{field} is required for OneDrive data source")
|
858
|
+
|
859
|
+
return {
|
860
|
+
"@odata.type": ONEDRIVE_DATA_SOURCE,
|
861
|
+
"folder": source["folder_path"],
|
862
|
+
"connectionId": source["connection_id"],
|
863
|
+
"connectionName": source["connection_name"],
|
864
|
+
"leafFolderId": source["leaf_folder_id"],
|
865
|
+
"directoryPath": source["directory_path"],
|
866
|
+
"fileNameGlob": file_name_glob,
|
867
|
+
}
|
868
|
+
|
869
|
+
def _build_confluence_data_source(
|
870
|
+
self, source: Dict[str, Any], file_name_glob: str
|
871
|
+
) -> Dict[str, Any]:
|
872
|
+
"""Build data source configuration for Confluence."""
|
873
|
+
required_fields = [
|
874
|
+
"connection_id",
|
875
|
+
"connection_name",
|
876
|
+
"directory_path",
|
877
|
+
"folder_path",
|
878
|
+
"space_id",
|
879
|
+
]
|
880
|
+
for field in required_fields:
|
881
|
+
if not source.get(field):
|
882
|
+
raise ValueError(f"{field} is required for Confluence data source")
|
883
|
+
|
884
|
+
return {
|
885
|
+
"@odata.type": CONFLUENCE_DATA_SOURCE,
|
886
|
+
"folder": source["folder_path"],
|
887
|
+
"connectionId": source["connection_id"],
|
888
|
+
"connectionName": source["connection_name"],
|
889
|
+
"directoryPath": source["directory_path"],
|
890
|
+
"fileNameGlob": file_name_glob,
|
891
|
+
"spaceId": source["space_id"],
|
892
|
+
}
|
893
|
+
|
612
894
|
def _retrieve_by_id_spec(
|
613
895
|
self,
|
614
896
|
id: str,
|
uipath/_utils/constants.py
CHANGED
@@ -25,6 +25,18 @@ HEADER_SW_LOCK_KEY = "x-uipath-sw-lockkey"
|
|
25
25
|
ORCHESTRATOR_STORAGE_BUCKET_DATA_SOURCE = (
|
26
26
|
"#UiPath.Vdbs.Domain.Api.V20Models.StorageBucketDataSourceRequest"
|
27
27
|
)
|
28
|
+
CONFLUENCE_DATA_SOURCE = "#UiPath.Vdbs.Domain.Api.V20Models.ConfluenceDataSourceRequest"
|
29
|
+
DROPBOX_DATA_SOURCE = "#UiPath.Vdbs.Domain.Api.V20Models.DropboxDataSourceRequest"
|
30
|
+
GOOGLE_DRIVE_DATA_SOURCE = (
|
31
|
+
"#UiPath.Vdbs.Domain.Api.V20Models.GoogleDriveDataSourceRequest"
|
32
|
+
)
|
33
|
+
ONEDRIVE_DATA_SOURCE = "#UiPath.Vdbs.Domain.Api.V20Models.OneDriveDataSourceRequest"
|
34
|
+
|
35
|
+
# Preprocessing request types
|
36
|
+
LLMV3Mini = "#UiPath.Vdbs.Domain.Api.V20Models.LLMV3MiniPreProcessingRequest"
|
37
|
+
LLMV4 = "#UiPath.Vdbs.Domain.Api.V20Models.LLMV4PreProcessingRequest"
|
38
|
+
NativeV1 = "#UiPath.Vdbs.Domain.Api.V20Models.NativeV1PreProcessingRequest"
|
39
|
+
|
28
40
|
|
29
41
|
# Local storage
|
30
42
|
TEMP_ATTACHMENTS_FOLDER = "uipath_attachments"
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: uipath
|
3
|
-
Version: 2.1.
|
3
|
+
Version: 2.1.71
|
4
4
|
Summary: Python SDK and CLI for UiPath Platform, enabling programmatic interaction with automation services, process management, and deployment tools.
|
5
5
|
Project-URL: Homepage, https://uipath.com
|
6
6
|
Project-URL: Repository, https://github.com/UiPath/uipath-python
|
@@ -32,7 +32,7 @@ uipath/_cli/_auth/auth_config.json,sha256=o8J5BBFwiEtjZLHpJ_64lvnTeYeRIHaJ-Bhg0Q
|
|
32
32
|
uipath/_cli/_auth/index.html,sha256=uGK0CDTP8Rys_p4O_Pbd2x4tz0frKNVcumjrXnal5Nc,22814
|
33
33
|
uipath/_cli/_auth/localhost.crt,sha256=oGl9oLLOiouHubAt39B4zEfylFvKEtbtr_43SIliXJc,1226
|
34
34
|
uipath/_cli/_auth/localhost.key,sha256=X31VYXD8scZtmGA837dGX5l6G-LXHLo5ItWJhZXaz3c,1679
|
35
|
-
uipath/_cli/_dev/_terminal/__init__.py,sha256=
|
35
|
+
uipath/_cli/_dev/_terminal/__init__.py,sha256=di_RiN9Mcp9wqyKRRqXag28vbSw8_78mCnQZNn9H-Ss,14027
|
36
36
|
uipath/_cli/_dev/_terminal/_components/_chat.py,sha256=NLRoy49QScHiI-q0FGykkaU8ajv1d23fx7issSALcFA,4119
|
37
37
|
uipath/_cli/_dev/_terminal/_components/_details.py,sha256=FbLYtJ56gqHV6CIrpzO_n9Sk_YNg4nzRKTSsbj-DBPQ,17257
|
38
38
|
uipath/_cli/_dev/_terminal/_components/_history.py,sha256=dcT9tohEwpUaLGi7VWu5d-mDIF45UxFzN2Yvdf5N-eM,2691
|
@@ -43,7 +43,7 @@ uipath/_cli/_dev/_terminal/_models/_messages.py,sha256=p66MHUi_SS30CQWXtiwydybMK
|
|
43
43
|
uipath/_cli/_dev/_terminal/_styles/terminal.tcss,sha256=ktVpKwXIXw2VZp8KIZD6fO9i9NTGvts_icCTxMdzEiY,3240
|
44
44
|
uipath/_cli/_dev/_terminal/_utils/_chat.py,sha256=YUZxYVdmEManwHDuZsczJT1dWIYE1dVBgABlurwMFcE,8493
|
45
45
|
uipath/_cli/_dev/_terminal/_utils/_exporter.py,sha256=oI6D_eMwrh_2aqDYUh4GrJg8VLGrLYhDahR-_o0uJns,4144
|
46
|
-
uipath/_cli/_dev/_terminal/_utils/_logger.py,sha256=
|
46
|
+
uipath/_cli/_dev/_terminal/_utils/_logger.py,sha256=_ipTl_oAiMF9I7keGt2AAFAMz40DNLVMVkoiq-07UAU,2943
|
47
47
|
uipath/_cli/_evals/_evaluator_factory.py,sha256=OWfLxPOEcDn4qv5m3n7LBfIBKcdTPml2ZCLcsqSymlU,5329
|
48
48
|
uipath/_cli/_evals/_progress_reporter.py,sha256=hpSt0CXpIoFJGsbqZkqmwyGO_TBNesbWKlvDJUEDxd8,16455
|
49
49
|
uipath/_cli/_evals/_runtime.py,sha256=WKcBT6DGzNRjgEOpmH0b7RoEbEsHMyAbcAMs8b_CAI0,11418
|
@@ -89,7 +89,7 @@ uipath/_services/assets_service.py,sha256=pG0Io--SeiRRQmfUWPQPl1vq3csZlQgx30LBNK
|
|
89
89
|
uipath/_services/attachments_service.py,sha256=NPQYK7CGjfBaNT_1S5vEAfODmOChTbQZforllFM2ofU,26678
|
90
90
|
uipath/_services/buckets_service.py,sha256=5s8tuivd7GUZYj774DDUYTa0axxlUuesc4EBY1V5sdk,18496
|
91
91
|
uipath/_services/connections_service.py,sha256=Gt8zPY4oA7cMYAU2LI3lBieoBpV81BOGelnzDWJl_V4,7931
|
92
|
-
uipath/_services/context_grounding_service.py,sha256=
|
92
|
+
uipath/_services/context_grounding_service.py,sha256=Pjx-QQQEiSKD-hY6ityj3QUSALN3fIcKLLHr_NZ0d_g,37117
|
93
93
|
uipath/_services/documents_service.py,sha256=UnFS8EpOZ_Ng2TZk3OiJJ3iNANvFs7QxuoG_v-lQj6c,24815
|
94
94
|
uipath/_services/entities_service.py,sha256=QKCLE6wRgq3HZraF-M2mljy-8il4vsNHrQhUgkewVVk,14028
|
95
95
|
uipath/_services/folder_service.py,sha256=9JqgjKhWD-G_KUnfUTP2BADxL6OK9QNZsBsWZHAULdE,2749
|
@@ -107,7 +107,7 @@ uipath/_utils/_request_spec.py,sha256=iCtBLqtbWUpFG5g1wtIZBzSupKsfaRLiQFoFc_4B70
|
|
107
107
|
uipath/_utils/_ssl_context.py,sha256=xSYitos0eJc9cPHzNtHISX9PBvL6D2vas5G_GiBdLp8,1783
|
108
108
|
uipath/_utils/_url.py,sha256=-4eluSrIZCUlnQ3qU17WPJkgaC2KwF9W5NeqGnTNGGo,2512
|
109
109
|
uipath/_utils/_user_agent.py,sha256=pVJkFYacGwaQBomfwWVAvBQgdBUo62e4n3-fLIajWUU,563
|
110
|
-
uipath/_utils/constants.py,sha256=
|
110
|
+
uipath/_utils/constants.py,sha256=2xLT-1aW0aJS2USeZbK-7zRgyyi1bgV60L0rtQOUqOM,1721
|
111
111
|
uipath/agent/_utils.py,sha256=Z5x7TprkEudrd_aC9YI5ObyT8O05_WkmBJTpqkQKOGQ,1641
|
112
112
|
uipath/agent/conversation/__init__.py,sha256=5hK-Iz131mnd9m6ANnpZZffxXZLVFDQ9GTg5z9ik1oQ,5265
|
113
113
|
uipath/agent/conversation/async_stream.py,sha256=BA_8uU1DgE3VpU2KkJj0rkI3bAHLk_ZJKsajR0ipMpo,2055
|
@@ -158,8 +158,8 @@ uipath/tracing/_traced.py,sha256=yBIY05PCCrYyx50EIHZnwJaKNdHPNx-YTR1sHQl0a98,199
|
|
158
158
|
uipath/tracing/_utils.py,sha256=qd7N56tg6VXQ9pREh61esBgUWLNA0ssKsE0QlwrRWFM,11974
|
159
159
|
uipath/utils/__init__.py,sha256=VD-KXFpF_oWexFg6zyiWMkxl2HM4hYJMIUDZ1UEtGx0,105
|
160
160
|
uipath/utils/_endpoints_manager.py,sha256=iRTl5Q0XAm_YgcnMcJOXtj-8052sr6jpWuPNz6CgT0Q,8408
|
161
|
-
uipath-2.1.
|
162
|
-
uipath-2.1.
|
163
|
-
uipath-2.1.
|
164
|
-
uipath-2.1.
|
165
|
-
uipath-2.1.
|
161
|
+
uipath-2.1.71.dist-info/METADATA,sha256=45oUACkDT5dlFX6IuVnvbE-5WRCYwQMRfRsdYauUjFU,6482
|
162
|
+
uipath-2.1.71.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
163
|
+
uipath-2.1.71.dist-info/entry_points.txt,sha256=9C2_29U6Oq1ExFu7usihR-dnfIVNSKc-0EFbh0rskB4,43
|
164
|
+
uipath-2.1.71.dist-info/licenses/LICENSE,sha256=-KBavWXepyDjimmzH5fVAsi-6jNVpIKFc2kZs0Ri4ng,1058
|
165
|
+
uipath-2.1.71.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|