uipath 2.1.90__py3-none-any.whl → 2.1.92__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of uipath might be problematic. Click here for more details.

@@ -326,7 +326,8 @@ class UiPathEvalRuntime(UiPathBaseRuntime, Generic[T, C]):
326
326
  if eval_item.input_mocking_strategy:
327
327
  eval_item = await self._generate_input_for_eval(eval_item)
328
328
 
329
- set_execution_context(eval_item, self.span_collector)
329
+ execution_id = str(uuid.uuid4())
330
+ set_execution_context(eval_item, self.span_collector, execution_id)
330
331
 
331
332
  await event_bus.publish(
332
333
  EvaluationEvents.CREATE_EVAL_RUN,
@@ -341,7 +342,7 @@ class UiPathEvalRuntime(UiPathBaseRuntime, Generic[T, C]):
341
342
  )
342
343
 
343
344
  try:
344
- agent_execution_output = await self.execute_runtime(eval_item)
345
+ agent_execution_output = await self.execute_runtime(eval_item, execution_id)
345
346
  evaluation_item_results: list[EvalItemResult] = []
346
347
 
347
348
  for evaluator in evaluators:
@@ -448,9 +449,8 @@ class UiPathEvalRuntime(UiPathBaseRuntime, Generic[T, C]):
448
449
  return spans, logs
449
450
 
450
451
  async def execute_runtime(
451
- self, eval_item: EvaluationItem
452
+ self, eval_item: EvaluationItem, execution_id: str
452
453
  ) -> UiPathEvalRunExecutionOutput:
453
- execution_id = str(uuid.uuid4())
454
454
  runtime_context: C = self.factory.new_context(
455
455
  execution_id=execution_id,
456
456
  input_json=eval_item.inputs,
@@ -96,7 +96,11 @@ class LLMMocker(Mocker):
96
96
  from uipath import UiPath
97
97
  from uipath._services.llm_gateway_service import _cleanup_schema
98
98
 
99
- from .mocks import evaluation_context, span_collector_context
99
+ from .mocks import (
100
+ evaluation_context,
101
+ execution_id_context,
102
+ span_collector_context,
103
+ )
100
104
 
101
105
  llm = UiPath().llm
102
106
  return_type: Any = func.__annotations__.get("return", None)
@@ -126,8 +130,9 @@ class LLMMocker(Mocker):
126
130
  test_run_history = "(empty)"
127
131
  eval_item = evaluation_context.get()
128
132
  span_collector = span_collector_context.get()
129
- if eval_item and span_collector:
130
- spans = span_collector.get_spans(eval_item.id)
133
+ execution_id = execution_id_context.get()
134
+ if eval_item and span_collector and execution_id:
135
+ spans = span_collector.get_spans(execution_id)
131
136
  test_run_history = _SpanUtils.spans_to_llm_context(spans)
132
137
 
133
138
  prompt_input: dict[str, Any] = {
@@ -21,11 +21,16 @@ span_collector_context: ContextVar[Optional[ExecutionSpanCollector]] = ContextVa
21
21
  "span_collector", default=None
22
22
  )
23
23
 
24
+ # Execution ID for the current evaluation item
25
+ execution_id_context: ContextVar[Optional[str]] = ContextVar(
26
+ "execution_id", default=None
27
+ )
28
+
24
29
  logger = logging.getLogger(__name__)
25
30
 
26
31
 
27
32
  def set_execution_context(
28
- eval_item: EvaluationItem, span_collector: ExecutionSpanCollector
33
+ eval_item: EvaluationItem, span_collector: ExecutionSpanCollector, execution_id: str
29
34
  ) -> None:
30
35
  """Set the execution context for an evaluation run for mocking and trace access."""
31
36
  evaluation_context.set(eval_item)
@@ -40,6 +45,7 @@ def set_execution_context(
40
45
  mocker_context.set(None)
41
46
 
42
47
  span_collector_context.set(span_collector)
48
+ execution_id_context.set(execution_id)
43
49
 
44
50
 
45
51
  def clear_execution_context() -> None:
@@ -47,6 +53,7 @@ def clear_execution_context() -> None:
47
53
  evaluation_context.set(None)
48
54
  mocker_context.set(None)
49
55
  span_collector_context.set(None)
56
+ execution_id_context.set(None)
50
57
 
51
58
 
52
59
  async def get_mocked_response(
@@ -319,7 +319,7 @@ class SwFileHandler:
319
319
  if existing:
320
320
  try:
321
321
  entry_points_json = (
322
- await self._studio_client.download_file_async(existing.id)
322
+ await self._studio_client.download_project_file_async(existing)
323
323
  ).json()
324
324
  entry_points_json["entryPoints"] = uipath_config["entryPoints"]
325
325
 
@@ -417,7 +417,7 @@ class SwFileHandler:
417
417
  if existing:
418
418
  try:
419
419
  existing_agent_json = (
420
- await self._studio_client.download_file_async(existing.id)
420
+ await self._studio_client.download_project_file_async(existing)
421
421
  ).json()
422
422
  version_parts = existing_agent_json["metadata"]["codeVersion"].split(
423
423
  "."
@@ -453,7 +453,7 @@ class SwFileHandler:
453
453
  )
454
454
  logger.info("Uploading 'agent.json'")
455
455
 
456
- async def upload_source_files(self, config_data: dict[str, Any]) -> None:
456
+ async def upload_source_files(self, settings: Optional[dict[str, Any]]) -> None:
457
457
  """Main method to upload source files to the UiPath project.
458
458
 
459
459
  - Gets project structure
@@ -462,7 +462,7 @@ class SwFileHandler:
462
462
  - Deletes removed files
463
463
 
464
464
  Args:
465
- config_data: Project configuration data
465
+ settings: File handling settings
466
466
 
467
467
  Returns:
468
468
  Dict[str, ProjectFileExtended]: Root level files for agent.json handling
@@ -493,7 +493,7 @@ class SwFileHandler:
493
493
 
494
494
  # Get files to upload and process them
495
495
  files = files_to_include(
496
- config_data,
496
+ settings,
497
497
  self.directory,
498
498
  self.include_uv_lock,
499
499
  directories_to_ignore=["evals"],
@@ -8,7 +8,19 @@ import traceback
8
8
  from abc import ABC, abstractmethod
9
9
  from enum import Enum
10
10
  from functools import cached_property
11
- from typing import Any, Callable, Dict, Generic, List, Optional, Type, TypeVar, Union
11
+ from typing import (
12
+ Any,
13
+ AsyncGenerator,
14
+ Callable,
15
+ Dict,
16
+ Generic,
17
+ List,
18
+ Literal,
19
+ Optional,
20
+ Type,
21
+ TypeVar,
22
+ Union,
23
+ )
12
24
  from uuid import uuid4
13
25
 
14
26
  from opentelemetry import context as context_api
@@ -23,6 +35,7 @@ from opentelemetry.sdk.trace.export import (
23
35
  from opentelemetry.trace import Tracer
24
36
  from pydantic import BaseModel, Field
25
37
 
38
+ from uipath._events._events import BaseEvent
26
39
  from uipath.agent.conversation import UiPathConversationEvent, UiPathConversationMessage
27
40
  from uipath.tracing import TracingManager
28
41
 
@@ -143,6 +156,19 @@ class UiPathRuntimeResult(BaseModel):
143
156
  return result
144
157
 
145
158
 
159
+ class UiPathRuntimeBreakpointResult(UiPathRuntimeResult):
160
+ """Result for execution suspended at a breakpoint."""
161
+
162
+ # Force status to always be SUSPENDED
163
+ status: UiPathRuntimeStatus = Field(
164
+ default=UiPathRuntimeStatus.SUSPENDED, frozen=True
165
+ )
166
+ breakpoint_node: str # Which node the breakpoint is at
167
+ breakpoint_type: Literal["before", "after"] # Before or after the node
168
+ current_state: dict[str, Any] | Any # Current workflow state at breakpoint
169
+ next_nodes: List[str] # Which node(s) will execute next
170
+
171
+
146
172
  class UiPathConversationHandler(ABC):
147
173
  """Base delegate for handling UiPath conversation events."""
148
174
 
@@ -548,6 +574,47 @@ class UiPathBaseRuntime(ABC):
548
574
  """
549
575
  pass
550
576
 
577
+ async def stream(
578
+ self,
579
+ ) -> AsyncGenerator[Union[BaseEvent, UiPathRuntimeResult], None]:
580
+ """Stream execution events in real-time.
581
+
582
+ This is an optional method that runtimes can implement to support streaming.
583
+ If not implemented, only the execute() method will be available.
584
+
585
+ Yields framework-agnostic BaseEvent instances during execution,
586
+ with the final event being UiPathRuntimeResult.
587
+
588
+ Yields:
589
+ BaseEvent subclasses: Framework-agnostic events (MessageCreatedEvent,
590
+ AgentStateUpdatedEvent, etc.)
591
+ Final yield: UiPathRuntimeResult
592
+
593
+ Raises:
594
+ NotImplementedError: If the runtime doesn't support streaming
595
+ RuntimeError: If execution fails
596
+
597
+ Example:
598
+ async for event in runtime.stream():
599
+ if isinstance(event, UiPathRuntimeResult):
600
+ # Last event - execution complete
601
+ print(f"Status: {event.status}")
602
+ break
603
+ elif isinstance(event, MessageCreatedEvent):
604
+ # Handle message event
605
+ print(f"Message: {event.payload}")
606
+ elif isinstance(event, AgentStateUpdatedEvent):
607
+ # Handle state update
608
+ print(f"State updated by: {event.node_name}")
609
+ """
610
+ raise NotImplementedError(
611
+ f"{self.__class__.__name__} does not implement streaming. "
612
+ "Use execute() instead."
613
+ )
614
+ # This yield is unreachable but makes this a proper generator function
615
+ # Without it, the function wouldn't match the AsyncGenerator return type
616
+ yield
617
+
551
618
  @abstractmethod
552
619
  async def validate(self):
553
620
  """Validate runtime inputs."""
@@ -13,6 +13,7 @@ from ..._services import (
13
13
  ProcessesService,
14
14
  )
15
15
  from ..._utils import get_inferred_bindings_names
16
+ from ..models.runtime_schema import BindingResource, BindingResourceValue, Bindings
16
17
  from ._constants import BINDINGS_VERSION
17
18
 
18
19
 
@@ -428,7 +429,7 @@ def parse_sdk_usage(
428
429
  return results
429
430
 
430
431
 
431
- def convert_to_bindings_format(sdk_usage_data):
432
+ def convert_to_bindings_format(sdk_usage_data) -> Bindings:
432
433
  """Convert the output of parse_sdk_usage to a structure similar to bindings_v2.json.
433
434
 
434
435
  Args:
@@ -437,7 +438,7 @@ def convert_to_bindings_format(sdk_usage_data):
437
438
  Returns:
438
439
  Dictionary in bindings_v2.json format
439
440
  """
440
- bindings = {"version": "2.0", "resources": []}
441
+ bindings = Bindings(version="2.0", resources=[])
441
442
 
442
443
  for resource_type, components in sdk_usage_data.items():
443
444
  for component in components:
@@ -448,24 +449,24 @@ def convert_to_bindings_format(sdk_usage_data):
448
449
  )
449
450
  is_connection_id_expression = connection_id.startswith("EXPR$")
450
451
  connection_id = connection_id.replace("EXPR$", "")
451
- resource_entry = {
452
- "resource": "connection",
453
- "key": connection_id,
454
- "value": {
455
- "ConnectionId": {
456
- "defaultValue": connection_id,
457
- "isExpression": is_connection_id_expression,
458
- "displayName": "Connection",
459
- }
452
+ resource_entry = BindingResource(
453
+ resource="connection",
454
+ key=connection_id,
455
+ value={
456
+ "ConnectionId": BindingResourceValue(
457
+ default_value=connection_id,
458
+ is_expression=is_connection_id_expression,
459
+ display_name="Connection",
460
+ ),
460
461
  },
461
- "metadata": {
462
+ metadata={
462
463
  "BindingsVersion": BINDINGS_VERSION,
463
464
  "Connector": connector_name,
464
465
  "UseConnectionService": "True",
465
466
  },
466
- }
467
+ )
467
468
 
468
- bindings["resources"].append(resource_entry)
469
+ bindings.resources.append(resource_entry)
469
470
  continue
470
471
 
471
472
  resource_name = component.get("name", "")
@@ -481,36 +482,61 @@ def convert_to_bindings_format(sdk_usage_data):
481
482
  key = name
482
483
  if folder_path:
483
484
  key = f"{folder_path}.{name}"
484
- resource_entry = {
485
- "resource": service_name_resource_mapping[resource_type],
486
- "key": key,
487
- "value": {
488
- "name": {
489
- "defaultValue": name,
490
- "isExpression": is_expression,
491
- "displayName": "Name",
492
- }
485
+ resource_entry = BindingResource(
486
+ resource=service_name_resource_mapping[resource_type],
487
+ key=key,
488
+ value={
489
+ "name": BindingResourceValue(
490
+ default_value=name,
491
+ is_expression=is_expression,
492
+ display_name="Name",
493
+ ),
493
494
  },
494
- "metadata": {
495
+ metadata={
495
496
  "ActivityName": method_name,
496
497
  "BindingsVersion": BINDINGS_VERSION,
497
498
  "DisplayLabel": "FullName",
498
499
  },
499
- }
500
+ )
500
501
 
501
502
  if folder_path:
502
- resource_entry["value"]["folderPath"] = {
503
- "defaultValue": folder_path,
504
- "isExpression": is_folder_path_expression,
505
- "displayName": "Folder Path",
506
- }
503
+ resource_entry.value["folderPath"] = BindingResourceValue(
504
+ default_value=folder_path,
505
+ is_expression=is_folder_path_expression,
506
+ display_name="Folder Path",
507
+ )
507
508
 
508
- bindings["resources"].append(resource_entry)
509
+ bindings.resources.append(resource_entry)
509
510
 
510
511
  return bindings
511
512
 
512
513
 
513
- def generate_bindings_json(file_path: str) -> str:
514
+ def generate_bindings_json(file_path: str) -> dict[str, Any]:
515
+ """Generate bindings JSON from a Python file.
516
+
517
+ Args:
518
+ file_path: Path to the Python file to analyze
519
+
520
+ Returns:
521
+ JSON string representation of the bindings
522
+ """
523
+ try:
524
+ with open(file_path, "r") as f:
525
+ source_code = f.read()
526
+
527
+ # Get the base directory for resolving imports
528
+ base_dir = os.path.dirname(os.path.abspath(file_path))
529
+
530
+ sdk_usage = parse_sdk_usage(source_code, base_dir)
531
+ bindings = convert_to_bindings_format(sdk_usage)
532
+
533
+ return bindings.model_dump(by_alias=True)
534
+
535
+ except Exception as e:
536
+ raise Exception(f"Error generating bindings JSON: {e}") from e
537
+
538
+
539
+ def generate_bindings(file_path: str) -> Bindings:
514
540
  """Generate bindings JSON from a Python file.
515
541
 
516
542
  Args:
@@ -7,9 +7,10 @@ import re
7
7
  from pathlib import Path
8
8
  from typing import Any, Dict, Optional, Protocol, Tuple
9
9
 
10
- from pydantic import BaseModel
10
+ from pydantic import BaseModel, TypeAdapter
11
11
 
12
12
  from .._utils._console import ConsoleLogger
13
+ from ..models.runtime_schema import RuntimeSchema
13
14
  from ._constants import is_binary_file
14
15
  from ._studio_project import (
15
16
  ProjectFile,
@@ -93,21 +94,19 @@ def get_project_config(directory: str) -> dict[str, str]:
93
94
  console.error("pyproject.toml not found.")
94
95
 
95
96
  with open(config_path, "r") as config_file:
96
- config_data = json.load(config_file)
97
-
98
- validate_config_structure(config_data)
97
+ config_data = TypeAdapter(RuntimeSchema).validate_python(json.load(config_file))
99
98
 
100
99
  toml_data = read_toml_project(toml_path)
101
100
 
102
101
  return {
103
102
  "project_name": toml_data["name"],
104
103
  "description": toml_data["description"],
105
- "entryPoints": config_data["entryPoints"],
104
+ "entryPoints": [ep.model_dump(by_alias=True) for ep in config_data.entrypoints],
106
105
  "version": toml_data["version"],
107
106
  "authors": toml_data["authors"],
108
107
  "dependencies": toml_data.get("dependencies", {}),
109
108
  "requires-python": toml_data.get("requires-python", None),
110
- "settings": config_data.get("settings", {}),
109
+ "settings": config_data.settings or {},
111
110
  }
112
111
 
113
112
 
@@ -347,7 +346,7 @@ def read_toml_project(file_path: str) -> dict:
347
346
 
348
347
 
349
348
  def files_to_include(
350
- config_data: Optional[dict[Any, Any]],
349
+ settings: Optional[dict[str, Any]],
351
350
  directory: str,
352
351
  include_uv_lock: bool = True,
353
352
  directories_to_ignore: list[str] | None = None,
@@ -358,7 +357,7 @@ def files_to_include(
358
357
  and explicit inclusion rules. Skips virtual environments and hidden directories.
359
358
 
360
359
  Args:
361
- config_data: Configuration containing file inclusion rules
360
+ settings: File handling settings
362
361
  directory: Root directory to search for files
363
362
  include_uv_lock: Whether to include uv.lock file
364
363
  directories_to_ignore: List of directories to ignore
@@ -374,8 +373,7 @@ def files_to_include(
374
373
  directories_to_ignore = []
375
374
  if include_uv_lock:
376
375
  files_included += ["uv.lock"]
377
- if "settings" in config_data:
378
- settings = config_data["settings"]
376
+ if settings is not None:
379
377
  if "fileExtensionsIncluded" in settings:
380
378
  file_extensions_included.extend(settings["fileExtensionsIncluded"])
381
379
  if "filesIncluded" in settings:
@@ -559,7 +557,7 @@ async def download_folder_files(
559
557
  local_path = base_path / file_path
560
558
  local_path.parent.mkdir(parents=True, exist_ok=True)
561
559
 
562
- response = await studio_client.download_file_async(remote_file.id)
560
+ response = await studio_client.download_project_file_async(remote_file)
563
561
  remote_content = response.read().decode("utf-8")
564
562
  remote_hash = compute_normalized_hash(remote_content)
565
563
 
@@ -8,6 +8,7 @@ from pydantic import BaseModel, ConfigDict, Field, field_validator
8
8
 
9
9
  from uipath._utils.constants import HEADER_SW_LOCK_KEY
10
10
  from uipath.models.exceptions import EnrichedException
11
+ from uipath.tracing import traced
11
12
 
12
13
 
13
14
  class ProjectFile(BaseModel):
@@ -231,6 +232,7 @@ class StudioSolutionsClient:
231
232
  self.uipath: UiPath = UiPath()
232
233
  self._solutions_base_url: str = f"/studio_/backend/api/Solution/{solution_id}"
233
234
 
235
+ @traced(name="create_project", run_type="uipath")
234
236
  async def create_project_async(
235
237
  self,
236
238
  project_name: str,
@@ -275,6 +277,7 @@ class StudioClient:
275
277
  f"/studio_/backend/api/Project/{project_id}/Lock"
276
278
  )
277
279
 
280
+ @traced(name="get_project_structure", run_type="uipath")
278
281
  async def get_project_structure_async(self) -> ProjectStructure:
279
282
  """Retrieve the project's file structure from UiPath Cloud.
280
283
 
@@ -293,6 +296,7 @@ class StudioClient:
293
296
 
294
297
  return ProjectStructure.model_validate(response.json())
295
298
 
299
+ @traced(name="create_folder", run_type="uipath")
296
300
  @with_lock_retry
297
301
  async def create_folder_async(
298
302
  self,
@@ -322,6 +326,7 @@ class StudioClient:
322
326
  )
323
327
  return response.json()
324
328
 
329
+ @traced(name="download_file", run_type="uipath")
325
330
  async def download_file_async(self, file_id: str) -> Any:
326
331
  response = await self.uipath.api_client.request_async(
327
332
  "GET",
@@ -330,6 +335,16 @@ class StudioClient:
330
335
  )
331
336
  return response
332
337
 
338
+ @traced(name="download_file", run_type="uipath")
339
+ async def download_project_file_async(self, file: ProjectFile) -> Any:
340
+ response = await self.uipath.api_client.request_async(
341
+ "GET",
342
+ url=f"{self.file_operations_base_url}/File/{file.id}",
343
+ scoped="org",
344
+ )
345
+ return response
346
+
347
+ @traced(name="upload_file", run_type="uipath")
333
348
  @with_lock_retry
334
349
  async def upload_file_async(
335
350
  self,
@@ -370,6 +385,7 @@ class StudioClient:
370
385
  # response contains only the uploaded file identifier
371
386
  return response.json(), action
372
387
 
388
+ @traced(name="delete_file", run_type="uipath")
373
389
  @with_lock_retry
374
390
  async def delete_item_async(
375
391
  self,
@@ -430,6 +446,7 @@ class StudioClient:
430
446
 
431
447
  return content_bytes, resolved_name
432
448
 
449
+ @traced(name="synchronize_files", run_type="uipath")
433
450
  @with_lock_retry
434
451
  async def perform_structural_migration_async(
435
452
  self,
uipath/_cli/cli_init.py CHANGED
@@ -15,8 +15,9 @@ from ..telemetry import track
15
15
  from ..telemetry._constants import _PROJECT_KEY, _TELEMETRY_CONFIG_FILE
16
16
  from ._utils._console import ConsoleLogger
17
17
  from ._utils._input_args import generate_args
18
- from ._utils._parse_ast import generate_bindings_json
18
+ from ._utils._parse_ast import generate_bindings
19
19
  from .middlewares import Middlewares
20
+ from .models.runtime_schema import Bindings, Entrypoint, RuntimeSchema
20
21
 
21
22
  console = ConsoleLogger()
22
23
  logger = logging.getLogger(__name__)
@@ -152,13 +153,17 @@ def get_user_script(directory: str, entrypoint: Optional[str] = None) -> Optiona
152
153
  return None
153
154
 
154
155
 
155
- def write_config_file(config_data: Dict[str, Any]) -> None:
156
+ def write_config_file(config_data: Dict[str, Any] | RuntimeSchema) -> None:
156
157
  existing_settings = get_existing_settings(CONFIG_PATH)
157
158
  if existing_settings is not None:
158
- config_data["settings"] = existing_settings
159
+ config_data.settings = existing_settings
159
160
 
160
161
  with open(CONFIG_PATH, "w") as config_file:
161
- json.dump(config_data, config_file, indent=4)
162
+ if isinstance(config_data, RuntimeSchema):
163
+ json_object = config_data.model_dump(by_alias=True, exclude_unset=True)
164
+ else:
165
+ json_object = config_data
166
+ json.dump(json_object, config_file, indent=4)
162
167
 
163
168
  return CONFIG_PATH
164
169
 
@@ -208,30 +213,29 @@ def init(entrypoint: str, infer_bindings: bool) -> None:
208
213
  args = generate_args(script_path)
209
214
 
210
215
  relative_path = Path(script_path).relative_to(current_directory).as_posix()
211
-
212
- config_data = {
213
- "entryPoints": [
214
- {
215
- "filePath": relative_path,
216
- "uniqueId": str(uuid.uuid4()),
217
- # "type": "process", OR BE doesn't offer json schema support for type: Process
218
- "type": "agent",
219
- "input": args["input"],
220
- "output": args["output"],
221
- }
222
- ]
223
- }
224
-
225
- # Generate bindings JSON based on the script path
226
- try:
227
- if infer_bindings:
228
- bindings_data = generate_bindings_json(script_path)
229
- else:
230
- bindings_data = {}
231
- # Add bindings to the config data
232
- config_data["bindings"] = bindings_data
233
- except Exception as e:
234
- console.warning(f"Warning: Could not generate bindings: {str(e)}")
216
+ bindings = None
217
+ if infer_bindings:
218
+ try:
219
+ bindings = generate_bindings(script_path)
220
+ except Exception as e:
221
+ console.warning(f"Warning: Could not generate bindings: {str(e)}")
222
+ if bindings is None:
223
+ bindings = Bindings(
224
+ version="2.0",
225
+ resources=[],
226
+ )
227
+ config_data = RuntimeSchema(
228
+ entrypoints=[
229
+ Entrypoint(
230
+ file_path=relative_path,
231
+ unique_id=str(uuid.uuid4()),
232
+ type="agent",
233
+ input=args["input"],
234
+ output=args["output"],
235
+ )
236
+ ],
237
+ bindings=bindings,
238
+ )
235
239
 
236
240
  config_path = write_config_file(config_data)
237
241
  console.success(f"Created '{config_path}' file.")
uipath/_cli/cli_pack.py CHANGED
@@ -6,8 +6,10 @@ import zipfile
6
6
  from string import Template
7
7
 
8
8
  import click
9
+ from pydantic import TypeAdapter
9
10
 
10
11
  from uipath._cli._utils._constants import UIPATH_PROJECT_ID
12
+ from uipath._cli.models.runtime_schema import Bindings, RuntimeSchema
11
13
 
12
14
  from ..telemetry import track
13
15
  from ..telemetry._constants import _PROJECT_KEY, _TELEMETRY_CONFIG_FILE
@@ -101,10 +103,11 @@ def generate_entrypoints_file(entryPoints):
101
103
  return entrypoint_json_data
102
104
 
103
105
 
104
- def generate_bindings_content():
105
- bindings_content = {"version": "2.0", "resources": []}
106
-
107
- return bindings_content
106
+ def generate_bindings_content() -> Bindings:
107
+ return Bindings(
108
+ version="2.0",
109
+ resources=[],
110
+ )
108
111
 
109
112
 
110
113
  def generate_content_types_content():
@@ -214,11 +217,7 @@ def pack_fn(
214
217
  console.error("uipath.json not found, please run `uipath init`.")
215
218
 
216
219
  with open(config_path, "r") as f:
217
- config_data = json.load(f)
218
- if "bindings" in config_data:
219
- bindings_content = config_data["bindings"]
220
- else:
221
- bindings_content = generate_bindings_content()
220
+ config_data = TypeAdapter(RuntimeSchema).validate_python(json.load(f))
222
221
 
223
222
  content_types_content = generate_content_types_content()
224
223
  [psmdcp_file_name, psmdcp_content] = generate_psmdcp_content(
@@ -249,11 +248,14 @@ def pack_fn(
249
248
  )
250
249
  z.writestr("content/operate.json", json.dumps(operate_file, indent=4))
251
250
  z.writestr("content/entry-points.json", json.dumps(entrypoints_file, indent=4))
252
- z.writestr("content/bindings_v2.json", json.dumps(bindings_content, indent=4))
251
+ z.writestr(
252
+ "content/bindings_v2.json",
253
+ json.dumps(config_data.bindings.model_dump(by_alias=True), indent=4),
254
+ )
253
255
  z.writestr(f"{projectName}.nuspec", nuspec_content)
254
256
  z.writestr("_rels/.rels", rels_content)
255
257
 
256
- files = files_to_include(config_data, directory, include_uv_lock)
258
+ files = files_to_include(config_data.settings, directory, include_uv_lock)
257
259
 
258
260
  for file in files:
259
261
  if file.is_binary: