uipath 2.1.13__py3-none-any.whl → 2.1.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,464 @@
1
+ """Studio Web File Handler for managing file operations in UiPath projects."""
2
+
3
+ import json
4
+ import os
5
+ from datetime import datetime, timezone
6
+ from typing import Any, Dict, Optional, Set
7
+
8
+ import click
9
+
10
+ from .._utils._console import ConsoleLogger
11
+ from .._utils._constants import (
12
+ AGENT_INITIAL_CODE_VERSION,
13
+ AGENT_STORAGE_VERSION,
14
+ AGENT_TARGET_RUNTIME,
15
+ AGENT_VERSION,
16
+ )
17
+ from .._utils._project_files import ( # type: ignore
18
+ FileInfo,
19
+ files_to_include,
20
+ read_toml_project,
21
+ )
22
+ from .._utils._studio_project import (
23
+ AddedResource,
24
+ ModifiedResource,
25
+ ProjectFile,
26
+ ProjectFolder,
27
+ ProjectStructure,
28
+ StructuralMigration,
29
+ StudioClient,
30
+ )
31
+
32
+
33
+ class SwFileHandler:
34
+ """Handler for Studio Web file operations.
35
+
36
+ This class encapsulates all file operations for UiPath Studio Web projects,
37
+ including uploading, updating, deleting, and managing project structure.
38
+
39
+ Attributes:
40
+ directory: Local project directory
41
+ include_uv_lock: Whether to include uv.lock file
42
+ console: Console logger instance
43
+ """
44
+
45
+ def __init__(
46
+ self,
47
+ project_id: str,
48
+ directory: str,
49
+ include_uv_lock: bool = True,
50
+ ) -> None:
51
+ """Initialize the SwFileHandler.
52
+
53
+ Args:
54
+ project_id: The ID of the UiPath project
55
+ directory: Local project directory
56
+ include_uv_lock: Whether to include uv.lock file
57
+ """
58
+ self.directory = directory
59
+ self.include_uv_lock = include_uv_lock
60
+ self.console = ConsoleLogger()
61
+ self._studio_client = StudioClient(project_id)
62
+ self._project_structure: Optional[ProjectStructure] = None
63
+
64
+ def _get_folder_by_name(
65
+ self, structure: ProjectStructure, folder_name: str
66
+ ) -> Optional[ProjectFolder]:
67
+ """Get a folder from the project structure by name.
68
+
69
+ Args:
70
+ folder_name: Name of the folder to find
71
+
72
+ Returns:
73
+ Optional[ProjectFolder]: The found folder or None
74
+ """
75
+ for folder in structure.folders:
76
+ if folder.name == folder_name:
77
+ return folder
78
+ return None
79
+
80
+ def collect_all_files(
81
+ self,
82
+ folder: ProjectFolder,
83
+ files_dict: Dict[str, ProjectFile],
84
+ current_path: str = "",
85
+ ) -> None:
86
+ """Recursively collect all files from a folder with computed paths.
87
+
88
+ Args:
89
+ folder: The folder to traverse
90
+ files_dict: Dictionary to store files (indexed by name)
91
+ current_path: The current path prefix for files in this folder
92
+ """
93
+ # Add files from current folder
94
+ for file in folder.files:
95
+ file_path = f"{current_path}/{file.name}" if current_path else file.name
96
+ files_dict[file_path] = file
97
+
98
+ # Recursively process subfolders
99
+ for subfolder in folder.folders:
100
+ subfolder_path = (
101
+ f"{current_path}/{subfolder.name}" if current_path else subfolder.name
102
+ )
103
+ self.collect_all_files(subfolder, files_dict, subfolder_path)
104
+
105
+ def _get_remote_files(
106
+ self,
107
+ structure: ProjectStructure,
108
+ source_code_folder: Optional[ProjectFolder] = None,
109
+ ) -> tuple[Dict[str, ProjectFile], Dict[str, ProjectFile]]:
110
+ """Get all files from the project structure indexed by name.
111
+
112
+ Args:
113
+ structure: The project structure
114
+ source_code_folder: Optional source_code folder to collect files from
115
+
116
+ Returns:
117
+ Tuple of (root_files, source_code_files) dictionaries with file paths as keys
118
+ """
119
+ root_files: Dict[str, ProjectFile] = {}
120
+ source_code_files: Dict[str, ProjectFile] = {}
121
+
122
+ # Add files from root level
123
+ for file in structure.files:
124
+ root_files[file.name] = file
125
+
126
+ # Add files from source_code folder if it exists
127
+ if source_code_folder:
128
+ self.collect_all_files(source_code_folder, source_code_files)
129
+
130
+ return root_files, source_code_files
131
+
132
+ async def _process_file_uploads(
133
+ self,
134
+ local_files: list[FileInfo],
135
+ source_code_files: Dict[str, ProjectFile],
136
+ ) -> None:
137
+ """Process all file uploads to the source_code folder.
138
+
139
+ Args:
140
+ local_files: List of files to upload
141
+ source_code_files: Dictionary of existing remote files
142
+
143
+ Returns:
144
+ Set of processed file names
145
+
146
+ Raises:
147
+ Exception: If any file upload fails
148
+ """
149
+ structural_migration = StructuralMigration(
150
+ deleted_resources=[], added_resources=[], modified_resources=[]
151
+ )
152
+ processed_source_files: Set[str] = set()
153
+
154
+ for local_file in local_files:
155
+ if not os.path.exists(local_file.file_path):
156
+ self.console.warning(
157
+ f"File not found: {click.style(local_file.file_path, fg='cyan')}"
158
+ )
159
+ continue
160
+
161
+ # Skip agent.json as it's handled separately
162
+ if local_file.file_name == "agent.json":
163
+ continue
164
+
165
+ remote_file = source_code_files.get(
166
+ local_file.relative_path.replace("\\", "/"), None
167
+ )
168
+ if remote_file:
169
+ processed_source_files.add(remote_file.id)
170
+ structural_migration.modified_resources.append(
171
+ ModifiedResource(
172
+ id=remote_file.id, content_file_path=local_file.file_path
173
+ )
174
+ )
175
+ self.console.info(
176
+ f"Updating {click.style(local_file.file_name, fg='yellow')}"
177
+ )
178
+ else:
179
+ parent_path = os.path.dirname(local_file.relative_path)
180
+ structural_migration.added_resources.append(
181
+ AddedResource(
182
+ content_file_path=local_file.file_path,
183
+ parent_path=f"source_code/{parent_path}"
184
+ if parent_path != ""
185
+ else "source_code",
186
+ )
187
+ )
188
+ self.console.info(
189
+ f"Uploading {click.style(local_file.file_name, fg='cyan')}"
190
+ )
191
+
192
+ # identify and add deleted files
193
+ structural_migration.deleted_resources.extend(
194
+ self._collect_deleted_files(source_code_files, processed_source_files)
195
+ )
196
+ await self._studio_client.perform_structural_migration_async(
197
+ structural_migration
198
+ )
199
+
200
+ # Clean up empty folders after migration
201
+ await self._cleanup_empty_folders()
202
+
203
+ def _collect_deleted_files(
204
+ self,
205
+ source_code_files: Dict[str, ProjectFile],
206
+ processed_source_file_paths: Set[str],
207
+ ) -> set[str]:
208
+ """Delete remote files that no longer exist locally.
209
+
210
+ Args:
211
+ source_code_files: Dictionary of existing remote files
212
+ processed_source_file_paths: Set of files that were processed
213
+
214
+ Raises:
215
+ Exception: If any file deletion fails
216
+ """
217
+ if not source_code_files:
218
+ return set()
219
+
220
+ deleted_files: Set[str] = set()
221
+ for _, remote_file in source_code_files.items():
222
+ if remote_file.id not in processed_source_file_paths:
223
+ deleted_files.add(remote_file.id)
224
+ self.console.info(
225
+ f"Deleting {click.style(remote_file.name, fg='bright_red')}"
226
+ )
227
+
228
+ return deleted_files
229
+
230
+ async def _cleanup_empty_folders(self) -> None:
231
+ """Clean up empty folders in the source_code directory after structural migration.
232
+
233
+ This method:
234
+ 1. Gets the current project structure
235
+ 2. Recursively checks for empty folders within source_code
236
+ 3. Deletes any empty folders found
237
+ """
238
+ try:
239
+ structure = await self._studio_client.get_project_structure_async()
240
+ source_code_folder = self._get_folder_by_name(structure, "source_code")
241
+
242
+ if not source_code_folder:
243
+ return
244
+
245
+ # Collect all empty folders (bottom-up to avoid parent-child deletion conflicts)
246
+ empty_folder_ids = self._collect_empty_folders(source_code_folder)
247
+
248
+ for folder_info in empty_folder_ids:
249
+ try:
250
+ await self._studio_client.delete_item_async(folder_info["id"])
251
+ self.console.info(
252
+ f"Deleted empty folder {click.style(folder_info['name'], fg='bright_red')}"
253
+ )
254
+ except Exception as e:
255
+ self.console.warning(
256
+ f"Failed to delete empty folder {folder_info['name']}: {str(e)}"
257
+ )
258
+
259
+ except Exception as e:
260
+ self.console.warning(f"Failed to cleanup empty folders: {str(e)}")
261
+
262
+ def _collect_empty_folders(self, folder: ProjectFolder) -> list[dict[str, str]]:
263
+ """Recursively collect IDs and names of empty folders.
264
+
265
+ Args:
266
+ folder: The folder to check for empty subfolders
267
+
268
+ Returns:
269
+ List of dictionaries containing folder ID and name for empty folders
270
+ """
271
+ empty_folders: list[dict[str, str]] = []
272
+
273
+ # Process subfolders first
274
+ for subfolder in folder.folders:
275
+ empty_subfolders = self._collect_empty_folders(subfolder)
276
+ empty_folders.extend(empty_subfolders)
277
+
278
+ # Check if the current folder is empty after processing its children
279
+ if self._is_folder_empty(subfolder):
280
+ empty_folders.append({"id": subfolder.id, "name": subfolder.name})
281
+
282
+ return empty_folders
283
+
284
+ def _is_folder_empty(self, folder: ProjectFolder) -> bool:
285
+ """Check if a folder is empty (no files and no non-empty subfolders).
286
+
287
+ Args:
288
+ folder: The folder to check
289
+
290
+ Returns:
291
+ True if the folder is empty, False otherwise
292
+ """
293
+ if folder.files:
294
+ return False
295
+
296
+ if not folder.folders:
297
+ return True
298
+
299
+ # If folder has subfolders, check if all subfolders are empty
300
+ for subfolder in folder.folders:
301
+ if not self._is_folder_empty(subfolder):
302
+ return False
303
+
304
+ return True
305
+
306
+ async def _update_agent_json(
307
+ self,
308
+ agent_json_file: Optional[ProjectFile] = None,
309
+ ) -> None:
310
+ """Update agent.json file with metadata from uipath.json.
311
+
312
+ This function:
313
+ 1. Downloads existing agent.json if it exists
314
+ 2. Updates metadata based on uipath.json content
315
+ 3. Increments code version
316
+ 4. Updates author from JWT or pyproject.toml
317
+ 5. Uploads updated agent.json
318
+
319
+ Args:
320
+ agent_json_file: Optional existing agent.json file
321
+
322
+ Raises:
323
+ httpx.HTTPError: If API requests fail
324
+ FileNotFoundError: If required files are missing
325
+ json.JSONDecodeError: If JSON parsing fails
326
+ """
327
+
328
+ def get_author_from_token_or_toml() -> str:
329
+ import jwt
330
+
331
+ """Extract preferred_username from JWT token or fall back to pyproject.toml author.
332
+
333
+ Args:
334
+ directory: Project directory containing pyproject.toml
335
+
336
+ Returns:
337
+ str: Author name from JWT preferred_username or pyproject.toml authors field
338
+ """
339
+ # Try to get author from JWT token first
340
+ token = os.getenv("UIPATH_ACCESS_TOKEN")
341
+ if token:
342
+ try:
343
+ decoded_token = jwt.decode(
344
+ token, options={"verify_signature": False}
345
+ )
346
+ preferred_username = decoded_token.get("preferred_username")
347
+ if preferred_username:
348
+ return preferred_username
349
+ except Exception:
350
+ # If JWT decoding fails, fall back to toml
351
+ pass
352
+
353
+ toml_data = read_toml_project(os.path.join(directory, "pyproject.toml"))
354
+
355
+ return toml_data.get("authors", "").strip()
356
+
357
+ # Read uipath.json
358
+ directory = os.getcwd()
359
+ with open(os.path.join(directory, "uipath.json"), "r") as f:
360
+ uipath_config = json.load(f)
361
+
362
+ try:
363
+ entrypoints = [
364
+ {"input": entry_point["input"], "output": entry_point["output"]}
365
+ for entry_point in uipath_config["entryPoints"]
366
+ ]
367
+ except (FileNotFoundError, KeyError) as e:
368
+ self.console.error(
369
+ f"Unable to extract entrypoints from configuration file. Please run 'uipath init' : {str(e)}",
370
+ )
371
+
372
+ author = get_author_from_token_or_toml()
373
+
374
+ # Initialize agent.json structure
375
+ agent_json = {
376
+ "version": AGENT_VERSION,
377
+ "metadata": {
378
+ "storageVersion": AGENT_STORAGE_VERSION,
379
+ "targetRuntime": AGENT_TARGET_RUNTIME,
380
+ "isConversational": False,
381
+ "codeVersion": AGENT_INITIAL_CODE_VERSION,
382
+ "author": author,
383
+ "pushDate": datetime.now(timezone.utc).isoformat(),
384
+ },
385
+ "entryPoints": entrypoints,
386
+ "bindings": uipath_config.get(
387
+ "bindings", {"version": "2.0", "resources": []}
388
+ ),
389
+ }
390
+
391
+ if agent_json_file:
392
+ # Download existing agent.json
393
+ existing_agent_json = (
394
+ await self._studio_client.download_file_async(agent_json_file.id)
395
+ ).json()
396
+
397
+ try:
398
+ # Get current version and increment patch version
399
+ version_parts = existing_agent_json["metadata"]["codeVersion"].split(
400
+ "."
401
+ )
402
+ if len(version_parts) >= 3:
403
+ version_parts[-1] = str(int(version_parts[-1]) + 1)
404
+ agent_json["metadata"]["codeVersion"] = ".".join(version_parts)
405
+ else:
406
+ # If version format is invalid, start from initial version + 1
407
+ agent_json["metadata"]["codeVersion"] = (
408
+ AGENT_INITIAL_CODE_VERSION[:-1] + "1"
409
+ )
410
+ except (json.JSONDecodeError, KeyError, ValueError):
411
+ self.console.warning(
412
+ "Could not parse existing agent.json, using default version"
413
+ )
414
+ file, action = await self._studio_client.upload_file_async(
415
+ file_content=json.dumps(agent_json),
416
+ file_name="agent.json",
417
+ remote_file=agent_json_file,
418
+ )
419
+ self.console.success(f"{action} {click.style('agent.json', fg='cyan')}")
420
+
421
+ async def upload_source_files(self, config_data: dict[str, Any]) -> None:
422
+ """Main method to upload source files to the UiPath project.
423
+
424
+ - Gets project structure
425
+ - Creates source_code folder if needed
426
+ - Uploads/updates files
427
+ - Deletes removed files
428
+
429
+ Args:
430
+ config_data: Project configuration data
431
+
432
+ Returns:
433
+ Dict[str, ProjectFileExtended]: Root level files for agent.json handling
434
+
435
+ Raises:
436
+ Exception: If any step in the process fails
437
+ """
438
+ structure = await self._studio_client.get_project_structure_async()
439
+ source_code_folder = self._get_folder_by_name(structure, "source_code")
440
+ root_files, source_code_files = self._get_remote_files(
441
+ structure, source_code_folder
442
+ )
443
+
444
+ # Create source_code folder if it doesn't exist
445
+ if not source_code_folder:
446
+ await self._studio_client.create_folder_async("source_code")
447
+
448
+ self.console.success(
449
+ f"Created {click.style('source_code', fg='cyan')} folder"
450
+ )
451
+ source_code_files = {}
452
+
453
+ # Get files to upload and process them
454
+ files = files_to_include(
455
+ config_data,
456
+ self.directory,
457
+ self.include_uv_lock,
458
+ directories_to_ignore=["evals"],
459
+ )
460
+ await self._process_file_uploads(files, source_code_files)
461
+
462
+ await self._update_agent_json(
463
+ root_files.get("agent.json", None),
464
+ )
@@ -86,3 +86,19 @@ def get_org_scoped_url(base_url: str) -> str:
86
86
  org_name, *_ = parsed.path.strip("/").split("/")
87
87
  org_scoped_url = f"{parsed.scheme}://{parsed.netloc}/{org_name}"
88
88
  return org_scoped_url
89
+
90
+
91
+ def clean_directory(directory: str) -> None:
92
+ """Clean up Python files in the specified directory.
93
+
94
+ Args:
95
+ directory (str): Path to the directory to clean.
96
+
97
+ This function removes all Python files (*.py) from the specified directory.
98
+ It's used to prepare a directory for a quickstart agent/coded MCP server.
99
+ """
100
+ for file_name in os.listdir(directory):
101
+ file_path = os.path.join(directory, file_name)
102
+
103
+ if os.path.isfile(file_path) and file_name.endswith(".py"):
104
+ os.remove(file_path)
@@ -45,6 +45,8 @@ PYTHON_BINARY_EXTENSIONS = {".pickle", ".pkl"}
45
45
 
46
46
  SPECIAL_EXTENSIONS = {""} # Extensionless binary files
47
47
 
48
+ UIPATH_PROJECT_ID = "UIPATH_PROJECT_ID"
49
+
48
50
  # Pre-compute the union for optimal performance
49
51
  BINARY_EXTENSIONS = (
50
52
  IMAGE_EXTENSIONS
@@ -24,6 +24,7 @@ class FileInfo(BaseModel):
24
24
  is_binary: Whether the file should be treated as binary
25
25
  """
26
26
 
27
+ file_name: str
27
28
  file_path: str
28
29
  relative_path: str
29
30
  is_binary: bool
@@ -299,7 +300,10 @@ def read_toml_project(file_path: str) -> dict:
299
300
 
300
301
 
301
302
  def files_to_include(
302
- config_data: Optional[dict[Any, Any]], directory: str
303
+ config_data: Optional[dict[Any, Any]],
304
+ directory: str,
305
+ include_uv_lock: bool = True,
306
+ directories_to_ignore: list[str] | None = None,
303
307
  ) -> list[FileInfo]:
304
308
  """Get list of files to include in the project based on configuration.
305
309
 
@@ -307,14 +311,18 @@ def files_to_include(
307
311
  and explicit inclusion rules. Skips virtual environments and hidden directories.
308
312
 
309
313
  Args:
310
- settings_section: Configuration section containing file inclusion rules
314
+ config_data: Configuration containing file inclusion rules
311
315
  directory: Root directory to search for files
316
+ include_uv_lock: Whether to include uv.lock file
317
+ directories_to_ignore: List of directories to ignore
312
318
 
313
319
  Returns:
314
320
  list[FileInfo]: List of file information objects for included files
315
321
  """
316
322
  file_extensions_included = [".py", ".mermaid", ".json", ".yaml", ".yml", ".md"]
317
323
  files_included = ["pyproject.toml"]
324
+ if include_uv_lock:
325
+ files_included += ["uv.lock"]
318
326
  if "settings" in config_data:
319
327
  settings = config_data["settings"]
320
328
  if "fileExtensionsIncluded" in settings:
@@ -344,15 +352,19 @@ def files_to_include(
344
352
  dirs[:] = [
345
353
  d
346
354
  for d in dirs
347
- if not d.startswith(".") and not is_venv_dir(os.path.join(root, d))
355
+ if not d.startswith(".")
356
+ and not is_venv_dir(os.path.join(root, d))
357
+ and (directories_to_ignore is not None and d not in directories_to_ignore)
348
358
  ]
349
359
  for file in files:
350
360
  file_extension = os.path.splitext(file)[1].lower()
351
361
  if file_extension in file_extensions_included or file in files_included:
352
362
  file_path = os.path.join(root, file)
363
+ file_name = os.path.basename(file_path)
353
364
  rel_path = os.path.relpath(file_path, directory)
354
365
  extra_files.append(
355
366
  FileInfo(
367
+ file_name=file_name,
356
368
  file_path=file_path,
357
369
  relative_path=rel_path,
358
370
  is_binary=is_binary_file(file_extension),