uipath 2.1.99__py3-none-any.whl → 2.1.101__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of uipath might be problematic. Click here for more details.

uipath/_cli/__init__.py CHANGED
@@ -3,7 +3,6 @@ import sys
3
3
 
4
4
  import click
5
5
 
6
- from .._utils._logs import setup_logging
7
6
  from ._utils._common import add_cwd_to_path, load_environment_variables
8
7
  from .cli_auth import auth as auth
9
8
  from .cli_debug import debug as debug # type: ignore
@@ -48,7 +47,6 @@ def _get_safe_version() -> str:
48
47
  def cli(lv: bool, v: bool) -> None:
49
48
  load_environment_variables()
50
49
  add_cwd_to_path()
51
- setup_logging()
52
50
  if lv:
53
51
  try:
54
52
  version = importlib.metadata.version("uipath-langchain")
@@ -4,7 +4,7 @@ import json
4
4
  import logging
5
5
  import os
6
6
  from datetime import datetime, timezone
7
- from typing import Any, Dict, Optional, Set
7
+ from typing import Any, AsyncIterator, Dict, Optional, Set
8
8
 
9
9
  from ...models.exceptions import EnrichedException
10
10
  from .._utils._constants import (
@@ -15,6 +15,7 @@ from .._utils._constants import (
15
15
  )
16
16
  from .._utils._project_files import ( # type: ignore
17
17
  FileInfo,
18
+ FileOperationUpdate,
18
19
  files_to_include,
19
20
  read_toml_project,
20
21
  )
@@ -40,7 +41,6 @@ class SwFileHandler:
40
41
  Attributes:
41
42
  directory: Local project directory
42
43
  include_uv_lock: Whether to include uv.lock file
43
- console: Console logger instance
44
44
  """
45
45
 
46
46
  def __init__(
@@ -67,6 +67,7 @@ class SwFileHandler:
67
67
  """Get a folder from the project structure by name.
68
68
 
69
69
  Args:
70
+ structure: The project structure
70
71
  folder_name: Name of the folder to find
71
72
 
72
73
  Returns:
@@ -134,16 +135,23 @@ class SwFileHandler:
134
135
  local_files: list[FileInfo],
135
136
  source_code_files: Dict[str, ProjectFile],
136
137
  root_files: Dict[str, ProjectFile],
137
- ) -> None:
138
+ ) -> list[FileOperationUpdate]:
138
139
  """Process all file uploads to the source_code folder.
139
140
 
141
+ This method:
142
+ 1. Compares local files with remote files
143
+ 2. Builds a structural migration with added/modified/deleted resources
144
+ 3. Prepares agent.json and entry-points.json
145
+ 4. Performs the structural migration
146
+ 5. Cleans up empty folders
147
+
140
148
  Args:
141
149
  local_files: List of files to upload
142
150
  source_code_files: Dictionary of existing remote files
143
151
  root_files: Dictionary of existing root-level files
144
152
 
145
153
  Returns:
146
- Set of processed file names
154
+ List of FileOperationUpdate objects describing all file operations
147
155
 
148
156
  Raises:
149
157
  Exception: If any file upload fails
@@ -152,7 +160,9 @@ class SwFileHandler:
152
160
  deleted_resources=[], added_resources=[], modified_resources=[]
153
161
  )
154
162
  processed_source_files: Set[str] = set()
163
+ updates: list[FileOperationUpdate] = []
155
164
 
165
+ # Process each local file and build structural migration
156
166
  for local_file in local_files:
157
167
  if not os.path.exists(local_file.file_path):
158
168
  logger.info(f"File not found: '{local_file.file_path}'")
@@ -166,14 +176,22 @@ class SwFileHandler:
166
176
  local_file.relative_path.replace("\\", "/"), None
167
177
  )
168
178
  if remote_file:
179
+ # File exists remotely - mark for update
169
180
  processed_source_files.add(remote_file.id)
170
181
  structural_migration.modified_resources.append(
171
182
  ModifiedResource(
172
183
  id=remote_file.id, content_file_path=local_file.file_path
173
184
  )
174
185
  )
175
- logger.info(f"Updating '{local_file.file_name}'")
186
+ updates.append(
187
+ FileOperationUpdate(
188
+ file_path=local_file.file_name,
189
+ status="updating",
190
+ message=f"Updating '{local_file.file_name}'",
191
+ )
192
+ )
176
193
  else:
194
+ # File doesn't exist remotely - mark for upload
177
195
  parent_path = os.path.dirname(local_file.relative_path)
178
196
  structural_migration.added_resources.append(
179
197
  AddedResource(
@@ -183,24 +201,53 @@ class SwFileHandler:
183
201
  else "source_code",
184
202
  )
185
203
  )
186
- logger.info(f"Uploading '{local_file.relative_path}'")
204
+ updates.append(
205
+ FileOperationUpdate(
206
+ file_path=local_file.relative_path,
207
+ status="uploading",
208
+ message=f"Uploading '{local_file.relative_path}'",
209
+ )
210
+ )
187
211
 
188
- # identify and add deleted files
189
- structural_migration.deleted_resources.extend(
190
- self._collect_deleted_files(source_code_files, processed_source_files)
212
+ # Identify and add deleted files (files that exist remotely but not locally)
213
+ deleted_files = self._collect_deleted_files(
214
+ source_code_files, processed_source_files
191
215
  )
216
+ structural_migration.deleted_resources.extend(deleted_files)
192
217
 
218
+ # Add delete updates
219
+ for file_id in deleted_files:
220
+ file_name = next(
221
+ (name for name, f in source_code_files.items() if f.id == file_id),
222
+ file_id,
223
+ )
224
+ updates.append(
225
+ FileOperationUpdate(
226
+ file_path=file_name,
227
+ status="deleting",
228
+ message=f"Deleting '{file_name}'",
229
+ )
230
+ )
231
+
232
+ # Load uipath.json configuration
193
233
  with open(os.path.join(self.directory, "uipath.json"), "r") as f:
194
234
  uipath_config = json.load(f)
195
235
 
196
- await self._prepare_agent_json_migration(
236
+ # Prepare agent.json migration (may download existing file to increment version)
237
+ agent_update = await self._prepare_agent_json_migration(
197
238
  structural_migration, root_files, uipath_config
198
239
  )
240
+ if agent_update:
241
+ updates.append(agent_update)
199
242
 
200
- await self._prepare_entrypoints_json_migration(
243
+ # Prepare entry-points.json migration (may download existing file to merge)
244
+ entry_points_update = await self._prepare_entrypoints_json_migration(
201
245
  structural_migration, root_files, uipath_config
202
246
  )
247
+ if entry_points_update:
248
+ updates.append(entry_points_update)
203
249
 
250
+ # Perform the structural migration (uploads/updates/deletes all files)
204
251
  await self._studio_client.perform_structural_migration_async(
205
252
  structural_migration
206
253
  )
@@ -208,78 +255,67 @@ class SwFileHandler:
208
255
  # Clean up empty folders after migration
209
256
  await self._cleanup_empty_folders()
210
257
 
258
+ return updates
259
+
211
260
  def _collect_deleted_files(
212
261
  self,
213
262
  source_code_files: Dict[str, ProjectFile],
214
- processed_source_file_paths: Set[str],
263
+ processed_source_file_ids: Set[str],
215
264
  ) -> set[str]:
216
- """Delete remote files that no longer exist locally.
265
+ """Identify remote files that no longer exist locally.
217
266
 
218
267
  Args:
219
268
  source_code_files: Dictionary of existing remote files
220
- processed_source_file_paths: Set of files that were processed
269
+ processed_source_file_ids: Set of file IDs that were processed (exist locally)
221
270
 
222
- Raises:
223
- Exception: If any file deletion fails
271
+ Returns:
272
+ Set of file IDs to delete
224
273
  """
225
- if not source_code_files:
226
- return set()
274
+ deleted_file_ids: Set[str] = set()
227
275
 
228
- deleted_files: Set[str] = set()
229
276
  for _, remote_file in source_code_files.items():
230
- if remote_file.id not in processed_source_file_paths:
231
- deleted_files.add(remote_file.id)
232
- logger.info(f"Deleting '{remote_file.name}'")
277
+ if remote_file.id not in processed_source_file_ids:
278
+ deleted_file_ids.add(remote_file.id)
233
279
 
234
- return deleted_files
280
+ return deleted_file_ids
235
281
 
236
282
  async def _cleanup_empty_folders(self) -> None:
237
- """Clean up empty folders in the source_code directory after structural migration.
283
+ """Delete empty folders from the project structure.
238
284
 
239
285
  This method:
240
286
  1. Gets the current project structure
241
- 2. Recursively checks for empty folders within source_code
242
- 3. Deletes any empty folders found
287
+ 2. Recursively finds all empty folders
288
+ 3. Deletes each empty folder
243
289
  """
244
- try:
245
- structure = await self._studio_client.get_project_structure_async()
246
- source_code_folder = self._get_folder_by_name(structure, "source_code")
247
-
248
- if not source_code_folder:
249
- return
290
+ structure = await self._studio_client.get_project_structure_async()
291
+ source_code_folder = self._get_folder_by_name(structure, "source_code")
250
292
 
251
- # Collect all empty folders (bottom-up to avoid parent-child deletion conflicts)
252
- empty_folder_ids = self._collect_empty_folders(source_code_folder)
293
+ if not source_code_folder:
294
+ return
253
295
 
254
- for folder_info in empty_folder_ids:
255
- try:
256
- await self._studio_client.delete_item_async(folder_info["id"])
257
- logger.info(f"Deleted empty folder '{folder_info['name']}'")
258
- except Exception as e:
259
- logger.warning(
260
- f"Failed to delete empty folder '{folder_info['name']}': {str(e)}"
261
- )
296
+ empty_folders = self._find_empty_folders(source_code_folder)
262
297
 
263
- except Exception as e:
264
- logger.warning(f"Failed to cleanup empty folders: {str(e)}")
298
+ if empty_folders:
299
+ for folder in empty_folders:
300
+ await self._studio_client.delete_item_async(folder["id"])
301
+ logger.info(f"Deleted empty folder: '{folder['name']}'")
265
302
 
266
- def _collect_empty_folders(self, folder: ProjectFolder) -> list[dict[str, str]]:
267
- """Recursively collect IDs and names of empty folders.
303
+ def _find_empty_folders(self, folder: ProjectFolder) -> list[dict[str, str]]:
304
+ """Recursively find all empty folders.
268
305
 
269
306
  Args:
270
- folder: The folder to check for empty subfolders
307
+ folder: The folder to check
271
308
 
272
309
  Returns:
273
- List of dictionaries containing folder ID and name for empty folders
310
+ List of empty folder info dictionaries with 'id' and 'name' keys
274
311
  """
275
312
  empty_folders: list[dict[str, str]] = []
276
313
 
277
- # Process subfolders first
278
314
  for subfolder in folder.folders:
279
- empty_subfolders = self._collect_empty_folders(subfolder)
280
- empty_folders.extend(empty_subfolders)
315
+ # Recursively check subfolders first
316
+ empty_folders.extend(self._find_empty_folders(subfolder))
281
317
 
282
- # Check if the current folder is empty after processing its children
318
+ # Check if current subfolder is empty after processing its children
283
319
  if self._is_folder_empty(subfolder):
284
320
  if subfolder.id is not None:
285
321
  empty_folders.append({"id": subfolder.id, "name": subfolder.name})
@@ -313,29 +349,55 @@ class SwFileHandler:
313
349
  structural_migration: StructuralMigration,
314
350
  root_files: Dict[str, ProjectFile],
315
351
  uipath_config: Dict[str, Any],
316
- ) -> None:
317
- """Prepare entry-points.json to be included in the same structural migration."""
352
+ ) -> Optional[FileOperationUpdate]:
353
+ """Prepare entry-points.json to be included in the same structural migration.
354
+
355
+ This method:
356
+ 1. Downloads existing entry-points.json if it exists
357
+ 2. Merges entryPoints from uipath.json config
358
+ 3. Adds to structural migration as modified or added resource
359
+
360
+ Args:
361
+ structural_migration: The structural migration to add resources to
362
+ root_files: Dictionary of root-level files
363
+ uipath_config: Configuration from uipath.json
364
+
365
+ Returns:
366
+ FileOperationUpdate describing the operation, or None if error occurred
367
+ """
318
368
  existing = root_files.get("entry-points.json")
369
+
319
370
  if existing:
371
+ # Entry-points.json exists - download and merge
320
372
  try:
321
373
  entry_points_json = (
322
374
  await self._studio_client.download_project_file_async(existing)
323
375
  ).json()
324
376
  entry_points_json["entryPoints"] = uipath_config["entryPoints"]
325
-
326
377
  except Exception:
327
378
  logger.info(
328
379
  "Could not parse existing 'entry-points.json' file, using default version"
329
380
  )
381
+ # If parsing fails, create default structure
382
+ entry_points_json = {
383
+ "$schema": "https://cloud.uipath.com/draft/2024-12/entry-point",
384
+ "$id": "entry-points.json",
385
+ "entryPoints": uipath_config["entryPoints"],
386
+ }
387
+
330
388
  structural_migration.modified_resources.append(
331
389
  ModifiedResource(
332
390
  id=existing.id,
333
391
  content_string=json.dumps(entry_points_json),
334
392
  )
335
393
  )
336
- logger.info("Updating 'entry-points.json'")
337
-
394
+ return FileOperationUpdate(
395
+ file_path="entry-points.json",
396
+ status="updating",
397
+ message="Updating 'entry-points.json'",
398
+ )
338
399
  else:
400
+ # Entry-points.json doesn't exist - create new one
339
401
  logger.info(
340
402
  "'entry-points.json' file does not exist in Studio Web project, initializing using default version"
341
403
  )
@@ -350,17 +412,37 @@ class SwFileHandler:
350
412
  content_string=json.dumps(entry_points_json),
351
413
  )
352
414
  )
353
- logger.info("Uploading 'entry-points.json'")
415
+ return FileOperationUpdate(
416
+ file_path="entry-points.json",
417
+ status="uploading",
418
+ message="Uploading 'entry-points.json'",
419
+ )
354
420
 
355
421
  async def _prepare_agent_json_migration(
356
422
  self,
357
423
  structural_migration: StructuralMigration,
358
424
  root_files: Dict[str, ProjectFile],
359
425
  uipath_config: Dict[str, Any],
360
- ) -> None:
361
- """Prepare agent.json to be included in the same structural migration."""
426
+ ) -> Optional[FileOperationUpdate]:
427
+ """Prepare agent.json to be included in the same structural migration.
428
+
429
+ This method:
430
+ 1. Extracts author from JWT token or pyproject.toml
431
+ 2. Downloads existing agent.json if it exists to increment code version
432
+ 3. Builds complete agent.json structure
433
+ 4. Adds to structural migration as modified or added resource
434
+
435
+ Args:
436
+ structural_migration: The structural migration to add resources to
437
+ root_files: Dictionary of root-level files
438
+ uipath_config: Configuration from uipath.json
439
+
440
+ Returns:
441
+ FileOperationUpdate describing the operation, or None if error occurred
442
+ """
362
443
 
363
444
  def get_author_from_token_or_toml() -> str:
445
+ """Get author from JWT token or fall back to pyproject.toml."""
364
446
  import jwt
365
447
 
366
448
  token = os.getenv("UIPATH_ACCESS_TOKEN")
@@ -381,6 +463,7 @@ class SwFileHandler:
381
463
  )
382
464
  return toml_data.get("authors", "").strip()
383
465
 
466
+ # Extract input and output schemas from entrypoints
384
467
  try:
385
468
  input_schema = uipath_config["entryPoints"][0]["input"]
386
469
  output_schema = uipath_config["entryPoints"][0]["output"]
@@ -388,11 +471,11 @@ class SwFileHandler:
388
471
  logger.error(
389
472
  f"Unable to extract entrypoints from configuration file. Please run 'uipath init' : {str(e)}",
390
473
  )
391
- return
474
+ return None
392
475
 
393
476
  author = get_author_from_token_or_toml()
394
477
 
395
- # Initialize agent.json structure
478
+ # Initialize agent.json structure with metadata
396
479
  agent_json = {
397
480
  "version": AGENT_VERSION,
398
481
  "metadata": {
@@ -415,6 +498,7 @@ class SwFileHandler:
415
498
 
416
499
  existing = root_files.get("agent.json")
417
500
  if existing:
501
+ # Agent.json exists - download and increment version
418
502
  try:
419
503
  existing_agent_json = (
420
504
  await self._studio_client.download_project_file_async(existing)
@@ -423,9 +507,11 @@ class SwFileHandler:
423
507
  "."
424
508
  )
425
509
  if len(version_parts) >= 3:
510
+ # Increment patch version (0.1.0 -> 0.1.1)
426
511
  version_parts[-1] = str(int(version_parts[-1]) + 1)
427
512
  agent_json["metadata"]["codeVersion"] = ".".join(version_parts)
428
513
  else:
514
+ # Invalid version format, use default with patch = 1
429
515
  agent_json["metadata"]["codeVersion"] = (
430
516
  AGENT_INITIAL_CODE_VERSION[:-1] + "1"
431
517
  )
@@ -440,8 +526,13 @@ class SwFileHandler:
440
526
  content_string=json.dumps(agent_json),
441
527
  )
442
528
  )
443
- logger.info("Updating 'agent.json'")
529
+ return FileOperationUpdate(
530
+ file_path="agent.json",
531
+ status="updating",
532
+ message="Updating 'agent.json'",
533
+ )
444
534
  else:
535
+ # Agent.json doesn't exist - create new one
445
536
  logger.info(
446
537
  "'agent.json' file does not exist in Studio Web project, initializing using default version"
447
538
  )
@@ -451,29 +542,40 @@ class SwFileHandler:
451
542
  content_string=json.dumps(agent_json),
452
543
  )
453
544
  )
454
- logger.info("Uploading 'agent.json'")
545
+ return FileOperationUpdate(
546
+ file_path="agent.json",
547
+ status="uploading",
548
+ message="Uploading 'agent.json'",
549
+ )
455
550
 
456
- async def upload_source_files(self, settings: Optional[dict[str, Any]]) -> None:
551
+ async def upload_source_files(
552
+ self, settings: Optional[dict[str, Any]]
553
+ ) -> AsyncIterator[FileOperationUpdate]:
457
554
  """Main method to upload source files to the UiPath project.
458
555
 
459
- - Gets project structure
460
- - Creates source_code folder if needed
461
- - Uploads/updates files
462
- - Deletes removed files
556
+ This method:
557
+ 1. Gets project structure (or creates if doesn't exist)
558
+ 2. Creates source_code folder if needed
559
+ 3. Collects local files to upload
560
+ 4. Processes file uploads (yields progress updates)
561
+ 5. Performs structural migration
562
+ 6. Cleans up empty folders
463
563
 
464
564
  Args:
465
- settings: File handling settings
565
+ settings: File handling settings (includes/excludes)
466
566
 
467
- Returns:
468
- Dict[str, ProjectFileExtended]: Root level files for agent.json handling
567
+ Yields:
568
+ FileOperationUpdate: Progress updates for each file operation
469
569
 
470
570
  Raises:
471
571
  Exception: If any step in the process fails
472
572
  """
573
+ # Get or create project structure
473
574
  try:
474
575
  structure = await self._studio_client.get_project_structure_async()
475
576
  except EnrichedException as e:
476
577
  if e.status_code == 404:
578
+ # Project structure doesn't exist - create empty structure and lock
477
579
  structure = ProjectStructure(name="", files=[], folders=[])
478
580
  await self._studio_client._put_lock()
479
581
  else:
@@ -487,8 +589,11 @@ class SwFileHandler:
487
589
  # Create source_code folder if it doesn't exist
488
590
  if not source_code_folder:
489
591
  await self._studio_client.create_folder_async("source_code")
490
-
491
- logger.info("Created 'source_code' folder.")
592
+ yield FileOperationUpdate(
593
+ file_path="source_code",
594
+ status="created_folder",
595
+ message="Created 'source_code' folder.",
596
+ )
492
597
  source_code_files = {}
493
598
 
494
599
  # Get files to upload and process them
@@ -498,4 +603,10 @@ class SwFileHandler:
498
603
  self.include_uv_lock,
499
604
  directories_to_ignore=["evals"],
500
605
  )
501
- await self._process_file_uploads(files, source_code_files, root_files)
606
+
607
+ # Process all files and get updates (this includes HTTP calls for agent.json/entry-points.json)
608
+ updates = await self._process_file_uploads(files, source_code_files, root_files)
609
+
610
+ # Yield all updates
611
+ for update in updates:
612
+ yield update
@@ -5,7 +5,7 @@ import logging
5
5
  import os
6
6
  import re
7
7
  from pathlib import Path
8
- from typing import Any, Dict, Optional, Protocol, Tuple
8
+ from typing import Any, AsyncIterator, Dict, Literal, Optional, Protocol, Tuple
9
9
 
10
10
  from pydantic import BaseModel, TypeAdapter
11
11
 
@@ -26,6 +26,33 @@ except ImportError:
26
26
  logger = logging.getLogger(__name__)
27
27
 
28
28
 
29
+ class FileOperationUpdate(BaseModel):
30
+ """Update about a file operation in progress."""
31
+
32
+ file_path: str
33
+ status: Literal[
34
+ "downloading",
35
+ "updated",
36
+ "skipped",
37
+ "up_to_date",
38
+ "downloaded",
39
+ "uploading",
40
+ "updating",
41
+ "deleting",
42
+ "created_folder",
43
+ ]
44
+ message: str
45
+
46
+
47
+ class ProjectPullError(Exception):
48
+ """Exception raised when pulling a project fails."""
49
+
50
+ def __init__(self, project_id: str, message: str = "Failed to pull UiPath project"):
51
+ self.project_id = project_id
52
+ self.message = message
53
+ super().__init__(self.message)
54
+
55
+
29
56
  class FileConflictHandler(Protocol):
30
57
  """Protocol for handling file conflicts."""
31
58
 
@@ -514,8 +541,12 @@ async def pull_project(
514
541
  project_id: str,
515
542
  download_configuration: dict[str, Path],
516
543
  conflict_handler: Optional[FileConflictHandler] = None,
517
- ):
518
- """Pull project with configurable conflict handling."""
544
+ ) -> AsyncIterator[FileOperationUpdate]:
545
+ """Pull project with configurable conflict handling.
546
+
547
+ Yields:
548
+ FileOperationUpdate: Progress updates for each file operation
549
+ """
519
550
  if conflict_handler is None:
520
551
  conflict_handler = AlwaysOverwriteHandler()
521
552
 
@@ -526,14 +557,14 @@ async def pull_project(
526
557
  for source_key, destination in download_configuration.items():
527
558
  source_folder = get_folder_by_name(structure, source_key)
528
559
  if source_folder:
529
- await download_folder_files(
560
+ async for update in download_folder_files(
530
561
  studio_client, source_folder, destination, conflict_handler
531
- )
562
+ ):
563
+ yield update
532
564
  else:
533
565
  logger.warning(f"No '{source_key}' folder found in remote project")
534
- except Exception:
535
- logger.exception("Failed to pull UiPath project")
536
- raise
566
+ except Exception as e:
567
+ raise ProjectPullError(project_id) from e
537
568
 
538
569
 
539
570
  async def download_folder_files(
@@ -541,18 +572,27 @@ async def download_folder_files(
541
572
  folder: ProjectFolder,
542
573
  base_path: Path,
543
574
  conflict_handler: FileConflictHandler,
544
- ) -> None:
545
- """Download files from a folder recursively.
575
+ ) -> AsyncIterator[FileOperationUpdate]:
576
+ """Download files from a folder recursively, yielding progress updates.
546
577
 
547
578
  Args:
548
579
  studio_client: Studio client
549
580
  folder: The folder to download files from
550
581
  base_path: Base path for local file storage
551
582
  conflict_handler: Handler for file conflicts
583
+
584
+ Yields:
585
+ FileOperationUpdate: Progress updates for each file operation
552
586
  """
553
587
  files_dict: Dict[str, ProjectFile] = {}
554
588
  collect_files_from_folder(folder, "", files_dict)
555
589
 
590
+ yield FileOperationUpdate(
591
+ file_path=folder.name,
592
+ status="downloading",
593
+ message=f"Downloading '{folder.name}'...",
594
+ )
595
+
556
596
  for file_path, remote_file in files_dict.items():
557
597
  local_path = base_path / file_path
558
598
  local_path.parent.mkdir(parents=True, exist_ok=True)
@@ -572,12 +612,30 @@ async def download_folder_files(
572
612
  ):
573
613
  with open(local_path, "w", encoding="utf-8", newline="\n") as f:
574
614
  f.write(remote_content)
575
- logger.info(f"Updated '{file_path}'")
615
+
616
+ yield FileOperationUpdate(
617
+ file_path=file_path,
618
+ status="updated",
619
+ message=f"Updated '{file_path}'",
620
+ )
576
621
  else:
577
- logger.info(f"Skipped '{file_path}'")
622
+ yield FileOperationUpdate(
623
+ file_path=file_path,
624
+ status="skipped",
625
+ message=f"Skipped '{file_path}'",
626
+ )
578
627
  else:
579
- logger.info(f"File '{file_path}' is up to date")
628
+ yield FileOperationUpdate(
629
+ file_path=file_path,
630
+ status="up_to_date",
631
+ message=f"File '{file_path}' is up to date",
632
+ )
580
633
  else:
581
634
  with open(local_path, "w", encoding="utf-8", newline="\n") as f:
582
635
  f.write(remote_content)
583
- logger.info(f"Downloaded '{file_path}'")
636
+
637
+ yield FileOperationUpdate(
638
+ file_path=file_path,
639
+ status="downloaded",
640
+ message=f"Downloaded '{file_path}'",
641
+ )
uipath/_cli/cli_init.py CHANGED
@@ -190,7 +190,10 @@ def init(entrypoint: str, infer_bindings: bool, no_agents_md_override: bool) ->
190
190
  result = Middlewares.next(
191
191
  "init",
192
192
  entrypoint,
193
- options={"infer_bindings": infer_bindings},
193
+ options={
194
+ "infer_bindings": infer_bindings,
195
+ "no_agents_md_override": no_agents_md_override,
196
+ },
194
197
  write_config=write_config_file,
195
198
  )
196
199