pixi-ros 0.1.2__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pixi_ros/cli.py CHANGED
@@ -5,7 +5,7 @@ from typing import Annotated
5
5
  import typer
6
6
 
7
7
  from pixi_ros.init import init_workspace
8
- from pixi_ros.mappings import get_ros_distros
8
+ from pixi_ros.mappings import get_platforms, get_ros_distros
9
9
 
10
10
  app = typer.Typer(
11
11
  name="pixi-ros",
@@ -27,6 +27,15 @@ def init(
27
27
  help="ROS distribution (e.g., humble, iron, jazzy)",
28
28
  ),
29
29
  ] = None,
30
+ platforms: Annotated[
31
+ list[str] | None,
32
+ typer.Option(
33
+ "--platform",
34
+ "-p",
35
+ help="Target platforms (e.g., linux-64, osx-arm64, win-64)."
36
+ " Can be specified multiple times.",
37
+ ),
38
+ ] = None,
30
39
  ):
31
40
  """Initialize pixi.toml for a ROS workspace."""
32
41
  # If distro not provided, prompt user to select one
@@ -50,7 +59,7 @@ def init(
50
59
  distro = available_distros[selection_num - 1]
51
60
  else:
52
61
  typer.echo(
53
- f"Error: Invalid selection.Please choose 1-{dist_count}",
62
+ f"Error: Invalid selection. Please choose 1-{dist_count}",
54
63
  err=True,
55
64
  )
56
65
  raise typer.Exit(code=1)
@@ -65,7 +74,55 @@ def init(
65
74
  typer.echo(f"Available: {', '.join(available_distros)}", err=True)
66
75
  raise typer.Exit(code=1) from err
67
76
 
68
- init_workspace(distro)
77
+ # If platforms not provided, prompt user to select
78
+ if platforms is None or len(platforms) == 0:
79
+ available_platforms = get_platforms()
80
+ typer.echo("\nAvailable target platforms:")
81
+ for i, p in enumerate(available_platforms, 1):
82
+ typer.echo(f" {i}. {p}")
83
+
84
+ # Prompt for selection (can be comma-separated or space-separated)
85
+ selection = typer.prompt(
86
+ "\nSelect platforms (enter numbers or names, comma or space separated)",
87
+ type=str,
88
+ )
89
+
90
+ # Parse selection (can be numbers or names, comma or space separated)
91
+ platforms = []
92
+ # Split by comma or space
93
+ selections = selection.replace(",", " ").split()
94
+
95
+ for sel in selections:
96
+ sel = sel.strip()
97
+ if not sel:
98
+ continue
99
+
100
+ try:
101
+ # Try parsing as number
102
+ sel_num = int(sel)
103
+ if 1 <= sel_num <= len(available_platforms):
104
+ platforms.append(available_platforms[sel_num - 1])
105
+ else:
106
+ typer.echo(
107
+ f"Error: Invalid selection {sel_num}."
108
+ + f"Please choose 1-{len(available_platforms)}",
109
+ err=True,
110
+ )
111
+ raise typer.Exit(code=1)
112
+ except ValueError as err:
113
+ # User entered a name instead of number
114
+ if sel in available_platforms:
115
+ platforms.append(sel)
116
+ else:
117
+ typer.echo(f"Error: '{sel}' is not a valid platform", err=True)
118
+ typer.echo(f"Available: {', '.join(available_platforms)}", err=True)
119
+ raise typer.Exit(code=1) from err
120
+
121
+ if not platforms:
122
+ typer.echo("Error: No platforms selected", err=True)
123
+ raise typer.Exit(code=1)
124
+
125
+ init_workspace(distro, platforms=platforms)
69
126
 
70
127
 
71
128
  def main():
@@ -792,6 +792,8 @@ python3-importlib-resources:
792
792
  pixi: [importlib_resources]
793
793
  python3-jinja2:
794
794
  pixi: [jinja2]
795
+ python3-jsonschema:
796
+ pixi: [jsonschema]
795
797
  python3-kitchen:
796
798
  pixi: [kitchen]
797
799
  python3-lark-parser:
pixi_ros/init.py CHANGED
@@ -17,13 +17,18 @@ from pixi_ros.workspace import discover_packages, find_workspace_root
17
17
  console = Console()
18
18
 
19
19
 
20
- def init_workspace(distro: str, workspace_path: Path | None = None) -> bool:
20
+ def init_workspace(
21
+ distro: str,
22
+ workspace_path: Path | None = None,
23
+ platforms: list[str] | None = None,
24
+ ) -> bool:
21
25
  """
22
26
  Initialize or update pixi.toml for a ROS workspace.
23
27
 
24
28
  Args:
25
29
  distro: ROS distribution (e.g., "humble", "iron", "jazzy")
26
30
  workspace_path: Path to workspace root (defaults to current directory)
31
+ platforms: Target platforms (e.g., ["linux-64", "osx-arm64"])
27
32
 
28
33
  Returns:
29
34
  True if successful, False otherwise
@@ -87,9 +92,9 @@ def init_workspace(distro: str, workspace_path: Path | None = None) -> bool:
87
92
  _display_dependencies(packages, distro)
88
93
 
89
94
  # Update configuration
90
- _ensure_workspace_section(pixi_config, workspace_path)
95
+ _ensure_workspace_section(pixi_config, workspace_path, platforms)
91
96
  _ensure_channels(pixi_config, distro)
92
- _ensure_dependencies(pixi_config, packages, distro)
97
+ _ensure_dependencies(pixi_config, packages, distro, platforms)
93
98
  _ensure_tasks(pixi_config)
94
99
  _ensure_activation(pixi_config)
95
100
 
@@ -232,7 +237,9 @@ def _display_dependencies(packages, distro: str):
232
237
  console.print("")
233
238
 
234
239
 
235
- def _ensure_workspace_section(config: dict, workspace_path: Path):
240
+ def _ensure_workspace_section(
241
+ config: dict, workspace_path: Path, platforms: list[str] | None = None
242
+ ):
236
243
  """Ensure workspace section exists with basic config."""
237
244
  if "workspace" not in config:
238
245
  config["workspace"] = {}
@@ -247,11 +254,27 @@ def _ensure_workspace_section(config: dict, workspace_path: Path):
247
254
  if "channels" not in workspace:
248
255
  workspace["channels"] = []
249
256
 
250
- # Set platforms if not present
257
+ # Set or extend platforms
251
258
  if "platforms" not in workspace:
252
- # Only add the current platform by default
253
- current_platform = str(Platform.current())
254
- workspace["platforms"] = [current_platform]
259
+ if platforms:
260
+ # Platforms are already in pixi format (linux-64, osx-64, etc.)
261
+ workspace["platforms"] = platforms
262
+ else:
263
+ # Only add the current platform by default
264
+ current_platform = str(Platform.current())
265
+ workspace["platforms"] = [current_platform]
266
+ elif platforms:
267
+ # Extend existing platforms list with new ones (avoiding duplicates)
268
+ existing_platforms = workspace["platforms"]
269
+ if not isinstance(existing_platforms, list):
270
+ existing_platforms = [existing_platforms]
271
+
272
+ # Add new platforms that aren't already in the list
273
+ for platform in platforms:
274
+ if platform not in existing_platforms:
275
+ existing_platforms.append(platform)
276
+
277
+ workspace["platforms"] = existing_platforms
255
278
 
256
279
 
257
280
  def _ensure_channels(config: dict, distro: str):
@@ -326,8 +349,19 @@ def _check_package_availability(
326
349
  return availability
327
350
 
328
351
 
329
- def _ensure_dependencies(config: dict, packages, distro: str):
330
- """Ensure all ROS dependencies are present with comments showing source."""
352
+ def _ensure_dependencies(
353
+ config: dict, packages, distro: str, platforms: list[str] | None = None
354
+ ):
355
+ """
356
+ Ensure all ROS dependencies are present with comments showing source.
357
+
358
+ Generates platform-specific dependencies if multiple platforms are specified.
359
+ Common dependencies (available on all platforms) go in [dependencies],
360
+ platform-specific ones go in [target.{platform}.dependencies].
361
+ """
362
+ # Default to current platform if none specified
363
+ if not platforms:
364
+ platforms = [str(Platform.current())]
331
365
  # Track which packages depend on which conda packages
332
366
  # conda_dep -> set of package names
333
367
  dep_sources: dict[str, set[str]] = {}
@@ -343,48 +377,117 @@ def _ensure_dependencies(config: dict, packages, distro: str):
343
377
  if cmake_version:
344
378
  dep_versions["cmake"] = cmake_version
345
379
 
346
- # Collect dependencies from each package
347
- for pkg in packages:
348
- for ros_dep in pkg.get_all_dependencies():
349
- # Skip workspace packages (they're built locally)
380
+ # Collect version constraints from package.xml
381
+ for ros_dep, version_constraint in pkg.dependency_versions.items():
382
+ # Skip workspace packages
350
383
  if ros_dep in workspace_pkg_names:
351
384
  continue
352
385
 
353
- # Map to conda packages
386
+ # Map ROS package to conda packages
387
+ # Note: We use the first platform for mapping since version constraints
388
+ # should be the same across platforms for a given ROS package
354
389
  conda_packages = map_ros_to_conda(ros_dep, distro)
355
390
 
356
- # Skip if no conda packages were returned
357
- if not conda_packages:
358
- continue
359
-
391
+ # Apply version constraint to all mapped conda packages
360
392
  for conda_dep in conda_packages:
361
- if conda_dep:
362
- if conda_dep not in dep_sources:
363
- dep_sources[conda_dep] = set()
364
- dep_sources[conda_dep].add(pkg.name)
365
-
366
- # Expand GL requirements (REQUIRE_GL, REQUIRE_OPENGL) to platform-specific packages
367
- # This replaces placeholder strings with actual conda packages
368
- expanded_dep_sources: dict[str, set[str]] = {}
369
- all_conda_packages = list(dep_sources.keys())
370
- expanded_packages = expand_gl_requirements(all_conda_packages)
371
-
372
- # Rebuild dep_sources with expanded packages
373
- for expanded_pkg in expanded_packages:
374
- # For expanded packages, merge the sources from the placeholder packages
375
- sources = set()
376
- for original_pkg, pkg_sources in dep_sources.items():
377
- if original_pkg == expanded_pkg:
378
- # Direct match
379
- sources.update(pkg_sources)
380
- elif original_pkg in ("REQUIRE_GL", "REQUIRE_OPENGL"):
381
- # This was a placeholder, include its sources for all expanded packages
382
- sources.update(pkg_sources)
383
-
384
- if sources:
385
- expanded_dep_sources[expanded_pkg] = sources
386
-
387
- dep_sources = expanded_dep_sources
393
+ if conda_dep and not conda_dep.startswith("REQUIRE_"):
394
+ # If package already has a constraint, combine them
395
+ if conda_dep in dep_versions:
396
+ dep_versions[conda_dep] = (
397
+ f"{dep_versions[conda_dep]},{version_constraint}"
398
+ )
399
+ else:
400
+ dep_versions[conda_dep] = version_constraint
401
+
402
+ # Platforms come from CLI as pixi platform names (linux-64, osx-64, etc.)
403
+ # Map them to mapping platform names for querying the mapping files
404
+ pixi_to_mapping = {
405
+ "linux-64": "linux",
406
+ "osx-64": "osx",
407
+ "osx-arm64": "osx",
408
+ "win-64": "win64",
409
+ }
410
+
411
+ # Group pixi platforms by their mapping platform
412
+ # This way osx-64 and osx-arm64 share the same dependencies
413
+ platform_groups = {}
414
+ for platform in platforms:
415
+ mapping_platform = pixi_to_mapping.get(platform, "linux")
416
+ if mapping_platform not in platform_groups:
417
+ platform_groups[mapping_platform] = []
418
+ platform_groups[mapping_platform].append(platform)
419
+
420
+ # Collect dependencies per mapping platform (which groups similar pixi platforms)
421
+ # Structure: mapping_platform -> conda_dep -> set of ROS package names
422
+ platform_deps: dict[str, dict[str, set[str]]] = {
423
+ mapping_platform: {} for mapping_platform in platform_groups.keys()
424
+ }
425
+
426
+ # Collect dependencies from each package, mapped for each platform
427
+ for mapping_platform in platform_groups.keys():
428
+ for pkg in packages:
429
+ for ros_dep in pkg.get_all_dependencies():
430
+ # Skip workspace packages (they're built locally)
431
+ if ros_dep in workspace_pkg_names:
432
+ continue
433
+
434
+ # Map to conda packages for this mapping platform
435
+ conda_packages = map_ros_to_conda(
436
+ ros_dep, distro, platform_override=mapping_platform
437
+ )
438
+
439
+ # Skip if no conda packages were returned
440
+ if not conda_packages:
441
+ continue
442
+
443
+ for conda_dep in conda_packages:
444
+ if conda_dep:
445
+ if conda_dep not in platform_deps[mapping_platform]:
446
+ platform_deps[mapping_platform][conda_dep] = set()
447
+ platform_deps[mapping_platform][conda_dep].add(pkg.name)
448
+
449
+ # Expand GL requirements for this mapping platform
450
+ all_conda_packages = list(platform_deps[mapping_platform].keys())
451
+ expanded_packages = expand_gl_requirements(
452
+ all_conda_packages, platform_override=mapping_platform
453
+ )
454
+
455
+ # Rebuild platform deps with expanded packages
456
+ expanded_platform_deps: dict[str, set[str]] = {}
457
+ for expanded_pkg in expanded_packages:
458
+ sources = set()
459
+ for original_pkg, pkg_sources in platform_deps[mapping_platform].items():
460
+ if original_pkg == expanded_pkg:
461
+ sources.update(pkg_sources)
462
+ elif original_pkg in ("REQUIRE_GL", "REQUIRE_OPENGL"):
463
+ sources.update(pkg_sources)
464
+
465
+ if sources:
466
+ expanded_platform_deps[expanded_pkg] = sources
467
+
468
+ platform_deps[mapping_platform] = expanded_platform_deps
469
+
470
+ # Determine common dependencies (present in all mapping platforms)
471
+ mapping_platform_list = list(platform_groups.keys())
472
+ if len(mapping_platform_list) > 1:
473
+ all_deps = set(platform_deps[mapping_platform_list[0]].keys())
474
+ for mapping_platform in mapping_platform_list[1:]:
475
+ all_deps &= set(platform_deps[mapping_platform].keys())
476
+ common_deps = all_deps
477
+ else:
478
+ # Single mapping platform - all deps are "common"
479
+ common_deps = set(platform_deps[mapping_platform_list[0]].keys())
480
+
481
+ # For backwards compatibility when single platform, use old behavior
482
+ dep_sources = (
483
+ platform_deps[mapping_platform_list[0]]
484
+ if len(mapping_platform_list) == 1
485
+ else {
486
+ dep: platform_deps[mapping_platform_list[0]][dep]
487
+ for dep in common_deps
488
+ if dep in platform_deps[mapping_platform_list[0]]
489
+ }
490
+ )
388
491
 
389
492
  # Create or get dependencies table
390
493
  if "dependencies" not in config:
@@ -426,51 +529,52 @@ def _ensure_dependencies(config: dict, packages, distro: str):
426
529
  dependencies["cmake"] = dep_versions["cmake"]
427
530
 
428
531
  # Add package dependencies
532
+ channels = config.get("workspace", {}).get("channels", [])
533
+
534
+ # Add common dependencies (available on all platforms)
429
535
  if dep_sources:
430
536
  dependencies.add(tomlkit.nl())
431
- dependencies.add(tomlkit.comment("Workspace dependencies"))
537
+ if len(mapping_platform_list) > 1:
538
+ dependencies.add(
539
+ tomlkit.comment("Workspace dependencies (common across platforms)")
540
+ )
541
+ else:
542
+ dependencies.add(tomlkit.comment("Workspace dependencies"))
432
543
 
433
- # Get channels and platform for availability checking
434
- channels = config.get("workspace", {}).get("channels", [])
435
- current_platform = Platform.current()
544
+ # For common deps, check on first pixi platform as representative
545
+ first_pixi_platform = platforms[0]
546
+ first_platform = Platform(first_pixi_platform)
436
547
 
437
- # Get list of packages to check
438
548
  packages_to_check = [
439
549
  conda_dep
440
550
  for conda_dep in dep_sources.keys()
441
551
  if conda_dep not in dependencies
442
552
  ]
443
553
 
444
- # Check package availability if channels are configured
445
554
  availability = {}
446
555
  if channels and packages_to_check:
447
- typer.echo("Checking package availability in channels...")
556
+ typer.echo(
557
+ f"Checking common package availability for {first_pixi_platform}..."
558
+ )
448
559
  availability = _check_package_availability(
449
- packages_to_check, channels, current_platform
560
+ packages_to_check, channels, first_platform
450
561
  )
451
562
 
452
- # Add all dependencies in alphabetical order
453
563
  available_packages = []
454
564
  unavailable_packages = []
455
565
 
456
566
  for conda_dep in sorted(dep_sources.keys()):
457
567
  if conda_dep not in dependencies:
458
- # Check if we have availability info
459
- is_available = availability.get(
460
- conda_dep, True
461
- ) # Default to True if not checked
462
-
568
+ is_available = availability.get(conda_dep, True)
463
569
  if is_available:
464
570
  available_packages.append(conda_dep)
465
571
  else:
466
572
  unavailable_packages.append(conda_dep)
467
573
 
468
- # Add available packages
469
574
  for conda_dep in available_packages:
470
575
  version = dep_versions.get(conda_dep, "*")
471
576
  dependencies[conda_dep] = version
472
577
 
473
- # Add unavailable packages as comments
474
578
  if unavailable_packages:
475
579
  dependencies.add(tomlkit.nl())
476
580
  dependencies.add(
@@ -486,6 +590,159 @@ def _ensure_dependencies(config: dict, packages, distro: str):
486
590
 
487
591
  config["dependencies"] = dependencies
488
592
 
593
+ # Add platform-specific dependencies if multiple mapping platforms
594
+ # First, identify unix dependencies (available on both linux and osx, but not win)
595
+ unix_deps = {}
596
+ if len(mapping_platform_list) > 1:
597
+ has_linux = "linux" in mapping_platform_list
598
+ has_osx = "osx" in mapping_platform_list
599
+ has_win = "win64" in mapping_platform_list or "win" in mapping_platform_list
600
+
601
+ if has_linux and has_osx:
602
+ # Find deps that are on both linux and osx
603
+ linux_only = set(platform_deps.get("linux", {}).keys())
604
+ osx_only = set(platform_deps.get("osx", {}).keys())
605
+ unix_candidates = (linux_only & osx_only) - common_deps
606
+
607
+ # If we also have windows, only move to unix if NOT on windows
608
+ if has_win:
609
+ win_deps = set(platform_deps.get("win64", {}).keys()) | set(
610
+ platform_deps.get("win", {}).keys()
611
+ )
612
+ unix_deps_keys = unix_candidates - win_deps
613
+ else:
614
+ unix_deps_keys = unix_candidates
615
+
616
+ # Move to unix section
617
+ for dep in unix_deps_keys:
618
+ if dep in platform_deps.get("linux", {}):
619
+ unix_deps[dep] = platform_deps["linux"][dep]
620
+
621
+ # Add unix section if there are unix-specific dependencies
622
+ if unix_deps:
623
+ if "target" not in config:
624
+ config["target"] = tomlkit.table()
625
+ if "unix" not in config["target"]:
626
+ config["target"]["unix"] = tomlkit.table()
627
+ if "dependencies" not in config["target"]["unix"]:
628
+ config["target"]["unix"]["dependencies"] = tomlkit.table()
629
+
630
+ target_deps = config["target"]["unix"]["dependencies"]
631
+
632
+ if len(target_deps) == 0:
633
+ target_deps.add(
634
+ tomlkit.comment("Unix-specific dependencies (Linux and macOS)")
635
+ )
636
+
637
+ # Check availability on linux platform as representative
638
+ representative_pixi_platform = platform_groups.get(
639
+ "linux", platform_groups.get("osx", platforms)
640
+ )[0]
641
+ platform_obj = Platform(representative_pixi_platform)
642
+ packages_to_check = list(unix_deps.keys())
643
+
644
+ availability = {}
645
+ if channels and packages_to_check:
646
+ typer.echo("Checking package availability for unix...")
647
+ availability = _check_package_availability(
648
+ packages_to_check, channels, platform_obj
649
+ )
650
+
651
+ available_packages = []
652
+ unavailable_packages = []
653
+
654
+ for conda_dep in sorted(unix_deps.keys()):
655
+ if conda_dep not in target_deps:
656
+ is_available = availability.get(conda_dep, True)
657
+ if is_available:
658
+ available_packages.append(conda_dep)
659
+ else:
660
+ unavailable_packages.append(conda_dep)
661
+
662
+ for conda_dep in available_packages:
663
+ version = dep_versions.get(conda_dep, "*")
664
+ target_deps[conda_dep] = version
665
+
666
+ if unavailable_packages:
667
+ target_deps.add(tomlkit.nl())
668
+ target_deps.add(
669
+ tomlkit.comment("The following packages were not found:")
670
+ )
671
+ for conda_dep in unavailable_packages:
672
+ version = dep_versions.get(conda_dep, "*")
673
+ target_deps.add(
674
+ tomlkit.comment(f'{conda_dep} = "{version}" # NOT FOUND')
675
+ )
676
+
677
+ # Now add remaining platform-specific dependencies (not in common, not in unix)
678
+ if len(mapping_platform_list) > 1:
679
+ for mapping_platform in mapping_platform_list:
680
+ platform_specific_deps = {
681
+ dep: sources
682
+ for dep, sources in platform_deps[mapping_platform].items()
683
+ if dep not in common_deps and dep not in unix_deps
684
+ }
685
+
686
+ if platform_specific_deps:
687
+ # Create target section if needed
688
+ if "target" not in config:
689
+ config["target"] = tomlkit.table()
690
+ if mapping_platform not in config["target"]:
691
+ config["target"][mapping_platform] = tomlkit.table()
692
+ if "dependencies" not in config["target"][mapping_platform]:
693
+ config["target"][mapping_platform]["dependencies"] = tomlkit.table()
694
+
695
+ target_deps = config["target"][mapping_platform]["dependencies"]
696
+
697
+ # Add comment
698
+ if len(target_deps) == 0:
699
+ target_deps.add(
700
+ tomlkit.comment(
701
+ f"Platform-specific dependencies for {mapping_platform}"
702
+ )
703
+ )
704
+
705
+ # Check availability for this mapping platform
706
+ # Use the first pixi platform in the group as representative
707
+ representative_pixi_platform = platform_groups[mapping_platform][0]
708
+ platform_obj = Platform(representative_pixi_platform)
709
+ packages_to_check = list(platform_specific_deps.keys())
710
+
711
+ availability = {}
712
+ if channels and packages_to_check:
713
+ typer.echo(
714
+ f"Checking package availability for {mapping_platform}..."
715
+ )
716
+ availability = _check_package_availability(
717
+ packages_to_check, channels, platform_obj
718
+ )
719
+
720
+ available_packages = []
721
+ unavailable_packages = []
722
+
723
+ for conda_dep in sorted(platform_specific_deps.keys()):
724
+ if conda_dep not in target_deps:
725
+ is_available = availability.get(conda_dep, True)
726
+ if is_available:
727
+ available_packages.append(conda_dep)
728
+ else:
729
+ unavailable_packages.append(conda_dep)
730
+
731
+ for conda_dep in available_packages:
732
+ version = dep_versions.get(conda_dep, "*")
733
+ target_deps[conda_dep] = version
734
+
735
+ if unavailable_packages:
736
+ target_deps.add(tomlkit.nl())
737
+ target_deps.add(
738
+ tomlkit.comment("The following packages were not found:")
739
+ )
740
+ for conda_dep in unavailable_packages:
741
+ version = dep_versions.get(conda_dep, "*")
742
+ target_deps.add(
743
+ tomlkit.comment(f'{conda_dep} = "{version}" # NOT FOUND')
744
+ )
745
+
489
746
 
490
747
  def _ensure_tasks(config: dict):
491
748
  """Ensure common ROS tasks are defined."""
@@ -493,14 +750,27 @@ def _ensure_tasks(config: dict):
493
750
 
494
751
  # Define common ROS tasks if not present
495
752
  default_tasks = {
496
- "build": "colcon build",
497
- "test": "colcon test",
498
- "clean": "rm -rf build install log",
753
+ "build": {
754
+ "cmd": "colcon build",
755
+ "description": "Build the ROS workspace",
756
+ },
757
+ "test": {
758
+ "cmd": "colcon test",
759
+ "description": "Run tests for the workspace",
760
+ },
761
+ "clean": {
762
+ "cmd": "rm -rf build install log",
763
+ "description": "Clean build artifacts (build, install, log directories)",
764
+ },
499
765
  }
500
766
 
501
- for task_name, task_cmd in default_tasks.items():
767
+ for task_name, task_config in default_tasks.items():
502
768
  if task_name not in tasks:
503
- tasks[task_name] = task_cmd
769
+ # Create inline table for task configuration
770
+ task_table = tomlkit.inline_table()
771
+ task_table["cmd"] = task_config["cmd"]
772
+ task_table["description"] = task_config["description"]
773
+ tasks[task_name] = task_table
504
774
 
505
775
  config["tasks"] = tasks
506
776
 
pixi_ros/mappings.py CHANGED
@@ -255,6 +255,45 @@ def is_system_package(package_name: str) -> bool:
255
255
  return package_name in system_packages
256
256
 
257
257
 
258
+ def get_platforms() -> list[str]:
259
+ """
260
+ Get list of supported pixi platforms based on mapping files.
261
+
262
+ Extracts platform names from the mapping data and converts them to
263
+ standard pixi platform names.
264
+
265
+ Mapping files use: linux, osx, win64
266
+ Pixi uses: linux-64, osx-64, osx-arm64, win-64
267
+
268
+ Returns:
269
+ List of pixi platform names
270
+ """
271
+ mappings = get_mappings()
272
+ mapping_platforms = set()
273
+
274
+ # Iterate through mappings to find all platform keys
275
+ for package_mappings in mappings.values():
276
+ for channel_mapping in package_mappings.values():
277
+ if isinstance(channel_mapping, dict):
278
+ # This is a platform-specific mapping
279
+ mapping_platforms.update(channel_mapping.keys())
280
+
281
+ # Convert mapping platforms to pixi platforms
282
+ pixi_platforms = []
283
+ if "linux" in mapping_platforms:
284
+ pixi_platforms.append("linux-64")
285
+ if "osx" in mapping_platforms:
286
+ pixi_platforms.extend(["osx-64", "osx-arm64"])
287
+ if "win64" in mapping_platforms or "win" in mapping_platforms:
288
+ pixi_platforms.append("win-64")
289
+
290
+ return (
291
+ pixi_platforms
292
+ if pixi_platforms
293
+ else ["linux-64", "osx-64", "osx-arm64", "win-64"]
294
+ )
295
+
296
+
258
297
  def get_ros_distros() -> list[str]:
259
298
  """
260
299
  Get list of supported ROS distributions.
pixi_ros/package_xml.py CHANGED
@@ -33,6 +33,11 @@ class PackageXML:
33
33
  # Generic depends (shorthand for build, export, and exec)
34
34
  depends: list[str] = field(default_factory=list)
35
35
 
36
+ # Version constraints for dependencies
37
+ # Maps package name to version constraint string
38
+ # (e.g., ">=3.12.4", ">=1.8.0,<2.0.0")
39
+ dependency_versions: dict[str, str] = field(default_factory=dict)
40
+
36
41
  @classmethod
37
42
  def from_file(cls, path: Path) -> "PackageXML":
38
43
  """
@@ -90,6 +95,36 @@ class PackageXML:
90
95
  build_type = build_type_elem.text
91
96
 
92
97
  # Extract dependencies
98
+ def parse_version_constraint(elem) -> str | None:
99
+ """
100
+ Parse version constraint attributes from a dependency element.
101
+
102
+ Converts ROS package.xml version attributes to conda/pixi constraint syntax:
103
+ - version_lt="X" → <X
104
+ - version_lte="X" → <=X
105
+ - version_eq="X" → ==X
106
+ - version_gte="X" → >=X
107
+ - version_gt="X" → >X
108
+
109
+ Multiple constraints are combined with commas.
110
+ """
111
+ constraints = []
112
+
113
+ version_attrs = [
114
+ ("version_lt", "<"),
115
+ ("version_lte", "<="),
116
+ ("version_eq", "=="),
117
+ ("version_gte", ">="),
118
+ ("version_gt", ">"),
119
+ ]
120
+
121
+ for attr, op in version_attrs:
122
+ value = elem.get(attr)
123
+ if value:
124
+ constraints.append(f"{op}{value}")
125
+
126
+ return ",".join(constraints) if constraints else None
127
+
93
128
  def get_deps(tag: str) -> list[str]:
94
129
  """Extract all dependencies with the given tag."""
95
130
  deps = []
@@ -98,16 +133,41 @@ class PackageXML:
98
133
  deps.append(elem.text.strip())
99
134
  return deps
100
135
 
101
- # Parse all dependency types
102
- buildtool_depends = get_deps("buildtool_depend")
103
- build_depends = get_deps("build_depend")
104
- build_export_depends = get_deps("build_export_depend")
105
- exec_depends = get_deps("exec_depend")
106
- test_depends = get_deps("test_depend")
107
- depends = get_deps("depend")
136
+ def get_deps_with_versions(tag: str, version_map: dict[str, str]) -> list[str]:
137
+ """Extract dependencies and populate version constraints."""
138
+ deps = []
139
+ for elem in root.findall(tag):
140
+ if elem.text:
141
+ pkg_name = elem.text.strip()
142
+ deps.append(pkg_name)
143
+
144
+ # Parse version constraint if present
145
+ constraint = parse_version_constraint(elem)
146
+ if constraint:
147
+ # If package already has a constraint, combine them
148
+ if pkg_name in version_map:
149
+ version_map[pkg_name] = (
150
+ f"{version_map[pkg_name]},{constraint}"
151
+ )
152
+ else:
153
+ version_map[pkg_name] = constraint
154
+ return deps
155
+
156
+ # Parse all dependency types and collect version constraints
157
+ dependency_versions: dict[str, str] = {}
158
+ buildtool_depends = get_deps_with_versions(
159
+ "buildtool_depend", dependency_versions
160
+ )
161
+ build_depends = get_deps_with_versions("build_depend", dependency_versions)
162
+ build_export_depends = get_deps_with_versions(
163
+ "build_export_depend", dependency_versions
164
+ )
165
+ exec_depends = get_deps_with_versions("exec_depend", dependency_versions)
166
+ test_depends = get_deps_with_versions("test_depend", dependency_versions)
167
+ depends = get_deps_with_versions("depend", dependency_versions)
108
168
 
109
169
  # Format 2 compatibility
110
- run_depends = get_deps("run_depend")
170
+ run_depends = get_deps_with_versions("run_depend", dependency_versions)
111
171
 
112
172
  return cls(
113
173
  name=name_elem.text.strip(),
@@ -126,6 +186,7 @@ class PackageXML:
126
186
  test_depends=test_depends,
127
187
  run_depends=run_depends,
128
188
  depends=depends,
189
+ dependency_versions=dependency_versions,
129
190
  )
130
191
 
131
192
  def get_all_build_dependencies(self) -> list[str]:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pixi-ros
3
- Version: 0.1.2
3
+ Version: 0.3.0
4
4
  Summary: Pixi extension for ROS package management
5
5
  Project-URL: Homepage, https://github.com/ruben-arts/pixi-ros
6
6
  Project-URL: Repository, https://github.com/ruben-arts/pixi-ros
@@ -90,13 +90,16 @@ pixi shell
90
90
 
91
91
  ### Dependency Mapping
92
92
 
93
- `pixi-ros` reads all dependency types from `package.xml` files.
93
+ `pixi-ros` reads all dependency types from `package.xml` files.
94
94
  It then does a best effort mapping of ROS package names to conda packages.
95
95
 
96
96
  - **ROS packages**: `ros-{distro}-{package}` from robostack channels (e.g., `ros-humble-rclcpp`)
97
97
  - **System packages**: Mapped to conda-forge equivalents (e.g., `cmake`, `eigen`)
98
+ - **Platform-specific packages**: Different mappings per platform (e.g., OpenGL → `libgl-devel` on Linux, X11 packages on macOS)
98
99
 
99
- After the mapping, it validates package availability in the configured channels. This starts a connection with `https://prefix.dev` to check if packages exist.
100
+ The mapping rules are defined in YAML files (see `src/pixi_ros/data/conda-forge.yaml`) and can be customized by placing your own mapping files in `pixi-ros/*.yaml` or `~/.pixi-ros/*.yaml`.
101
+
102
+ After the mapping, it validates package availability in the configured channels for each target platform. This starts a connection with `https://prefix.dev` to check if packages exist.
100
103
 
101
104
  ### Example
102
105
 
@@ -117,6 +120,41 @@ ros-humble-rclcpp = "*"
117
120
  ros-humble-std-msgs = "*"
118
121
  ```
119
122
 
123
+ ### Version Constraints
124
+
125
+ `pixi-ros` supports version constraints from `package.xml` files and automatically applies them to the generated `pixi.toml`.
126
+
127
+ #### Supported Version Attributes
128
+
129
+ You can specify version requirements in your `package.xml` using standard ROS version attributes:
130
+
131
+ | package.xml attribute | pixi.toml constraint | Description |
132
+ |----------------------|----------------------|-------------|
133
+ | `version_eq="X.Y.Z"` | `==X.Y.Z` | Exactly version X.Y.Z |
134
+ | `version_gte="X.Y.Z"` | `>=X.Y.Z` | Version X.Y.Z or newer |
135
+ | `version_gt="X.Y.Z"` | `>X.Y.Z` | Newer than version X.Y.Z |
136
+ | `version_lte="X.Y.Z"` | `<=X.Y.Z` | Version X.Y.Z or older |
137
+ | `version_lt="X.Y.Z"` | `<X.Y.Z` | Older than version X.Y.Z |
138
+
139
+ Multiple constraints can be combined on the same dependency and will be joined with commas in the output.
140
+
141
+ Given a `package.xml` with version constraints:
142
+
143
+ ```xml
144
+ <depend version_gte="3.12.4">cmake</depend>
145
+ <build_depend version_gte="3.3.0" version_lt="4.0.0">eigen</build_depend>
146
+ <exec_depend version_eq="1.2.3">boost</exec_depend>
147
+ ```
148
+
149
+ `pixi-ros init` generates:
150
+
151
+ ```toml
152
+ [dependencies]
153
+ cmake = ">=3.12.4"
154
+ eigen = ">=3.3.0,<4.0.0"
155
+ boost = "==1.2.3"
156
+ ```
157
+
120
158
  ## Supported ROS Distributions
121
159
 
122
160
  - ROS 2 Humble: https://prefix.dev/robostack-humble
@@ -132,24 +170,102 @@ Initialize or update a ROS workspace's `pixi.toml`.
132
170
 
133
171
  ```bash
134
172
  pixi-ros init --distro <ros_distro>
173
+ pixi-ros init --distro humble --platform linux-64 --platform osx-arm64
135
174
  pixi-ros init
136
175
  ```
137
176
 
138
177
  **Options:**
139
- - `--distro`, `-d`: ROS distribution (optional)
178
+ - `--distro`, `-d`: ROS distribution (optional, will prompt if not provided)
179
+ - `--platform`, `-p`: Target platforms (optional, can be specified multiple times, will prompt if not provided)
180
+ - Available: `linux-64`, `osx-64`, `osx-arm64`, `win-64`
181
+ - Platforms come from the mapping files and determine which dependencies are available
140
182
 
141
183
  **What it does:**
142
184
  - Scans workspace for `package.xml` files
143
- - Reads all dependency types (build, exec, test)
144
- - Maps ROS dependencies to conda packages
185
+ - Reads all dependency types (build, exec, test) and version constraints
186
+ - Maps ROS dependencies to conda packages for each platform
187
+ - Applies version constraints from package.xml to pixi.toml dependencies
145
188
  - Configures robostack channels
146
- - Checks package availability
189
+ - Checks package availability per platform
147
190
  - Creates build tasks using colcon
148
191
  - Generates helpful `README_PIXI.md`
192
+ - Sets up platform-specific dependencies in `pixi.toml`
149
193
 
150
194
  **Running multiple times:**
151
195
  The command is idempotent - you can run it multiple times to update dependencies as your workspace changes.
152
196
 
197
+ ## Multi-Platform Support
198
+
199
+ `pixi-ros` supports generating cross-platform configurations. When you specify multiple platforms, it:
200
+
201
+ 1. **Analyzes dependencies per platform**: Some packages have platform-specific mappings (e.g., OpenGL requirements differ between Linux and macOS)
202
+
203
+ 2. **Organizes dependencies intelligently**:
204
+ - **Common dependencies** (available on all platforms) → `[dependencies]`
205
+ - **Unix dependencies** (available on Linux and macOS, but not Windows) → `[target.unix.dependencies]`
206
+ - **Platform-specific dependencies** → `[target.linux.dependencies]`, `[target.osx.dependencies]`, etc.
207
+
208
+ 3. **Sets up correct platform list**: The `[workspace]` section gets the appropriate pixi platform names
209
+
210
+ ### Platform Naming
211
+
212
+ pixi-ros uses standard pixi platform names:
213
+ - `linux-64` - Linux x86_64
214
+ - `osx-64` - macOS Intel
215
+ - `osx-arm64` - macOS Apple Silicon (M1/M2/M3)
216
+ - `win-64` - Windows x86_64
217
+
218
+ Internally, mapping files use a simplified format (`linux`, `osx`, `win64`), but this is transparent to users. When you specify `osx-64` and `osx-arm64`, they both use the same `osx` mapping rules since package availability is typically the same for both architectures.
219
+
220
+ ### Example: Multi-Platform Setup
221
+
222
+ ```bash
223
+ pixi-ros init --distro humble --platform linux-64 --platform osx-arm64
224
+ ```
225
+
226
+ Generates:
227
+
228
+ ```toml
229
+ [workspace]
230
+ name = "my_workspace"
231
+ channels = [
232
+ "https://prefix.dev/robostack-humble",
233
+ "https://prefix.dev/conda-forge",
234
+ ]
235
+ platforms = ["linux-64", "osx-arm64"]
236
+
237
+ [dependencies]
238
+ # Common dependencies (available on all platforms)
239
+ ros-humble-rclcpp = "*"
240
+ ros-humble-std-msgs = "*"
241
+
242
+ [target.unix.dependencies]
243
+ # Unix-specific dependencies (Linux and macOS)
244
+ xorg-libx11 = "*"
245
+ xorg-libxext = "*"
246
+
247
+ [target.linux.dependencies]
248
+ # Linux-specific dependencies
249
+ libgl-devel = "*"
250
+ libopengl-devel = "*"
251
+ ```
252
+
253
+ ### Interactive Platform Selection
254
+
255
+ If you don't specify platforms, you'll be prompted:
256
+
257
+ ```bash
258
+ $ pixi-ros init --distro humble
259
+
260
+ Available target platforms:
261
+ 1. linux-64
262
+ 2. osx-64
263
+ 3. osx-arm64
264
+ 4. win-64
265
+
266
+ Select platforms (enter numbers or names, comma or space separated): 1 3
267
+ ```
268
+
153
269
  ## Philosophy
154
270
 
155
271
  `pixi-ros` aims to be a quick **gateway drug**. It:
@@ -0,0 +1,16 @@
1
+ pixi_ros/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ pixi_ros/cli.py,sha256=85lRSSsnSWAxqhkwjT0OiObYdTu84YBJXO576k0fOlY,4351
3
+ pixi_ros/config.py,sha256=JevtFXh96UJj7aMUWp18J1xcPeQPoLzh3hdiu5uPR0s,834
4
+ pixi_ros/init.py,sha256=wX7VqJFuh0q3OGNZ7_BLVM3BYyTdbI7_BhzHJfJJLIw,30835
5
+ pixi_ros/mappings.py,sha256=AlXT_VsPV7KeEQxHkbBrnc79LXwizrCk7oG9-fYX5MI,10376
6
+ pixi_ros/package_xml.py,sha256=XnDkKuccSar1ndmvACZMZ7c2yUNXz7CcfQRwluZolqQ,8815
7
+ pixi_ros/utils.py,sha256=uGgB8CYiM_3KfBtqvUKqkEXXTffv8FkkaIC230peHUY,2026
8
+ pixi_ros/workspace.py,sha256=N5Aqcl77J8aLrEDr4T-XR9V5fBjZ1KQHXd4dkbgX8HU,6838
9
+ pixi_ros/data/README.md,sha256=Tdc2sTUuvoyEaHYlmM_C1pf3qr0o0P5Lu2ZUQ88tUjI,1602
10
+ pixi_ros/data/README_PIXI.md.template,sha256=q7g65oHmrEqKTtqOT7lgX6l9RI69w64B0DCLwhf8ocM,3076
11
+ pixi_ros/data/conda-forge.yaml,sha256=DeMfdzEuFCFXLmceV6ENmGpSBo6tMZh-Gx-ZkEAakT8,18941
12
+ pixi_ros-0.3.0.dist-info/METADATA,sha256=fDBOMPyY5DSt4N87s-pugMV0JVCmOOdDU76x8HyND0M,10714
13
+ pixi_ros-0.3.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
14
+ pixi_ros-0.3.0.dist-info/entry_points.txt,sha256=DpBwU4Djcej8gT42q8Ccuv-R9pdmGHyFV5p57_ogqfQ,47
15
+ pixi_ros-0.3.0.dist-info/licenses/LICENSE,sha256=pAZXnNE2dxxwXFIduGyn1gpvPefJtUYOYZOi3yeGG94,1068
16
+ pixi_ros-0.3.0.dist-info/RECORD,,
@@ -1,16 +0,0 @@
1
- pixi_ros/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- pixi_ros/cli.py,sha256=eLT1GNfq3JY8yzJrxDkNjeUtKI5T8T_57XaDMw_XX-s,2145
3
- pixi_ros/config.py,sha256=JevtFXh96UJj7aMUWp18J1xcPeQPoLzh3hdiu5uPR0s,834
4
- pixi_ros/init.py,sha256=Y4k2y_NZOwt0UQ4VMOIWr3Eyg3i2IPU83xtQPd1uW2U,19107
5
- pixi_ros/mappings.py,sha256=WCo1ftPe8knJROy2NyLuPtk0R-zDGWTgCcQnGp5LkFA,9139
6
- pixi_ros/package_xml.py,sha256=a1_zJVc73eMOt67BIk2ITnxLPN1RIUOs0oL42q6CLvI,6279
7
- pixi_ros/utils.py,sha256=uGgB8CYiM_3KfBtqvUKqkEXXTffv8FkkaIC230peHUY,2026
8
- pixi_ros/workspace.py,sha256=N5Aqcl77J8aLrEDr4T-XR9V5fBjZ1KQHXd4dkbgX8HU,6838
9
- pixi_ros/data/README.md,sha256=Tdc2sTUuvoyEaHYlmM_C1pf3qr0o0P5Lu2ZUQ88tUjI,1602
10
- pixi_ros/data/README_PIXI.md.template,sha256=q7g65oHmrEqKTtqOT7lgX6l9RI69w64B0DCLwhf8ocM,3076
11
- pixi_ros/data/conda-forge.yaml,sha256=O3r31jNNU-lPCgvFlmhytTp_R1UMgIBRwDaeSebI7wI,18900
12
- pixi_ros-0.1.2.dist-info/METADATA,sha256=tLDuzO87xAZwkHWeSqMyj8IC2hIxXn3q1BeYZM2Dxfs,6521
13
- pixi_ros-0.1.2.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
14
- pixi_ros-0.1.2.dist-info/entry_points.txt,sha256=DpBwU4Djcej8gT42q8Ccuv-R9pdmGHyFV5p57_ogqfQ,47
15
- pixi_ros-0.1.2.dist-info/licenses/LICENSE,sha256=pAZXnNE2dxxwXFIduGyn1gpvPefJtUYOYZOi3yeGG94,1068
16
- pixi_ros-0.1.2.dist-info/RECORD,,