pixi-ros 0.3.0__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pixi_ros/cli.py +42 -29
- pixi_ros/data/conda-forge.yaml +4 -0
- pixi_ros/init.py +324 -198
- pixi_ros/mappings.py +39 -30
- pixi_ros/validator.py +245 -0
- {pixi_ros-0.3.0.dist-info → pixi_ros-0.4.0.dist-info}/METADATA +83 -26
- pixi_ros-0.4.0.dist-info/RECORD +17 -0
- pixi_ros-0.3.0.dist-info/RECORD +0 -16
- {pixi_ros-0.3.0.dist-info → pixi_ros-0.4.0.dist-info}/WHEEL +0 -0
- {pixi_ros-0.3.0.dist-info → pixi_ros-0.4.0.dist-info}/entry_points.txt +0 -0
- {pixi_ros-0.3.0.dist-info → pixi_ros-0.4.0.dist-info}/licenses/LICENSE +0 -0
pixi_ros/init.py
CHANGED
|
@@ -4,14 +4,19 @@ from pathlib import Path
|
|
|
4
4
|
|
|
5
5
|
import tomlkit
|
|
6
6
|
import typer
|
|
7
|
-
from rattler import
|
|
7
|
+
from rattler import Platform
|
|
8
8
|
from rich.console import Console
|
|
9
9
|
from rich.panel import Panel
|
|
10
10
|
from rich.table import Table
|
|
11
11
|
from rich.text import Text
|
|
12
12
|
|
|
13
|
-
from pixi_ros.mappings import
|
|
13
|
+
from pixi_ros.mappings import (
|
|
14
|
+
expand_gl_requirements,
|
|
15
|
+
get_mappings,
|
|
16
|
+
map_ros_to_conda,
|
|
17
|
+
)
|
|
14
18
|
from pixi_ros.utils import detect_cmake_version_requirement
|
|
19
|
+
from pixi_ros.validator import PackageSource, RosDistroValidator
|
|
15
20
|
from pixi_ros.workspace import discover_packages, find_workspace_root
|
|
16
21
|
|
|
17
22
|
console = Console()
|
|
@@ -36,12 +41,6 @@ def init_workspace(
|
|
|
36
41
|
Raises:
|
|
37
42
|
typer.Exit: If validation fails or workspace not found
|
|
38
43
|
"""
|
|
39
|
-
# Validate distro
|
|
40
|
-
if not validate_distro(distro):
|
|
41
|
-
typer.echo(f"Error: Unsupported ROS distribution '{distro}'", err=True)
|
|
42
|
-
typer.echo("Supported distros: humble, iron, jazzy, rolling", err=True)
|
|
43
|
-
raise typer.Exit(code=1)
|
|
44
|
-
|
|
45
44
|
# Find workspace root
|
|
46
45
|
if workspace_path is None:
|
|
47
46
|
workspace_path = find_workspace_root()
|
|
@@ -88,13 +87,32 @@ def init_workspace(
|
|
|
88
87
|
typer.echo(f"Creating new {pixi_toml_path}")
|
|
89
88
|
pixi_config = tomlkit.document()
|
|
90
89
|
|
|
90
|
+
# Create validator for package validation
|
|
91
|
+
validator = None
|
|
92
|
+
with console.status(
|
|
93
|
+
f"[cyan]Fetching ROS {distro} distribution index...[/cyan]",
|
|
94
|
+
spinner="dots",
|
|
95
|
+
):
|
|
96
|
+
validator = RosDistroValidator(distro)
|
|
97
|
+
|
|
98
|
+
if validator._init_error:
|
|
99
|
+
typer.echo(
|
|
100
|
+
f"Warning: Could not initialize ROS distro validator: "
|
|
101
|
+
f"{validator._init_error}",
|
|
102
|
+
err=True,
|
|
103
|
+
)
|
|
104
|
+
typer.echo("Continuing with fallback package resolution...", err=True)
|
|
105
|
+
validator = None
|
|
106
|
+
|
|
91
107
|
# Display discovered dependencies
|
|
92
|
-
_display_dependencies(packages, distro)
|
|
108
|
+
not_found_packages = _display_dependencies(packages, distro, validator)
|
|
93
109
|
|
|
94
110
|
# Update configuration
|
|
95
111
|
_ensure_workspace_section(pixi_config, workspace_path, platforms)
|
|
96
112
|
_ensure_channels(pixi_config, distro)
|
|
97
|
-
_ensure_dependencies(
|
|
113
|
+
_ensure_dependencies(
|
|
114
|
+
pixi_config, packages, distro, platforms, validator, not_found_packages
|
|
115
|
+
)
|
|
98
116
|
_ensure_tasks(pixi_config)
|
|
99
117
|
_ensure_activation(pixi_config)
|
|
100
118
|
|
|
@@ -153,16 +171,97 @@ def init_workspace(
|
|
|
153
171
|
raise typer.Exit(code=1) from e
|
|
154
172
|
|
|
155
173
|
|
|
156
|
-
def _display_dependencies(packages, distro: str):
|
|
157
|
-
"""Display discovered dependencies in a rich table.
|
|
174
|
+
def _display_dependencies(packages, distro: str, validator=None):
|
|
175
|
+
"""Display discovered dependencies in a rich table.
|
|
176
|
+
|
|
177
|
+
Returns:
|
|
178
|
+
Dict of NOT_FOUND packages: {ros_package: (source_pkgs, conda_pkgs)}
|
|
179
|
+
"""
|
|
158
180
|
if not packages:
|
|
159
|
-
return
|
|
181
|
+
return {}
|
|
160
182
|
|
|
161
183
|
workspace_pkg_names = {pkg.name for pkg in packages}
|
|
162
184
|
|
|
163
|
-
#
|
|
164
|
-
|
|
165
|
-
|
|
185
|
+
# First, collect all unique dependencies to avoid duplicate validation
|
|
186
|
+
all_unique_deps: dict[str, set[str]] = {} # {ros_dep: set of source packages}
|
|
187
|
+
# {ros_dep: set of dep types (Build/Runtime/Test)}
|
|
188
|
+
dep_types: dict[str, set[str]] = {}
|
|
189
|
+
|
|
190
|
+
for pkg in packages:
|
|
191
|
+
for ros_dep in pkg.get_all_build_dependencies():
|
|
192
|
+
if ros_dep not in workspace_pkg_names:
|
|
193
|
+
if ros_dep not in all_unique_deps:
|
|
194
|
+
all_unique_deps[ros_dep] = set()
|
|
195
|
+
dep_types[ros_dep] = set()
|
|
196
|
+
all_unique_deps[ros_dep].add(pkg.name)
|
|
197
|
+
dep_types[ros_dep].add("Build")
|
|
198
|
+
|
|
199
|
+
for ros_dep in pkg.get_all_runtime_dependencies():
|
|
200
|
+
if ros_dep not in workspace_pkg_names:
|
|
201
|
+
if ros_dep not in all_unique_deps:
|
|
202
|
+
all_unique_deps[ros_dep] = set()
|
|
203
|
+
dep_types[ros_dep] = set()
|
|
204
|
+
all_unique_deps[ros_dep].add(pkg.name)
|
|
205
|
+
dep_types[ros_dep].add("Runtime")
|
|
206
|
+
|
|
207
|
+
for ros_dep in pkg.get_all_test_dependencies():
|
|
208
|
+
if ros_dep not in workspace_pkg_names:
|
|
209
|
+
if ros_dep not in all_unique_deps:
|
|
210
|
+
all_unique_deps[ros_dep] = set()
|
|
211
|
+
dep_types[ros_dep] = set()
|
|
212
|
+
all_unique_deps[ros_dep].add(pkg.name)
|
|
213
|
+
dep_types[ros_dep].add("Test")
|
|
214
|
+
|
|
215
|
+
# Validate each unique dependency once
|
|
216
|
+
# {ros_dep: (conda_packages, source_label)}
|
|
217
|
+
validation_results: dict[str, tuple[list[str], str]] = {}
|
|
218
|
+
validation_stats = {
|
|
219
|
+
"workspace": 0,
|
|
220
|
+
"mapping": 0,
|
|
221
|
+
"ros_distro": 0,
|
|
222
|
+
"conda_forge": 0,
|
|
223
|
+
"not_found": 0,
|
|
224
|
+
}
|
|
225
|
+
not_found_packages: dict[str, tuple[set[str], list[str]]] = {}
|
|
226
|
+
|
|
227
|
+
for ros_dep, source_pkgs in all_unique_deps.items():
|
|
228
|
+
source_label = "Fallback"
|
|
229
|
+
if validator:
|
|
230
|
+
mappings = get_mappings()
|
|
231
|
+
result = validator.validate_package(
|
|
232
|
+
ros_dep, workspace_pkg_names, mappings, str(Platform.current())
|
|
233
|
+
)
|
|
234
|
+
if result.source == PackageSource.WORKSPACE:
|
|
235
|
+
validation_stats["workspace"] += 1
|
|
236
|
+
source_label = "[dim]Workspace[/dim]"
|
|
237
|
+
elif result.source == PackageSource.MAPPING:
|
|
238
|
+
validation_stats["mapping"] += 1
|
|
239
|
+
source_label = "[cyan]Mapping[/cyan]"
|
|
240
|
+
elif result.source == PackageSource.ROS_DISTRO:
|
|
241
|
+
validation_stats["ros_distro"] += 1
|
|
242
|
+
source_label = f"[green]ROS {distro}[/green]"
|
|
243
|
+
elif result.source == PackageSource.CONDA_FORGE:
|
|
244
|
+
validation_stats["conda_forge"] += 1
|
|
245
|
+
source_label = "[blue]conda-forge[/blue]"
|
|
246
|
+
elif result.source == PackageSource.NOT_FOUND:
|
|
247
|
+
validation_stats["not_found"] += 1
|
|
248
|
+
source_label = "[red]NOT FOUND[/red]"
|
|
249
|
+
# Track NOT_FOUND packages
|
|
250
|
+
not_found_packages[ros_dep] = (
|
|
251
|
+
source_pkgs.copy(),
|
|
252
|
+
result.conda_packages,
|
|
253
|
+
)
|
|
254
|
+
|
|
255
|
+
conda_packages = map_ros_to_conda(
|
|
256
|
+
ros_dep,
|
|
257
|
+
distro,
|
|
258
|
+
validator=validator,
|
|
259
|
+
workspace_packages=workspace_pkg_names,
|
|
260
|
+
)
|
|
261
|
+
validation_results[ros_dep] = (conda_packages, source_label)
|
|
262
|
+
|
|
263
|
+
# Now organize by package and type for display
|
|
264
|
+
pkg_deps: dict[str, dict[str, dict[str, tuple[list[str], str]]]] = {}
|
|
166
265
|
|
|
167
266
|
for pkg in packages:
|
|
168
267
|
pkg_deps[pkg.name] = {"Build": {}, "Runtime": {}, "Test": {}}
|
|
@@ -171,25 +270,37 @@ def _display_dependencies(packages, distro: str):
|
|
|
171
270
|
for ros_dep in pkg.get_all_build_dependencies():
|
|
172
271
|
if ros_dep in workspace_pkg_names:
|
|
173
272
|
continue
|
|
174
|
-
|
|
175
|
-
if
|
|
176
|
-
|
|
273
|
+
|
|
274
|
+
if ros_dep in validation_results:
|
|
275
|
+
conda_packages, source_label = validation_results[ros_dep]
|
|
276
|
+
if conda_packages:
|
|
277
|
+
pkg_deps[pkg.name]["Build"][ros_dep] = (
|
|
278
|
+
conda_packages,
|
|
279
|
+
source_label,
|
|
280
|
+
)
|
|
177
281
|
|
|
178
282
|
# Runtime dependencies
|
|
179
283
|
for ros_dep in pkg.get_all_runtime_dependencies():
|
|
180
284
|
if ros_dep in workspace_pkg_names:
|
|
181
285
|
continue
|
|
182
|
-
|
|
183
|
-
if
|
|
184
|
-
|
|
286
|
+
|
|
287
|
+
if ros_dep in validation_results:
|
|
288
|
+
conda_packages, source_label = validation_results[ros_dep]
|
|
289
|
+
if conda_packages:
|
|
290
|
+
pkg_deps[pkg.name]["Runtime"][ros_dep] = (
|
|
291
|
+
conda_packages,
|
|
292
|
+
source_label,
|
|
293
|
+
)
|
|
185
294
|
|
|
186
295
|
# Test dependencies
|
|
187
296
|
for ros_dep in pkg.get_all_test_dependencies():
|
|
188
297
|
if ros_dep in workspace_pkg_names:
|
|
189
298
|
continue
|
|
190
|
-
|
|
191
|
-
if
|
|
192
|
-
|
|
299
|
+
|
|
300
|
+
if ros_dep in validation_results:
|
|
301
|
+
conda_packages, source_label = validation_results[ros_dep]
|
|
302
|
+
if conda_packages:
|
|
303
|
+
pkg_deps[pkg.name]["Test"][ros_dep] = (conda_packages, source_label)
|
|
193
304
|
|
|
194
305
|
# Check if any external dependencies exist
|
|
195
306
|
has_deps = any(
|
|
@@ -211,9 +322,18 @@ def _display_dependencies(packages, distro: str):
|
|
|
211
322
|
all_deps = []
|
|
212
323
|
for dep_type in ["Build", "Runtime", "Test"]:
|
|
213
324
|
for ros_dep in sorted(pkg_info[dep_type].keys()):
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
325
|
+
dep_info = pkg_info[dep_type][ros_dep]
|
|
326
|
+
# Handle both tuple format (with source) and list format
|
|
327
|
+
# (backward compat)
|
|
328
|
+
if isinstance(dep_info, tuple):
|
|
329
|
+
conda_pkgs, source = dep_info
|
|
330
|
+
else:
|
|
331
|
+
conda_pkgs = dep_info
|
|
332
|
+
source = "Fallback"
|
|
333
|
+
conda_str = (
|
|
334
|
+
", ".join(conda_pkgs) if conda_pkgs else "[red]NOT FOUND[/red]"
|
|
335
|
+
)
|
|
336
|
+
all_deps.append((ros_dep, dep_type, conda_str, source))
|
|
217
337
|
|
|
218
338
|
# Skip packages with no external dependencies
|
|
219
339
|
if not all_deps:
|
|
@@ -228,14 +348,58 @@ def _display_dependencies(packages, distro: str):
|
|
|
228
348
|
table.add_column("ROS Dependency", style="yellow")
|
|
229
349
|
table.add_column("Type", style="blue")
|
|
230
350
|
table.add_column("Conda Packages", style="green")
|
|
351
|
+
if validator:
|
|
352
|
+
table.add_column("Source", style="magenta")
|
|
231
353
|
|
|
232
354
|
# Add all dependencies for this package
|
|
233
|
-
for
|
|
234
|
-
|
|
355
|
+
for dep_data in all_deps:
|
|
356
|
+
if validator:
|
|
357
|
+
ros_dep, dep_type, conda_str, source = dep_data
|
|
358
|
+
table.add_row(ros_dep, dep_type, conda_str, source)
|
|
359
|
+
else:
|
|
360
|
+
ros_dep, dep_type, conda_str, _ = dep_data
|
|
361
|
+
table.add_row(ros_dep, dep_type, conda_str)
|
|
235
362
|
|
|
236
363
|
console.print(table)
|
|
237
364
|
console.print("")
|
|
238
365
|
|
|
366
|
+
# Display validation summary if validator was used
|
|
367
|
+
if validator:
|
|
368
|
+
console.print("\n[bold cyan]Validation Summary:[/bold cyan]")
|
|
369
|
+
total_deps = sum(validation_stats.values())
|
|
370
|
+
|
|
371
|
+
if validation_stats["workspace"] > 0:
|
|
372
|
+
console.print(
|
|
373
|
+
f" [green]✓[/green] {validation_stats['workspace']} "
|
|
374
|
+
f"workspace packages (skipped)"
|
|
375
|
+
)
|
|
376
|
+
if validation_stats["mapping"] > 0:
|
|
377
|
+
console.print(
|
|
378
|
+
f" [green]✓[/green] {validation_stats['mapping']} "
|
|
379
|
+
f"packages from mappings"
|
|
380
|
+
)
|
|
381
|
+
if validation_stats["ros_distro"] > 0:
|
|
382
|
+
console.print(
|
|
383
|
+
f" [green]✓[/green] {validation_stats['ros_distro']} "
|
|
384
|
+
f"packages from ROS {distro} distro"
|
|
385
|
+
)
|
|
386
|
+
if validation_stats["conda_forge"] > 0:
|
|
387
|
+
console.print(
|
|
388
|
+
f" [green]✓[/green] {validation_stats['conda_forge']} "
|
|
389
|
+
f"packages from conda-forge (auto-detected)"
|
|
390
|
+
)
|
|
391
|
+
if validation_stats["not_found"] > 0:
|
|
392
|
+
console.print(
|
|
393
|
+
f" [yellow]⚠[/yellow] {validation_stats['not_found']} "
|
|
394
|
+
f"packages NOT FOUND (will be commented out)"
|
|
395
|
+
)
|
|
396
|
+
|
|
397
|
+
total_external = total_deps - validation_stats["workspace"]
|
|
398
|
+
console.print(f"\n Total external dependencies: {total_external}")
|
|
399
|
+
console.print("")
|
|
400
|
+
|
|
401
|
+
return not_found_packages
|
|
402
|
+
|
|
239
403
|
|
|
240
404
|
def _ensure_workspace_section(
|
|
241
405
|
config: dict, workspace_path: Path, platforms: list[str] | None = None
|
|
@@ -296,65 +460,20 @@ def _ensure_channels(config: dict, distro: str):
|
|
|
296
460
|
workspace["channels"] = channels
|
|
297
461
|
|
|
298
462
|
|
|
299
|
-
def _check_package_availability(
|
|
300
|
-
packages: list[str], channels: list[str], platform: Platform
|
|
301
|
-
) -> dict[str, bool]:
|
|
302
|
-
"""
|
|
303
|
-
Check if packages are available in the given channels.
|
|
304
|
-
|
|
305
|
-
Args:
|
|
306
|
-
packages: List of conda package names to check
|
|
307
|
-
channels: List of channel URLs
|
|
308
|
-
platform: Platform to check for
|
|
309
|
-
|
|
310
|
-
Returns:
|
|
311
|
-
Dictionary mapping package names to availability (True/False)
|
|
312
|
-
"""
|
|
313
|
-
import asyncio
|
|
314
|
-
|
|
315
|
-
availability = dict.fromkeys(packages, False)
|
|
316
|
-
|
|
317
|
-
try:
|
|
318
|
-
# Create gateway for fetching repo data
|
|
319
|
-
gateway = Gateway()
|
|
320
|
-
|
|
321
|
-
# Convert channel URLs to Channel objects
|
|
322
|
-
channel_objects = [Channel(url) for url in channels]
|
|
323
|
-
|
|
324
|
-
# Query all channels at once (gateway.query is async)
|
|
325
|
-
repo_data_by_channel = asyncio.run(
|
|
326
|
-
gateway.query(
|
|
327
|
-
channel_objects,
|
|
328
|
-
[platform],
|
|
329
|
-
specs=packages, # Correct parameter name
|
|
330
|
-
recursive=False, # Don't fetch dependencies
|
|
331
|
-
)
|
|
332
|
-
)
|
|
333
|
-
|
|
334
|
-
# repo_data_by_channel is a list of lists (one per channel)
|
|
335
|
-
# Check all channels for each package
|
|
336
|
-
for channel_records in repo_data_by_channel:
|
|
337
|
-
for record in channel_records:
|
|
338
|
-
# Check if any of our packages match this record
|
|
339
|
-
for package_name in packages:
|
|
340
|
-
if record.name.normalized == package_name.lower():
|
|
341
|
-
availability[package_name] = True
|
|
342
|
-
|
|
343
|
-
except Exception as e:
|
|
344
|
-
# If query fails, log the error but continue (all marked as unavailable)
|
|
345
|
-
console.print(
|
|
346
|
-
f"[yellow]Warning: Could not check package availability: {e}[/yellow]"
|
|
347
|
-
)
|
|
348
|
-
|
|
349
|
-
return availability
|
|
350
|
-
|
|
351
|
-
|
|
352
463
|
def _ensure_dependencies(
|
|
353
|
-
config: dict,
|
|
464
|
+
config: dict,
|
|
465
|
+
packages,
|
|
466
|
+
distro: str,
|
|
467
|
+
platforms: list[str] | None = None,
|
|
468
|
+
validator=None,
|
|
469
|
+
not_found_from_display: dict[str, tuple[set[str], list[str]]] | None = None,
|
|
354
470
|
):
|
|
355
471
|
"""
|
|
356
472
|
Ensure all ROS dependencies are present with comments showing source.
|
|
357
473
|
|
|
474
|
+
Args:
|
|
475
|
+
not_found_from_display: NOT_FOUND packages from _display_dependencies
|
|
476
|
+
|
|
358
477
|
Generates platform-specific dependencies if multiple platforms are specified.
|
|
359
478
|
Common dependencies (available on all platforms) go in [dependencies],
|
|
360
479
|
platform-specific ones go in [target.{platform}.dependencies].
|
|
@@ -386,7 +505,12 @@ def _ensure_dependencies(
|
|
|
386
505
|
# Map ROS package to conda packages
|
|
387
506
|
# Note: We use the first platform for mapping since version constraints
|
|
388
507
|
# should be the same across platforms for a given ROS package
|
|
389
|
-
conda_packages = map_ros_to_conda(
|
|
508
|
+
conda_packages = map_ros_to_conda(
|
|
509
|
+
ros_dep,
|
|
510
|
+
distro,
|
|
511
|
+
validator=validator,
|
|
512
|
+
workspace_packages=workspace_pkg_names,
|
|
513
|
+
)
|
|
390
514
|
|
|
391
515
|
# Apply version constraint to all mapped conda packages
|
|
392
516
|
for conda_dep in conda_packages:
|
|
@@ -423,6 +547,22 @@ def _ensure_dependencies(
|
|
|
423
547
|
mapping_platform: {} for mapping_platform in platform_groups.keys()
|
|
424
548
|
}
|
|
425
549
|
|
|
550
|
+
# Track NOT_FOUND packages per platform
|
|
551
|
+
# Structure: mapping_platform -> ros_package -> (set of sources, conda_packages)
|
|
552
|
+
not_found_packages: dict[str, dict[str, tuple[set[str], list[str]]]] = {
|
|
553
|
+
mapping_platform: {} for mapping_platform in platform_groups.keys()
|
|
554
|
+
}
|
|
555
|
+
|
|
556
|
+
# Use NOT_FOUND data from _display_dependencies (already validated there)
|
|
557
|
+
if not_found_from_display:
|
|
558
|
+
# Populate not_found_packages for all platforms with the data from display
|
|
559
|
+
for mapping_platform in platform_groups.keys():
|
|
560
|
+
for ros_dep, (source_pkgs, conda_pkgs) in not_found_from_display.items():
|
|
561
|
+
not_found_packages[mapping_platform][ros_dep] = (
|
|
562
|
+
source_pkgs.copy(),
|
|
563
|
+
conda_pkgs,
|
|
564
|
+
)
|
|
565
|
+
|
|
426
566
|
# Collect dependencies from each package, mapped for each platform
|
|
427
567
|
for mapping_platform in platform_groups.keys():
|
|
428
568
|
for pkg in packages:
|
|
@@ -431,9 +571,17 @@ def _ensure_dependencies(
|
|
|
431
571
|
if ros_dep in workspace_pkg_names:
|
|
432
572
|
continue
|
|
433
573
|
|
|
434
|
-
#
|
|
574
|
+
# Skip NOT_FOUND packages (already tracked from display)
|
|
575
|
+
if not_found_from_display and ros_dep in not_found_from_display:
|
|
576
|
+
continue
|
|
577
|
+
|
|
578
|
+
# Map to conda packages WITHOUT validator (already validated in display)
|
|
435
579
|
conda_packages = map_ros_to_conda(
|
|
436
|
-
ros_dep,
|
|
580
|
+
ros_dep,
|
|
581
|
+
distro,
|
|
582
|
+
platform_override=mapping_platform,
|
|
583
|
+
validator=None,
|
|
584
|
+
workspace_packages=workspace_pkg_names,
|
|
437
585
|
)
|
|
438
586
|
|
|
439
587
|
# Skip if no conda packages were returned
|
|
@@ -478,6 +626,16 @@ def _ensure_dependencies(
|
|
|
478
626
|
# Single mapping platform - all deps are "common"
|
|
479
627
|
common_deps = set(platform_deps[mapping_platform_list[0]].keys())
|
|
480
628
|
|
|
629
|
+
# Determine common NOT_FOUND packages (present in all mapping platforms)
|
|
630
|
+
common_not_found = set()
|
|
631
|
+
if len(mapping_platform_list) > 1:
|
|
632
|
+
common_not_found = set(not_found_packages[mapping_platform_list[0]].keys())
|
|
633
|
+
for mapping_platform in mapping_platform_list[1:]:
|
|
634
|
+
common_not_found &= set(not_found_packages[mapping_platform].keys())
|
|
635
|
+
|
|
636
|
+
# Will be set later when processing unix deps
|
|
637
|
+
unix_not_found = set()
|
|
638
|
+
|
|
481
639
|
# For backwards compatibility when single platform, use old behavior
|
|
482
640
|
dep_sources = (
|
|
483
641
|
platform_deps[mapping_platform_list[0]]
|
|
@@ -497,6 +655,9 @@ def _ensure_dependencies(
|
|
|
497
655
|
# Add base ROS dependencies with comment
|
|
498
656
|
base_deps = {
|
|
499
657
|
f"ros-{distro}-ros-base": "*",
|
|
658
|
+
}
|
|
659
|
+
|
|
660
|
+
build_deps = {
|
|
500
661
|
"pkg-config": "*",
|
|
501
662
|
"compilers": "*",
|
|
502
663
|
"make": "*",
|
|
@@ -511,8 +672,14 @@ def _ensure_dependencies(
|
|
|
511
672
|
if dep not in dependencies:
|
|
512
673
|
dependencies[dep] = version
|
|
513
674
|
|
|
675
|
+
for dep, version in build_deps.items():
|
|
676
|
+
if dep not in dependencies:
|
|
677
|
+
dependencies[dep] = version
|
|
678
|
+
|
|
514
679
|
# Add ros2cli packages
|
|
515
|
-
ros2cli_deps = map_ros_to_conda(
|
|
680
|
+
ros2cli_deps = map_ros_to_conda(
|
|
681
|
+
"ros2cli", distro, validator=validator, workspace_packages=workspace_pkg_names
|
|
682
|
+
)
|
|
516
683
|
if ros2cli_deps:
|
|
517
684
|
for conda_pkg in ros2cli_deps:
|
|
518
685
|
if conda_pkg and conda_pkg not in dependencies:
|
|
@@ -529,8 +696,6 @@ def _ensure_dependencies(
|
|
|
529
696
|
dependencies["cmake"] = dep_versions["cmake"]
|
|
530
697
|
|
|
531
698
|
# Add package dependencies
|
|
532
|
-
channels = config.get("workspace", {}).get("channels", [])
|
|
533
|
-
|
|
534
699
|
# Add common dependencies (available on all platforms)
|
|
535
700
|
if dep_sources:
|
|
536
701
|
dependencies.add(tomlkit.nl())
|
|
@@ -541,52 +706,33 @@ def _ensure_dependencies(
|
|
|
541
706
|
else:
|
|
542
707
|
dependencies.add(tomlkit.comment("Workspace dependencies"))
|
|
543
708
|
|
|
544
|
-
#
|
|
545
|
-
first_pixi_platform = platforms[0]
|
|
546
|
-
first_platform = Platform(first_pixi_platform)
|
|
547
|
-
|
|
548
|
-
packages_to_check = [
|
|
549
|
-
conda_dep
|
|
550
|
-
for conda_dep in dep_sources.keys()
|
|
551
|
-
if conda_dep not in dependencies
|
|
552
|
-
]
|
|
553
|
-
|
|
554
|
-
availability = {}
|
|
555
|
-
if channels and packages_to_check:
|
|
556
|
-
typer.echo(
|
|
557
|
-
f"Checking common package availability for {first_pixi_platform}..."
|
|
558
|
-
)
|
|
559
|
-
availability = _check_package_availability(
|
|
560
|
-
packages_to_check, channels, first_platform
|
|
561
|
-
)
|
|
562
|
-
|
|
563
|
-
available_packages = []
|
|
564
|
-
unavailable_packages = []
|
|
565
|
-
|
|
709
|
+
# Add all dependencies (validation already checked availability)
|
|
566
710
|
for conda_dep in sorted(dep_sources.keys()):
|
|
567
711
|
if conda_dep not in dependencies:
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
available_packages.append(conda_dep)
|
|
571
|
-
else:
|
|
572
|
-
unavailable_packages.append(conda_dep)
|
|
573
|
-
|
|
574
|
-
for conda_dep in available_packages:
|
|
575
|
-
version = dep_versions.get(conda_dep, "*")
|
|
576
|
-
dependencies[conda_dep] = version
|
|
712
|
+
version = dep_versions.get(conda_dep, "*")
|
|
713
|
+
dependencies[conda_dep] = version
|
|
577
714
|
|
|
578
|
-
|
|
715
|
+
# Add NOT_FOUND packages as comments (common across all platforms)
|
|
716
|
+
if len(mapping_platform_list) > 1:
|
|
717
|
+
if common_not_found:
|
|
579
718
|
dependencies.add(tomlkit.nl())
|
|
580
|
-
dependencies.add(
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
719
|
+
dependencies.add(tomlkit.comment("The following packages were not found:"))
|
|
720
|
+
for ros_pkg in sorted(common_not_found):
|
|
721
|
+
_, conda_pkgs = not_found_packages[mapping_platform_list[0]][ros_pkg]
|
|
722
|
+
# Use conda package name if available, otherwise ros package name
|
|
723
|
+
pkg_name = conda_pkgs[0] if conda_pkgs else ros_pkg
|
|
724
|
+
dependencies.add(tomlkit.comment(f'{pkg_name} = "*"'))
|
|
725
|
+
else:
|
|
726
|
+
# Single platform - add all NOT_FOUND packages
|
|
727
|
+
mapping_platform = mapping_platform_list[0]
|
|
728
|
+
if not_found_packages[mapping_platform]:
|
|
729
|
+
dependencies.add(tomlkit.nl())
|
|
730
|
+
dependencies.add(tomlkit.comment("The following packages were not found:"))
|
|
731
|
+
for ros_pkg in sorted(not_found_packages[mapping_platform].keys()):
|
|
732
|
+
_, conda_pkgs = not_found_packages[mapping_platform][ros_pkg]
|
|
733
|
+
# Use conda package name if available, otherwise ros package name
|
|
734
|
+
pkg_name = conda_pkgs[0] if conda_pkgs else ros_pkg
|
|
735
|
+
dependencies.add(tomlkit.comment(f'{pkg_name} = "*"'))
|
|
590
736
|
|
|
591
737
|
config["dependencies"] = dependencies
|
|
592
738
|
|
|
@@ -634,45 +780,37 @@ def _ensure_dependencies(
|
|
|
634
780
|
tomlkit.comment("Unix-specific dependencies (Linux and macOS)")
|
|
635
781
|
)
|
|
636
782
|
|
|
637
|
-
#
|
|
638
|
-
representative_pixi_platform = platform_groups.get(
|
|
639
|
-
"linux", platform_groups.get("osx", platforms)
|
|
640
|
-
)[0]
|
|
641
|
-
platform_obj = Platform(representative_pixi_platform)
|
|
642
|
-
packages_to_check = list(unix_deps.keys())
|
|
643
|
-
|
|
644
|
-
availability = {}
|
|
645
|
-
if channels and packages_to_check:
|
|
646
|
-
typer.echo("Checking package availability for unix...")
|
|
647
|
-
availability = _check_package_availability(
|
|
648
|
-
packages_to_check, channels, platform_obj
|
|
649
|
-
)
|
|
650
|
-
|
|
651
|
-
available_packages = []
|
|
652
|
-
unavailable_packages = []
|
|
653
|
-
|
|
783
|
+
# Add unix-specific dependencies
|
|
654
784
|
for conda_dep in sorted(unix_deps.keys()):
|
|
655
785
|
if conda_dep not in target_deps:
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
available_packages.append(conda_dep)
|
|
659
|
-
else:
|
|
660
|
-
unavailable_packages.append(conda_dep)
|
|
786
|
+
version = dep_versions.get(conda_dep, "*")
|
|
787
|
+
target_deps[conda_dep] = version
|
|
661
788
|
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
789
|
+
# Calculate and add unix-specific NOT_FOUND packages as comments
|
|
790
|
+
if has_linux and has_osx:
|
|
791
|
+
linux_not_found = set(not_found_packages.get("linux", {}).keys())
|
|
792
|
+
osx_not_found = set(not_found_packages.get("osx", {}).keys())
|
|
793
|
+
unix_not_found_candidates = (
|
|
794
|
+
linux_not_found & osx_not_found
|
|
795
|
+
) - common_not_found
|
|
796
|
+
|
|
797
|
+
if has_win:
|
|
798
|
+
win_not_found = set(
|
|
799
|
+
not_found_packages.get("win64", {}).keys()
|
|
800
|
+
) | set(not_found_packages.get("win", {}).keys())
|
|
801
|
+
unix_not_found = unix_not_found_candidates - win_not_found
|
|
802
|
+
else:
|
|
803
|
+
unix_not_found = unix_not_found_candidates
|
|
665
804
|
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
target_deps.add(
|
|
669
|
-
tomlkit.comment("The following packages were not found:")
|
|
670
|
-
)
|
|
671
|
-
for conda_dep in unavailable_packages:
|
|
672
|
-
version = dep_versions.get(conda_dep, "*")
|
|
805
|
+
if unix_not_found:
|
|
806
|
+
target_deps.add(tomlkit.nl())
|
|
673
807
|
target_deps.add(
|
|
674
|
-
tomlkit.comment(
|
|
808
|
+
tomlkit.comment("The following packages were not found:")
|
|
675
809
|
)
|
|
810
|
+
for ros_pkg in sorted(unix_not_found):
|
|
811
|
+
_, conda_pkgs = not_found_packages["linux"][ros_pkg]
|
|
812
|
+
pkg_name = conda_pkgs[0] if conda_pkgs else ros_pkg
|
|
813
|
+
target_deps.add(tomlkit.comment(f'{pkg_name} = "*"'))
|
|
676
814
|
|
|
677
815
|
# Now add remaining platform-specific dependencies (not in common, not in unix)
|
|
678
816
|
if len(mapping_platform_list) > 1:
|
|
@@ -702,46 +840,34 @@ def _ensure_dependencies(
|
|
|
702
840
|
)
|
|
703
841
|
)
|
|
704
842
|
|
|
705
|
-
#
|
|
706
|
-
# Use the first pixi platform in the group as representative
|
|
707
|
-
representative_pixi_platform = platform_groups[mapping_platform][0]
|
|
708
|
-
platform_obj = Platform(representative_pixi_platform)
|
|
709
|
-
packages_to_check = list(platform_specific_deps.keys())
|
|
710
|
-
|
|
711
|
-
availability = {}
|
|
712
|
-
if channels and packages_to_check:
|
|
713
|
-
typer.echo(
|
|
714
|
-
f"Checking package availability for {mapping_platform}..."
|
|
715
|
-
)
|
|
716
|
-
availability = _check_package_availability(
|
|
717
|
-
packages_to_check, channels, platform_obj
|
|
718
|
-
)
|
|
719
|
-
|
|
720
|
-
available_packages = []
|
|
721
|
-
unavailable_packages = []
|
|
722
|
-
|
|
843
|
+
# Add platform-specific dependencies
|
|
723
844
|
for conda_dep in sorted(platform_specific_deps.keys()):
|
|
724
845
|
if conda_dep not in target_deps:
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
available_packages.append(conda_dep)
|
|
728
|
-
else:
|
|
729
|
-
unavailable_packages.append(conda_dep)
|
|
730
|
-
|
|
731
|
-
for conda_dep in available_packages:
|
|
732
|
-
version = dep_versions.get(conda_dep, "*")
|
|
733
|
-
target_deps[conda_dep] = version
|
|
846
|
+
version = dep_versions.get(conda_dep, "*")
|
|
847
|
+
target_deps[conda_dep] = version
|
|
734
848
|
|
|
735
|
-
|
|
849
|
+
# Add platform-specific NOT_FOUND packages as comments
|
|
850
|
+
# Determine which NOT_FOUND packages are platform-specific
|
|
851
|
+
common_set = (
|
|
852
|
+
common_not_found if len(mapping_platform_list) > 1 else set()
|
|
853
|
+
)
|
|
854
|
+
platform_not_found = {
|
|
855
|
+
ros_pkg: (sources, conda_pkgs)
|
|
856
|
+
for ros_pkg, (sources, conda_pkgs) in not_found_packages[
|
|
857
|
+
mapping_platform
|
|
858
|
+
].items()
|
|
859
|
+
if ros_pkg not in common_set and ros_pkg not in unix_not_found
|
|
860
|
+
}
|
|
861
|
+
|
|
862
|
+
if platform_not_found:
|
|
736
863
|
target_deps.add(tomlkit.nl())
|
|
737
864
|
target_deps.add(
|
|
738
865
|
tomlkit.comment("The following packages were not found:")
|
|
739
866
|
)
|
|
740
|
-
for
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
)
|
|
867
|
+
for ros_pkg in sorted(platform_not_found.keys()):
|
|
868
|
+
_, conda_pkgs = platform_not_found[ros_pkg]
|
|
869
|
+
pkg_name = conda_pkgs[0] if conda_pkgs else ros_pkg
|
|
870
|
+
target_deps.add(tomlkit.comment(f'{pkg_name} = "*"'))
|
|
745
871
|
|
|
746
872
|
|
|
747
873
|
def _ensure_tasks(config: dict):
|