pixi-ros 0.2.0__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pixi_ros/cli.py +44 -31
- pixi_ros/data/conda-forge.yaml +6 -0
- pixi_ros/init.py +395 -206
- pixi_ros/mappings.py +39 -26
- pixi_ros/package_xml.py +69 -8
- pixi_ros/validator.py +245 -0
- {pixi_ros-0.2.0.dist-info → pixi_ros-0.4.0.dist-info}/METADATA +118 -25
- pixi_ros-0.4.0.dist-info/RECORD +17 -0
- pixi_ros-0.2.0.dist-info/RECORD +0 -16
- {pixi_ros-0.2.0.dist-info → pixi_ros-0.4.0.dist-info}/WHEEL +0 -0
- {pixi_ros-0.2.0.dist-info → pixi_ros-0.4.0.dist-info}/entry_points.txt +0 -0
- {pixi_ros-0.2.0.dist-info → pixi_ros-0.4.0.dist-info}/licenses/LICENSE +0 -0
pixi_ros/init.py
CHANGED
|
@@ -4,14 +4,19 @@ from pathlib import Path
|
|
|
4
4
|
|
|
5
5
|
import tomlkit
|
|
6
6
|
import typer
|
|
7
|
-
from rattler import
|
|
7
|
+
from rattler import Platform
|
|
8
8
|
from rich.console import Console
|
|
9
9
|
from rich.panel import Panel
|
|
10
10
|
from rich.table import Table
|
|
11
11
|
from rich.text import Text
|
|
12
12
|
|
|
13
|
-
from pixi_ros.mappings import
|
|
13
|
+
from pixi_ros.mappings import (
|
|
14
|
+
expand_gl_requirements,
|
|
15
|
+
get_mappings,
|
|
16
|
+
map_ros_to_conda,
|
|
17
|
+
)
|
|
14
18
|
from pixi_ros.utils import detect_cmake_version_requirement
|
|
19
|
+
from pixi_ros.validator import PackageSource, RosDistroValidator
|
|
15
20
|
from pixi_ros.workspace import discover_packages, find_workspace_root
|
|
16
21
|
|
|
17
22
|
console = Console()
|
|
@@ -36,12 +41,6 @@ def init_workspace(
|
|
|
36
41
|
Raises:
|
|
37
42
|
typer.Exit: If validation fails or workspace not found
|
|
38
43
|
"""
|
|
39
|
-
# Validate distro
|
|
40
|
-
if not validate_distro(distro):
|
|
41
|
-
typer.echo(f"Error: Unsupported ROS distribution '{distro}'", err=True)
|
|
42
|
-
typer.echo("Supported distros: humble, iron, jazzy, rolling", err=True)
|
|
43
|
-
raise typer.Exit(code=1)
|
|
44
|
-
|
|
45
44
|
# Find workspace root
|
|
46
45
|
if workspace_path is None:
|
|
47
46
|
workspace_path = find_workspace_root()
|
|
@@ -88,13 +87,32 @@ def init_workspace(
|
|
|
88
87
|
typer.echo(f"Creating new {pixi_toml_path}")
|
|
89
88
|
pixi_config = tomlkit.document()
|
|
90
89
|
|
|
90
|
+
# Create validator for package validation
|
|
91
|
+
validator = None
|
|
92
|
+
with console.status(
|
|
93
|
+
f"[cyan]Fetching ROS {distro} distribution index...[/cyan]",
|
|
94
|
+
spinner="dots",
|
|
95
|
+
):
|
|
96
|
+
validator = RosDistroValidator(distro)
|
|
97
|
+
|
|
98
|
+
if validator._init_error:
|
|
99
|
+
typer.echo(
|
|
100
|
+
f"Warning: Could not initialize ROS distro validator: "
|
|
101
|
+
f"{validator._init_error}",
|
|
102
|
+
err=True,
|
|
103
|
+
)
|
|
104
|
+
typer.echo("Continuing with fallback package resolution...", err=True)
|
|
105
|
+
validator = None
|
|
106
|
+
|
|
91
107
|
# Display discovered dependencies
|
|
92
|
-
_display_dependencies(packages, distro)
|
|
108
|
+
not_found_packages = _display_dependencies(packages, distro, validator)
|
|
93
109
|
|
|
94
110
|
# Update configuration
|
|
95
111
|
_ensure_workspace_section(pixi_config, workspace_path, platforms)
|
|
96
112
|
_ensure_channels(pixi_config, distro)
|
|
97
|
-
_ensure_dependencies(
|
|
113
|
+
_ensure_dependencies(
|
|
114
|
+
pixi_config, packages, distro, platforms, validator, not_found_packages
|
|
115
|
+
)
|
|
98
116
|
_ensure_tasks(pixi_config)
|
|
99
117
|
_ensure_activation(pixi_config)
|
|
100
118
|
|
|
@@ -153,16 +171,97 @@ def init_workspace(
|
|
|
153
171
|
raise typer.Exit(code=1) from e
|
|
154
172
|
|
|
155
173
|
|
|
156
|
-
def _display_dependencies(packages, distro: str):
|
|
157
|
-
"""Display discovered dependencies in a rich table.
|
|
174
|
+
def _display_dependencies(packages, distro: str, validator=None):
|
|
175
|
+
"""Display discovered dependencies in a rich table.
|
|
176
|
+
|
|
177
|
+
Returns:
|
|
178
|
+
Dict of NOT_FOUND packages: {ros_package: (source_pkgs, conda_pkgs)}
|
|
179
|
+
"""
|
|
158
180
|
if not packages:
|
|
159
|
-
return
|
|
181
|
+
return {}
|
|
160
182
|
|
|
161
183
|
workspace_pkg_names = {pkg.name for pkg in packages}
|
|
162
184
|
|
|
163
|
-
#
|
|
164
|
-
|
|
165
|
-
|
|
185
|
+
# First, collect all unique dependencies to avoid duplicate validation
|
|
186
|
+
all_unique_deps: dict[str, set[str]] = {} # {ros_dep: set of source packages}
|
|
187
|
+
# {ros_dep: set of dep types (Build/Runtime/Test)}
|
|
188
|
+
dep_types: dict[str, set[str]] = {}
|
|
189
|
+
|
|
190
|
+
for pkg in packages:
|
|
191
|
+
for ros_dep in pkg.get_all_build_dependencies():
|
|
192
|
+
if ros_dep not in workspace_pkg_names:
|
|
193
|
+
if ros_dep not in all_unique_deps:
|
|
194
|
+
all_unique_deps[ros_dep] = set()
|
|
195
|
+
dep_types[ros_dep] = set()
|
|
196
|
+
all_unique_deps[ros_dep].add(pkg.name)
|
|
197
|
+
dep_types[ros_dep].add("Build")
|
|
198
|
+
|
|
199
|
+
for ros_dep in pkg.get_all_runtime_dependencies():
|
|
200
|
+
if ros_dep not in workspace_pkg_names:
|
|
201
|
+
if ros_dep not in all_unique_deps:
|
|
202
|
+
all_unique_deps[ros_dep] = set()
|
|
203
|
+
dep_types[ros_dep] = set()
|
|
204
|
+
all_unique_deps[ros_dep].add(pkg.name)
|
|
205
|
+
dep_types[ros_dep].add("Runtime")
|
|
206
|
+
|
|
207
|
+
for ros_dep in pkg.get_all_test_dependencies():
|
|
208
|
+
if ros_dep not in workspace_pkg_names:
|
|
209
|
+
if ros_dep not in all_unique_deps:
|
|
210
|
+
all_unique_deps[ros_dep] = set()
|
|
211
|
+
dep_types[ros_dep] = set()
|
|
212
|
+
all_unique_deps[ros_dep].add(pkg.name)
|
|
213
|
+
dep_types[ros_dep].add("Test")
|
|
214
|
+
|
|
215
|
+
# Validate each unique dependency once
|
|
216
|
+
# {ros_dep: (conda_packages, source_label)}
|
|
217
|
+
validation_results: dict[str, tuple[list[str], str]] = {}
|
|
218
|
+
validation_stats = {
|
|
219
|
+
"workspace": 0,
|
|
220
|
+
"mapping": 0,
|
|
221
|
+
"ros_distro": 0,
|
|
222
|
+
"conda_forge": 0,
|
|
223
|
+
"not_found": 0,
|
|
224
|
+
}
|
|
225
|
+
not_found_packages: dict[str, tuple[set[str], list[str]]] = {}
|
|
226
|
+
|
|
227
|
+
for ros_dep, source_pkgs in all_unique_deps.items():
|
|
228
|
+
source_label = "Fallback"
|
|
229
|
+
if validator:
|
|
230
|
+
mappings = get_mappings()
|
|
231
|
+
result = validator.validate_package(
|
|
232
|
+
ros_dep, workspace_pkg_names, mappings, str(Platform.current())
|
|
233
|
+
)
|
|
234
|
+
if result.source == PackageSource.WORKSPACE:
|
|
235
|
+
validation_stats["workspace"] += 1
|
|
236
|
+
source_label = "[dim]Workspace[/dim]"
|
|
237
|
+
elif result.source == PackageSource.MAPPING:
|
|
238
|
+
validation_stats["mapping"] += 1
|
|
239
|
+
source_label = "[cyan]Mapping[/cyan]"
|
|
240
|
+
elif result.source == PackageSource.ROS_DISTRO:
|
|
241
|
+
validation_stats["ros_distro"] += 1
|
|
242
|
+
source_label = f"[green]ROS {distro}[/green]"
|
|
243
|
+
elif result.source == PackageSource.CONDA_FORGE:
|
|
244
|
+
validation_stats["conda_forge"] += 1
|
|
245
|
+
source_label = "[blue]conda-forge[/blue]"
|
|
246
|
+
elif result.source == PackageSource.NOT_FOUND:
|
|
247
|
+
validation_stats["not_found"] += 1
|
|
248
|
+
source_label = "[red]NOT FOUND[/red]"
|
|
249
|
+
# Track NOT_FOUND packages
|
|
250
|
+
not_found_packages[ros_dep] = (
|
|
251
|
+
source_pkgs.copy(),
|
|
252
|
+
result.conda_packages,
|
|
253
|
+
)
|
|
254
|
+
|
|
255
|
+
conda_packages = map_ros_to_conda(
|
|
256
|
+
ros_dep,
|
|
257
|
+
distro,
|
|
258
|
+
validator=validator,
|
|
259
|
+
workspace_packages=workspace_pkg_names,
|
|
260
|
+
)
|
|
261
|
+
validation_results[ros_dep] = (conda_packages, source_label)
|
|
262
|
+
|
|
263
|
+
# Now organize by package and type for display
|
|
264
|
+
pkg_deps: dict[str, dict[str, dict[str, tuple[list[str], str]]]] = {}
|
|
166
265
|
|
|
167
266
|
for pkg in packages:
|
|
168
267
|
pkg_deps[pkg.name] = {"Build": {}, "Runtime": {}, "Test": {}}
|
|
@@ -171,25 +270,37 @@ def _display_dependencies(packages, distro: str):
|
|
|
171
270
|
for ros_dep in pkg.get_all_build_dependencies():
|
|
172
271
|
if ros_dep in workspace_pkg_names:
|
|
173
272
|
continue
|
|
174
|
-
|
|
175
|
-
if
|
|
176
|
-
|
|
273
|
+
|
|
274
|
+
if ros_dep in validation_results:
|
|
275
|
+
conda_packages, source_label = validation_results[ros_dep]
|
|
276
|
+
if conda_packages:
|
|
277
|
+
pkg_deps[pkg.name]["Build"][ros_dep] = (
|
|
278
|
+
conda_packages,
|
|
279
|
+
source_label,
|
|
280
|
+
)
|
|
177
281
|
|
|
178
282
|
# Runtime dependencies
|
|
179
283
|
for ros_dep in pkg.get_all_runtime_dependencies():
|
|
180
284
|
if ros_dep in workspace_pkg_names:
|
|
181
285
|
continue
|
|
182
|
-
|
|
183
|
-
if
|
|
184
|
-
|
|
286
|
+
|
|
287
|
+
if ros_dep in validation_results:
|
|
288
|
+
conda_packages, source_label = validation_results[ros_dep]
|
|
289
|
+
if conda_packages:
|
|
290
|
+
pkg_deps[pkg.name]["Runtime"][ros_dep] = (
|
|
291
|
+
conda_packages,
|
|
292
|
+
source_label,
|
|
293
|
+
)
|
|
185
294
|
|
|
186
295
|
# Test dependencies
|
|
187
296
|
for ros_dep in pkg.get_all_test_dependencies():
|
|
188
297
|
if ros_dep in workspace_pkg_names:
|
|
189
298
|
continue
|
|
190
|
-
|
|
191
|
-
if
|
|
192
|
-
|
|
299
|
+
|
|
300
|
+
if ros_dep in validation_results:
|
|
301
|
+
conda_packages, source_label = validation_results[ros_dep]
|
|
302
|
+
if conda_packages:
|
|
303
|
+
pkg_deps[pkg.name]["Test"][ros_dep] = (conda_packages, source_label)
|
|
193
304
|
|
|
194
305
|
# Check if any external dependencies exist
|
|
195
306
|
has_deps = any(
|
|
@@ -211,9 +322,18 @@ def _display_dependencies(packages, distro: str):
|
|
|
211
322
|
all_deps = []
|
|
212
323
|
for dep_type in ["Build", "Runtime", "Test"]:
|
|
213
324
|
for ros_dep in sorted(pkg_info[dep_type].keys()):
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
325
|
+
dep_info = pkg_info[dep_type][ros_dep]
|
|
326
|
+
# Handle both tuple format (with source) and list format
|
|
327
|
+
# (backward compat)
|
|
328
|
+
if isinstance(dep_info, tuple):
|
|
329
|
+
conda_pkgs, source = dep_info
|
|
330
|
+
else:
|
|
331
|
+
conda_pkgs = dep_info
|
|
332
|
+
source = "Fallback"
|
|
333
|
+
conda_str = (
|
|
334
|
+
", ".join(conda_pkgs) if conda_pkgs else "[red]NOT FOUND[/red]"
|
|
335
|
+
)
|
|
336
|
+
all_deps.append((ros_dep, dep_type, conda_str, source))
|
|
217
337
|
|
|
218
338
|
# Skip packages with no external dependencies
|
|
219
339
|
if not all_deps:
|
|
@@ -228,14 +348,58 @@ def _display_dependencies(packages, distro: str):
|
|
|
228
348
|
table.add_column("ROS Dependency", style="yellow")
|
|
229
349
|
table.add_column("Type", style="blue")
|
|
230
350
|
table.add_column("Conda Packages", style="green")
|
|
351
|
+
if validator:
|
|
352
|
+
table.add_column("Source", style="magenta")
|
|
231
353
|
|
|
232
354
|
# Add all dependencies for this package
|
|
233
|
-
for
|
|
234
|
-
|
|
355
|
+
for dep_data in all_deps:
|
|
356
|
+
if validator:
|
|
357
|
+
ros_dep, dep_type, conda_str, source = dep_data
|
|
358
|
+
table.add_row(ros_dep, dep_type, conda_str, source)
|
|
359
|
+
else:
|
|
360
|
+
ros_dep, dep_type, conda_str, _ = dep_data
|
|
361
|
+
table.add_row(ros_dep, dep_type, conda_str)
|
|
235
362
|
|
|
236
363
|
console.print(table)
|
|
237
364
|
console.print("")
|
|
238
365
|
|
|
366
|
+
# Display validation summary if validator was used
|
|
367
|
+
if validator:
|
|
368
|
+
console.print("\n[bold cyan]Validation Summary:[/bold cyan]")
|
|
369
|
+
total_deps = sum(validation_stats.values())
|
|
370
|
+
|
|
371
|
+
if validation_stats["workspace"] > 0:
|
|
372
|
+
console.print(
|
|
373
|
+
f" [green]✓[/green] {validation_stats['workspace']} "
|
|
374
|
+
f"workspace packages (skipped)"
|
|
375
|
+
)
|
|
376
|
+
if validation_stats["mapping"] > 0:
|
|
377
|
+
console.print(
|
|
378
|
+
f" [green]✓[/green] {validation_stats['mapping']} "
|
|
379
|
+
f"packages from mappings"
|
|
380
|
+
)
|
|
381
|
+
if validation_stats["ros_distro"] > 0:
|
|
382
|
+
console.print(
|
|
383
|
+
f" [green]✓[/green] {validation_stats['ros_distro']} "
|
|
384
|
+
f"packages from ROS {distro} distro"
|
|
385
|
+
)
|
|
386
|
+
if validation_stats["conda_forge"] > 0:
|
|
387
|
+
console.print(
|
|
388
|
+
f" [green]✓[/green] {validation_stats['conda_forge']} "
|
|
389
|
+
f"packages from conda-forge (auto-detected)"
|
|
390
|
+
)
|
|
391
|
+
if validation_stats["not_found"] > 0:
|
|
392
|
+
console.print(
|
|
393
|
+
f" [yellow]⚠[/yellow] {validation_stats['not_found']} "
|
|
394
|
+
f"packages NOT FOUND (will be commented out)"
|
|
395
|
+
)
|
|
396
|
+
|
|
397
|
+
total_external = total_deps - validation_stats["workspace"]
|
|
398
|
+
console.print(f"\n Total external dependencies: {total_external}")
|
|
399
|
+
console.print("")
|
|
400
|
+
|
|
401
|
+
return not_found_packages
|
|
402
|
+
|
|
239
403
|
|
|
240
404
|
def _ensure_workspace_section(
|
|
241
405
|
config: dict, workspace_path: Path, platforms: list[str] | None = None
|
|
@@ -254,8 +418,8 @@ def _ensure_workspace_section(
|
|
|
254
418
|
if "channels" not in workspace:
|
|
255
419
|
workspace["channels"] = []
|
|
256
420
|
|
|
257
|
-
# Set
|
|
258
|
-
if "platforms" not in workspace
|
|
421
|
+
# Set or extend platforms
|
|
422
|
+
if "platforms" not in workspace:
|
|
259
423
|
if platforms:
|
|
260
424
|
# Platforms are already in pixi format (linux-64, osx-64, etc.)
|
|
261
425
|
workspace["platforms"] = platforms
|
|
@@ -263,6 +427,18 @@ def _ensure_workspace_section(
|
|
|
263
427
|
# Only add the current platform by default
|
|
264
428
|
current_platform = str(Platform.current())
|
|
265
429
|
workspace["platforms"] = [current_platform]
|
|
430
|
+
elif platforms:
|
|
431
|
+
# Extend existing platforms list with new ones (avoiding duplicates)
|
|
432
|
+
existing_platforms = workspace["platforms"]
|
|
433
|
+
if not isinstance(existing_platforms, list):
|
|
434
|
+
existing_platforms = [existing_platforms]
|
|
435
|
+
|
|
436
|
+
# Add new platforms that aren't already in the list
|
|
437
|
+
for platform in platforms:
|
|
438
|
+
if platform not in existing_platforms:
|
|
439
|
+
existing_platforms.append(platform)
|
|
440
|
+
|
|
441
|
+
workspace["platforms"] = existing_platforms
|
|
266
442
|
|
|
267
443
|
|
|
268
444
|
def _ensure_channels(config: dict, distro: str):
|
|
@@ -284,65 +460,20 @@ def _ensure_channels(config: dict, distro: str):
|
|
|
284
460
|
workspace["channels"] = channels
|
|
285
461
|
|
|
286
462
|
|
|
287
|
-
def _check_package_availability(
|
|
288
|
-
packages: list[str], channels: list[str], platform: Platform
|
|
289
|
-
) -> dict[str, bool]:
|
|
290
|
-
"""
|
|
291
|
-
Check if packages are available in the given channels.
|
|
292
|
-
|
|
293
|
-
Args:
|
|
294
|
-
packages: List of conda package names to check
|
|
295
|
-
channels: List of channel URLs
|
|
296
|
-
platform: Platform to check for
|
|
297
|
-
|
|
298
|
-
Returns:
|
|
299
|
-
Dictionary mapping package names to availability (True/False)
|
|
300
|
-
"""
|
|
301
|
-
import asyncio
|
|
302
|
-
|
|
303
|
-
availability = dict.fromkeys(packages, False)
|
|
304
|
-
|
|
305
|
-
try:
|
|
306
|
-
# Create gateway for fetching repo data
|
|
307
|
-
gateway = Gateway()
|
|
308
|
-
|
|
309
|
-
# Convert channel URLs to Channel objects
|
|
310
|
-
channel_objects = [Channel(url) for url in channels]
|
|
311
|
-
|
|
312
|
-
# Query all channels at once (gateway.query is async)
|
|
313
|
-
repo_data_by_channel = asyncio.run(
|
|
314
|
-
gateway.query(
|
|
315
|
-
channel_objects,
|
|
316
|
-
[platform],
|
|
317
|
-
specs=packages, # Correct parameter name
|
|
318
|
-
recursive=False, # Don't fetch dependencies
|
|
319
|
-
)
|
|
320
|
-
)
|
|
321
|
-
|
|
322
|
-
# repo_data_by_channel is a list of lists (one per channel)
|
|
323
|
-
# Check all channels for each package
|
|
324
|
-
for channel_records in repo_data_by_channel:
|
|
325
|
-
for record in channel_records:
|
|
326
|
-
# Check if any of our packages match this record
|
|
327
|
-
for package_name in packages:
|
|
328
|
-
if record.name.normalized == package_name.lower():
|
|
329
|
-
availability[package_name] = True
|
|
330
|
-
|
|
331
|
-
except Exception as e:
|
|
332
|
-
# If query fails, log the error but continue (all marked as unavailable)
|
|
333
|
-
console.print(
|
|
334
|
-
f"[yellow]Warning: Could not check package availability: {e}[/yellow]"
|
|
335
|
-
)
|
|
336
|
-
|
|
337
|
-
return availability
|
|
338
|
-
|
|
339
|
-
|
|
340
463
|
def _ensure_dependencies(
|
|
341
|
-
config: dict,
|
|
464
|
+
config: dict,
|
|
465
|
+
packages,
|
|
466
|
+
distro: str,
|
|
467
|
+
platforms: list[str] | None = None,
|
|
468
|
+
validator=None,
|
|
469
|
+
not_found_from_display: dict[str, tuple[set[str], list[str]]] | None = None,
|
|
342
470
|
):
|
|
343
471
|
"""
|
|
344
472
|
Ensure all ROS dependencies are present with comments showing source.
|
|
345
473
|
|
|
474
|
+
Args:
|
|
475
|
+
not_found_from_display: NOT_FOUND packages from _display_dependencies
|
|
476
|
+
|
|
346
477
|
Generates platform-specific dependencies if multiple platforms are specified.
|
|
347
478
|
Common dependencies (available on all platforms) go in [dependencies],
|
|
348
479
|
platform-specific ones go in [target.{platform}.dependencies].
|
|
@@ -365,6 +496,33 @@ def _ensure_dependencies(
|
|
|
365
496
|
if cmake_version:
|
|
366
497
|
dep_versions["cmake"] = cmake_version
|
|
367
498
|
|
|
499
|
+
# Collect version constraints from package.xml
|
|
500
|
+
for ros_dep, version_constraint in pkg.dependency_versions.items():
|
|
501
|
+
# Skip workspace packages
|
|
502
|
+
if ros_dep in workspace_pkg_names:
|
|
503
|
+
continue
|
|
504
|
+
|
|
505
|
+
# Map ROS package to conda packages
|
|
506
|
+
# Note: We use the first platform for mapping since version constraints
|
|
507
|
+
# should be the same across platforms for a given ROS package
|
|
508
|
+
conda_packages = map_ros_to_conda(
|
|
509
|
+
ros_dep,
|
|
510
|
+
distro,
|
|
511
|
+
validator=validator,
|
|
512
|
+
workspace_packages=workspace_pkg_names,
|
|
513
|
+
)
|
|
514
|
+
|
|
515
|
+
# Apply version constraint to all mapped conda packages
|
|
516
|
+
for conda_dep in conda_packages:
|
|
517
|
+
if conda_dep and not conda_dep.startswith("REQUIRE_"):
|
|
518
|
+
# If package already has a constraint, combine them
|
|
519
|
+
if conda_dep in dep_versions:
|
|
520
|
+
dep_versions[conda_dep] = (
|
|
521
|
+
f"{dep_versions[conda_dep]},{version_constraint}"
|
|
522
|
+
)
|
|
523
|
+
else:
|
|
524
|
+
dep_versions[conda_dep] = version_constraint
|
|
525
|
+
|
|
368
526
|
# Platforms come from CLI as pixi platform names (linux-64, osx-64, etc.)
|
|
369
527
|
# Map them to mapping platform names for querying the mapping files
|
|
370
528
|
pixi_to_mapping = {
|
|
@@ -389,6 +547,22 @@ def _ensure_dependencies(
|
|
|
389
547
|
mapping_platform: {} for mapping_platform in platform_groups.keys()
|
|
390
548
|
}
|
|
391
549
|
|
|
550
|
+
# Track NOT_FOUND packages per platform
|
|
551
|
+
# Structure: mapping_platform -> ros_package -> (set of sources, conda_packages)
|
|
552
|
+
not_found_packages: dict[str, dict[str, tuple[set[str], list[str]]]] = {
|
|
553
|
+
mapping_platform: {} for mapping_platform in platform_groups.keys()
|
|
554
|
+
}
|
|
555
|
+
|
|
556
|
+
# Use NOT_FOUND data from _display_dependencies (already validated there)
|
|
557
|
+
if not_found_from_display:
|
|
558
|
+
# Populate not_found_packages for all platforms with the data from display
|
|
559
|
+
for mapping_platform in platform_groups.keys():
|
|
560
|
+
for ros_dep, (source_pkgs, conda_pkgs) in not_found_from_display.items():
|
|
561
|
+
not_found_packages[mapping_platform][ros_dep] = (
|
|
562
|
+
source_pkgs.copy(),
|
|
563
|
+
conda_pkgs,
|
|
564
|
+
)
|
|
565
|
+
|
|
392
566
|
# Collect dependencies from each package, mapped for each platform
|
|
393
567
|
for mapping_platform in platform_groups.keys():
|
|
394
568
|
for pkg in packages:
|
|
@@ -397,9 +571,17 @@ def _ensure_dependencies(
|
|
|
397
571
|
if ros_dep in workspace_pkg_names:
|
|
398
572
|
continue
|
|
399
573
|
|
|
400
|
-
#
|
|
574
|
+
# Skip NOT_FOUND packages (already tracked from display)
|
|
575
|
+
if not_found_from_display and ros_dep in not_found_from_display:
|
|
576
|
+
continue
|
|
577
|
+
|
|
578
|
+
# Map to conda packages WITHOUT validator (already validated in display)
|
|
401
579
|
conda_packages = map_ros_to_conda(
|
|
402
|
-
ros_dep,
|
|
580
|
+
ros_dep,
|
|
581
|
+
distro,
|
|
582
|
+
platform_override=mapping_platform,
|
|
583
|
+
validator=None,
|
|
584
|
+
workspace_packages=workspace_pkg_names,
|
|
403
585
|
)
|
|
404
586
|
|
|
405
587
|
# Skip if no conda packages were returned
|
|
@@ -444,12 +626,26 @@ def _ensure_dependencies(
|
|
|
444
626
|
# Single mapping platform - all deps are "common"
|
|
445
627
|
common_deps = set(platform_deps[mapping_platform_list[0]].keys())
|
|
446
628
|
|
|
629
|
+
# Determine common NOT_FOUND packages (present in all mapping platforms)
|
|
630
|
+
common_not_found = set()
|
|
631
|
+
if len(mapping_platform_list) > 1:
|
|
632
|
+
common_not_found = set(not_found_packages[mapping_platform_list[0]].keys())
|
|
633
|
+
for mapping_platform in mapping_platform_list[1:]:
|
|
634
|
+
common_not_found &= set(not_found_packages[mapping_platform].keys())
|
|
635
|
+
|
|
636
|
+
# Will be set later when processing unix deps
|
|
637
|
+
unix_not_found = set()
|
|
638
|
+
|
|
447
639
|
# For backwards compatibility when single platform, use old behavior
|
|
448
|
-
dep_sources =
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
640
|
+
dep_sources = (
|
|
641
|
+
platform_deps[mapping_platform_list[0]]
|
|
642
|
+
if len(mapping_platform_list) == 1
|
|
643
|
+
else {
|
|
644
|
+
dep: platform_deps[mapping_platform_list[0]][dep]
|
|
645
|
+
for dep in common_deps
|
|
646
|
+
if dep in platform_deps[mapping_platform_list[0]]
|
|
647
|
+
}
|
|
648
|
+
)
|
|
453
649
|
|
|
454
650
|
# Create or get dependencies table
|
|
455
651
|
if "dependencies" not in config:
|
|
@@ -459,6 +655,9 @@ def _ensure_dependencies(
|
|
|
459
655
|
# Add base ROS dependencies with comment
|
|
460
656
|
base_deps = {
|
|
461
657
|
f"ros-{distro}-ros-base": "*",
|
|
658
|
+
}
|
|
659
|
+
|
|
660
|
+
build_deps = {
|
|
462
661
|
"pkg-config": "*",
|
|
463
662
|
"compilers": "*",
|
|
464
663
|
"make": "*",
|
|
@@ -473,8 +672,14 @@ def _ensure_dependencies(
|
|
|
473
672
|
if dep not in dependencies:
|
|
474
673
|
dependencies[dep] = version
|
|
475
674
|
|
|
675
|
+
for dep, version in build_deps.items():
|
|
676
|
+
if dep not in dependencies:
|
|
677
|
+
dependencies[dep] = version
|
|
678
|
+
|
|
476
679
|
# Add ros2cli packages
|
|
477
|
-
ros2cli_deps = map_ros_to_conda(
|
|
680
|
+
ros2cli_deps = map_ros_to_conda(
|
|
681
|
+
"ros2cli", distro, validator=validator, workspace_packages=workspace_pkg_names
|
|
682
|
+
)
|
|
478
683
|
if ros2cli_deps:
|
|
479
684
|
for conda_pkg in ros2cli_deps:
|
|
480
685
|
if conda_pkg and conda_pkg not in dependencies:
|
|
@@ -491,60 +696,43 @@ def _ensure_dependencies(
|
|
|
491
696
|
dependencies["cmake"] = dep_versions["cmake"]
|
|
492
697
|
|
|
493
698
|
# Add package dependencies
|
|
494
|
-
channels = config.get("workspace", {}).get("channels", [])
|
|
495
|
-
|
|
496
699
|
# Add common dependencies (available on all platforms)
|
|
497
700
|
if dep_sources:
|
|
498
701
|
dependencies.add(tomlkit.nl())
|
|
499
702
|
if len(mapping_platform_list) > 1:
|
|
500
|
-
dependencies.add(
|
|
703
|
+
dependencies.add(
|
|
704
|
+
tomlkit.comment("Workspace dependencies (common across platforms)")
|
|
705
|
+
)
|
|
501
706
|
else:
|
|
502
707
|
dependencies.add(tomlkit.comment("Workspace dependencies"))
|
|
503
708
|
|
|
504
|
-
#
|
|
505
|
-
first_pixi_platform = platforms[0]
|
|
506
|
-
first_platform = Platform(first_pixi_platform)
|
|
507
|
-
|
|
508
|
-
packages_to_check = [
|
|
509
|
-
conda_dep
|
|
510
|
-
for conda_dep in dep_sources.keys()
|
|
511
|
-
if conda_dep not in dependencies
|
|
512
|
-
]
|
|
513
|
-
|
|
514
|
-
availability = {}
|
|
515
|
-
if channels and packages_to_check:
|
|
516
|
-
typer.echo(f"Checking common package availability for {first_pixi_platform}...")
|
|
517
|
-
availability = _check_package_availability(
|
|
518
|
-
packages_to_check, channels, first_platform
|
|
519
|
-
)
|
|
520
|
-
|
|
521
|
-
available_packages = []
|
|
522
|
-
unavailable_packages = []
|
|
523
|
-
|
|
709
|
+
# Add all dependencies (validation already checked availability)
|
|
524
710
|
for conda_dep in sorted(dep_sources.keys()):
|
|
525
711
|
if conda_dep not in dependencies:
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
available_packages.append(conda_dep)
|
|
529
|
-
else:
|
|
530
|
-
unavailable_packages.append(conda_dep)
|
|
531
|
-
|
|
532
|
-
for conda_dep in available_packages:
|
|
533
|
-
version = dep_versions.get(conda_dep, "*")
|
|
534
|
-
dependencies[conda_dep] = version
|
|
712
|
+
version = dep_versions.get(conda_dep, "*")
|
|
713
|
+
dependencies[conda_dep] = version
|
|
535
714
|
|
|
536
|
-
|
|
715
|
+
# Add NOT_FOUND packages as comments (common across all platforms)
|
|
716
|
+
if len(mapping_platform_list) > 1:
|
|
717
|
+
if common_not_found:
|
|
537
718
|
dependencies.add(tomlkit.nl())
|
|
538
|
-
dependencies.add(
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
719
|
+
dependencies.add(tomlkit.comment("The following packages were not found:"))
|
|
720
|
+
for ros_pkg in sorted(common_not_found):
|
|
721
|
+
_, conda_pkgs = not_found_packages[mapping_platform_list[0]][ros_pkg]
|
|
722
|
+
# Use conda package name if available, otherwise ros package name
|
|
723
|
+
pkg_name = conda_pkgs[0] if conda_pkgs else ros_pkg
|
|
724
|
+
dependencies.add(tomlkit.comment(f'{pkg_name} = "*"'))
|
|
725
|
+
else:
|
|
726
|
+
# Single platform - add all NOT_FOUND packages
|
|
727
|
+
mapping_platform = mapping_platform_list[0]
|
|
728
|
+
if not_found_packages[mapping_platform]:
|
|
729
|
+
dependencies.add(tomlkit.nl())
|
|
730
|
+
dependencies.add(tomlkit.comment("The following packages were not found:"))
|
|
731
|
+
for ros_pkg in sorted(not_found_packages[mapping_platform].keys()):
|
|
732
|
+
_, conda_pkgs = not_found_packages[mapping_platform][ros_pkg]
|
|
733
|
+
# Use conda package name if available, otherwise ros package name
|
|
734
|
+
pkg_name = conda_pkgs[0] if conda_pkgs else ros_pkg
|
|
735
|
+
dependencies.add(tomlkit.comment(f'{pkg_name} = "*"'))
|
|
548
736
|
|
|
549
737
|
config["dependencies"] = dependencies
|
|
550
738
|
|
|
@@ -564,7 +752,9 @@ def _ensure_dependencies(
|
|
|
564
752
|
|
|
565
753
|
# If we also have windows, only move to unix if NOT on windows
|
|
566
754
|
if has_win:
|
|
567
|
-
win_deps = set(platform_deps.get("win64", {}).keys()) | set(
|
|
755
|
+
win_deps = set(platform_deps.get("win64", {}).keys()) | set(
|
|
756
|
+
platform_deps.get("win", {}).keys()
|
|
757
|
+
)
|
|
568
758
|
unix_deps_keys = unix_candidates - win_deps
|
|
569
759
|
else:
|
|
570
760
|
unix_deps_keys = unix_candidates
|
|
@@ -590,43 +780,37 @@ def _ensure_dependencies(
|
|
|
590
780
|
tomlkit.comment("Unix-specific dependencies (Linux and macOS)")
|
|
591
781
|
)
|
|
592
782
|
|
|
593
|
-
#
|
|
594
|
-
representative_pixi_platform = platform_groups.get("linux", platform_groups.get("osx", platforms))[0]
|
|
595
|
-
platform_obj = Platform(representative_pixi_platform)
|
|
596
|
-
packages_to_check = list(unix_deps.keys())
|
|
597
|
-
|
|
598
|
-
availability = {}
|
|
599
|
-
if channels and packages_to_check:
|
|
600
|
-
typer.echo("Checking package availability for unix...")
|
|
601
|
-
availability = _check_package_availability(
|
|
602
|
-
packages_to_check, channels, platform_obj
|
|
603
|
-
)
|
|
604
|
-
|
|
605
|
-
available_packages = []
|
|
606
|
-
unavailable_packages = []
|
|
607
|
-
|
|
783
|
+
# Add unix-specific dependencies
|
|
608
784
|
for conda_dep in sorted(unix_deps.keys()):
|
|
609
785
|
if conda_dep not in target_deps:
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
available_packages.append(conda_dep)
|
|
613
|
-
else:
|
|
614
|
-
unavailable_packages.append(conda_dep)
|
|
786
|
+
version = dep_versions.get(conda_dep, "*")
|
|
787
|
+
target_deps[conda_dep] = version
|
|
615
788
|
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
789
|
+
# Calculate and add unix-specific NOT_FOUND packages as comments
|
|
790
|
+
if has_linux and has_osx:
|
|
791
|
+
linux_not_found = set(not_found_packages.get("linux", {}).keys())
|
|
792
|
+
osx_not_found = set(not_found_packages.get("osx", {}).keys())
|
|
793
|
+
unix_not_found_candidates = (
|
|
794
|
+
linux_not_found & osx_not_found
|
|
795
|
+
) - common_not_found
|
|
796
|
+
|
|
797
|
+
if has_win:
|
|
798
|
+
win_not_found = set(
|
|
799
|
+
not_found_packages.get("win64", {}).keys()
|
|
800
|
+
) | set(not_found_packages.get("win", {}).keys())
|
|
801
|
+
unix_not_found = unix_not_found_candidates - win_not_found
|
|
802
|
+
else:
|
|
803
|
+
unix_not_found = unix_not_found_candidates
|
|
619
804
|
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
target_deps.add(
|
|
623
|
-
tomlkit.comment("The following packages were not found:")
|
|
624
|
-
)
|
|
625
|
-
for conda_dep in unavailable_packages:
|
|
626
|
-
version = dep_versions.get(conda_dep, "*")
|
|
805
|
+
if unix_not_found:
|
|
806
|
+
target_deps.add(tomlkit.nl())
|
|
627
807
|
target_deps.add(
|
|
628
|
-
tomlkit.comment(
|
|
808
|
+
tomlkit.comment("The following packages were not found:")
|
|
629
809
|
)
|
|
810
|
+
for ros_pkg in sorted(unix_not_found):
|
|
811
|
+
_, conda_pkgs = not_found_packages["linux"][ros_pkg]
|
|
812
|
+
pkg_name = conda_pkgs[0] if conda_pkgs else ros_pkg
|
|
813
|
+
target_deps.add(tomlkit.comment(f'{pkg_name} = "*"'))
|
|
630
814
|
|
|
631
815
|
# Now add remaining platform-specific dependencies (not in common, not in unix)
|
|
632
816
|
if len(mapping_platform_list) > 1:
|
|
@@ -651,47 +835,39 @@ def _ensure_dependencies(
|
|
|
651
835
|
# Add comment
|
|
652
836
|
if len(target_deps) == 0:
|
|
653
837
|
target_deps.add(
|
|
654
|
-
tomlkit.comment(
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
# Check availability for this mapping platform
|
|
658
|
-
# Use the first pixi platform in the group as representative
|
|
659
|
-
representative_pixi_platform = platform_groups[mapping_platform][0]
|
|
660
|
-
platform_obj = Platform(representative_pixi_platform)
|
|
661
|
-
packages_to_check = list(platform_specific_deps.keys())
|
|
662
|
-
|
|
663
|
-
availability = {}
|
|
664
|
-
if channels and packages_to_check:
|
|
665
|
-
typer.echo(f"Checking package availability for {mapping_platform}...")
|
|
666
|
-
availability = _check_package_availability(
|
|
667
|
-
packages_to_check, channels, platform_obj
|
|
838
|
+
tomlkit.comment(
|
|
839
|
+
f"Platform-specific dependencies for {mapping_platform}"
|
|
840
|
+
)
|
|
668
841
|
)
|
|
669
842
|
|
|
670
|
-
|
|
671
|
-
unavailable_packages = []
|
|
672
|
-
|
|
843
|
+
# Add platform-specific dependencies
|
|
673
844
|
for conda_dep in sorted(platform_specific_deps.keys()):
|
|
674
845
|
if conda_dep not in target_deps:
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
available_packages.append(conda_dep)
|
|
678
|
-
else:
|
|
679
|
-
unavailable_packages.append(conda_dep)
|
|
680
|
-
|
|
681
|
-
for conda_dep in available_packages:
|
|
682
|
-
version = dep_versions.get(conda_dep, "*")
|
|
683
|
-
target_deps[conda_dep] = version
|
|
846
|
+
version = dep_versions.get(conda_dep, "*")
|
|
847
|
+
target_deps[conda_dep] = version
|
|
684
848
|
|
|
685
|
-
|
|
849
|
+
# Add platform-specific NOT_FOUND packages as comments
|
|
850
|
+
# Determine which NOT_FOUND packages are platform-specific
|
|
851
|
+
common_set = (
|
|
852
|
+
common_not_found if len(mapping_platform_list) > 1 else set()
|
|
853
|
+
)
|
|
854
|
+
platform_not_found = {
|
|
855
|
+
ros_pkg: (sources, conda_pkgs)
|
|
856
|
+
for ros_pkg, (sources, conda_pkgs) in not_found_packages[
|
|
857
|
+
mapping_platform
|
|
858
|
+
].items()
|
|
859
|
+
if ros_pkg not in common_set and ros_pkg not in unix_not_found
|
|
860
|
+
}
|
|
861
|
+
|
|
862
|
+
if platform_not_found:
|
|
686
863
|
target_deps.add(tomlkit.nl())
|
|
687
864
|
target_deps.add(
|
|
688
865
|
tomlkit.comment("The following packages were not found:")
|
|
689
866
|
)
|
|
690
|
-
for
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
)
|
|
867
|
+
for ros_pkg in sorted(platform_not_found.keys()):
|
|
868
|
+
_, conda_pkgs = platform_not_found[ros_pkg]
|
|
869
|
+
pkg_name = conda_pkgs[0] if conda_pkgs else ros_pkg
|
|
870
|
+
target_deps.add(tomlkit.comment(f'{pkg_name} = "*"'))
|
|
695
871
|
|
|
696
872
|
|
|
697
873
|
def _ensure_tasks(config: dict):
|
|
@@ -700,14 +876,27 @@ def _ensure_tasks(config: dict):
|
|
|
700
876
|
|
|
701
877
|
# Define common ROS tasks if not present
|
|
702
878
|
default_tasks = {
|
|
703
|
-
"build":
|
|
704
|
-
|
|
705
|
-
|
|
879
|
+
"build": {
|
|
880
|
+
"cmd": "colcon build",
|
|
881
|
+
"description": "Build the ROS workspace",
|
|
882
|
+
},
|
|
883
|
+
"test": {
|
|
884
|
+
"cmd": "colcon test",
|
|
885
|
+
"description": "Run tests for the workspace",
|
|
886
|
+
},
|
|
887
|
+
"clean": {
|
|
888
|
+
"cmd": "rm -rf build install log",
|
|
889
|
+
"description": "Clean build artifacts (build, install, log directories)",
|
|
890
|
+
},
|
|
706
891
|
}
|
|
707
892
|
|
|
708
|
-
for task_name,
|
|
893
|
+
for task_name, task_config in default_tasks.items():
|
|
709
894
|
if task_name not in tasks:
|
|
710
|
-
|
|
895
|
+
# Create inline table for task configuration
|
|
896
|
+
task_table = tomlkit.inline_table()
|
|
897
|
+
task_table["cmd"] = task_config["cmd"]
|
|
898
|
+
task_table["description"] = task_config["description"]
|
|
899
|
+
tasks[task_name] = task_table
|
|
711
900
|
|
|
712
901
|
config["tasks"] = tasks
|
|
713
902
|
|