pixi-ros 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pixi_ros/__init__.py +3 -0
- pixi_ros/cli.py +77 -0
- pixi_ros/config.py +34 -0
- pixi_ros/data/README.md +56 -0
- pixi_ros/data/README_PIXI.md.template +125 -0
- pixi_ros/data/conda-forge.yaml +1049 -0
- pixi_ros/init.py +548 -0
- pixi_ros/mappings.py +298 -0
- pixi_ros/package_xml.py +183 -0
- pixi_ros/utils.py +80 -0
- pixi_ros/workspace.py +213 -0
- pixi_ros-0.1.0.dist-info/METADATA +212 -0
- pixi_ros-0.1.0.dist-info/RECORD +15 -0
- pixi_ros-0.1.0.dist-info/WHEEL +4 -0
- pixi_ros-0.1.0.dist-info/entry_points.txt +2 -0
pixi_ros/init.py
ADDED
|
@@ -0,0 +1,548 @@
|
|
|
1
|
+
"""Initialize pixi.toml for ROS workspaces."""
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
import tomlkit
|
|
6
|
+
import typer
|
|
7
|
+
from rattler import Channel, Gateway, Platform
|
|
8
|
+
from rich.console import Console
|
|
9
|
+
from rich.panel import Panel
|
|
10
|
+
from rich.table import Table
|
|
11
|
+
from rich.text import Text
|
|
12
|
+
|
|
13
|
+
from pixi_ros.mappings import expand_gl_requirements, map_ros_to_conda, validate_distro
|
|
14
|
+
from pixi_ros.utils import detect_cmake_version_requirement
|
|
15
|
+
from pixi_ros.workspace import discover_packages, find_workspace_root
|
|
16
|
+
|
|
17
|
+
console = Console()
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def init_workspace(distro: str, workspace_path: Path | None = None) -> bool:
|
|
21
|
+
"""
|
|
22
|
+
Initialize or update pixi.toml for a ROS workspace.
|
|
23
|
+
|
|
24
|
+
Args:
|
|
25
|
+
distro: ROS distribution (e.g., "humble", "iron", "jazzy")
|
|
26
|
+
workspace_path: Path to workspace root (defaults to current directory)
|
|
27
|
+
|
|
28
|
+
Returns:
|
|
29
|
+
True if successful, False otherwise
|
|
30
|
+
|
|
31
|
+
Raises:
|
|
32
|
+
typer.Exit: If validation fails or workspace not found
|
|
33
|
+
"""
|
|
34
|
+
# Validate distro
|
|
35
|
+
if not validate_distro(distro):
|
|
36
|
+
typer.echo(f"Error: Unsupported ROS distribution '{distro}'", err=True)
|
|
37
|
+
typer.echo("Supported distros: humble, iron, jazzy, rolling", err=True)
|
|
38
|
+
raise typer.Exit(code=1)
|
|
39
|
+
|
|
40
|
+
# Find workspace root
|
|
41
|
+
if workspace_path is None:
|
|
42
|
+
workspace_path = find_workspace_root()
|
|
43
|
+
if workspace_path is None:
|
|
44
|
+
typer.echo(
|
|
45
|
+
"Error: Could not find ROS workspace. "
|
|
46
|
+
"Make sure you're in a directory with ROS packages.",
|
|
47
|
+
err=True,
|
|
48
|
+
)
|
|
49
|
+
raise typer.Exit(code=1)
|
|
50
|
+
else:
|
|
51
|
+
workspace_path = workspace_path.resolve()
|
|
52
|
+
|
|
53
|
+
typer.echo(f"Initializing ROS {distro} workspace at: {workspace_path}")
|
|
54
|
+
|
|
55
|
+
# Discover packages in workspace
|
|
56
|
+
try:
|
|
57
|
+
packages = discover_packages(workspace_path)
|
|
58
|
+
if not packages:
|
|
59
|
+
typer.echo(
|
|
60
|
+
"Warning: No ROS packages found in workspace. "
|
|
61
|
+
"Creating minimal pixi.toml.",
|
|
62
|
+
err=True,
|
|
63
|
+
)
|
|
64
|
+
except ValueError as e:
|
|
65
|
+
typer.echo(f"Error discovering packages: {e}", err=True)
|
|
66
|
+
raise typer.Exit(code=1) from e
|
|
67
|
+
|
|
68
|
+
if packages:
|
|
69
|
+
package_names = ", ".join(p.name for p in packages)
|
|
70
|
+
typer.echo(f"Found {len(packages)} package(s): {package_names}")
|
|
71
|
+
|
|
72
|
+
# Load or create pixi.toml
|
|
73
|
+
pixi_toml_path = workspace_path / "pixi.toml"
|
|
74
|
+
if pixi_toml_path.exists():
|
|
75
|
+
typer.echo(f"Updating existing {pixi_toml_path}")
|
|
76
|
+
try:
|
|
77
|
+
with open(pixi_toml_path) as f:
|
|
78
|
+
pixi_config = tomlkit.load(f)
|
|
79
|
+
except Exception as e:
|
|
80
|
+
typer.echo(f"Error reading pixi.toml: {e}", err=True)
|
|
81
|
+
raise typer.Exit(code=1) from e
|
|
82
|
+
else:
|
|
83
|
+
typer.echo(f"Creating new {pixi_toml_path}")
|
|
84
|
+
pixi_config = tomlkit.document()
|
|
85
|
+
|
|
86
|
+
# Display discovered dependencies
|
|
87
|
+
_display_dependencies(packages, distro)
|
|
88
|
+
|
|
89
|
+
# Update configuration
|
|
90
|
+
_ensure_workspace_section(pixi_config, workspace_path)
|
|
91
|
+
_ensure_channels(pixi_config, distro)
|
|
92
|
+
_ensure_dependencies(pixi_config, packages, distro)
|
|
93
|
+
_ensure_tasks(pixi_config)
|
|
94
|
+
_ensure_activation(pixi_config)
|
|
95
|
+
|
|
96
|
+
# Write pixi.toml
|
|
97
|
+
try:
|
|
98
|
+
with open(pixi_toml_path, "w") as f:
|
|
99
|
+
tomlkit.dump(pixi_config, f)
|
|
100
|
+
|
|
101
|
+
# Create README_PIXI.md to help users
|
|
102
|
+
_create_readme(workspace_path, distro)
|
|
103
|
+
typer.secho(
|
|
104
|
+
f"✓ Successfully initialized {pixi_toml_path}", fg="green", bold=True
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
# Inform about README
|
|
108
|
+
readme_path = workspace_path / "README_PIXI.md"
|
|
109
|
+
readme_created = readme_path.exists()
|
|
110
|
+
|
|
111
|
+
# Display helpful next steps with Rich
|
|
112
|
+
typer.echo("")
|
|
113
|
+
|
|
114
|
+
# Build the content for the panel
|
|
115
|
+
content = Text()
|
|
116
|
+
|
|
117
|
+
if readme_created:
|
|
118
|
+
content.append("📖 Created README_PIXI.md\n", style="green bold")
|
|
119
|
+
content.append(
|
|
120
|
+
" Check it out for detailed usage instructions!\n\n", style="dim"
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
content.append("Next steps:\n", style="cyan bold")
|
|
124
|
+
content.append(" 1. Install dependencies: ", style="white")
|
|
125
|
+
content.append("pixi install\n", style="yellow bold")
|
|
126
|
+
content.append(" 2. Build workspace: ", style="white")
|
|
127
|
+
content.append("pixi run build\n", style="yellow bold")
|
|
128
|
+
content.append(" 3. Run tests: ", style="white")
|
|
129
|
+
content.append("pixi run test\n\n", style="yellow bold")
|
|
130
|
+
|
|
131
|
+
content.append("💡 Tip: ", style="blue bold")
|
|
132
|
+
content.append("Activate the workspace with ", style="white")
|
|
133
|
+
content.append("pixi shell", style="yellow bold")
|
|
134
|
+
content.append("\n and then run ROS commands directly.", style="dim")
|
|
135
|
+
|
|
136
|
+
# Display in a nice panel
|
|
137
|
+
panel = Panel(
|
|
138
|
+
content,
|
|
139
|
+
title="[bold green]✓ Successfully Initialized[/bold green]",
|
|
140
|
+
border_style="green",
|
|
141
|
+
padding=(1, 2),
|
|
142
|
+
)
|
|
143
|
+
console.print(panel)
|
|
144
|
+
|
|
145
|
+
return True
|
|
146
|
+
except Exception as e:
|
|
147
|
+
typer.echo(f"Error writing pixi.toml: {e}", err=True)
|
|
148
|
+
raise typer.Exit(code=1) from e
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def _display_dependencies(packages, distro: str):
|
|
152
|
+
"""Display discovered dependencies in a rich table."""
|
|
153
|
+
if not packages:
|
|
154
|
+
return
|
|
155
|
+
|
|
156
|
+
workspace_pkg_names = {pkg.name for pkg in packages}
|
|
157
|
+
|
|
158
|
+
# Collect dependencies organized by ROS package and type
|
|
159
|
+
# Structure: {pkg_name: {dep_type: {ros_dep: [conda_packages]}}}
|
|
160
|
+
pkg_deps: dict[str, dict[str, dict[str, list[str]]]] = {}
|
|
161
|
+
|
|
162
|
+
for pkg in packages:
|
|
163
|
+
pkg_deps[pkg.name] = {"Build": {}, "Runtime": {}, "Test": {}}
|
|
164
|
+
|
|
165
|
+
# Build dependencies
|
|
166
|
+
for ros_dep in pkg.get_all_build_dependencies():
|
|
167
|
+
if ros_dep in workspace_pkg_names:
|
|
168
|
+
continue
|
|
169
|
+
conda_packages = map_ros_to_conda(ros_dep, distro)
|
|
170
|
+
if conda_packages:
|
|
171
|
+
pkg_deps[pkg.name]["Build"][ros_dep] = conda_packages
|
|
172
|
+
|
|
173
|
+
# Runtime dependencies
|
|
174
|
+
for ros_dep in pkg.get_all_runtime_dependencies():
|
|
175
|
+
if ros_dep in workspace_pkg_names:
|
|
176
|
+
continue
|
|
177
|
+
conda_packages = map_ros_to_conda(ros_dep, distro)
|
|
178
|
+
if conda_packages:
|
|
179
|
+
pkg_deps[pkg.name]["Runtime"][ros_dep] = conda_packages
|
|
180
|
+
|
|
181
|
+
# Test dependencies
|
|
182
|
+
for ros_dep in pkg.get_all_test_dependencies():
|
|
183
|
+
if ros_dep in workspace_pkg_names:
|
|
184
|
+
continue
|
|
185
|
+
conda_packages = map_ros_to_conda(ros_dep, distro)
|
|
186
|
+
if conda_packages:
|
|
187
|
+
pkg_deps[pkg.name]["Test"][ros_dep] = conda_packages
|
|
188
|
+
|
|
189
|
+
# Check if any external dependencies exist
|
|
190
|
+
has_deps = any(
|
|
191
|
+
any(pkg_deps[pkg_name][dep_type] for dep_type in ["Build", "Runtime", "Test"])
|
|
192
|
+
for pkg_name in pkg_deps
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
if not has_deps:
|
|
196
|
+
console.print("\n[yellow]No external dependencies found[/yellow]")
|
|
197
|
+
return
|
|
198
|
+
|
|
199
|
+
console.print("")
|
|
200
|
+
|
|
201
|
+
# Display one table per ROS package with all dependency types
|
|
202
|
+
for pkg_name in sorted(pkg_deps.keys()):
|
|
203
|
+
pkg_info = pkg_deps[pkg_name]
|
|
204
|
+
|
|
205
|
+
# Collect all dependencies for this package across all types
|
|
206
|
+
all_deps = []
|
|
207
|
+
for dep_type in ["Build", "Runtime", "Test"]:
|
|
208
|
+
for ros_dep in sorted(pkg_info[dep_type].keys()):
|
|
209
|
+
conda_pkgs = pkg_info[dep_type][ros_dep]
|
|
210
|
+
conda_str = ", ".join(conda_pkgs)
|
|
211
|
+
all_deps.append((ros_dep, dep_type, conda_str))
|
|
212
|
+
|
|
213
|
+
# Skip packages with no external dependencies
|
|
214
|
+
if not all_deps:
|
|
215
|
+
continue
|
|
216
|
+
|
|
217
|
+
# Create table for this package
|
|
218
|
+
table = Table(
|
|
219
|
+
title=f"Package: {pkg_name}",
|
|
220
|
+
show_header=True,
|
|
221
|
+
header_style="bold cyan",
|
|
222
|
+
)
|
|
223
|
+
table.add_column("ROS Dependency", style="yellow")
|
|
224
|
+
table.add_column("Type", style="blue")
|
|
225
|
+
table.add_column("Conda Packages", style="green")
|
|
226
|
+
|
|
227
|
+
# Add all dependencies for this package
|
|
228
|
+
for ros_dep, dep_type, conda_str in all_deps:
|
|
229
|
+
table.add_row(ros_dep, dep_type, conda_str)
|
|
230
|
+
|
|
231
|
+
console.print(table)
|
|
232
|
+
console.print("")
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
def _ensure_workspace_section(config: dict, workspace_path: Path):
|
|
236
|
+
"""Ensure workspace section exists with basic config."""
|
|
237
|
+
if "workspace" not in config:
|
|
238
|
+
config["workspace"] = {}
|
|
239
|
+
|
|
240
|
+
workspace = config["workspace"]
|
|
241
|
+
|
|
242
|
+
# Set name if not present
|
|
243
|
+
if "name" not in workspace:
|
|
244
|
+
workspace["name"] = workspace_path.name
|
|
245
|
+
|
|
246
|
+
# Set channels if not present
|
|
247
|
+
if "channels" not in workspace:
|
|
248
|
+
workspace["channels"] = []
|
|
249
|
+
|
|
250
|
+
# Set platforms if not present
|
|
251
|
+
if "platforms" not in workspace:
|
|
252
|
+
# Only add the current platform by default
|
|
253
|
+
current_platform = str(Platform.current())
|
|
254
|
+
workspace["platforms"] = [current_platform]
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
def _ensure_channels(config: dict, distro: str):
|
|
258
|
+
"""Ensure required ROS channels are present."""
|
|
259
|
+
workspace = config.setdefault("workspace", {})
|
|
260
|
+
channels = workspace.setdefault("channels", [])
|
|
261
|
+
|
|
262
|
+
# Required channels for ROS
|
|
263
|
+
channel_host = "https://prefix.dev"
|
|
264
|
+
required_channels = [
|
|
265
|
+
f"{channel_host}/robostack-{distro}",
|
|
266
|
+
f"{channel_host}/conda-forge",
|
|
267
|
+
]
|
|
268
|
+
|
|
269
|
+
for channel in required_channels:
|
|
270
|
+
if channel not in channels:
|
|
271
|
+
channels.append(channel)
|
|
272
|
+
|
|
273
|
+
workspace["channels"] = channels
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
def _check_package_availability(
|
|
277
|
+
packages: list[str], channels: list[str], platform: Platform
|
|
278
|
+
) -> dict[str, bool]:
|
|
279
|
+
"""
|
|
280
|
+
Check if packages are available in the given channels.
|
|
281
|
+
|
|
282
|
+
Args:
|
|
283
|
+
packages: List of conda package names to check
|
|
284
|
+
channels: List of channel URLs
|
|
285
|
+
platform: Platform to check for
|
|
286
|
+
|
|
287
|
+
Returns:
|
|
288
|
+
Dictionary mapping package names to availability (True/False)
|
|
289
|
+
"""
|
|
290
|
+
import asyncio
|
|
291
|
+
|
|
292
|
+
availability = dict.fromkeys(packages, False)
|
|
293
|
+
|
|
294
|
+
try:
|
|
295
|
+
# Create gateway for fetching repo data
|
|
296
|
+
gateway = Gateway()
|
|
297
|
+
|
|
298
|
+
# Convert channel URLs to Channel objects
|
|
299
|
+
channel_objects = [Channel(url) for url in channels]
|
|
300
|
+
|
|
301
|
+
# Query all channels at once (gateway.query is async)
|
|
302
|
+
repo_data_by_channel = asyncio.run(
|
|
303
|
+
gateway.query(
|
|
304
|
+
channel_objects,
|
|
305
|
+
[platform],
|
|
306
|
+
specs=packages, # Correct parameter name
|
|
307
|
+
recursive=False, # Don't fetch dependencies
|
|
308
|
+
)
|
|
309
|
+
)
|
|
310
|
+
|
|
311
|
+
# repo_data_by_channel is a list of lists (one per channel)
|
|
312
|
+
# Check all channels for each package
|
|
313
|
+
for channel_records in repo_data_by_channel:
|
|
314
|
+
for record in channel_records:
|
|
315
|
+
# Check if any of our packages match this record
|
|
316
|
+
for package_name in packages:
|
|
317
|
+
if record.name.normalized == package_name.lower():
|
|
318
|
+
availability[package_name] = True
|
|
319
|
+
|
|
320
|
+
except Exception as e:
|
|
321
|
+
# If query fails, log the error but continue (all marked as unavailable)
|
|
322
|
+
console.print(
|
|
323
|
+
f"[yellow]Warning: Could not check package availability: {e}[/yellow]"
|
|
324
|
+
)
|
|
325
|
+
|
|
326
|
+
return availability
|
|
327
|
+
|
|
328
|
+
|
|
329
|
+
def _ensure_dependencies(config: dict, packages, distro: str):
|
|
330
|
+
"""Ensure all ROS dependencies are present with comments showing source."""
|
|
331
|
+
# Track which packages depend on which conda packages
|
|
332
|
+
# conda_dep -> set of package names
|
|
333
|
+
dep_sources: dict[str, set[str]] = {}
|
|
334
|
+
# Track version constraints for special dependencies
|
|
335
|
+
dep_versions: dict[str, str] = {}
|
|
336
|
+
workspace_pkg_names = {pkg.name for pkg in packages}
|
|
337
|
+
|
|
338
|
+
# Detect special version requirements from package files
|
|
339
|
+
for pkg in packages:
|
|
340
|
+
# Check for cmake version requirements
|
|
341
|
+
pkg_path = pkg.path.parent
|
|
342
|
+
cmake_version = detect_cmake_version_requirement(pkg_path)
|
|
343
|
+
if cmake_version:
|
|
344
|
+
dep_versions["cmake"] = cmake_version
|
|
345
|
+
|
|
346
|
+
# Collect dependencies from each package
|
|
347
|
+
for pkg in packages:
|
|
348
|
+
for ros_dep in pkg.get_all_dependencies():
|
|
349
|
+
# Skip workspace packages (they're built locally)
|
|
350
|
+
if ros_dep in workspace_pkg_names:
|
|
351
|
+
continue
|
|
352
|
+
|
|
353
|
+
# Map to conda packages
|
|
354
|
+
conda_packages = map_ros_to_conda(ros_dep, distro)
|
|
355
|
+
|
|
356
|
+
# Skip if no conda packages were returned
|
|
357
|
+
if not conda_packages:
|
|
358
|
+
continue
|
|
359
|
+
|
|
360
|
+
for conda_dep in conda_packages:
|
|
361
|
+
if conda_dep:
|
|
362
|
+
if conda_dep not in dep_sources:
|
|
363
|
+
dep_sources[conda_dep] = set()
|
|
364
|
+
dep_sources[conda_dep].add(pkg.name)
|
|
365
|
+
|
|
366
|
+
# Expand GL requirements (REQUIRE_GL, REQUIRE_OPENGL) to platform-specific packages
|
|
367
|
+
# This replaces placeholder strings with actual conda packages
|
|
368
|
+
expanded_dep_sources: dict[str, set[str]] = {}
|
|
369
|
+
all_conda_packages = list(dep_sources.keys())
|
|
370
|
+
expanded_packages = expand_gl_requirements(all_conda_packages)
|
|
371
|
+
|
|
372
|
+
# Rebuild dep_sources with expanded packages
|
|
373
|
+
for expanded_pkg in expanded_packages:
|
|
374
|
+
# For expanded packages, merge the sources from the placeholder packages
|
|
375
|
+
sources = set()
|
|
376
|
+
for original_pkg, pkg_sources in dep_sources.items():
|
|
377
|
+
if original_pkg == expanded_pkg:
|
|
378
|
+
# Direct match
|
|
379
|
+
sources.update(pkg_sources)
|
|
380
|
+
elif original_pkg in ("REQUIRE_GL", "REQUIRE_OPENGL"):
|
|
381
|
+
# This was a placeholder, include its sources for all expanded packages
|
|
382
|
+
sources.update(pkg_sources)
|
|
383
|
+
|
|
384
|
+
if sources:
|
|
385
|
+
expanded_dep_sources[expanded_pkg] = sources
|
|
386
|
+
|
|
387
|
+
dep_sources = expanded_dep_sources
|
|
388
|
+
|
|
389
|
+
# Create or get dependencies table
|
|
390
|
+
if "dependencies" not in config:
|
|
391
|
+
config["dependencies"] = tomlkit.table()
|
|
392
|
+
dependencies = config["dependencies"]
|
|
393
|
+
|
|
394
|
+
# Add base ROS dependencies with comment
|
|
395
|
+
base_deps = {
|
|
396
|
+
f"ros-{distro}-ros-base": "*",
|
|
397
|
+
"pkg-config": "*",
|
|
398
|
+
"compilers": "*",
|
|
399
|
+
"make": "*",
|
|
400
|
+
"ninja": "*",
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
# Add newline and comment before base deps if dependencies table is empty
|
|
404
|
+
if len(dependencies) == 0:
|
|
405
|
+
dependencies.add(tomlkit.comment("Base ROS dependencies"))
|
|
406
|
+
|
|
407
|
+
for dep, version in base_deps.items():
|
|
408
|
+
if dep not in dependencies:
|
|
409
|
+
dependencies[dep] = version
|
|
410
|
+
|
|
411
|
+
# Add ros2cli packages
|
|
412
|
+
ros2cli_deps = map_ros_to_conda("ros2cli", distro)
|
|
413
|
+
if ros2cli_deps:
|
|
414
|
+
for conda_pkg in ros2cli_deps:
|
|
415
|
+
if conda_pkg and conda_pkg not in dependencies:
|
|
416
|
+
dependencies[conda_pkg] = "*"
|
|
417
|
+
|
|
418
|
+
# Add build tools
|
|
419
|
+
if "colcon-common-extensions" not in dependencies:
|
|
420
|
+
dependencies.add(tomlkit.nl())
|
|
421
|
+
dependencies.add(tomlkit.comment("Build tools"))
|
|
422
|
+
dependencies["colcon-common-extensions"] = "*"
|
|
423
|
+
|
|
424
|
+
# Add cmake with version constraint if detected
|
|
425
|
+
if "cmake" in dep_versions and "cmake" not in dependencies:
|
|
426
|
+
dependencies["cmake"] = dep_versions["cmake"]
|
|
427
|
+
|
|
428
|
+
# Add package dependencies
|
|
429
|
+
if dep_sources:
|
|
430
|
+
dependencies.add(tomlkit.nl())
|
|
431
|
+
dependencies.add(tomlkit.comment("Workspace dependencies"))
|
|
432
|
+
|
|
433
|
+
# Get channels and platform for availability checking
|
|
434
|
+
channels = config.get("workspace", {}).get("channels", [])
|
|
435
|
+
current_platform = Platform.current()
|
|
436
|
+
|
|
437
|
+
# Get list of packages to check
|
|
438
|
+
packages_to_check = [
|
|
439
|
+
conda_dep
|
|
440
|
+
for conda_dep in dep_sources.keys()
|
|
441
|
+
if conda_dep not in dependencies
|
|
442
|
+
]
|
|
443
|
+
|
|
444
|
+
# Check package availability if channels are configured
|
|
445
|
+
availability = {}
|
|
446
|
+
if channels and packages_to_check:
|
|
447
|
+
typer.echo("Checking package availability in channels...")
|
|
448
|
+
availability = _check_package_availability(
|
|
449
|
+
packages_to_check, channels, current_platform
|
|
450
|
+
)
|
|
451
|
+
|
|
452
|
+
# Add all dependencies in alphabetical order
|
|
453
|
+
available_packages = []
|
|
454
|
+
unavailable_packages = []
|
|
455
|
+
|
|
456
|
+
for conda_dep in sorted(dep_sources.keys()):
|
|
457
|
+
if conda_dep not in dependencies:
|
|
458
|
+
# Check if we have availability info
|
|
459
|
+
is_available = availability.get(
|
|
460
|
+
conda_dep, True
|
|
461
|
+
) # Default to True if not checked
|
|
462
|
+
|
|
463
|
+
if is_available:
|
|
464
|
+
available_packages.append(conda_dep)
|
|
465
|
+
else:
|
|
466
|
+
unavailable_packages.append(conda_dep)
|
|
467
|
+
|
|
468
|
+
# Add available packages
|
|
469
|
+
for conda_dep in available_packages:
|
|
470
|
+
version = dep_versions.get(conda_dep, "*")
|
|
471
|
+
dependencies[conda_dep] = version
|
|
472
|
+
|
|
473
|
+
# Add unavailable packages as comments
|
|
474
|
+
if unavailable_packages:
|
|
475
|
+
dependencies.add(tomlkit.nl())
|
|
476
|
+
dependencies.add(
|
|
477
|
+
tomlkit.comment(
|
|
478
|
+
"The following packages were not found in the configured channels:"
|
|
479
|
+
)
|
|
480
|
+
)
|
|
481
|
+
for conda_dep in unavailable_packages:
|
|
482
|
+
version = dep_versions.get(conda_dep, "*")
|
|
483
|
+
dependencies.add(
|
|
484
|
+
tomlkit.comment(f'{conda_dep} = "{version}" # NOT FOUND')
|
|
485
|
+
)
|
|
486
|
+
|
|
487
|
+
config["dependencies"] = dependencies
|
|
488
|
+
|
|
489
|
+
|
|
490
|
+
def _ensure_tasks(config: dict):
|
|
491
|
+
"""Ensure common ROS tasks are defined."""
|
|
492
|
+
tasks = config.setdefault("tasks", {})
|
|
493
|
+
|
|
494
|
+
# Define common ROS tasks if not present
|
|
495
|
+
default_tasks = {
|
|
496
|
+
"build": "colcon build",
|
|
497
|
+
"test": "colcon test",
|
|
498
|
+
"clean": "rm -rf build install log",
|
|
499
|
+
}
|
|
500
|
+
|
|
501
|
+
for task_name, task_cmd in default_tasks.items():
|
|
502
|
+
if task_name not in tasks:
|
|
503
|
+
tasks[task_name] = task_cmd
|
|
504
|
+
|
|
505
|
+
config["tasks"] = tasks
|
|
506
|
+
|
|
507
|
+
|
|
508
|
+
def _ensure_activation(config: dict):
|
|
509
|
+
"""Ensure activation section exists to source ROS setup."""
|
|
510
|
+
if "activation" not in config:
|
|
511
|
+
# Add comment before the activation section
|
|
512
|
+
config.add(tomlkit.nl())
|
|
513
|
+
config.add(
|
|
514
|
+
tomlkit.comment(
|
|
515
|
+
"Scripts to source on environment activation, "
|
|
516
|
+
"found after first colcon build."
|
|
517
|
+
)
|
|
518
|
+
)
|
|
519
|
+
config["activation"] = tomlkit.table()
|
|
520
|
+
config["activation"]["scripts"] = ["install/setup.bash"]
|
|
521
|
+
elif "scripts" not in config["activation"]:
|
|
522
|
+
# Just add scripts if activation exists but scripts don't
|
|
523
|
+
config["activation"]["scripts"] = ["install/setup.bash"]
|
|
524
|
+
|
|
525
|
+
|
|
526
|
+
def _create_readme(workspace_path: Path, distro: str):
|
|
527
|
+
"""Create README_PIXI.md to help users understand the pixi-ros workflow."""
|
|
528
|
+
readme_path = workspace_path / "README_PIXI.md"
|
|
529
|
+
|
|
530
|
+
# Don't overwrite existing README_PIXI.md
|
|
531
|
+
if readme_path.exists():
|
|
532
|
+
return
|
|
533
|
+
|
|
534
|
+
try:
|
|
535
|
+
# Load template from data directory
|
|
536
|
+
template_path = Path(__file__).parent / "data" / "README_PIXI.md.template"
|
|
537
|
+
with open(template_path) as f:
|
|
538
|
+
template_content = f.read()
|
|
539
|
+
|
|
540
|
+
# Format template with distro
|
|
541
|
+
readme_content = template_content.format(distro=distro)
|
|
542
|
+
|
|
543
|
+
# Write to workspace
|
|
544
|
+
with open(readme_path, "w") as f:
|
|
545
|
+
f.write(readme_content)
|
|
546
|
+
except Exception as e:
|
|
547
|
+
# Don't fail if we can't create the README
|
|
548
|
+
typer.echo(f"Warning: Could not create README_PIXI.md: {e}", err=True)
|