foodforthought-cli 0.2.8__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (116) hide show
  1. ate/__init__.py +6 -0
  2. ate/__main__.py +16 -0
  3. ate/auth/__init__.py +1 -0
  4. ate/auth/device_flow.py +141 -0
  5. ate/auth/token_store.py +96 -0
  6. ate/behaviors/__init__.py +12 -0
  7. ate/behaviors/approach.py +399 -0
  8. ate/cli.py +855 -4551
  9. ate/client.py +90 -0
  10. ate/commands/__init__.py +168 -0
  11. ate/commands/auth.py +389 -0
  12. ate/commands/bridge.py +448 -0
  13. ate/commands/data.py +185 -0
  14. ate/commands/deps.py +111 -0
  15. ate/commands/generate.py +384 -0
  16. ate/commands/memory.py +907 -0
  17. ate/commands/parts.py +166 -0
  18. ate/commands/primitive.py +399 -0
  19. ate/commands/protocol.py +288 -0
  20. ate/commands/recording.py +524 -0
  21. ate/commands/repo.py +154 -0
  22. ate/commands/simulation.py +291 -0
  23. ate/commands/skill.py +303 -0
  24. ate/commands/skills.py +487 -0
  25. ate/commands/team.py +147 -0
  26. ate/commands/workflow.py +271 -0
  27. ate/detection/__init__.py +38 -0
  28. ate/detection/base.py +142 -0
  29. ate/detection/color_detector.py +399 -0
  30. ate/detection/trash_detector.py +322 -0
  31. ate/drivers/__init__.py +18 -6
  32. ate/drivers/ble_transport.py +405 -0
  33. ate/drivers/mechdog.py +360 -24
  34. ate/drivers/wifi_camera.py +477 -0
  35. ate/interfaces/__init__.py +16 -0
  36. ate/interfaces/base.py +2 -0
  37. ate/interfaces/sensors.py +247 -0
  38. ate/llm_proxy.py +239 -0
  39. ate/memory/__init__.py +35 -0
  40. ate/memory/cloud.py +244 -0
  41. ate/memory/context.py +269 -0
  42. ate/memory/embeddings.py +184 -0
  43. ate/memory/export.py +26 -0
  44. ate/memory/merge.py +146 -0
  45. ate/memory/migrate/__init__.py +34 -0
  46. ate/memory/migrate/base.py +89 -0
  47. ate/memory/migrate/pipeline.py +189 -0
  48. ate/memory/migrate/sources/__init__.py +13 -0
  49. ate/memory/migrate/sources/chroma.py +170 -0
  50. ate/memory/migrate/sources/pinecone.py +120 -0
  51. ate/memory/migrate/sources/qdrant.py +110 -0
  52. ate/memory/migrate/sources/weaviate.py +160 -0
  53. ate/memory/reranker.py +353 -0
  54. ate/memory/search.py +26 -0
  55. ate/memory/store.py +548 -0
  56. ate/recording/__init__.py +42 -3
  57. ate/recording/session.py +12 -2
  58. ate/recording/visual.py +416 -0
  59. ate/robot/__init__.py +142 -0
  60. ate/robot/agentic_servo.py +856 -0
  61. ate/robot/behaviors.py +493 -0
  62. ate/robot/ble_capture.py +1000 -0
  63. ate/robot/ble_enumerate.py +506 -0
  64. ate/robot/calibration.py +88 -3
  65. ate/robot/calibration_state.py +388 -0
  66. ate/robot/commands.py +143 -11
  67. ate/robot/direction_calibration.py +554 -0
  68. ate/robot/discovery.py +104 -2
  69. ate/robot/llm_system_id.py +654 -0
  70. ate/robot/locomotion_calibration.py +508 -0
  71. ate/robot/marker_generator.py +611 -0
  72. ate/robot/perception.py +502 -0
  73. ate/robot/primitives.py +614 -0
  74. ate/robot/profiles.py +6 -0
  75. ate/robot/registry.py +5 -2
  76. ate/robot/servo_mapper.py +1153 -0
  77. ate/robot/skill_upload.py +285 -3
  78. ate/robot/target_calibration.py +500 -0
  79. ate/robot/teach.py +515 -0
  80. ate/robot/types.py +242 -0
  81. ate/robot/visual_labeler.py +9 -0
  82. ate/robot/visual_servo_loop.py +494 -0
  83. ate/robot/visual_servoing.py +570 -0
  84. ate/robot/visual_system_id.py +906 -0
  85. ate/transports/__init__.py +121 -0
  86. ate/transports/base.py +394 -0
  87. ate/transports/ble.py +405 -0
  88. ate/transports/hybrid.py +444 -0
  89. ate/transports/serial.py +345 -0
  90. ate/urdf/__init__.py +30 -0
  91. ate/urdf/capture.py +582 -0
  92. ate/urdf/cloud.py +491 -0
  93. ate/urdf/collision.py +271 -0
  94. ate/urdf/commands.py +708 -0
  95. ate/urdf/depth.py +360 -0
  96. ate/urdf/inertial.py +312 -0
  97. ate/urdf/kinematics.py +330 -0
  98. ate/urdf/lifting.py +415 -0
  99. ate/urdf/meshing.py +300 -0
  100. ate/urdf/models/__init__.py +110 -0
  101. ate/urdf/models/depth_anything.py +253 -0
  102. ate/urdf/models/sam2.py +324 -0
  103. ate/urdf/motion_analysis.py +396 -0
  104. ate/urdf/pipeline.py +468 -0
  105. ate/urdf/scale.py +256 -0
  106. ate/urdf/scan_session.py +411 -0
  107. ate/urdf/segmentation.py +299 -0
  108. ate/urdf/synthesis.py +319 -0
  109. ate/urdf/topology.py +336 -0
  110. ate/urdf/validation.py +371 -0
  111. {foodforthought_cli-0.2.8.dist-info → foodforthought_cli-0.3.0.dist-info}/METADATA +1 -1
  112. foodforthought_cli-0.3.0.dist-info/RECORD +166 -0
  113. {foodforthought_cli-0.2.8.dist-info → foodforthought_cli-0.3.0.dist-info}/WHEEL +1 -1
  114. foodforthought_cli-0.2.8.dist-info/RECORD +0 -73
  115. {foodforthought_cli-0.2.8.dist-info → foodforthought_cli-0.3.0.dist-info}/entry_points.txt +0 -0
  116. {foodforthought_cli-0.2.8.dist-info → foodforthought_cli-0.3.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,1153 @@
1
+ """
2
+ Agentic Servo Mapper - Automatically discover and map servo capabilities.
3
+
4
+ Uses an LLM agent to systematically explore servos and generate primitive skills.
5
+ Optionally uses camera feedback to verify movements and understand what each
6
+ servo controls.
7
+
8
+ Architecture:
9
+ - ServoProbe: Low-level servo testing and measurement
10
+ - ServoMapperAgent: LLM-guided exploration and analysis
11
+ - PrimitiveGenerator: Creates skill definitions from discovered mappings
12
+
13
+ Usage:
14
+ from ate.robot.servo_mapper import run_servo_mapping
15
+
16
+ # Map all servos and generate primitives
17
+ primitives = run_servo_mapping(
18
+ serial_port="/dev/cu.usbserial-10",
19
+ num_servos=13,
20
+ use_camera=True,
21
+ )
22
+ """
23
+
24
+ import time
25
+ import json
26
+ from dataclasses import dataclass, field, asdict
27
+ from typing import Optional, List, Dict, Any, Callable, Tuple
28
+ from enum import Enum
29
+ from pathlib import Path
30
+
31
+ try:
32
+ import cv2
33
+ import numpy as np
34
+ HAS_CV = True
35
+ except ImportError:
36
+ HAS_CV = False
37
+
38
+ try:
39
+ from ..llm_proxy import LLMProxy, LLMProxyError
40
+ HAS_LLM_PROXY = True
41
+ except ImportError:
42
+ HAS_LLM_PROXY = False
43
+
44
+
45
+ # ============================================================================
46
+ # Data Classes
47
+ # ============================================================================
48
+
49
+ class ServoType(Enum):
50
+ """Classification of servo function."""
51
+ UNKNOWN = "unknown"
52
+ LEG_HIP = "leg_hip" # Hip rotation (forward/back)
53
+ LEG_SHOULDER = "leg_shoulder" # Upper leg (lift)
54
+ LEG_KNEE = "leg_knee" # Lower leg (extend/retract)
55
+ ARM_SHOULDER = "arm_shoulder" # Arm base rotation
56
+ ARM_ELBOW = "arm_elbow" # Arm extension
57
+ GRIPPER = "gripper" # End effector
58
+ HEAD_PAN = "head_pan" # Head left/right
59
+ HEAD_TILT = "head_tilt" # Head up/down
60
+ TAIL = "tail" # Tail wagging
61
+
62
+
63
+ @dataclass
64
+ class ServoRange:
65
+ """Discovered range for a servo."""
66
+ min_pwm: int = 500 # Minimum safe PWM value
67
+ max_pwm: int = 2500 # Maximum safe PWM value
68
+ center_pwm: int = 1500 # Center/neutral position
69
+ safe_min: int = 500 # Safe minimum (may be narrower than hardware)
70
+ safe_max: int = 2500 # Safe maximum
71
+
72
+ def to_dict(self) -> Dict:
73
+ return asdict(self)
74
+
75
+
76
+ @dataclass
77
+ class NamedPosition:
78
+ """A named position for a servo."""
79
+ name: str # e.g., "GRIPPER_OPEN"
80
+ pwm_value: int # PWM value for this position
81
+ description: str = "" # Human description
82
+
83
+
84
+ @dataclass
85
+ class ServoMapping:
86
+ """Complete mapping for a single servo."""
87
+ servo_id: int
88
+ servo_type: ServoType = ServoType.UNKNOWN
89
+ body_part: str = "" # e.g., "front_left_leg", "gripper"
90
+ joint_name: str = "" # e.g., "hip", "elbow"
91
+ range: ServoRange = field(default_factory=ServoRange)
92
+ named_positions: List[NamedPosition] = field(default_factory=list)
93
+ observations: List[str] = field(default_factory=list)
94
+ confidence: float = 0.0 # Confidence in mapping (0-1)
95
+
96
+ def to_dict(self) -> Dict:
97
+ return {
98
+ "servo_id": self.servo_id,
99
+ "servo_type": self.servo_type.value,
100
+ "body_part": self.body_part,
101
+ "joint_name": self.joint_name,
102
+ "range": self.range.to_dict(),
103
+ "named_positions": [
104
+ {"name": p.name, "pwm": p.pwm_value, "description": p.description}
105
+ for p in self.named_positions
106
+ ],
107
+ "observations": self.observations,
108
+ "confidence": self.confidence,
109
+ }
110
+
111
+
112
+ @dataclass
113
+ class ProbeResult:
114
+ """Result of probing a servo at a position."""
115
+ servo_id: int
116
+ pwm_value: int
117
+ response_time_ms: float
118
+ moved: bool # Whether robot responded
119
+ image_before: Optional[np.ndarray] = None
120
+ image_after: Optional[np.ndarray] = None
121
+ pixel_change: float = 0.0 # Amount of visual change (0-1)
122
+ error: Optional[str] = None
123
+
124
+
125
+ # ============================================================================
126
+ # Servo Probe - Low-level testing
127
+ # ============================================================================
128
+
129
+ class ServoProbe:
130
+ """
131
+ Low-level servo probing for discovering capabilities.
132
+
133
+ Safely tests servos through their range and measures responses.
134
+ """
135
+
136
+ def __init__(
137
+ self,
138
+ send_command: Callable[[str, float], str],
139
+ capture_fn: Optional[Callable[[], np.ndarray]] = None,
140
+ safe_delay: float = 0.5,
141
+ ):
142
+ """
143
+ Initialize probe.
144
+
145
+ Args:
146
+ send_command: Function to send commands to robot (cmd, wait) -> response
147
+ capture_fn: Optional function to capture camera images
148
+ safe_delay: Delay between movements for safety
149
+ """
150
+ self.send = send_command
151
+ self.capture = capture_fn
152
+ self.safe_delay = safe_delay
153
+
154
+ def probe_servo(
155
+ self,
156
+ servo_id: int,
157
+ pwm_value: int,
158
+ duration_ms: int = 500,
159
+ ) -> ProbeResult:
160
+ """
161
+ Move a servo to a position and measure the result.
162
+
163
+ Args:
164
+ servo_id: Servo ID to test
165
+ pwm_value: PWM value to send (500-2500)
166
+ duration_ms: Movement duration in milliseconds
167
+
168
+ Returns:
169
+ ProbeResult with timing and visual change data
170
+ """
171
+ # Capture before image
172
+ image_before = None
173
+ if self.capture:
174
+ try:
175
+ image_before = self.capture()
176
+ except Exception:
177
+ pass
178
+
179
+ # Send command and measure response time
180
+ start_time = time.time()
181
+ try:
182
+ response = self.send(
183
+ f"dog.set_servo({servo_id}, {pwm_value}, {duration_ms})",
184
+ duration_ms / 1000 + 0.2
185
+ )
186
+ response_time = (time.time() - start_time) * 1000
187
+ moved = True
188
+ error = None
189
+ except Exception as e:
190
+ response_time = 0
191
+ moved = False
192
+ error = str(e)
193
+
194
+ # Wait for movement to complete
195
+ time.sleep(self.safe_delay)
196
+
197
+ # Capture after image
198
+ image_after = None
199
+ pixel_change = 0.0
200
+ if self.capture and image_before is not None:
201
+ try:
202
+ image_after = self.capture()
203
+ # Calculate visual difference
204
+ if image_after is not None and image_after.size > 0:
205
+ diff = cv2.absdiff(image_before, image_after)
206
+ pixel_change = np.mean(diff) / 255.0
207
+ except Exception:
208
+ pass
209
+
210
+ return ProbeResult(
211
+ servo_id=servo_id,
212
+ pwm_value=pwm_value,
213
+ response_time_ms=response_time,
214
+ moved=moved,
215
+ image_before=image_before,
216
+ image_after=image_after,
217
+ pixel_change=pixel_change,
218
+ error=error,
219
+ )
220
+
221
+ def find_range(
222
+ self,
223
+ servo_id: int,
224
+ start_pwm: int = 1500,
225
+ step: int = 200,
226
+ min_bound: int = 500,
227
+ max_bound: int = 2500,
228
+ ) -> ServoRange:
229
+ """
230
+ Find the safe range of motion for a servo.
231
+
232
+ Starts from center and expands outward, looking for movement limits.
233
+
234
+ Args:
235
+ servo_id: Servo to test
236
+ start_pwm: Starting position (center)
237
+ step: Step size for probing
238
+ min_bound: Minimum PWM to test
239
+ max_bound: Maximum PWM to test
240
+
241
+ Returns:
242
+ ServoRange with discovered limits
243
+ """
244
+ print(f" Finding range for servo {servo_id}...")
245
+
246
+ # Move to center first
247
+ self.probe_servo(servo_id, start_pwm)
248
+
249
+ # Without camera, we can't detect limits - just probe key positions
250
+ has_camera = self.capture is not None
251
+
252
+ if has_camera:
253
+ # Find minimum with visual feedback
254
+ safe_min = start_pwm
255
+ current = start_pwm - step
256
+ while current >= min_bound:
257
+ result = self.probe_servo(servo_id, current)
258
+ if not result.moved or result.pixel_change < 0.001:
259
+ break
260
+ safe_min = current
261
+ current -= step
262
+
263
+ # Move back to center
264
+ self.probe_servo(servo_id, start_pwm)
265
+
266
+ # Find maximum with visual feedback
267
+ safe_max = start_pwm
268
+ current = start_pwm + step
269
+ while current <= max_bound:
270
+ result = self.probe_servo(servo_id, current)
271
+ if not result.moved or result.pixel_change < 0.001:
272
+ break
273
+ safe_max = current
274
+ current += step
275
+ else:
276
+ # Without camera, probe the full range with key positions
277
+ # Use known safe ranges for MechDog servos
278
+ safe_min = min_bound
279
+ safe_max = max_bound
280
+
281
+ # Probe min, center, max to ensure servo responds
282
+ self.probe_servo(servo_id, min_bound, duration_ms=800)
283
+ self.probe_servo(servo_id, start_pwm, duration_ms=500)
284
+ self.probe_servo(servo_id, max_bound, duration_ms=800)
285
+
286
+ # Return to center
287
+ self.probe_servo(servo_id, start_pwm)
288
+
289
+ return ServoRange(
290
+ min_pwm=min_bound,
291
+ max_pwm=max_bound,
292
+ center_pwm=start_pwm,
293
+ safe_min=safe_min,
294
+ safe_max=safe_max,
295
+ )
296
+
297
+ def sweep_servo(
298
+ self,
299
+ servo_id: int,
300
+ pwm_values: List[int],
301
+ save_images: bool = False,
302
+ output_dir: str = "/tmp/servo_sweep",
303
+ ) -> List[ProbeResult]:
304
+ """
305
+ Sweep a servo through multiple positions.
306
+
307
+ Args:
308
+ servo_id: Servo to sweep
309
+ pwm_values: List of PWM values to test
310
+ save_images: Whether to save images
311
+ output_dir: Directory for saved images
312
+
313
+ Returns:
314
+ List of ProbeResults
315
+ """
316
+ results = []
317
+
318
+ if save_images:
319
+ Path(output_dir).mkdir(parents=True, exist_ok=True)
320
+
321
+ for i, pwm in enumerate(pwm_values):
322
+ result = self.probe_servo(servo_id, pwm)
323
+ results.append(result)
324
+
325
+ if save_images and result.image_after is not None:
326
+ path = f"{output_dir}/servo{servo_id}_pwm{pwm}.jpg"
327
+ cv2.imwrite(path, result.image_after)
328
+
329
+ print(f" PWM {pwm}: change={result.pixel_change:.3f}")
330
+
331
+ return results
332
+
333
+
334
+ # ============================================================================
335
+ # Servo Mapper Agent - LLM-guided exploration
336
+ # ============================================================================
337
+
338
+ class ServoMapperAgent:
339
+ """
340
+ LLM-powered agent for discovering servo mappings.
341
+
342
+ Uses the LLM to:
343
+ - Decide which servos to test
344
+ - Analyze camera images to understand servo function
345
+ - Generate named positions and primitive skills
346
+ """
347
+
348
+ SYSTEM_PROMPT = """You are a robotics engineer analyzing servo motors on a quadruped robot with an arm attachment.
349
+
350
+ The robot has 13 servos:
351
+ - Servos 1-10: Likely leg servos (4 legs x ~2-3 joints each)
352
+ - Servos 11-13: Likely arm servos (gripper, shoulder, elbow)
353
+
354
+ Your task is to analyze servo test data and determine:
355
+ 1. What body part each servo controls
356
+ 2. What type of joint it is (hip, knee, shoulder, gripper, etc.)
357
+ 3. Named positions (e.g., GRIPPER_OPEN=500, GRIPPER_CLOSED=2400)
358
+ 4. Safe operating range
359
+
360
+ For each servo, you'll receive:
361
+ - Servo ID
362
+ - PWM range tested (500-2500 typically)
363
+ - Visual change measurements at each position
364
+ - Optional camera images showing before/after
365
+
366
+ Respond with JSON:
367
+ {
368
+ "servo_id": 11,
369
+ "body_part": "arm",
370
+ "joint_name": "gripper",
371
+ "servo_type": "gripper",
372
+ "named_positions": [
373
+ {"name": "OPEN", "pwm": 500, "description": "Gripper fully open"},
374
+ {"name": "CLOSED", "pwm": 2400, "description": "Gripper fully closed"}
375
+ ],
376
+ "safe_range": {"min": 500, "max": 2500},
377
+ "confidence": 0.9,
378
+ "reasoning": "Large visual change between extremes, typical gripper behavior"
379
+ }
380
+
381
+ Servo types: leg_hip, leg_shoulder, leg_knee, arm_shoulder, arm_elbow, gripper, head_pan, head_tilt, tail, unknown
382
+
383
+ Common patterns:
384
+ - Gripper: Large visual change, positions at extremes
385
+ - Leg joints: Moderate change, affects robot height/stance
386
+ - Arm joints: Affects arm position relative to body"""
387
+
388
+ def __init__(self, use_proxy: bool = True):
389
+ """
390
+ Initialize agent.
391
+
392
+ Args:
393
+ use_proxy: Use FoodforThought proxy for metering
394
+ """
395
+ self.proxy = None
396
+
397
+ if use_proxy and HAS_LLM_PROXY:
398
+ try:
399
+ self.proxy = LLMProxy()
400
+ if not self.proxy.is_authenticated():
401
+ print(" [Agent] Not logged in - run 'ate login'")
402
+ self.proxy = None
403
+ except Exception as e:
404
+ print(f" [Agent] Proxy error: {e}")
405
+
406
+ if not self.proxy:
407
+ print(" [Agent] No LLM available - using heuristic analysis")
408
+
409
+ def analyze_servo(
410
+ self,
411
+ servo_id: int,
412
+ sweep_results: List[ProbeResult],
413
+ servo_range: ServoRange,
414
+ ) -> ServoMapping:
415
+ """
416
+ Analyze sweep data to determine servo function.
417
+
418
+ Args:
419
+ servo_id: Servo being analyzed
420
+ sweep_results: Results from sweep test
421
+ servo_range: Discovered range
422
+
423
+ Returns:
424
+ ServoMapping with inferred function
425
+ """
426
+ if self.proxy:
427
+ return self._llm_analyze(servo_id, sweep_results, servo_range)
428
+ else:
429
+ return self._heuristic_analyze(servo_id, sweep_results, servo_range)
430
+
431
+ def _llm_analyze(
432
+ self,
433
+ servo_id: int,
434
+ sweep_results: List[ProbeResult],
435
+ servo_range: ServoRange,
436
+ ) -> ServoMapping:
437
+ """Use LLM to analyze servo data."""
438
+
439
+ # Build context
440
+ sweep_data = [
441
+ {"pwm": r.pwm_value, "visual_change": round(r.pixel_change, 4)}
442
+ for r in sweep_results
443
+ ]
444
+
445
+ context = {
446
+ "servo_id": servo_id,
447
+ "range": {
448
+ "tested_min": servo_range.safe_min,
449
+ "tested_max": servo_range.safe_max,
450
+ "center": servo_range.center_pwm,
451
+ },
452
+ "sweep_data": sweep_data,
453
+ "total_visual_change": sum(r.pixel_change for r in sweep_results),
454
+ }
455
+
456
+ prompt = f"""Analyze servo {servo_id}:
457
+
458
+ {json.dumps(context, indent=2)}
459
+
460
+ Based on this data, what does this servo control? Provide your analysis as JSON."""
461
+
462
+ try:
463
+ response = self.proxy.chat(
464
+ messages=[{"role": "user", "content": prompt}],
465
+ model="claude-3-5-haiku-20241022",
466
+ max_tokens=500,
467
+ system=self.SYSTEM_PROMPT,
468
+ )
469
+
470
+ # Parse response
471
+ text = response.content.strip()
472
+
473
+ # Extract JSON - handle various formats
474
+ data = None
475
+
476
+ # Try direct parse first
477
+ try:
478
+ data = json.loads(text)
479
+ except json.JSONDecodeError:
480
+ pass
481
+
482
+ # Try extracting from markdown code block
483
+ if data is None and "```" in text:
484
+ parts = text.split("```")
485
+ for part in parts[1:]: # Skip text before first ```
486
+ part = part.strip()
487
+ if part.startswith("json"):
488
+ part = part[4:].strip()
489
+ # Find the end of the JSON (before next ```)
490
+ if part.endswith("```"):
491
+ part = part[:-3]
492
+ try:
493
+ data = json.loads(part.strip())
494
+ break
495
+ except json.JSONDecodeError:
496
+ continue
497
+
498
+ # Try finding JSON object in text
499
+ if data is None:
500
+ import re
501
+ json_match = re.search(r'\{[^{}]*(?:\{[^{}]*\}[^{}]*)*\}', text, re.DOTALL)
502
+ if json_match:
503
+ try:
504
+ data = json.loads(json_match.group())
505
+ except json.JSONDecodeError:
506
+ pass
507
+
508
+ if data is None:
509
+ raise ValueError("Could not extract JSON from response")
510
+
511
+ # Build mapping
512
+ mapping = ServoMapping(
513
+ servo_id=servo_id,
514
+ servo_type=ServoType(data.get("servo_type", "unknown")),
515
+ body_part=data.get("body_part", ""),
516
+ joint_name=data.get("joint_name", ""),
517
+ range=servo_range,
518
+ confidence=data.get("confidence", 0.5),
519
+ observations=[data.get("reasoning", "")],
520
+ )
521
+
522
+ # Add named positions
523
+ for pos in data.get("named_positions", []):
524
+ mapping.named_positions.append(NamedPosition(
525
+ name=pos["name"],
526
+ pwm_value=pos["pwm"],
527
+ description=pos.get("description", ""),
528
+ ))
529
+
530
+ return mapping
531
+
532
+ except Exception as e:
533
+ print(f" [Agent] LLM error: {e}")
534
+ return self._heuristic_analyze(servo_id, sweep_results, servo_range)
535
+
536
+ def _heuristic_analyze(
537
+ self,
538
+ servo_id: int,
539
+ sweep_results: List[ProbeResult],
540
+ servo_range: ServoRange,
541
+ ) -> ServoMapping:
542
+ """Fallback heuristic analysis."""
543
+
544
+ # Classify based on servo ID patterns
545
+ # Common MechDog layout:
546
+ # 1-3: Front left leg, 4-6: Front right leg
547
+ # 7-9: Back left leg, 10: Back right (or partial)
548
+ # 11: Gripper, 12: Shoulder, 13: Elbow
549
+
550
+ mapping = ServoMapping(servo_id=servo_id, range=servo_range)
551
+
552
+ if servo_id == 11:
553
+ mapping.servo_type = ServoType.GRIPPER
554
+ mapping.body_part = "arm"
555
+ mapping.joint_name = "gripper"
556
+ mapping.named_positions = [
557
+ NamedPosition("OPEN", servo_range.safe_min, "Gripper open"),
558
+ NamedPosition("CLOSED", servo_range.safe_max, "Gripper closed"),
559
+ ]
560
+ mapping.confidence = 0.9
561
+
562
+ elif servo_id == 12:
563
+ mapping.servo_type = ServoType.ARM_SHOULDER
564
+ mapping.body_part = "arm"
565
+ mapping.joint_name = "shoulder"
566
+ mapping.named_positions = [
567
+ NamedPosition("UP", servo_range.safe_min, "Arm raised"),
568
+ NamedPosition("DOWN", servo_range.safe_max, "Arm lowered"),
569
+ ]
570
+ mapping.confidence = 0.8
571
+
572
+ elif servo_id == 13:
573
+ mapping.servo_type = ServoType.ARM_ELBOW
574
+ mapping.body_part = "arm"
575
+ mapping.joint_name = "elbow"
576
+ mapping.named_positions = [
577
+ NamedPosition("RETRACTED", servo_range.center_pwm + 100, "Elbow bent"),
578
+ NamedPosition("EXTENDED", servo_range.safe_min, "Elbow straight"),
579
+ ]
580
+ mapping.confidence = 0.8
581
+
582
+ elif servo_id <= 10:
583
+ # Leg servo - determine which leg and joint
584
+ leg_num = (servo_id - 1) // 3
585
+ joint_num = (servo_id - 1) % 3
586
+
587
+ leg_names = ["front_left", "front_right", "back_left", "back_right"]
588
+ joint_types = [ServoType.LEG_HIP, ServoType.LEG_SHOULDER, ServoType.LEG_KNEE]
589
+ joint_names = ["hip", "shoulder", "knee"]
590
+
591
+ if leg_num < 4:
592
+ mapping.body_part = leg_names[leg_num] + "_leg"
593
+ mapping.servo_type = joint_types[joint_num] if joint_num < 3 else ServoType.UNKNOWN
594
+ mapping.joint_name = joint_names[joint_num] if joint_num < 3 else "unknown"
595
+ mapping.confidence = 0.6
596
+
597
+ # Add generic positions if none set
598
+ if not mapping.named_positions:
599
+ mapping.named_positions = [
600
+ NamedPosition("MIN", servo_range.safe_min, "Minimum position"),
601
+ NamedPosition("CENTER", servo_range.center_pwm, "Center position"),
602
+ NamedPosition("MAX", servo_range.safe_max, "Maximum position"),
603
+ ]
604
+
605
+ return mapping
606
+
607
+
608
+ # ============================================================================
609
+ # Primitive Generator
610
+ # ============================================================================
611
+
612
+ class PrimitiveGenerator:
613
+ """
614
+ Generate primitive skill definitions from servo mappings.
615
+ """
616
+
617
+ def __init__(self, robot_name: str = "mechdog"):
618
+ self.robot_name = robot_name
619
+
620
+ def generate_primitives(
621
+ self,
622
+ mappings: List[ServoMapping],
623
+ ) -> Dict[str, Any]:
624
+ """
625
+ Generate primitive skill definitions from mappings.
626
+
627
+ Args:
628
+ mappings: List of servo mappings
629
+
630
+ Returns:
631
+ Dictionary of primitive definitions
632
+ """
633
+ primitives = {
634
+ "robot": self.robot_name,
635
+ "generated_at": time.strftime("%Y-%m-%d %H:%M:%S"),
636
+ "servo_count": len(mappings),
637
+ "constants": {},
638
+ "primitives": [],
639
+ }
640
+
641
+ # Generate constants for named positions
642
+ for mapping in mappings:
643
+ prefix = f"SERVO_{mapping.servo_id}"
644
+ if mapping.body_part and mapping.joint_name:
645
+ prefix = f"{mapping.body_part.upper()}_{mapping.joint_name.upper()}"
646
+
647
+ for pos in mapping.named_positions:
648
+ const_name = f"{prefix}_{pos.name}"
649
+ primitives["constants"][const_name] = pos.pwm_value
650
+
651
+ # Generate primitive functions
652
+ for mapping in mappings:
653
+ self._add_primitives_for_servo(primitives, mapping)
654
+
655
+ # Add compound primitives
656
+ self._add_compound_primitives(primitives, mappings)
657
+
658
+ return primitives
659
+
660
+ def _add_primitives_for_servo(
661
+ self,
662
+ primitives: Dict,
663
+ mapping: ServoMapping,
664
+ ):
665
+ """Add primitive functions for a single servo."""
666
+
667
+ for pos in mapping.named_positions:
668
+ name = f"{mapping.body_part}_{mapping.joint_name}_{pos.name}".lower()
669
+ name = name.replace("__", "_").strip("_")
670
+
671
+ if not name:
672
+ name = f"servo_{mapping.servo_id}_{pos.name}".lower()
673
+
674
+ primitive = {
675
+ "name": name,
676
+ "description": pos.description or f"Move {mapping.joint_name} to {pos.name}",
677
+ "servo_id": mapping.servo_id,
678
+ "pwm_value": pos.pwm_value,
679
+ "body_part": mapping.body_part,
680
+ "joint": mapping.joint_name,
681
+ "code": f"dog.set_servo({mapping.servo_id}, {pos.pwm_value}, {{duration}})",
682
+ }
683
+ primitives["primitives"].append(primitive)
684
+
685
+ def _add_compound_primitives(
686
+ self,
687
+ primitives: Dict,
688
+ mappings: List[ServoMapping],
689
+ ):
690
+ """Add compound primitives that combine multiple servos."""
691
+
692
+ # Find arm servos
693
+ gripper = next((m for m in mappings if m.servo_type == ServoType.GRIPPER), None)
694
+ shoulder = next((m for m in mappings if m.servo_type == ServoType.ARM_SHOULDER), None)
695
+ elbow = next((m for m in mappings if m.servo_type == ServoType.ARM_ELBOW), None)
696
+
697
+ if gripper and shoulder and elbow:
698
+ # Arm up compound
699
+ primitives["primitives"].append({
700
+ "name": "arm_up",
701
+ "description": "Raise arm to upright position",
702
+ "compound": True,
703
+ "steps": [
704
+ {"servo_id": elbow.servo_id, "position": "RETRACTED"},
705
+ {"servo_id": shoulder.servo_id, "position": "UP"},
706
+ ],
707
+ })
708
+
709
+ # Arm down compound
710
+ primitives["primitives"].append({
711
+ "name": "arm_down",
712
+ "description": "Lower arm for pickup",
713
+ "compound": True,
714
+ "steps": [
715
+ {"servo_id": shoulder.servo_id, "position": "DOWN"},
716
+ {"servo_id": elbow.servo_id, "position": "EXTENDED"},
717
+ ],
718
+ })
719
+
720
+ # Grab sequence
721
+ primitives["primitives"].append({
722
+ "name": "grab",
723
+ "description": "Complete grab sequence",
724
+ "compound": True,
725
+ "steps": [
726
+ {"primitive": "arm_gripper_open"},
727
+ {"primitive": "arm_down"},
728
+ {"wait": 0.5},
729
+ {"primitive": "arm_gripper_closed"},
730
+ {"primitive": "arm_up"},
731
+ ],
732
+ })
733
+
734
+ def generate_python_code(
735
+ self,
736
+ mappings: List[ServoMapping],
737
+ ) -> str:
738
+ """
739
+ Generate Python code for primitives.
740
+
741
+ Args:
742
+ mappings: Servo mappings
743
+
744
+ Returns:
745
+ Python code string
746
+ """
747
+ lines = [
748
+ '"""',
749
+ f'Auto-generated primitives for {self.robot_name}.',
750
+ f'Generated: {time.strftime("%Y-%m-%d %H:%M:%S")}',
751
+ '"""',
752
+ '',
753
+ '# Servo Constants',
754
+ ]
755
+
756
+ for mapping in mappings:
757
+ prefix = f"SERVO_{mapping.servo_id}"
758
+ if mapping.body_part and mapping.joint_name:
759
+ prefix = f"{mapping.body_part.upper()}_{mapping.joint_name.upper()}"
760
+
761
+ lines.append(f"# {mapping.body_part} {mapping.joint_name} (servo {mapping.servo_id})")
762
+ for pos in mapping.named_positions:
763
+ const_name = f"{prefix}_{pos.name}".replace("__", "_")
764
+ lines.append(f"{const_name} = {pos.pwm_value}")
765
+ lines.append("")
766
+
767
+ lines.extend([
768
+ '',
769
+ '# Primitive Functions',
770
+ '',
771
+ ])
772
+
773
+ for mapping in mappings:
774
+ for pos in mapping.named_positions:
775
+ func_name = f"{mapping.body_part}_{mapping.joint_name}_{pos.name}".lower()
776
+ func_name = func_name.replace("__", "_").strip("_")
777
+ if not func_name:
778
+ func_name = f"servo_{mapping.servo_id}_{pos.name}".lower()
779
+
780
+ lines.extend([
781
+ f"def {func_name}(dog, duration_ms=500):",
782
+ f' """{pos.description or f"Move {mapping.joint_name} to {pos.name}"}"""',
783
+ f" dog.set_servo({mapping.servo_id}, {pos.pwm_value}, duration_ms)",
784
+ "",
785
+ ])
786
+
787
+ # Add compound functions
788
+ gripper = next((m for m in mappings if m.servo_type == ServoType.GRIPPER), None)
789
+ shoulder = next((m for m in mappings if m.servo_type == ServoType.ARM_SHOULDER), None)
790
+ elbow = next((m for m in mappings if m.servo_type == ServoType.ARM_ELBOW), None)
791
+
792
+ if gripper and shoulder and elbow:
793
+ lines.extend([
794
+ "",
795
+ "# Compound Primitives",
796
+ "",
797
+ "def arm_up(dog, duration_ms=600):",
798
+ ' """Raise arm to upright position."""',
799
+ ])
800
+
801
+ # Find positions
802
+ elbow_ret = next((p for p in elbow.named_positions if "RETRACT" in p.name.upper()), None)
803
+ shoulder_up = next((p for p in shoulder.named_positions if "UP" in p.name.upper()), None)
804
+
805
+ if elbow_ret and shoulder_up:
806
+ lines.extend([
807
+ f" dog.set_servo({elbow.servo_id}, {elbow_ret.pwm_value}, duration_ms)",
808
+ f" dog.set_servo({shoulder.servo_id}, {shoulder_up.pwm_value}, duration_ms)",
809
+ ])
810
+
811
+ lines.extend([
812
+ "",
813
+ "def arm_down(dog, duration_ms=600):",
814
+ ' """Lower arm for pickup."""',
815
+ ])
816
+
817
+ shoulder_down = next((p for p in shoulder.named_positions if "DOWN" in p.name.upper()), None)
818
+ elbow_ext = next((p for p in elbow.named_positions if "EXTEND" in p.name.upper()), None)
819
+
820
+ if shoulder_down and elbow_ext:
821
+ lines.extend([
822
+ f" dog.set_servo({shoulder.servo_id}, {shoulder_down.pwm_value}, duration_ms)",
823
+ f" dog.set_servo({elbow.servo_id}, {elbow_ext.pwm_value}, duration_ms)",
824
+ ])
825
+
826
+ gripper_open = next((p for p in gripper.named_positions if "OPEN" in p.name.upper()), None)
827
+ gripper_close = next((p for p in gripper.named_positions if "CLOSE" in p.name.upper()), None)
828
+
829
+ if gripper_open and gripper_close:
830
+ lines.extend([
831
+ "",
832
+ "def grab_sequence(dog):",
833
+ ' """Complete grab sequence: open, lower, close, raise."""',
834
+ " import time",
835
+ f" dog.set_servo({gripper.servo_id}, {gripper_open.pwm_value}, 400) # Open gripper",
836
+ " time.sleep(0.4)",
837
+ " arm_down(dog, 600)",
838
+ " time.sleep(0.6)",
839
+ f" dog.set_servo({gripper.servo_id}, {gripper_close.pwm_value}, 400) # Close gripper",
840
+ " time.sleep(0.4)",
841
+ " arm_up(dog, 600)",
842
+ ])
843
+
844
+ return "\n".join(lines)
845
+
846
+
847
+ # ============================================================================
848
+ # Main Entry Point
849
+ # ============================================================================
850
+
851
+ def run_servo_mapping(
852
+ serial_port: str = "/dev/cu.usbserial-10",
853
+ num_servos: int = 13,
854
+ use_camera: bool = False,
855
+ camera_index: int = 0,
856
+ wifi_camera_ip: Optional[str] = None,
857
+ use_proxy: bool = True,
858
+ output_file: Optional[str] = None,
859
+ save_images: bool = False,
860
+ ) -> Dict[str, Any]:
861
+ """
862
+ Run servo mapping to discover and document all servos.
863
+
864
+ Args:
865
+ serial_port: Robot serial port
866
+ num_servos: Number of servos to probe
867
+ use_camera: Use camera for visual verification
868
+ camera_index: Webcam index (if use_camera=True)
869
+ wifi_camera_ip: WiFi camera IP (alternative to webcam)
870
+ use_proxy: Route LLM through FoodforThought for metering
871
+ output_file: Path to save results
872
+ save_images: Save images during sweep
873
+
874
+ Returns:
875
+ Dictionary with mappings and generated primitives
876
+ """
877
+ print("\n" + "=" * 60)
878
+ print("SERVO MAPPER - Automated Primitive Discovery")
879
+ print("=" * 60)
880
+
881
+ # Setup serial connection
882
+ try:
883
+ import serial
884
+ except ImportError:
885
+ print("Error: pyserial required. Install with: pip install pyserial")
886
+ return {}
887
+
888
+ print(f"\nConnecting to robot at {serial_port}...")
889
+ try:
890
+ ser = serial.Serial(serial_port, 115200, timeout=1)
891
+ time.sleep(0.5)
892
+ ser.read(ser.in_waiting)
893
+
894
+ def cmd(command: str, wait: float = 0.3) -> str:
895
+ ser.write(f"{command}\r\n".encode())
896
+ time.sleep(wait)
897
+ return ser.read(ser.in_waiting).decode('utf-8', errors='ignore')
898
+
899
+ # Initialize MechDog
900
+ cmd("from HW_MechDog import MechDog", 1.5)
901
+ cmd("dog = MechDog()", 1.5)
902
+ print("Connected!")
903
+
904
+ except Exception as e:
905
+ print(f"Failed to connect: {e}")
906
+ return {}
907
+
908
+ # Setup camera if requested
909
+ capture_fn = None
910
+ cap = None
911
+
912
+ if use_camera:
913
+ if wifi_camera_ip:
914
+ print(f"Using WiFi camera at {wifi_camera_ip}...")
915
+ import requests
916
+ from PIL import Image as PILImage
917
+ import io
918
+
919
+ def wifi_capture():
920
+ try:
921
+ response = requests.get(
922
+ f"http://{wifi_camera_ip}/capture",
923
+ timeout=2
924
+ )
925
+ pil_img = PILImage.open(io.BytesIO(response.content))
926
+ frame = np.array(pil_img.convert("RGB"))
927
+ return cv2.cvtColor(frame, cv2.COLOR_RGB2BGR)
928
+ except Exception:
929
+ return np.array([])
930
+
931
+ capture_fn = wifi_capture
932
+
933
+ elif HAS_CV:
934
+ print(f"Opening webcam {camera_index}...")
935
+ cap = cv2.VideoCapture(camera_index)
936
+ if cap.isOpened():
937
+ cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640)
938
+ cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 480)
939
+
940
+ def webcam_capture():
941
+ for _ in range(2):
942
+ cap.read()
943
+ ret, frame = cap.read()
944
+ return frame if ret else np.array([])
945
+
946
+ capture_fn = webcam_capture
947
+ print("Camera ready!")
948
+ else:
949
+ print("Warning: Could not open camera")
950
+
951
+ # Create probe and agent
952
+ probe = ServoProbe(cmd, capture_fn)
953
+ agent = ServoMapperAgent(use_proxy=use_proxy)
954
+
955
+ # Map each servo
956
+ mappings = []
957
+ print(f"\nMapping {num_servos} servos...")
958
+
959
+ for servo_id in range(1, num_servos + 1):
960
+ print(f"\n--- Servo {servo_id}/{num_servos} ---")
961
+
962
+ # Find range
963
+ servo_range = probe.find_range(servo_id)
964
+ print(f" Range: {servo_range.safe_min} - {servo_range.safe_max}")
965
+
966
+ # Sweep through range
967
+ sweep_values = list(range(
968
+ servo_range.safe_min,
969
+ servo_range.safe_max + 1,
970
+ (servo_range.safe_max - servo_range.safe_min) // 5 or 100
971
+ ))
972
+
973
+ sweep_results = probe.sweep_servo(
974
+ servo_id,
975
+ sweep_values,
976
+ save_images=save_images,
977
+ )
978
+
979
+ # Analyze with agent
980
+ mapping = agent.analyze_servo(servo_id, sweep_results, servo_range)
981
+ mappings.append(mapping)
982
+
983
+ print(f" Identified: {mapping.body_part} {mapping.joint_name} ({mapping.servo_type.value})")
984
+ print(f" Confidence: {mapping.confidence:.0%}")
985
+
986
+ # Return to center
987
+ probe.probe_servo(servo_id, servo_range.center_pwm)
988
+
989
+ # Generate primitives
990
+ print("\n" + "=" * 60)
991
+ print("GENERATING PRIMITIVES")
992
+ print("=" * 60)
993
+
994
+ generator = PrimitiveGenerator("mechdog")
995
+ primitives = generator.generate_primitives(mappings)
996
+ python_code = generator.generate_python_code(mappings)
997
+
998
+ # Print summary
999
+ print(f"\nDiscovered {len(primitives['primitives'])} primitives:")
1000
+ for p in primitives["primitives"]:
1001
+ print(f" - {p['name']}")
1002
+
1003
+ print(f"\nConstants defined: {len(primitives['constants'])}")
1004
+
1005
+ # Save results
1006
+ results = {
1007
+ "mappings": [m.to_dict() for m in mappings],
1008
+ "primitives": primitives,
1009
+ "python_code": python_code,
1010
+ }
1011
+
1012
+ if output_file:
1013
+ output_path = Path(output_file)
1014
+
1015
+ # Save JSON
1016
+ with open(output_path.with_suffix(".json"), "w") as f:
1017
+ json.dump(results, f, indent=2)
1018
+ print(f"\nSaved JSON to {output_path.with_suffix('.json')}")
1019
+
1020
+ # Save Python code
1021
+ with open(output_path.with_suffix(".py"), "w") as f:
1022
+ f.write(python_code)
1023
+ print(f"Saved Python to {output_path.with_suffix('.py')}")
1024
+
1025
+ # Cleanup
1026
+ if cap:
1027
+ cap.release()
1028
+ ser.close()
1029
+
1030
+ print("\n" + "=" * 60)
1031
+ print("MAPPING COMPLETE")
1032
+ print("=" * 60)
1033
+
1034
+ return results
1035
+
1036
+
1037
+ def upload_servo_mapping(
1038
+ results: Dict[str, Any],
1039
+ output_file: str,
1040
+ project_id: Optional[str] = None,
1041
+ ) -> Dict[str, Any]:
1042
+ """
1043
+ Upload servo mapping results to FoodforThought.
1044
+
1045
+ Creates artifacts:
1046
+ - Calibration (raw stage, calibration type): servo mappings and ranges
1047
+ - Generated Skills (skill stage, code type): Python primitives
1048
+
1049
+ Args:
1050
+ results: Results from run_servo_mapping()
1051
+ output_file: Output file path (used for naming)
1052
+ project_id: Optional project ID (creates new if not provided)
1053
+
1054
+ Returns:
1055
+ Dict with project_id and artifact list
1056
+ """
1057
+ from .skill_upload import SkillLibraryUploader
1058
+
1059
+ uploader = SkillLibraryUploader()
1060
+
1061
+ # Extract robot name from output file
1062
+ robot_name = Path(output_file).stem.replace("servo_mappings_", "")
1063
+ if not robot_name:
1064
+ robot_name = "mechdog"
1065
+
1066
+ # Get or create project
1067
+ if not project_id:
1068
+ project_id = uploader.get_or_create_project(
1069
+ f"{robot_name.replace('_', ' ').title()} Servo Mapping",
1070
+ f"Automated servo discovery and primitive generation for {robot_name}",
1071
+ )
1072
+
1073
+ result = {
1074
+ "project_id": project_id,
1075
+ "artifacts": [],
1076
+ }
1077
+
1078
+ # 1. Upload calibration/mapping data as raw + calibration type
1079
+ mappings = results.get("mappings", [])
1080
+ primitives = results.get("primitives", {})
1081
+
1082
+ calibration_response = uploader._request("POST", "/artifacts", json={
1083
+ "projectId": project_id,
1084
+ "name": f"Servo Mapping ({len(mappings)} servos)",
1085
+ "stage": "raw",
1086
+ "type": "calibration",
1087
+ "metadata": {
1088
+ "robot_name": robot_name,
1089
+ "servo_count": len(mappings),
1090
+ "mappings": mappings,
1091
+ "constants": primitives.get("constants", {}),
1092
+ "generator": "ate_servo_mapper_v1",
1093
+ "generated_at": primitives.get("generated_at", time.strftime("%Y-%m-%d %H:%M:%S")),
1094
+ "source": "agentic_servo_mapper",
1095
+ },
1096
+ })
1097
+ calibration_artifact_id = calibration_response.get("artifact", {}).get("id")
1098
+ result["artifacts"].append({
1099
+ "id": calibration_artifact_id,
1100
+ "stage": "raw",
1101
+ "name": f"Servo Mapping ({len(mappings)} servos)",
1102
+ })
1103
+
1104
+ # 2. Upload generated Python code as skill artifact
1105
+ python_code = results.get("python_code", "")
1106
+ if python_code:
1107
+ primitives_list = primitives.get("primitives", [])
1108
+ skill_response = uploader._request("POST", "/artifacts", json={
1109
+ "projectId": project_id,
1110
+ "name": f"Generated Primitives ({len(primitives_list)} functions)",
1111
+ "stage": "skill",
1112
+ "type": "code",
1113
+ "parentArtifactId": calibration_artifact_id,
1114
+ "transformationType": "generation",
1115
+ "transformationNotes": "Python primitives auto-generated from servo mapping via LLM analysis",
1116
+ "metadata": {
1117
+ "robot_name": robot_name,
1118
+ "skill_type": "primitive",
1119
+ "primitive_count": len(primitives_list),
1120
+ "constant_count": len(primitives.get("constants", {})),
1121
+ "code": python_code,
1122
+ "primitives": primitives_list,
1123
+ "hardware_requirements": ["servos"],
1124
+ "source": "agentic_servo_mapper",
1125
+ "generator": "ate_primitive_generator_v1",
1126
+ },
1127
+ })
1128
+ result["artifacts"].append({
1129
+ "id": skill_response.get("artifact", {}).get("id"),
1130
+ "stage": "skill",
1131
+ "name": f"Generated Primitives ({len(primitives_list)} functions)",
1132
+ })
1133
+
1134
+ return result
1135
+
1136
+
1137
+ if __name__ == "__main__":
1138
+ import sys
1139
+
1140
+ output = sys.argv[1] if len(sys.argv) > 1 else "/tmp/mechdog_primitives"
1141
+ use_cam = "--camera" in sys.argv
1142
+
1143
+ results = run_servo_mapping(
1144
+ use_camera=use_cam,
1145
+ output_file=output,
1146
+ save_images=use_cam,
1147
+ )
1148
+
1149
+ if results.get("python_code"):
1150
+ print("\n" + "=" * 60)
1151
+ print("GENERATED PYTHON CODE:")
1152
+ print("=" * 60)
1153
+ print(results["python_code"])