livepilot 1.9.15 → 1.9.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/.claude-plugin/marketplace.json +1 -1
  2. package/AGENTS.md +1 -1
  3. package/CHANGELOG.md +40 -0
  4. package/README.md +1 -1
  5. package/livepilot/.Codex-plugin/plugin.json +1 -1
  6. package/livepilot/.claude-plugin/plugin.json +1 -1
  7. package/livepilot/skills/livepilot-core/references/overview.md +1 -1
  8. package/m4l_device/livepilot_bridge.js +27 -13
  9. package/mcp_server/__init__.py +1 -1
  10. package/mcp_server/connection.py +24 -2
  11. package/mcp_server/curves.py +3 -3
  12. package/mcp_server/evaluation/fabric.py +1 -1
  13. package/mcp_server/m4l_bridge.py +9 -1
  14. package/mcp_server/memory/technique_store.py +25 -17
  15. package/mcp_server/mix_engine/critics.py +1 -1
  16. package/mcp_server/mix_engine/tools.py +14 -8
  17. package/mcp_server/performance_engine/safety.py +6 -3
  18. package/mcp_server/project_brain/refresh.py +8 -2
  19. package/mcp_server/project_brain/tools.py +12 -12
  20. package/mcp_server/reference_engine/tools.py +16 -15
  21. package/mcp_server/runtime/action_ledger_models.py +10 -3
  22. package/mcp_server/runtime/capability_state.py +3 -2
  23. package/mcp_server/runtime/tools.py +6 -3
  24. package/mcp_server/tools/agent_os.py +47 -39
  25. package/mcp_server/tools/composition.py +114 -32
  26. package/mcp_server/tools/devices.py +15 -1
  27. package/mcp_server/tools/midi_io.py +3 -1
  28. package/mcp_server/tools/research.py +31 -31
  29. package/mcp_server/tools/tracks.py +3 -3
  30. package/mcp_server/translation_engine/tools.py +50 -16
  31. package/package.json +1 -1
  32. package/remote_script/LivePilot/__init__.py +1 -1
  33. package/remote_script/LivePilot/arrangement.py +9 -1
  34. package/remote_script/LivePilot/clips.py +22 -6
  35. package/remote_script/LivePilot/notes.py +9 -1
  36. package/remote_script/LivePilot/server.py +6 -6
@@ -10,7 +10,7 @@
10
10
  {
11
11
  "name": "livepilot",
12
12
  "description": "Agentic production system for Ableton Live 12 — 236 tools, 32 domains, device atlas, spectral perception, technique memory, neo-Riemannian harmony, Euclidean rhythm, species counterpoint, MIDI I/O",
13
- "version": "1.9.15",
13
+ "version": "1.9.16",
14
14
  "author": {
15
15
  "name": "Pilot Studio"
16
16
  },
package/AGENTS.md CHANGED
@@ -1,4 +1,4 @@
1
- # LivePilot v1.9.14 — Ableton Live 12
1
+ # LivePilot v1.9.16 — Ableton Live 12
2
2
 
3
3
  ## Project
4
4
  - **Repo:** This directory (LivePilot)
package/CHANGELOG.md CHANGED
@@ -1,5 +1,45 @@
1
1
  # Changelog
2
2
 
3
+ ## 1.9.16 — Comprehensive Bug Fix Audit (April 2026)
4
+
5
+ ### Critical Fixes
6
+ - **connection.py** — Don't retry TCP commands after timeout (prevents duplicate mutations in Ableton)
7
+ - **connection.py** — Add `send_command_async()` to avoid blocking the asyncio event loop
8
+ - **technique_store.py** — Thread-safe initialization with double-checked locking; add missing `_ensure_initialized()` in `increment_replay`
9
+ - **capability_state.py** — Fix inverted mode logic: offline analyzer is now correctly more restrictive than stale analyzer
10
+ - **server.py** — Fix thread safety: assign `_client_thread` inside lock
11
+ - **action_ledger_models.py** — Thread-safe unique IDs with UUID session suffix
12
+
13
+ ### High-Priority Fixes
14
+ - **notes.py / arrangement.py** — `modify_notes` now applies `mute`, `velocity_deviation`, `release_velocity` (previously silently dropped)
15
+ - **clips.py** — `create_clip` checks `has_clip` first; `set_clip_loop` uses conditional ordering for shrink vs expand
16
+ - **notes.py / arrangement.py** — Fix `transpose_notes` default `time_span` when `from_time > 0`
17
+ - **m4l_bridge.py** — Clear stale response future after timeout
18
+ - **composition.py** — Fix `get_phrase_grid` using section_index as clip_index
19
+ - **devices.py** — Fix `_postflight_loaded_device` always reporting plugins as failed
20
+ - **tracks.py** — Correct input monitoring enum (0=Off, 1=In, 2=Auto); fix `set_group_fold` allowing return tracks
21
+ - **research.py** — Fix browser path casing (`"Instruments"` → `"instruments"`)
22
+ - **midi_io.py** — Fix path traversal check prefix collision
23
+ - **fabric.py** — Distinguish `measured` vs `measured_reject` decision modes
24
+ - **critics.py** — Fix dynamics critic double-counting `over_compressed` + `flat_dynamics`
25
+ - **refresh.py** — Deep-copy freshness objects to prevent mutation leak
26
+ - **mix_engine/tools.py** — Fix `track_count` key (always 0) → use `len(tracks)`
27
+ - **safety.py** — Distinguish `unknown` from `caution` for unrecognized move types
28
+ - **translation_engine** — Fix pan values always 0 (check nested `mixer.panning`)
29
+ - **livepilot_bridge.js** — Track selection by LiveAPI ID (not name); 4-byte UTF-8 support (emoji)
30
+
31
+ ### Medium Fixes
32
+ - Version strings bumped across all files
33
+ - `hashlib.md5` calls use `usedforsecurity=False` (FIPS compat)
34
+ - `.mcp.json` uses portable `node` command
35
+ - README "32 additional tools" → "29"
36
+ - Lazy `asyncio.Lock` creation in M4L bridge
37
+ - `_friendly_error` now includes `command_type` in output
38
+
39
+ ### Test Improvements
40
+ - Tests updated to match corrected capability_state, dynamics critic, and safety logic
41
+ - `test_default_name_detection` now imports production function instead of local copy
42
+
3
43
  ## 1.9.15 — V2 Engine Architecture (April 2026)
4
44
 
5
45
  ### New Engine Packages (12)
package/README.md CHANGED
@@ -448,7 +448,7 @@ Windsurf — `~/.codeium/windsurf/mcp_config.json`:
448
448
 
449
449
  Drag `LivePilot_Analyzer.amxd` onto the master track.
450
450
 
451
- Unlocks 32 additional tools: spectral analysis, key detection,
451
+ Unlocks 29 additional tools: spectral analysis, key detection,
452
452
  sample manipulation, deep device introspection, plugin parameter mapping.
453
453
 
454
454
  > [!IMPORTANT]
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "livepilot",
3
- "version": "1.9.15",
3
+ "version": "1.9.16",
4
4
  "description": "Agentic production system for Ableton Live 12 — 236 tools, 32 domains, device atlas, spectral perception, technique memory, neo-Riemannian harmony, Euclidean rhythm, species counterpoint, MIDI I/O",
5
5
  "author": {
6
6
  "name": "Pilot Studio"
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "livepilot",
3
- "version": "1.9.15",
3
+ "version": "1.9.16",
4
4
  "description": "Agentic production system for Ableton Live 12 — 236 tools, 32 domains, device atlas, spectral perception, technique memory, neo-Riemannian harmony, Euclidean rhythm, species counterpoint, MIDI I/O",
5
5
  "author": {
6
6
  "name": "Pilot Studio"
@@ -1,4 +1,4 @@
1
- # LivePilot v1.9.14 — Architecture & Tool Reference
1
+ # LivePilot v1.9.16 — Architecture & Tool Reference
2
2
 
3
3
  Agentic production system for Ableton Live 12. 236 tools across 32 domains. Device atlas (280+ devices), spectral perception (M4L analyzer), technique memory, automation intelligence (16 curve types, 15 recipes), music theory (Krumhansl-Schmuckler, species counterpoint), generative algorithms (Euclidean rhythm, tintinnabuli, phase shift, additive process), neo-Riemannian harmony (PRL transforms, Tonnetz), MIDI file I/O.
4
4
 
@@ -84,7 +84,7 @@ function anything() {
84
84
  function dispatch(cmd, args) {
85
85
  switch(cmd) {
86
86
  case "ping":
87
- send_response({"ok": true, "version": "1.9.14"});
87
+ send_response({"ok": true, "version": "1.9.16"});
88
88
  break;
89
89
  case "get_params":
90
90
  cmd_get_params(args);
@@ -513,16 +513,17 @@ function cmd_get_selected() {
513
513
  appointed_device: null
514
514
  };
515
515
 
516
- // Selected track
516
+ // Selected track — match by object ID (not name, which can be duplicated)
517
517
  try {
518
518
  cursor_b.goto("live_set view selected_track");
519
519
  result.selected_track_name = cursor_b.get("name").toString();
520
- // Get track index by walking tracks
520
+ var selected_id = cursor_b.id;
521
+ // Get track index by walking tracks and comparing IDs
521
522
  cursor_a.goto("live_set");
522
523
  var tc = cursor_a.getcount("tracks");
523
524
  for (var i = 0; i < tc; i++) {
524
525
  cursor_a.goto("live_set tracks " + i);
525
- if (cursor_a.get("name").toString() === result.selected_track_name) {
526
+ if (cursor_a.id === selected_id) {
526
527
  result.selected_track = i;
527
528
  break;
528
529
  }
@@ -533,7 +534,7 @@ function cmd_get_selected() {
533
534
  var rtc = cursor_a.getcount("return_tracks");
534
535
  for (var j = 0; j < rtc; j++) {
535
536
  cursor_a.goto("live_set return_tracks " + j);
536
- if (cursor_a.get("name").toString() === result.selected_track_name) {
537
+ if (cursor_a.id === selected_id) {
537
538
  result.selected_track = -(j + 1); // -1, -2, ... convention
538
539
  break;
539
540
  }
@@ -542,7 +543,7 @@ function cmd_get_selected() {
542
543
  // Check master track if still not found
543
544
  if (result.selected_track === -1) {
544
545
  cursor_a.goto("live_set master_track");
545
- if (cursor_a.get("name").toString() === result.selected_track_name) {
546
+ if (cursor_a.id === selected_id) {
546
547
  result.selected_track = -1000; // master convention
547
548
  }
548
549
  }
@@ -743,7 +744,7 @@ function base64_decode(str) {
743
744
 
744
745
  function _utf8_decode(bytes) {
745
746
  // Convert a UTF-8 byte array back to a JavaScript string.
746
- // Handles BMP codepoints which covers the text LivePilot exchanges.
747
+ // Handles BMP codepoints and 4-byte sequences (emoji/supplementary planes).
747
748
  var result = "";
748
749
  for (var i = 0; i < bytes.length;) {
749
750
  var b0 = bytes[i];
@@ -754,17 +755,30 @@ function _utf8_decode(bytes) {
754
755
  var b1 = bytes[i + 1];
755
756
  result += String.fromCharCode(((b0 & 0x1F) << 6) | (b1 & 0x3F));
756
757
  i += 2;
757
- } else if (i + 2 < bytes.length) {
758
- var b2 = bytes[i + 1];
759
- var b3 = bytes[i + 2];
758
+ } else if ((b0 & 0xF0) === 0xE0 && i + 2 < bytes.length) {
759
+ // 3-byte sequence (U+0800..U+FFFF)
760
+ var b1_3 = bytes[i + 1];
761
+ var b2_3 = bytes[i + 2];
760
762
  result += String.fromCharCode(
761
763
  ((b0 & 0x0F) << 12) |
762
- ((b2 & 0x3F) << 6) |
763
- (b3 & 0x3F)
764
+ ((b1_3 & 0x3F) << 6) |
765
+ (b2_3 & 0x3F)
764
766
  );
765
767
  i += 3;
768
+ } else if ((b0 & 0xF8) === 0xF0 && i + 3 < bytes.length) {
769
+ // 4-byte sequence (U+10000..U+10FFFF) — emoji and supplementary planes
770
+ var cp = ((b0 & 0x07) << 18) |
771
+ ((bytes[i + 1] & 0x3F) << 12) |
772
+ ((bytes[i + 2] & 0x3F) << 6) |
773
+ (bytes[i + 3] & 0x3F);
774
+ // Encode as UTF-16 surrogate pair
775
+ cp -= 0x10000;
776
+ result += String.fromCharCode(0xD800 + (cp >> 10));
777
+ result += String.fromCharCode(0xDC00 + (cp & 0x3FF));
778
+ i += 4;
766
779
  } else {
767
- break;
780
+ // Skip invalid byte
781
+ i += 1;
768
782
  }
769
783
  }
770
784
  return result;
@@ -1,2 +1,2 @@
1
1
  """LivePilot MCP Server — bridges MCP protocol to Ableton Live."""
2
- __version__ = "1.9.14"
2
+ __version__ = "1.9.16"
@@ -2,6 +2,7 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
+ import asyncio
5
6
  import json
6
7
  import os
7
8
  import socket
@@ -40,6 +41,8 @@ def _friendly_error(code: str, message: str, command_type: str) -> str:
40
41
  """Format an error from the Remote Script into a user-friendly message."""
41
42
  hint = _ERROR_HINTS.get(code, "")
42
43
  parts = [f"[{code}] {message}"]
44
+ if command_type:
45
+ parts.append(f"(while running '{command_type}')")
43
46
  if hint:
44
47
  parts.append(hint)
45
48
  return " ".join(parts)
@@ -143,7 +146,9 @@ class AbletonConnection:
143
146
 
144
147
  Thread-safe: a lock serializes all TCP send/receive cycles to
145
148
  prevent socket corruption when multiple MCP tools fire concurrently.
146
- Retries once on socket errors with a fresh connection.
149
+ Retries once on connection errors (command never reached Ableton).
150
+ Does NOT retry on timeouts — Ableton may have already processed the
151
+ command, and retrying would cause duplicate mutations.
147
152
  """
148
153
  with self._lock:
149
154
  # Ensure we have a connection
@@ -156,7 +161,15 @@ class AbletonConnection:
156
161
 
157
162
  try:
158
163
  response = self._send_raw(command)
159
- except (OSError, AbletonConnectionError):
164
+ except AbletonConnectionError as exc:
165
+ # Don't retry timeouts — Ableton may have processed the command
166
+ if "Timeout" in str(exc):
167
+ raise
168
+ # Retry once with a fresh connection for non-timeout errors
169
+ self.disconnect()
170
+ self.connect()
171
+ response = self._send_raw(command)
172
+ except OSError:
160
173
  # Retry once with a fresh connection
161
174
  self.disconnect()
162
175
  self.connect()
@@ -183,6 +196,15 @@ class AbletonConnection:
183
196
  self._command_log.append(log_entry)
184
197
  return response.get("result", {})
185
198
 
199
+ async def send_command_async(self, command_type: str, params: Optional[dict] = None) -> dict:
200
+ """Async wrapper around send_command that avoids blocking the event loop.
201
+
202
+ Runs the blocking TCP send/receive in a thread pool executor so the
203
+ asyncio event loop remains responsive to other concurrent MCP tools.
204
+ """
205
+ loop = asyncio.get_running_loop()
206
+ return await loop.run_in_executor(None, self.send_command, command_type, params)
207
+
186
208
  # ------------------------------------------------------------------
187
209
  # Command log
188
210
  # ------------------------------------------------------------------
@@ -314,7 +314,7 @@ def _perlin(duration: float, density: int, center: float = 0.5,
314
314
 
315
315
  def _hash_float(x: float, s: float) -> float:
316
316
  """Deterministic pseudo-random float from position + seed."""
317
- h = hashlib.md5(f"{x:.6f}:{s:.6f}".encode()).hexdigest()
317
+ h = hashlib.md5(f"{x:.6f}:{s:.6f}".encode(), usedforsecurity=False).hexdigest()
318
318
  return (int(h[:8], 16) / 0xFFFFFFFF) * 2.0 - 1.0
319
319
 
320
320
  def _smoothstep(t: float) -> float:
@@ -363,7 +363,7 @@ def _brownian(duration: float, density: int, start: float = 0.5,
363
363
  import hashlib
364
364
 
365
365
  def _det_random(i: int, s: float) -> float:
366
- h = hashlib.md5(f"{i}:{s:.6f}".encode()).hexdigest()
366
+ h = hashlib.md5(f"{i}:{s:.6f}".encode(), usedforsecurity=False).hexdigest()
367
367
  return (int(h[:8], 16) / 0xFFFFFFFF) * 2.0 - 1.0
368
368
 
369
369
  points = []
@@ -589,7 +589,7 @@ def _stochastic(duration: float, density: int, center: float = 0.5,
589
589
  import hashlib
590
590
 
591
591
  def _det_random(i: int, s: float) -> float:
592
- h = hashlib.md5(f"{i}:{s:.6f}".encode()).hexdigest()
592
+ h = hashlib.md5(f"{i}:{s:.6f}".encode(), usedforsecurity=False).hexdigest()
593
593
  return (int(h[:8], 16) / 0xFFFFFFFF) * 2.0 - 1.0
594
594
 
595
595
  points = []
@@ -136,7 +136,7 @@ def evaluate_sonic_move(
136
136
  elif keep_change:
137
137
  decision_mode = "measured"
138
138
  else:
139
- decision_mode = "measured"
139
+ decision_mode = "measured_reject"
140
140
 
141
141
  return EvaluationResult(
142
142
  engine=request.engine or "sonic",
@@ -338,13 +338,15 @@ class M4LBridge:
338
338
  self.receiver = receiver
339
339
  self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
340
340
  self._m4l_addr = ("127.0.0.1", 9881)
341
- self._cmd_lock = asyncio.Lock()
341
+ self._cmd_lock: Optional[asyncio.Lock] = None
342
342
 
343
343
  async def send_command(self, command: str, *args: Any, timeout: float = 5.0) -> dict:
344
344
  """Send an OSC command to the M4L device and wait for the response."""
345
345
  if not self.cache.is_connected:
346
346
  return {"error": "LivePilot Analyzer not connected. Drop it on the master track."}
347
347
 
348
+ if self._cmd_lock is None:
349
+ self._cmd_lock = asyncio.Lock()
348
350
  async with self._cmd_lock:
349
351
  # Create a future for the response
350
352
  loop = asyncio.get_running_loop()
@@ -362,6 +364,10 @@ class M4LBridge:
362
364
  result = await asyncio.wait_for(future, timeout=timeout)
363
365
  return result
364
366
  except asyncio.TimeoutError:
367
+ # Clear the stale future so a delayed response doesn't resolve
368
+ # a future that no caller is waiting on
369
+ if self.receiver:
370
+ self.receiver.set_response_future(None)
365
371
  return {"error": "M4L bridge timeout — device may be busy or removed"}
366
372
 
367
373
  async def send_capture(self, command: str, *args: Any, timeout: float = 35.0) -> dict:
@@ -369,6 +375,8 @@ class M4LBridge:
369
375
  if not self.cache.is_connected:
370
376
  return {"error": "LivePilot Analyzer not connected. Drop it on the master track."}
371
377
 
378
+ if self._cmd_lock is None:
379
+ self._cmd_lock = asyncio.Lock()
372
380
  async with self._cmd_lock:
373
381
  # Cancel any stale capture future before creating a new one
374
382
  if self.receiver and self.receiver._capture_future and not self.receiver._capture_future.done():
@@ -37,28 +37,35 @@ class TechniqueStore:
37
37
 
38
38
  Deferred so that a read-only HOME doesn't crash the entire MCP
39
39
  server at import time — memory tools just return errors instead.
40
+ Thread-safe: uses double-checked locking to prevent concurrent
41
+ callers from racing on initialization.
40
42
  """
41
43
  if self._initialized:
42
44
  return
43
- try:
44
- self._base_dir.mkdir(parents=True, exist_ok=True)
45
- except OSError as exc:
46
- raise RuntimeError(
47
- f"Cannot create memory directory {self._base_dir}: {exc}. "
48
- "Memory tools are unavailable."
49
- ) from exc
50
- if self._file.exists():
45
+ with self._lock:
46
+ # Double-check after acquiring lock — another thread may have
47
+ # initialized while we were waiting.
48
+ if self._initialized:
49
+ return
51
50
  try:
52
- with open(self._file, "r") as f:
53
- self._data = json.load(f)
54
- except (json.JSONDecodeError, ValueError):
55
- corrupt = self._file.with_suffix(".json.corrupt")
56
- self._file.rename(corrupt)
51
+ self._base_dir.mkdir(parents=True, exist_ok=True)
52
+ except OSError as exc:
53
+ raise RuntimeError(
54
+ f"Cannot create memory directory {self._base_dir}: {exc}. "
55
+ "Memory tools are unavailable."
56
+ ) from exc
57
+ if self._file.exists():
58
+ try:
59
+ with open(self._file, "r") as f:
60
+ self._data = json.load(f)
61
+ except (json.JSONDecodeError, ValueError):
62
+ corrupt = self._file.with_suffix(".json.corrupt")
63
+ self._file.rename(corrupt)
64
+ self._data = {"version": 1, "techniques": []}
65
+ else:
57
66
  self._data = {"version": 1, "techniques": []}
58
- else:
59
- self._data = {"version": 1, "techniques": []}
60
- self._flush()
61
- self._initialized = True
67
+ self._flush()
68
+ self._initialized = True
62
69
 
63
70
  # ── persistence ──────────────────────────────────────────────
64
71
 
@@ -263,6 +270,7 @@ class TechniqueStore:
263
270
 
264
271
  def increment_replay(self, technique_id: str) -> None:
265
272
  """Increment replay_count and set last_replayed_at."""
273
+ self._ensure_initialized()
266
274
  with self._lock:
267
275
  t = self._find(technique_id)
268
276
  t["replay_count"] = t.get("replay_count", 0) + 1
@@ -134,7 +134,7 @@ def run_dynamics_critic(dynamics: DynamicsState) -> list[MixIssue]:
134
134
  recommended_moves=["bus_compression", "transient_shaping"],
135
135
  ))
136
136
 
137
- if dynamics.crest_factor_db < 3.0 and dynamics.crest_factor_db > 0:
137
+ elif dynamics.crest_factor_db < 3.0 and dynamics.crest_factor_db > 0:
138
138
  issues.append(MixIssue(
139
139
  issue_type="flat_dynamics",
140
140
  critic="dynamics",
@@ -25,7 +25,7 @@ def _fetch_mix_data(ctx: Context) -> dict:
25
25
  ableton = ctx.lifespan_context["ableton"]
26
26
 
27
27
  session_info = ableton.send_command("get_session_info", {})
28
- track_count = session_info.get("track_count", 0)
28
+ track_count = len(session_info.get("tracks", []))
29
29
 
30
30
  track_infos: list[dict] = []
31
31
  for i in range(track_count):
@@ -35,16 +35,22 @@ def _fetch_mix_data(ctx: Context) -> dict:
35
35
  except Exception:
36
36
  continue
37
37
 
38
- # Try to get spectrum and RMS data
38
+ # Get spectrum and RMS data directly from SpectralCache (not TCP)
39
39
  spectrum = None
40
40
  rms_data = None
41
41
  try:
42
- spectrum = ableton.send_command("get_master_spectrum", {})
43
- except Exception:
44
- pass
45
- try:
46
- rms_result = ableton.send_command("get_master_rms", {})
47
- rms_data = rms_result.get("rms") if isinstance(rms_result, dict) else None
42
+ spectral = ctx.lifespan_context.get("spectral")
43
+ if spectral and spectral.is_connected:
44
+ spec_data = spectral.get("spectrum")
45
+ if spec_data:
46
+ spectrum = {"bands": spec_data["value"]}
47
+ key_data = spectral.get("key")
48
+ if key_data:
49
+ spectrum["detected_key"] = key_data["value"]
50
+
51
+ rms_snap = spectral.get("rms")
52
+ if rms_snap:
53
+ rms_data = rms_snap["value"] if isinstance(rms_snap["value"], dict) else rms_snap["value"]
48
54
  except Exception:
49
55
  pass
50
56
 
@@ -41,18 +41,21 @@ BLOCKED_MOVE_TYPES: frozenset = frozenset({
41
41
 
42
42
 
43
43
  def classify_move_safety(move_type: str) -> str:
44
- """Classify a move type as 'safe', 'caution', or 'blocked'.
44
+ """Classify a move type as 'safe', 'caution', 'blocked', or 'unknown'.
45
45
 
46
46
  Returns:
47
47
  'safe' if the move is in SAFE_MOVE_TYPES,
48
48
  'blocked' if in BLOCKED_MOVE_TYPES,
49
- 'caution' otherwise (unknown or caution-class moves).
49
+ 'caution' if in CAUTION_MOVE_TYPES,
50
+ 'unknown' for unrecognized move types.
50
51
  """
51
52
  if move_type in SAFE_MOVE_TYPES:
52
53
  return "safe"
53
54
  if move_type in BLOCKED_MOVE_TYPES:
54
55
  return "blocked"
55
- return "caution"
56
+ if move_type in CAUTION_MOVE_TYPES:
57
+ return "caution"
58
+ return "unknown"
56
59
 
57
60
 
58
61
  # ── Safe move suggestions ─────────────────────────────────────────────
@@ -6,6 +6,7 @@ Pure computation, zero I/O.
6
6
  from __future__ import annotations
7
7
 
8
8
  import copy
9
+ import dataclasses
9
10
  from typing import Optional
10
11
 
11
12
  from .arrangement_graph import build_arrangement_graph
@@ -35,12 +36,15 @@ def refresh_tracks(
35
36
  new_state.session_graph = build_session_graph(session_info)
36
37
  new_state.session_graph.freshness.mark_fresh(new_state.revision)
37
38
 
38
- # Mark role and automation graphs stale since tracks changed
39
+ # Mark role and automation graphs stale since tracks changed.
40
+ # Deep-copy freshness so mutations don't leak to the original state.
39
41
  new_state.role_graph = copy.copy(state.role_graph)
42
+ new_state.role_graph.freshness = copy.deepcopy(state.role_graph.freshness)
40
43
  new_state.role_graph.freshness.mark_stale(
41
44
  f"tracks refreshed: {track_indices}"
42
45
  )
43
46
  new_state.automation_graph = copy.copy(state.automation_graph)
47
+ new_state.automation_graph.freshness = copy.deepcopy(state.automation_graph.freshness)
44
48
  new_state.automation_graph.freshness.mark_stale(
45
49
  f"tracks refreshed: {track_indices}"
46
50
  )
@@ -73,8 +77,10 @@ def refresh_arrangement(
73
77
  )
74
78
  new_state.arrangement_graph.freshness.mark_fresh(new_state.revision)
75
79
 
76
- # Mark role graph stale since arrangement changed
80
+ # Mark role graph stale since arrangement changed.
81
+ # Deep-copy freshness so mutations don't leak to the original state.
77
82
  new_state.role_graph = copy.copy(state.role_graph)
83
+ new_state.role_graph.freshness = copy.deepcopy(state.role_graph.freshness)
78
84
  new_state.role_graph.freshness.mark_stale("arrangement refreshed")
79
85
 
80
86
  return new_state
@@ -78,22 +78,22 @@ def build_project_brain(ctx: Context) -> dict:
78
78
  except Exception:
79
79
  pass
80
80
 
81
- # 6. Probe capabilities
81
+ # 6. Probe capabilities (direct SpectralCache access, not TCP)
82
82
  analyzer_ok = False
83
83
  analyzer_fresh = False
84
84
  flucoma_ok = False
85
85
  try:
86
- # Check if M4L bridge is responding via spectral cache
87
- bridge = ctx.lifespan_context.get("spectral_cache")
88
- if bridge:
89
- analyzer_ok = True
90
- analyzer_fresh = not bridge.is_stale() if hasattr(bridge, "is_stale") else False
91
- except Exception:
92
- pass
93
-
94
- try:
95
- flucoma_resp = ableton.send_command("check_flucoma")
96
- flucoma_ok = flucoma_resp.get("available", False)
86
+ spectral = ctx.lifespan_context.get("spectral")
87
+ if spectral:
88
+ analyzer_ok = spectral.is_connected
89
+ if analyzer_ok:
90
+ snap = spectral.get("spectrum")
91
+ analyzer_fresh = snap is not None
92
+ # Check FluCoMa by looking for any FluCoMa stream data
93
+ for key in ("spectral_shape", "mel_bands", "chroma", "onset", "novelty", "loudness"):
94
+ if spectral.get(key) is not None:
95
+ flucoma_ok = True
96
+ break
97
97
  except Exception:
98
98
  pass
99
99
 
@@ -41,22 +41,23 @@ def _fetch_project_snapshot(ctx: Context) -> dict:
41
41
  "harmonic_character": "",
42
42
  }
43
43
 
44
- # Try to get master RMS / loudness
44
+ # Get master RMS / loudness and spectrum from SpectralCache directly
45
45
  try:
46
- rms_result = ableton.send_command("get_master_rms", {})
47
- rms = rms_result.get("rms", 0.0) if isinstance(rms_result, dict) else 0.0
48
- # Approximate LUFS from RMS (rough heuristic)
49
- if rms > 0:
50
- import math
51
- snapshot["loudness"] = round(20 * math.log10(max(rms, 1e-10)), 2)
52
- except Exception:
53
- pass
54
-
55
- # Try to get spectrum data
56
- try:
57
- spectrum = ableton.send_command("get_master_spectrum", {})
58
- if isinstance(spectrum, dict):
59
- snapshot["spectral"] = spectrum
46
+ spectral = ctx.lifespan_context.get("spectral")
47
+ if spectral and spectral.is_connected:
48
+ rms_snap = spectral.get("rms")
49
+ if rms_snap:
50
+ rms = rms_snap["value"] if isinstance(rms_snap["value"], (int, float)) else 0.0
51
+ if rms > 0:
52
+ import math
53
+ snapshot["loudness"] = round(20 * math.log10(max(rms, 1e-10)), 2)
54
+
55
+ spec_data = spectral.get("spectrum")
56
+ if spec_data:
57
+ snapshot["spectral"] = {"bands": spec_data["value"]}
58
+ key_data = spectral.get("key")
59
+ if key_data:
60
+ snapshot["spectral"]["detected_key"] = key_data["value"]
60
61
  except Exception:
61
62
  pass
62
63
 
@@ -7,19 +7,26 @@ Classes:
7
7
 
8
8
  from __future__ import annotations
9
9
 
10
+ import threading
10
11
  import time
12
+ import uuid
11
13
  from dataclasses import dataclass, field
12
14
  from typing import Optional
13
15
 
14
16
 
15
- # Module-level counter for auto-generating IDs.
17
+ # Thread-safe counter with UUID suffix to prevent collisions across
18
+ # server restarts and concurrent processes.
16
19
  _counter: int = 0
20
+ _counter_lock = threading.Lock()
21
+ _session_id = uuid.uuid4().hex[:6]
17
22
 
18
23
 
19
24
  def _next_id() -> str:
20
25
  global _counter
21
- _counter += 1
22
- return f"move_{_counter:04d}"
26
+ with _counter_lock:
27
+ _counter += 1
28
+ seq = _counter
29
+ return f"move_{_session_id}_{seq:04d}"
23
30
 
24
31
 
25
32
  @dataclass
@@ -203,10 +203,11 @@ def build_capability_state(
203
203
  # ── Overall mode ────────────────────────────────────────────────
204
204
  if session_ok and analyzer_ok and analyzer_fresh:
205
205
  overall_mode = "normal"
206
- elif session_ok and not analyzer_ok:
206
+ elif session_ok and analyzer_ok:
207
+ # Analyzer online but data is stale — degraded measurement
207
208
  overall_mode = "measured_degraded"
208
209
  elif session_ok:
209
- # session_ok, analyzer_ok but not fresh
210
+ # Analyzer offline entirely must rely on judgment alone
210
211
  overall_mode = "judgment_only"
211
212
  else:
212
213
  overall_mode = "read_only"