livepilot 1.5.0 → 1.6.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -55,6 +55,7 @@ from .tools import browser # noqa: F401, E402
55
55
  from .tools import arrangement # noqa: F401, E402
56
56
  from .tools import memory # noqa: F401, E402
57
57
  from .tools import analyzer # noqa: F401, E402
58
+ from .tools import automation # noqa: F401, E402
58
59
 
59
60
 
60
61
  # ---------------------------------------------------------------------------
@@ -0,0 +1,488 @@
1
+ """Automation MCP tools — clip envelope CRUD + intelligent curve generation.
2
+
3
+ 8 tools for writing, reading, and generating automation curves on session clips.
4
+ Combines the clip automation handlers (Remote Script) with the curve generation
5
+ engine (curves.py) for musically intelligent automation.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from typing import Any, Optional
11
+
12
+ from fastmcp import Context
13
+
14
+ from ..curves import generate_curve, generate_from_recipe, list_recipes
15
+ from ..server import mcp
16
+
17
+
18
+ def _get_ableton(ctx: Context):
19
+ return ctx.lifespan_context["ableton"]
20
+
21
+
22
+ def _ensure_list(v: Any) -> list:
23
+ if isinstance(v, str):
24
+ import json
25
+ return json.loads(v)
26
+ return list(v)
27
+
28
+
29
+ @mcp.tool()
30
+ def get_clip_automation(
31
+ ctx: Context,
32
+ track_index: int,
33
+ clip_index: int,
34
+ ) -> dict:
35
+ """List all automation envelopes on a session clip.
36
+
37
+ Returns which parameters have automation, including device name,
38
+ parameter name, and type (mixer/send/device). Use this to see
39
+ what's already automated before writing new curves.
40
+ """
41
+ return _get_ableton(ctx).send_command("get_clip_automation", {
42
+ "track_index": track_index,
43
+ "clip_index": clip_index,
44
+ })
45
+
46
+
47
+ @mcp.tool()
48
+ def set_clip_automation(
49
+ ctx: Context,
50
+ track_index: int,
51
+ clip_index: int,
52
+ parameter_type: str,
53
+ points: Any,
54
+ device_index: Optional[int] = None,
55
+ parameter_index: Optional[int] = None,
56
+ send_index: Optional[int] = None,
57
+ ) -> dict:
58
+ """Write automation points to a session clip envelope.
59
+
60
+ parameter_type: "device", "volume", "panning", or "send"
61
+ points: [{time, value, duration?}] — time relative to clip start (beats)
62
+ values: 0.0-1.0 normalized (or parameter's actual min/max range)
63
+
64
+ For device params: provide device_index + parameter_index.
65
+ For sends: provide send_index (0=A, 1=B, etc).
66
+
67
+ Tip: Use apply_automation_shape to generate points from curves/recipes
68
+ instead of calculating points manually.
69
+ """
70
+ params: dict = {
71
+ "track_index": track_index,
72
+ "clip_index": clip_index,
73
+ "parameter_type": parameter_type,
74
+ "points": _ensure_list(points),
75
+ }
76
+ if device_index is not None:
77
+ params["device_index"] = device_index
78
+ if parameter_index is not None:
79
+ params["parameter_index"] = parameter_index
80
+ if send_index is not None:
81
+ params["send_index"] = send_index
82
+ return _get_ableton(ctx).send_command("set_clip_automation", params)
83
+
84
+
85
+ @mcp.tool()
86
+ def clear_clip_automation(
87
+ ctx: Context,
88
+ track_index: int,
89
+ clip_index: int,
90
+ parameter_type: Optional[str] = None,
91
+ device_index: Optional[int] = None,
92
+ parameter_index: Optional[int] = None,
93
+ send_index: Optional[int] = None,
94
+ ) -> dict:
95
+ """Clear automation envelopes from a session clip.
96
+
97
+ If parameter_type is omitted, clears ALL envelopes.
98
+ If provided, clears only that parameter's envelope.
99
+ """
100
+ params: dict = {
101
+ "track_index": track_index,
102
+ "clip_index": clip_index,
103
+ }
104
+ if parameter_type is not None:
105
+ params["parameter_type"] = parameter_type
106
+ if device_index is not None:
107
+ params["device_index"] = device_index
108
+ if parameter_index is not None:
109
+ params["parameter_index"] = parameter_index
110
+ if send_index is not None:
111
+ params["send_index"] = send_index
112
+ return _get_ableton(ctx).send_command("clear_clip_automation", params)
113
+
114
+
115
+ @mcp.tool()
116
+ def apply_automation_shape(
117
+ ctx: Context,
118
+ track_index: int,
119
+ clip_index: int,
120
+ parameter_type: str,
121
+ curve_type: str,
122
+ duration: float = 4.0,
123
+ density: int = 16,
124
+ device_index: Optional[int] = None,
125
+ parameter_index: Optional[int] = None,
126
+ send_index: Optional[int] = None,
127
+ start: float = 0.0,
128
+ end: float = 1.0,
129
+ center: float = 0.5,
130
+ amplitude: float = 0.5,
131
+ frequency: float = 1.0,
132
+ phase: float = 0.0,
133
+ peak: float = 1.0,
134
+ decay: float = 4.0,
135
+ low: float = 0.0,
136
+ high: float = 1.0,
137
+ factor: float = 3.0,
138
+ invert: bool = False,
139
+ time_offset: float = 0.0,
140
+ # Steps params
141
+ values: Optional[list[float]] = None,
142
+ # Euclidean params
143
+ hits: int = 5,
144
+ steps: int = 16,
145
+ # Organic params
146
+ seed: float = 0.0,
147
+ drift: float = 0.0,
148
+ volatility: float = 0.1,
149
+ damping: float = 0.15,
150
+ stiffness: float = 8.0,
151
+ # Bezier params
152
+ control1: float = 0.0,
153
+ control2: float = 1.0,
154
+ control1_time: float = 0.33,
155
+ control2_time: float = 0.66,
156
+ # Easing params
157
+ easing_type: str = "ease_out",
158
+ # Stochastic params
159
+ narrowing: float = 0.5,
160
+ ) -> dict:
161
+ """Generate and apply an automation curve to a session clip.
162
+
163
+ Combines curve generation with clip automation writing in one call.
164
+
165
+ curve_type: linear, exponential, logarithmic, s_curve, sine,
166
+ sawtooth, spike, square, steps, perlin, brownian,
167
+ spring, bezier, easing, euclidean, stochastic
168
+ duration: curve length in beats
169
+ density: number of automation points
170
+ time_offset: shift the entire curve forward by N beats
171
+
172
+ Curve-specific params:
173
+ - linear/exp/log: start, end, factor (steepness 2-6)
174
+ - sine: center, amplitude, frequency, phase
175
+ - sawtooth: start, end, frequency (resets per duration)
176
+ - spike: peak, decay (higher = faster)
177
+ - square: low, high, frequency
178
+ - s_curve: start, end
179
+
180
+ Musical guidance:
181
+ - Filter sweeps: use exponential (perceptually even)
182
+ - Volume fades: use logarithmic (matches ear's response)
183
+ - Crossfades: use s_curve (natural acceleration/deceleration)
184
+ - Pumping: use sawtooth with frequency matching beat divisions
185
+ - Throws: use spike with short duration (1-2 beats)
186
+ - Tremolo/pan: use sine with frequency in musical divisions
187
+ """
188
+ # Generate the curve
189
+ points = generate_curve(
190
+ curve_type=curve_type,
191
+ duration=duration,
192
+ density=density,
193
+ start=start, end=end,
194
+ center=center, amplitude=amplitude,
195
+ frequency=frequency, phase=phase,
196
+ peak=peak, decay=decay,
197
+ low=low, high=high,
198
+ factor=factor,
199
+ invert=invert,
200
+ values=values or [],
201
+ hits=hits, steps=steps,
202
+ seed=seed, drift=drift, volatility=volatility,
203
+ damping=damping, stiffness=stiffness,
204
+ control1=control1, control2=control2,
205
+ control1_time=control1_time, control2_time=control2_time,
206
+ easing_type=easing_type,
207
+ narrowing=narrowing,
208
+ )
209
+
210
+ # Apply time offset
211
+ if time_offset > 0:
212
+ for p in points:
213
+ p["time"] += time_offset
214
+
215
+ # Write to clip
216
+ params: dict = {
217
+ "track_index": track_index,
218
+ "clip_index": clip_index,
219
+ "parameter_type": parameter_type,
220
+ "points": points,
221
+ }
222
+ if device_index is not None:
223
+ params["device_index"] = device_index
224
+ if parameter_index is not None:
225
+ params["parameter_index"] = parameter_index
226
+ if send_index is not None:
227
+ params["send_index"] = send_index
228
+
229
+ result = _get_ableton(ctx).send_command("set_clip_automation", params)
230
+ result["curve_type"] = curve_type
231
+ result["curve_points"] = len(points)
232
+ return result
233
+
234
+
235
+ @mcp.tool()
236
+ def apply_automation_recipe(
237
+ ctx: Context,
238
+ track_index: int,
239
+ clip_index: int,
240
+ parameter_type: str,
241
+ recipe: str,
242
+ duration: float = 4.0,
243
+ density: int = 16,
244
+ device_index: Optional[int] = None,
245
+ parameter_index: Optional[int] = None,
246
+ send_index: Optional[int] = None,
247
+ time_offset: float = 0.0,
248
+ ) -> dict:
249
+ """Apply a named automation recipe to a session clip.
250
+
251
+ Recipes are predefined curve shapes for common production techniques.
252
+ Use get_automation_recipes to list all available recipes.
253
+
254
+ Available recipes:
255
+ - filter_sweep_up: LP filter opening (exponential, 8-32 bars)
256
+ - filter_sweep_down: LP filter closing (logarithmic, 4-16 bars)
257
+ - dub_throw: send spike for reverb/delay throw (1-2 beats)
258
+ - tape_stop: pitch dropping to zero (0.5-2 beats)
259
+ - build_rise: tension build on HP filter + volume (8-32 bars)
260
+ - sidechain_pump: volume ducking per beat (sawtooth, 1 beat loop)
261
+ - fade_in / fade_out: perceptually smooth volume fades
262
+ - tremolo: periodic volume oscillation
263
+ - auto_pan: stereo movement via pan sine
264
+ - stutter: rapid on/off gating
265
+ - breathing: subtle filter movement (acoustic instrument feel)
266
+ - washout: reverb/delay feedback increasing
267
+ - vinyl_crackle: slow bit reduction movement
268
+ - stereo_narrow: collapse to mono before drop
269
+ """
270
+ points = generate_from_recipe(recipe, duration=duration, density=density)
271
+
272
+ if time_offset > 0:
273
+ for p in points:
274
+ p["time"] += time_offset
275
+
276
+ params: dict = {
277
+ "track_index": track_index,
278
+ "clip_index": clip_index,
279
+ "parameter_type": parameter_type,
280
+ "points": points,
281
+ }
282
+ if device_index is not None:
283
+ params["device_index"] = device_index
284
+ if parameter_index is not None:
285
+ params["parameter_index"] = parameter_index
286
+ if send_index is not None:
287
+ params["send_index"] = send_index
288
+
289
+ result = _get_ableton(ctx).send_command("set_clip_automation", params)
290
+ result["recipe"] = recipe
291
+ result["curve_points"] = len(points)
292
+ return result
293
+
294
+
295
+ @mcp.tool()
296
+ def get_automation_recipes(ctx: Context) -> dict:
297
+ """List all available automation recipes with descriptions.
298
+
299
+ Each recipe includes: curve type, description, typical duration,
300
+ and recommended target parameter. Use apply_automation_recipe
301
+ to apply any recipe to a clip.
302
+ """
303
+ return {"recipes": list_recipes()}
304
+
305
+
306
+ @mcp.tool()
307
+ def generate_automation_curve(
308
+ ctx: Context,
309
+ curve_type: str,
310
+ duration: float = 4.0,
311
+ density: int = 16,
312
+ start: float = 0.0,
313
+ end: float = 1.0,
314
+ center: float = 0.5,
315
+ amplitude: float = 0.5,
316
+ frequency: float = 1.0,
317
+ phase: float = 0.0,
318
+ peak: float = 1.0,
319
+ decay: float = 4.0,
320
+ low: float = 0.0,
321
+ high: float = 1.0,
322
+ factor: float = 3.0,
323
+ invert: bool = False,
324
+ # Steps params
325
+ values: Optional[list[float]] = None,
326
+ # Euclidean params
327
+ hits: int = 5,
328
+ steps: int = 16,
329
+ # Organic params
330
+ seed: float = 0.0,
331
+ drift: float = 0.0,
332
+ volatility: float = 0.1,
333
+ damping: float = 0.15,
334
+ stiffness: float = 8.0,
335
+ # Bezier params
336
+ control1: float = 0.0,
337
+ control2: float = 1.0,
338
+ control1_time: float = 0.33,
339
+ control2_time: float = 0.66,
340
+ # Easing params
341
+ easing_type: str = "ease_out",
342
+ # Stochastic params
343
+ narrowing: float = 0.5,
344
+ ) -> dict:
345
+ """Generate automation curve points WITHOUT writing them.
346
+
347
+ Returns the points array for preview/inspection. Use this to see
348
+ what a curve looks like before committing it to a clip.
349
+ Pass the returned points to set_clip_automation or
350
+ set_arrangement_automation to write them.
351
+ """
352
+ points = generate_curve(
353
+ curve_type=curve_type,
354
+ duration=duration,
355
+ density=density,
356
+ start=start, end=end,
357
+ center=center, amplitude=amplitude,
358
+ frequency=frequency, phase=phase,
359
+ peak=peak, decay=decay,
360
+ low=low, high=high,
361
+ factor=factor,
362
+ invert=invert,
363
+ values=values or [],
364
+ hits=hits, steps=steps,
365
+ seed=seed, drift=drift, volatility=volatility,
366
+ damping=damping, stiffness=stiffness,
367
+ control1=control1, control2=control2,
368
+ control1_time=control1_time, control2_time=control2_time,
369
+ easing_type=easing_type,
370
+ narrowing=narrowing,
371
+ )
372
+ return {
373
+ "curve_type": curve_type,
374
+ "duration": duration,
375
+ "point_count": len(points),
376
+ "points": points,
377
+ "value_range": {
378
+ "min": min(p["value"] for p in points) if points else 0,
379
+ "max": max(p["value"] for p in points) if points else 0,
380
+ },
381
+ }
382
+
383
+
384
+ @mcp.tool()
385
+ def analyze_for_automation(
386
+ ctx: Context,
387
+ track_index: int,
388
+ ) -> dict:
389
+ """Analyze a track's spectrum and suggest automation targets.
390
+
391
+ Reads the track's current spectral data and device chain,
392
+ then suggests which parameters would benefit from automation
393
+ based on the frequency content and device types present.
394
+
395
+ Requires LivePilot Analyzer on master track and audio playing.
396
+ """
397
+ ableton = _get_ableton(ctx)
398
+
399
+ # Get track devices
400
+ track_info = ableton.send_command("get_track_info", {
401
+ "track_index": track_index,
402
+ })
403
+
404
+ # Get current spectrum
405
+ spectral = ctx.lifespan_context.get("spectral")
406
+ spectrum = {}
407
+ if spectral and spectral.is_connected:
408
+ data = spectral.get("spectrum")
409
+ spectrum = data["value"] if data else {}
410
+
411
+ # Get meter level
412
+ meters = ableton.send_command("get_track_meters", {
413
+ "track_index": track_index,
414
+ })
415
+
416
+ devices = track_info.get("devices", [])
417
+ suggestions = []
418
+
419
+ # Analyze based on device types and spectrum
420
+ for i, dev in enumerate(devices):
421
+ dev_name = dev.get("name", "").lower()
422
+ dev_class = dev.get("class_name", "").lower()
423
+
424
+ # Filter devices — suggest sweep automation
425
+ if any(kw in dev_class for kw in ["autofilter", "eq8", "filter"]):
426
+ suggestions.append({
427
+ "device_index": i,
428
+ "device_name": dev.get("name"),
429
+ "suggestion": "filter_sweep",
430
+ "reason": "Filter detected — automate cutoff for movement",
431
+ "recipe": "filter_sweep_up",
432
+ })
433
+
434
+ # Reverb/delay — suggest send throws or washout
435
+ if any(kw in dev_class for kw in ["reverb", "delay", "hybrid", "echo"]):
436
+ suggestions.append({
437
+ "device_index": i,
438
+ "device_name": dev.get("name"),
439
+ "suggestion": "spatial_automation",
440
+ "reason": "Space effect — automate mix/decay for depth changes",
441
+ "recipe": "washout",
442
+ })
443
+
444
+ # Distortion — suggest drive automation
445
+ if any(kw in dev_class for kw in ["saturator", "overdrive", "pedal", "amp"]):
446
+ suggestions.append({
447
+ "device_index": i,
448
+ "device_name": dev.get("name"),
449
+ "suggestion": "drive_automation",
450
+ "reason": "Distortion — automate drive for dynamic saturation",
451
+ "recipe": "breathing",
452
+ })
453
+
454
+ # Synths — suggest wavetable/macro automation
455
+ if any(kw in dev_class for kw in ["wavetable", "drift", "analog", "operator"]):
456
+ suggestions.append({
457
+ "device_index": i,
458
+ "device_name": dev.get("name"),
459
+ "suggestion": "timbre_evolution",
460
+ "reason": "Synth — automate timbre params for evolving sound",
461
+ "recipe": "breathing",
462
+ })
463
+
464
+ # Mixer suggestions based on spectrum
465
+ if spectrum:
466
+ sub = spectrum.get("sub", 0)
467
+ if sub > 0.15:
468
+ suggestions.append({
469
+ "suggestion": "high_pass_automation",
470
+ "reason": "Heavy sub content (%.2f) — HP filter sweep for builds" % sub,
471
+ "recipe": "build_rise",
472
+ })
473
+
474
+ # Always suggest send automation for spatial depth
475
+ suggestions.append({
476
+ "suggestion": "send_throws",
477
+ "reason": "Reverb/delay sends — automate for dub throws and spatial variation",
478
+ "recipe": "dub_throw",
479
+ })
480
+
481
+ return {
482
+ "track_index": track_index,
483
+ "track_name": track_info.get("name", ""),
484
+ "device_count": len(devices),
485
+ "current_level": meters.get("tracks", [{}])[0].get("level", 0),
486
+ "spectrum": spectrum,
487
+ "suggestions": suggestions,
488
+ }
package/package.json CHANGED
@@ -1,8 +1,8 @@
1
1
  {
2
2
  "name": "livepilot",
3
- "version": "1.5.0",
3
+ "version": "1.6.2",
4
4
  "mcpName": "io.github.dreamrec/livepilot",
5
- "description": "AI copilot for Ableton Live 12 — 127 tools, device atlas (280+ devices), real-time audio analysis, and technique memory",
5
+ "description": "AI copilot for Ableton Live 12 — 135 tools, device atlas (280+ devices), real-time audio analysis, automation intelligence, and technique memory",
6
6
  "author": "Pilot Studio",
7
7
  "license": "MIT",
8
8
  "type": "commonjs",
@@ -24,8 +24,9 @@ Given a high-level description, you:
24
24
  6. **Program patterns** — write MIDI notes that fit the genre and style
25
25
  7. **Add effects** — load and configure effect chains for the desired sound
26
26
  8. **HEALTH CHECK** — verify effects aren't pass-throughs (Dry/Wet > 0, Drive set, etc.)
27
- 9. **Mix** — balance volumes, set panning, configure sends
28
- 10. **Final verify** — `get_session_info`, fire scenes, confirm audio output
27
+ 9. **Automate** — add movement and evolution to the mix (see Automation Phase below)
28
+ 10. **Mix** — balance volumes, set panning, configure sends
29
+ 11. **Final verify** — `get_session_info`, fire scenes, confirm audio output
29
30
 
30
31
  ## Mandatory Track Health Checks
31
32
 
@@ -49,6 +50,35 @@ After loading any instrument, run this checklist:
49
50
  - **For synths, use `search_browser` → `load_browser_item`** with exact URI. `find_and_load_device` can match sample files before the actual instrument (e.g., "Drift" matches a .wav sample first)
50
51
  - **After loading any effect**, set its key parameters to non-default values. A Saturator with Drive=0, a Reverb with Dry/Wet=0, or a Compressor with Threshold at max are all pass-throughs.
51
52
 
53
+ ## Automation Phase (after writing notes, before mixing)
54
+
55
+ ### Step 1: Spectral Diagnosis
56
+ - Solo each track -> `get_master_spectrum` -> build spectral map
57
+ - Identify frequency overlaps between tracks (masking)
58
+ - Note problem areas: resonances, mud, harshness
59
+
60
+ ### Step 2: Per-Track Analysis
61
+ - `analyze_for_automation` on each track -> get device-specific suggestions
62
+ - Cross-reference with spectral map: which suggestions address the problems found?
63
+
64
+ ### Step 3: Write Automation (perception-action loop)
65
+ For each automation decision:
66
+ 1. Read spectrum BEFORE
67
+ 2. Apply recipe or custom curve
68
+ 3. Read spectrum AFTER
69
+ 4. Compare: did it improve? If not, clear and adjust
70
+ 5. Store the final working automation parameters in memory
71
+
72
+ ### Step 4: Spatial Design
73
+ - Add send automation for depth (dub throws, reverb washes)
74
+ - Consider complementary automation: as one track's filter opens, another's narrows
75
+ - Use cross-track spectral awareness to avoid new masking from automation
76
+
77
+ ### Step 5: Generative/Evolving Textures
78
+ - Consider polyrhythmic automation for non-repeating evolution
79
+ - Unlinked envelopes with prime-number beat lengths (3, 5, 7 beats)
80
+ - Spectral-driven automation: use analyzer data to modulate parameters in real-time concepts
81
+
52
82
  ## Rules
53
83
 
54
84
  - Always use the livepilot-core skill for guidance on tool usage
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "livepilot",
3
- "version": "1.5.0",
4
- "description": "AI copilot for Ableton Live 12 — 127 tools, device atlas (280+ devices), real-time audio analysis, and technique memory",
3
+ "version": "1.6.1",
4
+ "description": "AI copilot for Ableton Live 12 — 135 tools, device atlas (280+ devices), real-time audio analysis, automation intelligence, and technique memory",
5
5
  "author": "Pilot Studio",
6
6
  "skills": [
7
7
  "skills/livepilot-core"
@@ -1,17 +1,17 @@
1
1
  ---
2
2
  name: livepilot-core
3
- description: Core discipline for controlling Ableton Live 12 through LivePilot's 127 MCP tools, device atlas (280+ devices), M4L analyzer (spectrum/RMS/key detection), and technique memory. Use whenever working with Ableton Live through MCP tools.
3
+ description: Core discipline for controlling Ableton Live 12 through LivePilot's 135 MCP tools, device atlas (280+ devices), M4L analyzer (spectrum/RMS/key detection), automation intelligence (16 curve types, 15 recipes), and technique memory. Use whenever working with Ableton Live through MCP tools.
4
4
  ---
5
5
 
6
6
  # LivePilot Core — Ableton Live 12 AI Copilot
7
7
 
8
- LivePilot is an agentic production system for Ableton Live 12. It combines 127 MCP tools with three layers of intelligence:
8
+ LivePilot is an agentic production system for Ableton Live 12. It combines 135 MCP tools with three layers of intelligence:
9
9
 
10
10
  - **Device Atlas** — A structured knowledge corpus of 280+ instruments, 139 drum kits, and 350+ impulse responses. Consult the atlas before loading any device. It contains real browser URIs, preset names, and sonic descriptions. Never guess a device name — look it up.
11
11
  - **M4L Analyzer** — Real-time audio analysis on the master bus (8-band spectrum, RMS/peak, key detection). Use it to verify mixing decisions, detect frequency problems, and find the key before writing harmonic content.
12
12
  - **Technique Memory** — Persistent storage for production decisions. Consult `memory_recall` before creative tasks to understand the user's taste. Save techniques when the user likes something. The memory shapes future decisions without constraining them.
13
13
 
14
- These layers sit on top of 127 deterministic tools across 11 domains: transport, tracks, clips, MIDI notes, devices, scenes, mixing, browser, arrangement, technique memory, and real-time DSP analysis.
14
+ These layers sit on top of 135 deterministic tools across 12 domains: transport, tracks, clips, MIDI notes, devices, scenes, mixing, browser, arrangement, technique memory, real-time DSP analysis, and automation.
15
15
 
16
16
  ## Golden Rules
17
17
 
@@ -64,7 +64,7 @@ These layers sit on top of 127 deterministic tools across 11 domains: transport,
64
64
  - MIDI track with no instrument loaded
65
65
  - Notes programmed but clip not fired
66
66
 
67
- ## Tool Domains (127 total)
67
+ ## Tool Domains (135 total)
68
68
 
69
69
  ### Transport (12)
70
70
  `get_session_info` · `set_tempo` · `set_time_signature` · `start_playback` · `stop_playback` · `continue_playback` · `toggle_metronome` · `set_session_loop` · `undo` · `redo` · `get_recent_actions` · `get_session_diagnostics`
@@ -99,6 +99,29 @@ These layers sit on top of 127 deterministic tools across 11 domains: transport,
99
99
  ### Analyzer (20) — requires LivePilot Analyzer M4L device on master track
100
100
  `get_master_spectrum` · `get_master_rms` · `get_detected_key` · `get_hidden_parameters` · `get_automation_state` · `walk_device_tree` · `get_clip_file_path` · `replace_simpler_sample` · `load_sample_to_simpler` · `get_simpler_slices` · `crop_simpler` · `reverse_simpler` · `warp_simpler` · `get_warp_markers` · `add_warp_marker` · `move_warp_marker` · `remove_warp_marker` · `scrub_clip` · `stop_scrub` · `get_display_values`
101
101
 
102
+ ### Automation (8)
103
+ Clip automation CRUD + intelligent curve generation with 15 built-in recipes.
104
+
105
+ **Tools:** `get_clip_automation` · `set_clip_automation` · `clear_clip_automation` · `apply_automation_shape` · `apply_automation_recipe` · `get_automation_recipes` · `generate_automation_curve` · `analyze_for_automation`
106
+
107
+ **Key discipline:**
108
+
109
+ **The Feedback Loop (MANDATORY for all automation work):**
110
+ 1. PERCEIVE: `get_master_spectrum` + `get_track_meters` -> understand current state
111
+ 2. DIAGNOSE: What needs to change? Use diagnostic filter technique if unsure
112
+ 3. DECIDE: Which parameter, which curve, which recipe?
113
+ 4. ACT: `apply_automation_shape` or `apply_automation_recipe`
114
+ 5. VERIFY: `get_master_spectrum` again -> did it work?
115
+ 6. ADJUST: If not right, `clear_clip_automation` -> try different curve/params
116
+ 7. NEVER write automation without reading spectrum first and after
117
+
118
+ **Rules:**
119
+ - Use `analyze_for_automation` before writing — let spectral data guide decisions
120
+ - Use recipes for common patterns (filter_sweep_up, dub_throw, sidechain_pump)
121
+ - Use `apply_automation_shape` for custom curves with specific math
122
+ - Clear existing automation before rewriting: `clear_clip_automation` first
123
+ - Load `references/automation-atlas.md` for curve theory, genre recipes, diagnostic technique, and cross-track spectral mapping
124
+
102
125
  ## Workflow: Building a Beat
103
126
 
104
127
  1. `get_session_info` — check current state
@@ -240,10 +263,11 @@ Deep production knowledge lives in `references/`. Consult these when making crea
240
263
 
241
264
  | File | What's inside | When to consult |
242
265
  |------|--------------|-----------------|
243
- | `references/overview.md` | All 127 tools mapped with params, units, ranges | Quick lookup for any tool |
266
+ | `references/overview.md` | All 135 tools mapped with params, units, ranges | Quick lookup for any tool |
244
267
  | `references/midi-recipes.md` | Drum patterns by genre, chord voicings, scales, hi-hat techniques, humanization, polymetrics | Programming MIDI notes, building beats |
245
268
  | `references/sound-design.md` | Stock instruments/effects, parameter recipes for bass/pad/lead/pluck, device chain patterns | Loading and configuring devices |
246
269
  | `references/mixing-patterns.md` | Gain staging, parallel compression, sidechain, EQ by instrument, bus processing, stereo width | Setting volumes, panning, adding effects |
247
270
  | `references/ableton-workflow-patterns.md` | Session/Arrangement workflow, song structures by genre, follow actions, clip launch modes, export | Organizing sessions, structuring songs |
248
271
  | `references/m4l-devices.md` | Browser organization, MIDI effects, rack systems, device loading patterns | Finding and loading devices, using racks |
249
272
  | `references/memory-guide.md` | Qualities template, good/bad examples for each technique type | Saving techniques, writing qualities |
273
+ | `references/automation-atlas.md` | Curve theory, perception-action loop, genre recipes, diagnostic filter technique, spectral mapping | Writing automation, choosing curves, mixing with spectral feedback |