livepilot 1.5.0 → 1.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -55,6 +55,7 @@ from .tools import browser # noqa: F401, E402
55
55
  from .tools import arrangement # noqa: F401, E402
56
56
  from .tools import memory # noqa: F401, E402
57
57
  from .tools import analyzer # noqa: F401, E402
58
+ from .tools import automation # noqa: F401, E402
58
59
 
59
60
 
60
61
  # ---------------------------------------------------------------------------
@@ -0,0 +1,431 @@
1
+ """Automation MCP tools — clip envelope CRUD + intelligent curve generation.
2
+
3
+ 8 tools for writing, reading, and generating automation curves on session clips.
4
+ Combines the clip automation handlers (Remote Script) with the curve generation
5
+ engine (curves.py) for musically intelligent automation.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from typing import Any, Optional
11
+
12
+ from fastmcp import Context
13
+
14
+ from ..curves import generate_curve, generate_from_recipe, list_recipes
15
+ from ..server import mcp
16
+
17
+
18
+ def _get_ableton(ctx: Context):
19
+ return ctx.lifespan_context["ableton"]
20
+
21
+
22
+ def _ensure_list(v: Any) -> list:
23
+ if isinstance(v, str):
24
+ import json
25
+ return json.loads(v)
26
+ return list(v)
27
+
28
+
29
+ @mcp.tool()
30
+ def get_clip_automation(
31
+ ctx: Context,
32
+ track_index: int,
33
+ clip_index: int,
34
+ ) -> dict:
35
+ """List all automation envelopes on a session clip.
36
+
37
+ Returns which parameters have automation, including device name,
38
+ parameter name, and type (mixer/send/device). Use this to see
39
+ what's already automated before writing new curves.
40
+ """
41
+ return _get_ableton(ctx).send_command("get_clip_automation", {
42
+ "track_index": track_index,
43
+ "clip_index": clip_index,
44
+ })
45
+
46
+
47
+ @mcp.tool()
48
+ def set_clip_automation(
49
+ ctx: Context,
50
+ track_index: int,
51
+ clip_index: int,
52
+ parameter_type: str,
53
+ points: Any,
54
+ device_index: Optional[int] = None,
55
+ parameter_index: Optional[int] = None,
56
+ send_index: Optional[int] = None,
57
+ ) -> dict:
58
+ """Write automation points to a session clip envelope.
59
+
60
+ parameter_type: "device", "volume", "panning", or "send"
61
+ points: [{time, value, duration?}] — time relative to clip start (beats)
62
+ values: 0.0-1.0 normalized (or parameter's actual min/max range)
63
+
64
+ For device params: provide device_index + parameter_index.
65
+ For sends: provide send_index (0=A, 1=B, etc).
66
+
67
+ Tip: Use apply_automation_shape to generate points from curves/recipes
68
+ instead of calculating points manually.
69
+ """
70
+ params: dict = {
71
+ "track_index": track_index,
72
+ "clip_index": clip_index,
73
+ "parameter_type": parameter_type,
74
+ "points": _ensure_list(points),
75
+ }
76
+ if device_index is not None:
77
+ params["device_index"] = device_index
78
+ if parameter_index is not None:
79
+ params["parameter_index"] = parameter_index
80
+ if send_index is not None:
81
+ params["send_index"] = send_index
82
+ return _get_ableton(ctx).send_command("set_clip_automation", params)
83
+
84
+
85
+ @mcp.tool()
86
+ def clear_clip_automation(
87
+ ctx: Context,
88
+ track_index: int,
89
+ clip_index: int,
90
+ parameter_type: Optional[str] = None,
91
+ device_index: Optional[int] = None,
92
+ parameter_index: Optional[int] = None,
93
+ send_index: Optional[int] = None,
94
+ ) -> dict:
95
+ """Clear automation envelopes from a session clip.
96
+
97
+ If parameter_type is omitted, clears ALL envelopes.
98
+ If provided, clears only that parameter's envelope.
99
+ """
100
+ params: dict = {
101
+ "track_index": track_index,
102
+ "clip_index": clip_index,
103
+ }
104
+ if parameter_type is not None:
105
+ params["parameter_type"] = parameter_type
106
+ if device_index is not None:
107
+ params["device_index"] = device_index
108
+ if parameter_index is not None:
109
+ params["parameter_index"] = parameter_index
110
+ if send_index is not None:
111
+ params["send_index"] = send_index
112
+ return _get_ableton(ctx).send_command("clear_clip_automation", params)
113
+
114
+
115
+ @mcp.tool()
116
+ def apply_automation_shape(
117
+ ctx: Context,
118
+ track_index: int,
119
+ clip_index: int,
120
+ parameter_type: str,
121
+ curve_type: str,
122
+ duration: float = 4.0,
123
+ density: int = 16,
124
+ device_index: Optional[int] = None,
125
+ parameter_index: Optional[int] = None,
126
+ send_index: Optional[int] = None,
127
+ start: float = 0.0,
128
+ end: float = 1.0,
129
+ center: float = 0.5,
130
+ amplitude: float = 0.5,
131
+ frequency: float = 1.0,
132
+ phase: float = 0.0,
133
+ peak: float = 1.0,
134
+ decay: float = 4.0,
135
+ low: float = 0.0,
136
+ high: float = 1.0,
137
+ factor: float = 3.0,
138
+ invert: bool = False,
139
+ time_offset: float = 0.0,
140
+ ) -> dict:
141
+ """Generate and apply an automation curve to a session clip.
142
+
143
+ Combines curve generation with clip automation writing in one call.
144
+
145
+ curve_type: linear, exponential, logarithmic, s_curve, sine,
146
+ sawtooth, spike, square, steps, perlin, brownian,
147
+ spring, bezier, easing, euclidean, stochastic
148
+ duration: curve length in beats
149
+ density: number of automation points
150
+ time_offset: shift the entire curve forward by N beats
151
+
152
+ Curve-specific params:
153
+ - linear/exp/log: start, end, factor (steepness 2-6)
154
+ - sine: center, amplitude, frequency, phase
155
+ - sawtooth: start, end, frequency (resets per duration)
156
+ - spike: peak, decay (higher = faster)
157
+ - square: low, high, frequency
158
+ - s_curve: start, end
159
+
160
+ Musical guidance:
161
+ - Filter sweeps: use exponential (perceptually even)
162
+ - Volume fades: use logarithmic (matches ear's response)
163
+ - Crossfades: use s_curve (natural acceleration/deceleration)
164
+ - Pumping: use sawtooth with frequency matching beat divisions
165
+ - Throws: use spike with short duration (1-2 beats)
166
+ - Tremolo/pan: use sine with frequency in musical divisions
167
+ """
168
+ # Generate the curve
169
+ points = generate_curve(
170
+ curve_type=curve_type,
171
+ duration=duration,
172
+ density=density,
173
+ start=start, end=end,
174
+ center=center, amplitude=amplitude,
175
+ frequency=frequency, phase=phase,
176
+ peak=peak, decay=decay,
177
+ low=low, high=high,
178
+ factor=factor,
179
+ invert=invert,
180
+ )
181
+
182
+ # Apply time offset
183
+ if time_offset > 0:
184
+ for p in points:
185
+ p["time"] += time_offset
186
+
187
+ # Write to clip
188
+ params: dict = {
189
+ "track_index": track_index,
190
+ "clip_index": clip_index,
191
+ "parameter_type": parameter_type,
192
+ "points": points,
193
+ }
194
+ if device_index is not None:
195
+ params["device_index"] = device_index
196
+ if parameter_index is not None:
197
+ params["parameter_index"] = parameter_index
198
+ if send_index is not None:
199
+ params["send_index"] = send_index
200
+
201
+ result = _get_ableton(ctx).send_command("set_clip_automation", params)
202
+ result["curve_type"] = curve_type
203
+ result["curve_points"] = len(points)
204
+ return result
205
+
206
+
207
+ @mcp.tool()
208
+ def apply_automation_recipe(
209
+ ctx: Context,
210
+ track_index: int,
211
+ clip_index: int,
212
+ parameter_type: str,
213
+ recipe: str,
214
+ duration: float = 4.0,
215
+ density: int = 16,
216
+ device_index: Optional[int] = None,
217
+ parameter_index: Optional[int] = None,
218
+ send_index: Optional[int] = None,
219
+ time_offset: float = 0.0,
220
+ ) -> dict:
221
+ """Apply a named automation recipe to a session clip.
222
+
223
+ Recipes are predefined curve shapes for common production techniques.
224
+ Use get_automation_recipes to list all available recipes.
225
+
226
+ Available recipes:
227
+ - filter_sweep_up: LP filter opening (exponential, 8-32 bars)
228
+ - filter_sweep_down: LP filter closing (logarithmic, 4-16 bars)
229
+ - dub_throw: send spike for reverb/delay throw (1-2 beats)
230
+ - tape_stop: pitch dropping to zero (0.5-2 beats)
231
+ - build_rise: tension build on HP filter + volume (8-32 bars)
232
+ - sidechain_pump: volume ducking per beat (sawtooth, 1 beat loop)
233
+ - fade_in / fade_out: perceptually smooth volume fades
234
+ - tremolo: periodic volume oscillation
235
+ - auto_pan: stereo movement via pan sine
236
+ - stutter: rapid on/off gating
237
+ - breathing: subtle filter movement (acoustic instrument feel)
238
+ - washout: reverb/delay feedback increasing
239
+ - vinyl_crackle: slow bit reduction movement
240
+ - stereo_narrow: collapse to mono before drop
241
+ """
242
+ points = generate_from_recipe(recipe, duration=duration, density=density)
243
+
244
+ if time_offset > 0:
245
+ for p in points:
246
+ p["time"] += time_offset
247
+
248
+ params: dict = {
249
+ "track_index": track_index,
250
+ "clip_index": clip_index,
251
+ "parameter_type": parameter_type,
252
+ "points": points,
253
+ }
254
+ if device_index is not None:
255
+ params["device_index"] = device_index
256
+ if parameter_index is not None:
257
+ params["parameter_index"] = parameter_index
258
+ if send_index is not None:
259
+ params["send_index"] = send_index
260
+
261
+ result = _get_ableton(ctx).send_command("set_clip_automation", params)
262
+ result["recipe"] = recipe
263
+ result["curve_points"] = len(points)
264
+ return result
265
+
266
+
267
+ @mcp.tool()
268
+ def get_automation_recipes(ctx: Context) -> dict:
269
+ """List all available automation recipes with descriptions.
270
+
271
+ Each recipe includes: curve type, description, typical duration,
272
+ and recommended target parameter. Use apply_automation_recipe
273
+ to apply any recipe to a clip.
274
+ """
275
+ return {"recipes": list_recipes()}
276
+
277
+
278
+ @mcp.tool()
279
+ def generate_automation_curve(
280
+ ctx: Context,
281
+ curve_type: str,
282
+ duration: float = 4.0,
283
+ density: int = 16,
284
+ start: float = 0.0,
285
+ end: float = 1.0,
286
+ center: float = 0.5,
287
+ amplitude: float = 0.5,
288
+ frequency: float = 1.0,
289
+ phase: float = 0.0,
290
+ peak: float = 1.0,
291
+ decay: float = 4.0,
292
+ low: float = 0.0,
293
+ high: float = 1.0,
294
+ factor: float = 3.0,
295
+ invert: bool = False,
296
+ ) -> dict:
297
+ """Generate automation curve points WITHOUT writing them.
298
+
299
+ Returns the points array for preview/inspection. Use this to see
300
+ what a curve looks like before committing it to a clip.
301
+ Pass the returned points to set_clip_automation or
302
+ set_arrangement_automation to write them.
303
+ """
304
+ points = generate_curve(
305
+ curve_type=curve_type,
306
+ duration=duration,
307
+ density=density,
308
+ start=start, end=end,
309
+ center=center, amplitude=amplitude,
310
+ frequency=frequency, phase=phase,
311
+ peak=peak, decay=decay,
312
+ low=low, high=high,
313
+ factor=factor,
314
+ invert=invert,
315
+ )
316
+ return {
317
+ "curve_type": curve_type,
318
+ "duration": duration,
319
+ "point_count": len(points),
320
+ "points": points,
321
+ "value_range": {
322
+ "min": min(p["value"] for p in points) if points else 0,
323
+ "max": max(p["value"] for p in points) if points else 0,
324
+ },
325
+ }
326
+
327
+
328
+ @mcp.tool()
329
+ def analyze_for_automation(
330
+ ctx: Context,
331
+ track_index: int,
332
+ ) -> dict:
333
+ """Analyze a track's spectrum and suggest automation targets.
334
+
335
+ Reads the track's current spectral data and device chain,
336
+ then suggests which parameters would benefit from automation
337
+ based on the frequency content and device types present.
338
+
339
+ Requires LivePilot Analyzer on master track and audio playing.
340
+ """
341
+ ableton = _get_ableton(ctx)
342
+
343
+ # Get track devices
344
+ track_info = ableton.send_command("get_track_info", {
345
+ "track_index": track_index,
346
+ })
347
+
348
+ # Get current spectrum
349
+ spectral = ctx.lifespan_context.get("spectral")
350
+ spectrum = {}
351
+ if spectral and spectral.is_connected:
352
+ spectrum = spectral.get_spectrum()
353
+
354
+ # Get meter level
355
+ meters = ableton.send_command("get_track_meters", {
356
+ "track_index": track_index,
357
+ })
358
+
359
+ devices = track_info.get("devices", [])
360
+ suggestions = []
361
+
362
+ # Analyze based on device types and spectrum
363
+ for i, dev in enumerate(devices):
364
+ dev_name = dev.get("name", "").lower()
365
+ dev_class = dev.get("class_name", "").lower()
366
+
367
+ # Filter devices — suggest sweep automation
368
+ if any(kw in dev_class for kw in ["autofilter", "eq8", "filter"]):
369
+ suggestions.append({
370
+ "device_index": i,
371
+ "device_name": dev.get("name"),
372
+ "suggestion": "filter_sweep",
373
+ "reason": "Filter detected — automate cutoff for movement",
374
+ "recipe": "filter_sweep_up",
375
+ })
376
+
377
+ # Reverb/delay — suggest send throws or washout
378
+ if any(kw in dev_class for kw in ["reverb", "delay", "hybrid", "echo"]):
379
+ suggestions.append({
380
+ "device_index": i,
381
+ "device_name": dev.get("name"),
382
+ "suggestion": "spatial_automation",
383
+ "reason": "Space effect — automate mix/decay for depth changes",
384
+ "recipe": "washout",
385
+ })
386
+
387
+ # Distortion — suggest drive automation
388
+ if any(kw in dev_class for kw in ["saturator", "overdrive", "pedal", "amp"]):
389
+ suggestions.append({
390
+ "device_index": i,
391
+ "device_name": dev.get("name"),
392
+ "suggestion": "drive_automation",
393
+ "reason": "Distortion — automate drive for dynamic saturation",
394
+ "recipe": "breathing",
395
+ })
396
+
397
+ # Synths — suggest wavetable/macro automation
398
+ if any(kw in dev_class for kw in ["wavetable", "drift", "analog", "operator"]):
399
+ suggestions.append({
400
+ "device_index": i,
401
+ "device_name": dev.get("name"),
402
+ "suggestion": "timbre_evolution",
403
+ "reason": "Synth — automate timbre params for evolving sound",
404
+ "recipe": "breathing",
405
+ })
406
+
407
+ # Mixer suggestions based on spectrum
408
+ if spectrum:
409
+ sub = spectrum.get("sub", 0)
410
+ if sub > 0.15:
411
+ suggestions.append({
412
+ "suggestion": "high_pass_automation",
413
+ "reason": "Heavy sub content (%.2f) — HP filter sweep for builds" % sub,
414
+ "recipe": "build_rise",
415
+ })
416
+
417
+ # Always suggest send automation for spatial depth
418
+ suggestions.append({
419
+ "suggestion": "send_throws",
420
+ "reason": "Reverb/delay sends — automate for dub throws and spatial variation",
421
+ "recipe": "dub_throw",
422
+ })
423
+
424
+ return {
425
+ "track_index": track_index,
426
+ "track_name": track_info.get("name", ""),
427
+ "device_count": len(devices),
428
+ "current_level": meters.get("tracks", [{}])[0].get("level", 0),
429
+ "spectrum": spectrum,
430
+ "suggestions": suggestions,
431
+ }
package/package.json CHANGED
@@ -1,8 +1,8 @@
1
1
  {
2
2
  "name": "livepilot",
3
- "version": "1.5.0",
3
+ "version": "1.6.1",
4
4
  "mcpName": "io.github.dreamrec/livepilot",
5
- "description": "AI copilot for Ableton Live 12 — 127 tools, device atlas (280+ devices), real-time audio analysis, and technique memory",
5
+ "description": "AI copilot for Ableton Live 12 — 135 tools, device atlas (280+ devices), real-time audio analysis, automation intelligence, and technique memory",
6
6
  "author": "Pilot Studio",
7
7
  "license": "MIT",
8
8
  "type": "commonjs",
@@ -24,8 +24,9 @@ Given a high-level description, you:
24
24
  6. **Program patterns** — write MIDI notes that fit the genre and style
25
25
  7. **Add effects** — load and configure effect chains for the desired sound
26
26
  8. **HEALTH CHECK** — verify effects aren't pass-throughs (Dry/Wet > 0, Drive set, etc.)
27
- 9. **Mix** — balance volumes, set panning, configure sends
28
- 10. **Final verify** — `get_session_info`, fire scenes, confirm audio output
27
+ 9. **Automate** — add movement and evolution to the mix (see Automation Phase below)
28
+ 10. **Mix** — balance volumes, set panning, configure sends
29
+ 11. **Final verify** — `get_session_info`, fire scenes, confirm audio output
29
30
 
30
31
  ## Mandatory Track Health Checks
31
32
 
@@ -49,6 +50,35 @@ After loading any instrument, run this checklist:
49
50
  - **For synths, use `search_browser` → `load_browser_item`** with exact URI. `find_and_load_device` can match sample files before the actual instrument (e.g., "Drift" matches a .wav sample first)
50
51
  - **After loading any effect**, set its key parameters to non-default values. A Saturator with Drive=0, a Reverb with Dry/Wet=0, or a Compressor with Threshold at max are all pass-throughs.
51
52
 
53
+ ## Automation Phase (after writing notes, before mixing)
54
+
55
+ ### Step 1: Spectral Diagnosis
56
+ - Solo each track -> `get_master_spectrum` -> build spectral map
57
+ - Identify frequency overlaps between tracks (masking)
58
+ - Note problem areas: resonances, mud, harshness
59
+
60
+ ### Step 2: Per-Track Analysis
61
+ - `analyze_for_automation` on each track -> get device-specific suggestions
62
+ - Cross-reference with spectral map: which suggestions address the problems found?
63
+
64
+ ### Step 3: Write Automation (perception-action loop)
65
+ For each automation decision:
66
+ 1. Read spectrum BEFORE
67
+ 2. Apply recipe or custom curve
68
+ 3. Read spectrum AFTER
69
+ 4. Compare: did it improve? If not, clear and adjust
70
+ 5. Store the final working automation parameters in memory
71
+
72
+ ### Step 4: Spatial Design
73
+ - Add send automation for depth (dub throws, reverb washes)
74
+ - Consider complementary automation: as one track's filter opens, another's narrows
75
+ - Use cross-track spectral awareness to avoid new masking from automation
76
+
77
+ ### Step 5: Generative/Evolving Textures
78
+ - Consider polyrhythmic automation for non-repeating evolution
79
+ - Unlinked envelopes with prime-number beat lengths (3, 5, 7 beats)
80
+ - Spectral-driven automation: use analyzer data to modulate parameters in real-time concepts
81
+
52
82
  ## Rules
53
83
 
54
84
  - Always use the livepilot-core skill for guidance on tool usage
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "livepilot",
3
- "version": "1.5.0",
4
- "description": "AI copilot for Ableton Live 12 — 127 tools, device atlas (280+ devices), real-time audio analysis, and technique memory",
3
+ "version": "1.6.1",
4
+ "description": "AI copilot for Ableton Live 12 — 135 tools, device atlas (280+ devices), real-time audio analysis, automation intelligence, and technique memory",
5
5
  "author": "Pilot Studio",
6
6
  "skills": [
7
7
  "skills/livepilot-core"
@@ -1,17 +1,17 @@
1
1
  ---
2
2
  name: livepilot-core
3
- description: Core discipline for controlling Ableton Live 12 through LivePilot's 127 MCP tools, device atlas (280+ devices), M4L analyzer (spectrum/RMS/key detection), and technique memory. Use whenever working with Ableton Live through MCP tools.
3
+ description: Core discipline for controlling Ableton Live 12 through LivePilot's 135 MCP tools, device atlas (280+ devices), M4L analyzer (spectrum/RMS/key detection), automation intelligence (16 curve types, 15 recipes), and technique memory. Use whenever working with Ableton Live through MCP tools.
4
4
  ---
5
5
 
6
6
  # LivePilot Core — Ableton Live 12 AI Copilot
7
7
 
8
- LivePilot is an agentic production system for Ableton Live 12. It combines 127 MCP tools with three layers of intelligence:
8
+ LivePilot is an agentic production system for Ableton Live 12. It combines 135 MCP tools with three layers of intelligence:
9
9
 
10
10
  - **Device Atlas** — A structured knowledge corpus of 280+ instruments, 139 drum kits, and 350+ impulse responses. Consult the atlas before loading any device. It contains real browser URIs, preset names, and sonic descriptions. Never guess a device name — look it up.
11
11
  - **M4L Analyzer** — Real-time audio analysis on the master bus (8-band spectrum, RMS/peak, key detection). Use it to verify mixing decisions, detect frequency problems, and find the key before writing harmonic content.
12
12
  - **Technique Memory** — Persistent storage for production decisions. Consult `memory_recall` before creative tasks to understand the user's taste. Save techniques when the user likes something. The memory shapes future decisions without constraining them.
13
13
 
14
- These layers sit on top of 127 deterministic tools across 11 domains: transport, tracks, clips, MIDI notes, devices, scenes, mixing, browser, arrangement, technique memory, and real-time DSP analysis.
14
+ These layers sit on top of 135 deterministic tools across 12 domains: transport, tracks, clips, MIDI notes, devices, scenes, mixing, browser, arrangement, technique memory, real-time DSP analysis, and automation.
15
15
 
16
16
  ## Golden Rules
17
17
 
@@ -64,7 +64,7 @@ These layers sit on top of 127 deterministic tools across 11 domains: transport,
64
64
  - MIDI track with no instrument loaded
65
65
  - Notes programmed but clip not fired
66
66
 
67
- ## Tool Domains (127 total)
67
+ ## Tool Domains (135 total)
68
68
 
69
69
  ### Transport (12)
70
70
  `get_session_info` · `set_tempo` · `set_time_signature` · `start_playback` · `stop_playback` · `continue_playback` · `toggle_metronome` · `set_session_loop` · `undo` · `redo` · `get_recent_actions` · `get_session_diagnostics`
@@ -99,6 +99,29 @@ These layers sit on top of 127 deterministic tools across 11 domains: transport,
99
99
  ### Analyzer (20) — requires LivePilot Analyzer M4L device on master track
100
100
  `get_master_spectrum` · `get_master_rms` · `get_detected_key` · `get_hidden_parameters` · `get_automation_state` · `walk_device_tree` · `get_clip_file_path` · `replace_simpler_sample` · `load_sample_to_simpler` · `get_simpler_slices` · `crop_simpler` · `reverse_simpler` · `warp_simpler` · `get_warp_markers` · `add_warp_marker` · `move_warp_marker` · `remove_warp_marker` · `scrub_clip` · `stop_scrub` · `get_display_values`
101
101
 
102
+ ### Automation (8)
103
+ Clip automation CRUD + intelligent curve generation with 15 built-in recipes.
104
+
105
+ **Tools:** `get_clip_automation` · `set_clip_automation` · `clear_clip_automation` · `apply_automation_shape` · `apply_automation_recipe` · `get_automation_recipes` · `generate_automation_curve` · `analyze_for_automation`
106
+
107
+ **Key discipline:**
108
+
109
+ **The Feedback Loop (MANDATORY for all automation work):**
110
+ 1. PERCEIVE: `get_master_spectrum` + `get_track_meters` -> understand current state
111
+ 2. DIAGNOSE: What needs to change? Use diagnostic filter technique if unsure
112
+ 3. DECIDE: Which parameter, which curve, which recipe?
113
+ 4. ACT: `apply_automation_shape` or `apply_automation_recipe`
114
+ 5. VERIFY: `get_master_spectrum` again -> did it work?
115
+ 6. ADJUST: If not right, `clear_clip_automation` -> try different curve/params
116
+ 7. NEVER write automation without reading spectrum first and after
117
+
118
+ **Rules:**
119
+ - Use `analyze_for_automation` before writing — let spectral data guide decisions
120
+ - Use recipes for common patterns (filter_sweep_up, dub_throw, sidechain_pump)
121
+ - Use `apply_automation_shape` for custom curves with specific math
122
+ - Clear existing automation before rewriting: `clear_clip_automation` first
123
+ - Load `references/automation-atlas.md` for curve theory, genre recipes, diagnostic technique, and cross-track spectral mapping
124
+
102
125
  ## Workflow: Building a Beat
103
126
 
104
127
  1. `get_session_info` — check current state
@@ -240,10 +263,11 @@ Deep production knowledge lives in `references/`. Consult these when making crea
240
263
 
241
264
  | File | What's inside | When to consult |
242
265
  |------|--------------|-----------------|
243
- | `references/overview.md` | All 127 tools mapped with params, units, ranges | Quick lookup for any tool |
266
+ | `references/overview.md` | All 135 tools mapped with params, units, ranges | Quick lookup for any tool |
244
267
  | `references/midi-recipes.md` | Drum patterns by genre, chord voicings, scales, hi-hat techniques, humanization, polymetrics | Programming MIDI notes, building beats |
245
268
  | `references/sound-design.md` | Stock instruments/effects, parameter recipes for bass/pad/lead/pluck, device chain patterns | Loading and configuring devices |
246
269
  | `references/mixing-patterns.md` | Gain staging, parallel compression, sidechain, EQ by instrument, bus processing, stereo width | Setting volumes, panning, adding effects |
247
270
  | `references/ableton-workflow-patterns.md` | Session/Arrangement workflow, song structures by genre, follow actions, clip launch modes, export | Organizing sessions, structuring songs |
248
271
  | `references/m4l-devices.md` | Browser organization, MIDI effects, rack systems, device loading patterns | Finding and loading devices, using racks |
249
272
  | `references/memory-guide.md` | Qualities template, good/bad examples for each technique type | Saving techniques, writing qualities |
273
+ | `references/automation-atlas.md` | Curve theory, perception-action loop, genre recipes, diagnostic filter technique, spectral mapping | Writing automation, choosing curves, mixing with spectral feedback |