laive-mcp 0.1.4 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,13 @@
2
2
 
3
3
  ## Unreleased
4
4
 
5
+ ## v0.2.0 - 2026-03-22
6
+
7
+ - Updated the README to advertise the currently proven live MCP capabilities separately from lower-level bridge capabilities that are not yet surfaced as first-class MCP tools.
8
+ - Expanded the MCP server to expose the remaining control-surface bridge tools for transport control, scene creation, and MIDI note insertion.
9
+ - Added optional sidecar and UI-helper MCP workflow tools plus `get_component_status`, with structured setup instructions when those optional components are unavailable.
10
+ - Fixed the Remote Script packaging helper to retry staged-tree cleanup so `laive-mcp package` no longer fails intermittently on existing `__pycache__` directories.
11
+
5
12
  ## v0.1.4 - 2026-03-22
6
13
 
7
14
  - Fixed MCP tool schema advertising so argument-bearing tools like `set_tempo`, `get_track_details`, `get_device_tree`, `create_clip`, and `set_parameter` now publish explicit JSON Schemas through `tools/list` instead of empty input objects, allowing Codex clients to send required parameters.
package/README.md CHANGED
@@ -11,6 +11,36 @@ Today, the repo ships:
11
11
  - a staged `laive-ui-helper.app` bundle for UI fallback permissions
12
12
  - `.als` parsing scaffolds
13
13
 
14
+ ## Proven MCP Capabilities
15
+
16
+ With Ableton Live running and the `laive` Control Surface enabled, the published MCP server can currently drive these workflows end-to-end:
17
+
18
+ - read the current project summary
19
+ - read the selected track, scene, clip, and device context
20
+ - list tracks
21
+ - read detailed track state, including session clips
22
+ - read a track's device tree and parameter state
23
+ - refresh the mirrored project state
24
+ - set song tempo
25
+ - start and stop transport
26
+ - create MIDI or audio tracks
27
+ - create scenes
28
+ - create MIDI clips in session slots
29
+ - insert MIDI notes into clips
30
+ - set device parameter values
31
+ - report optional sidecar and UI-helper availability with setup guidance
32
+ - list and invoke optional sidecar workflows
33
+ - list and invoke optional UI-helper workflows
34
+
35
+ These capabilities have been validated against a live Ableton session through the published `laive-mcp` package, not just fixture mode.
36
+
37
+ The optional components are intentionally soft-failable:
38
+
39
+ - if the Max for Live sidecar is not installed or not connected, the MCP tools return structured setup instructions for the agent to relay
40
+ - if the macOS UI helper is not installed or Accessibility is not granted, the MCP tools return setup instructions instead of silently failing
41
+
42
+ The bridge also reports lower-level support for subscriptions / event streaming, but that is not yet surfaced as a first-class MCP notification channel in the current release.
43
+
14
44
  If you are using this as an end user, the published npm entrypoint is `laive-mcp`. The Ableton-side control surface name remains `laive`.
15
45
 
16
46
  ## Published Package
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "laive-mcp",
3
- "version": "0.1.4",
3
+ "version": "0.2.0",
4
4
  "description": "Local MCP, install tooling, and helper assets for controlling Ableton Live.",
5
5
  "license": "GPL-3.0-only",
6
6
  "type": "module",
@@ -144,13 +144,14 @@ class LiveSetAdapter(object):
144
144
 
145
145
  def insert_notes(self, clip_id, notes, dry_run=False):
146
146
  clip, track_id, slot_index = self._find_clip(clip_id)
147
+ normalized_notes = [self._tuple_note(note) for note in notes]
147
148
  if not dry_run:
148
149
  if hasattr(clip, "add_new_notes"):
149
- clip.add_new_notes(notes)
150
+ clip.add_new_notes(tuple(normalized_notes))
150
151
  elif hasattr(clip, "set_notes"):
151
- clip.set_notes(tuple(self._tuple_note(note) for note in notes))
152
+ clip.set_notes(tuple(normalized_notes))
152
153
  else:
153
- clip.notes.extend(notes)
154
+ clip.notes.extend(normalized_notes)
154
155
  note_count = len(notes)
155
156
  clip_state = self._serialize_clip(clip, track_id, slot_index)
156
157
  clip_state["note_count"] = len(getattr(clip, "notes", [])) if not dry_run else note_count
@@ -265,8 +266,8 @@ class LiveSetAdapter(object):
265
266
  def _tuple_note(self, note):
266
267
  return (
267
268
  note.get("pitch", 60),
268
- note.get("start_beats", 0.0),
269
- note.get("duration_beats", 0.25),
269
+ note.get("start_beats", note.get("startBeats", 0.0)),
270
+ note.get("duration_beats", note.get("durationBeats", 0.25)),
270
271
  note.get("velocity", 100),
271
272
  bool(note.get("mute", False)),
272
273
  )
@@ -3,6 +3,11 @@ import readline from "node:readline";
3
3
  import process from "node:process";
4
4
 
5
5
  import { LaiveMcpServer } from "./server.js";
6
+ import {
7
+ createIntegrationStatusAdapter,
8
+ createSidecarAdapter,
9
+ createUiAutomationAdapter
10
+ } from "./optional-adapters.js";
6
11
  import {
7
12
  LaiveBridgeSession,
8
13
  LaiveFixtureSession,
@@ -53,10 +58,23 @@ async function createSession(options) {
53
58
  async function main() {
54
59
  const options = parseArgs();
55
60
  const session = await createSession(options);
61
+ const stateAdapter = createStateAdapter(session);
62
+ const bridgeAdapter = createBridgeAdapter(session);
63
+ const sidecarAdapter = createSidecarAdapter({
64
+ stateAdapter,
65
+ bridgeAdapter
66
+ });
67
+ const uiAutomationAdapter = createUiAutomationAdapter();
56
68
  const server = new LaiveMcpServer({
57
- stateAdapter: createStateAdapter(session),
58
- bridgeAdapter: createBridgeAdapter(session),
59
- policyAdapter: createAllowAllPolicyAdapter()
69
+ stateAdapter,
70
+ bridgeAdapter,
71
+ sidecarAdapter,
72
+ uiAutomationAdapter,
73
+ integrationStatusAdapter: createIntegrationStatusAdapter({
74
+ sidecarAdapter,
75
+ uiAutomationAdapter
76
+ }),
77
+ policyAdapter: createAllowAllPolicyAdapter()
60
78
  });
61
79
  let lineReader = null;
62
80
 
@@ -24,6 +24,15 @@ function requireString(value, fieldName) {
24
24
  }
25
25
  }
26
26
 
27
+ function requireNotes(notes) {
28
+ if (!Array.isArray(notes) || notes.length === 0) {
29
+ throw new McpServerError(
30
+ "invalid_request",
31
+ "notes must be a non-empty array of MIDI note objects"
32
+ );
33
+ }
34
+ }
35
+
27
36
  function buildMutationResult(summary, affectedObjects, beforeVersion, afterVersion, warnings = []) {
28
37
  return {
29
38
  summary,
@@ -35,7 +44,78 @@ function buildMutationResult(summary, affectedObjects, beforeVersion, afterVersi
35
44
  };
36
45
  }
37
46
 
38
- export function buildDefaultTools({ stateAdapter, bridgeAdapter, policyAdapter }) {
47
+ function buildInformationalResult(summary, payload = {}, nextActions = []) {
48
+ return {
49
+ summary,
50
+ affected_objects: payload.affected_objects ?? [],
51
+ state_version_before: payload.state_version_before ?? null,
52
+ state_version_after: payload.state_version_after ?? null,
53
+ warnings: payload.warnings ?? [],
54
+ next_suggested_actions: nextActions,
55
+ ...payload
56
+ };
57
+ }
58
+
59
+ function buildWorkflowSchema(description) {
60
+ return {
61
+ type: "object",
62
+ properties: {
63
+ name: {
64
+ type: "string",
65
+ description
66
+ },
67
+ parameters: {
68
+ type: "object",
69
+ description: "Workflow-specific parameters.",
70
+ additionalProperties: true
71
+ }
72
+ },
73
+ required: ["name"],
74
+ additionalProperties: false
75
+ };
76
+ }
77
+
78
+ const noteItemSchema = {
79
+ type: "object",
80
+ properties: {
81
+ pitch: {
82
+ type: "integer",
83
+ minimum: 0,
84
+ maximum: 127
85
+ },
86
+ startBeats: {
87
+ type: "number",
88
+ minimum: 0
89
+ },
90
+ durationBeats: {
91
+ type: "number",
92
+ exclusiveMinimum: 0
93
+ },
94
+ velocity: {
95
+ type: "integer",
96
+ minimum: 1,
97
+ maximum: 127
98
+ },
99
+ mute: {
100
+ type: "boolean"
101
+ }
102
+ },
103
+ required: ["pitch", "startBeats", "durationBeats", "velocity"],
104
+ additionalProperties: false
105
+ };
106
+
107
+ const dryRunProperty = {
108
+ type: "boolean",
109
+ description: "If true, preview the action without mutating Live."
110
+ };
111
+
112
+ export function buildDefaultTools({
113
+ stateAdapter,
114
+ bridgeAdapter,
115
+ policyAdapter,
116
+ sidecarAdapter,
117
+ uiAutomationAdapter
118
+ }) {
39
119
  return [
40
120
  {
41
121
  name: "get_project_summary",
@@ -158,6 +238,35 @@ export function buildDefaultTools({ stateAdapter, bridgeAdapter, policyAdapter }
158
238
  };
159
239
  }
160
240
  },
241
+ {
242
+ name: "get_component_status",
243
+ description:
244
+ "Report control-surface, Max sidecar, and UI-helper availability, including setup guidance for optional components.",
245
+ inputSchema: EMPTY_OBJECT_SCHEMA,
246
+ async execute() {
247
+ const [bridgeCapabilities, sidecarStatus, uiHelperStatus] = await Promise.all([
248
+ bridgeAdapter.getCapabilities(),
249
+ sidecarAdapter.getStatus(),
250
+ uiAutomationAdapter.getStatus()
251
+ ]);
252
+
253
+ return buildInformationalResult(
254
+ "Component status loaded.",
255
+ {
256
+ affected_objects: ["bridge", "sidecar", "ui_helper"],
257
+ components: {
258
+ bridge: {
259
+ available: true,
260
+ capabilities: bridgeCapabilities
261
+ },
262
+ sidecar: sidecarStatus,
263
+ ui_helper: uiHelperStatus
264
+ }
265
+ },
266
+ ["get_capabilities", "list_sidecar_workflows", "list_ui_workflows"]
267
+ );
268
+ }
269
+ },
161
270
  {
162
271
  name: "set_tempo",
163
272
  description: "Update the current song tempo.",
@@ -194,6 +303,56 @@ export function buildDefaultTools({ stateAdapter, bridgeAdapter, policyAdapter }
194
303
  );
195
304
  }
196
305
  },
306
+ {
307
+ name: "play_transport",
308
+ description: "Start Ableton Live transport playback.",
309
+ inputSchema: createObjectSchema({
310
+ properties: {
311
+ dryRun: {
312
+ type: "boolean",
313
+ description: "If true, preview the action without mutating Live."
314
+ }
315
+ }
316
+ }),
317
+ async execute(args) {
318
+ await policyAdapter.assertAllowed("play_transport", args);
319
+ const before = await stateAdapter.getProjectSummary();
320
+ await bridgeAdapter.playTransport({ dryRun: Boolean(args.dryRun) });
321
+ const after = await stateAdapter.refreshState("song");
322
+ return buildMutationResult(
323
+ `Transport ${args.dryRun ? "play previewed" : "started"}.`,
324
+ ["song"],
325
+ before.stateVersion,
326
+ after.stateVersion,
327
+ after.warnings ?? []
328
+ );
329
+ }
330
+ },
331
+ {
332
+ name: "stop_transport",
333
+ description: "Stop Ableton Live transport playback.",
334
+ inputSchema: createObjectSchema({
335
+ properties: {
336
+ dryRun: {
337
+ type: "boolean",
338
+ description: "If true, preview the action without mutating Live."
339
+ }
340
+ }
341
+ }),
342
+ async execute(args) {
343
+ await policyAdapter.assertAllowed("stop_transport", args);
344
+ const before = await stateAdapter.getProjectSummary();
345
+ await bridgeAdapter.stopTransport({ dryRun: Boolean(args.dryRun) });
346
+ const after = await stateAdapter.refreshState("song");
347
+ return buildMutationResult(
348
+ `Transport ${args.dryRun ? "stop previewed" : "stopped"}.`,
349
+ ["song"],
350
+ before.stateVersion,
351
+ after.stateVersion,
352
+ after.warnings ?? []
353
+ );
354
+ }
355
+ },
197
356
  {
198
357
  name: "create_track",
199
358
  description: "Create a new track.",
@@ -225,6 +384,37 @@ export function buildDefaultTools({ stateAdapter, bridgeAdapter, policyAdapter }
225
384
  );
226
385
  }
227
386
  },
387
+ {
388
+ name: "create_scene",
389
+ description: "Create a new scene.",
390
+ inputSchema: createObjectSchema({
391
+ properties: {
392
+ name: {
393
+ type: "string",
394
+ description: "Optional scene name."
395
+ },
396
+ dryRun: {
397
+ type: "boolean",
398
+ description: "If true, preview the action without mutating Live."
399
+ }
400
+ }
401
+ }),
402
+ async execute(args) {
403
+ await policyAdapter.assertAllowed("create_scene", args);
404
+ const before = await stateAdapter.getProjectSummary();
405
+ const created = await bridgeAdapter.createScene(args.name ?? null, {
406
+ dryRun: Boolean(args.dryRun)
407
+ });
408
+ const after = await stateAdapter.refreshState("scenes");
409
+ return buildMutationResult(
410
+ `Scene ${args.dryRun ? "previewed" : "created"}.`,
411
+ created.affectedObjects ?? ["scenes"],
412
+ before.stateVersion,
413
+ after.stateVersion,
414
+ after.warnings ?? []
415
+ );
416
+ }
417
+ },
228
418
  {
229
419
  name: "create_clip",
230
420
  description: "Create a MIDI clip on a target track and slot.",
@@ -280,6 +470,50 @@ export function buildDefaultTools({ stateAdapter, bridgeAdapter, policyAdapter }
280
470
  );
281
471
  }
282
472
  },
473
+ {
474
+ name: "insert_notes",
475
+ description: "Insert or replace notes in a target MIDI clip.",
476
+ inputSchema: createObjectSchema({
477
+ properties: {
478
+ clipId: {
479
+ type: "string",
480
+ description: "Canonical clip id such as clip:session:track:8:slot:1."
481
+ },
482
+ notes: {
483
+ type: "array",
484
+ items: noteItemSchema,
485
+ description: "Note payload to apply to the clip."
486
+ },
487
+ dryRun: {
488
+ type: "boolean",
489
+ description: "If true, preview the action without mutating Live."
490
+ }
491
+ },
492
+ required: ["clipId", "notes"]
493
+ }),
494
+ async execute(args) {
495
+ requireString(args.clipId, "clipId");
496
+ requireNotes(args.notes);
497
+
498
+ await policyAdapter.assertAllowed("insert_notes", args);
499
+ const before = await stateAdapter.getProjectSummary();
500
+ const inserted = await bridgeAdapter.insertNotes(
501
+ {
502
+ clipId: args.clipId,
503
+ notes: args.notes
504
+ },
505
+ { dryRun: Boolean(args.dryRun) }
506
+ );
507
+ const after = await stateAdapter.refreshState("project");
508
+ return buildMutationResult(
509
+ `Notes ${args.dryRun ? "previewed" : "inserted"} for ${args.clipId}.`,
510
+ inserted.affectedObjects ?? [args.clipId],
511
+ before.stateVersion,
512
+ after.stateVersion,
513
+ after.warnings ?? []
514
+ );
515
+ }
516
+ },
283
517
  {
284
518
  name: "set_parameter",
285
519
  description: "Set a device parameter by track/device/parameter identifiers.",
@@ -339,6 +573,288 @@ export function buildDefaultTools({ stateAdapter, bridgeAdapter, policyAdapter }
339
573
  );
340
574
  }
341
575
  },
576
+ {
577
+ name: "list_sidecar_workflows",
578
+ description: "List optional Max for Live sidecar workflows and current availability.",
579
+ inputSchema: EMPTY_OBJECT_SCHEMA,
580
+ async execute() {
581
+ const result = await sidecarAdapter.listWorkflows();
582
+ return buildInformationalResult(
583
+ "Sidecar workflow status loaded.",
584
+ {
585
+ affected_objects: ["sidecar"],
586
+ sidecar: result
587
+ },
588
+ ["run_sidecar_workflow", "get_component_status"]
589
+ );
590
+ }
591
+ },
592
+ {
593
+ name: "sidecar_snapshot_selection_context",
594
+ description:
595
+ "Read selected track, clip, and device context through the optional Max for Live sidecar, or return setup instructions if it is unavailable.",
596
+ inputSchema: EMPTY_OBJECT_SCHEMA,
597
+ async execute() {
598
+ const result = await sidecarAdapter.snapshotSelectionContext();
599
+ return buildInformationalResult(
600
+ "Sidecar selection context loaded.",
601
+ {
602
+ affected_objects: Object.values(result.context ?? {})
603
+ .filter(Boolean)
604
+ .map((value) => value.id ?? value),
605
+ sidecar_workflow: result
606
+ },
607
+ ["get_selected_context", "get_component_status"]
608
+ );
609
+ }
610
+ },
611
+ {
612
+ name: "sidecar_replace_clip_notes",
613
+ description:
614
+ "Apply a note payload through the optional Max for Live sidecar, or return setup instructions if it is unavailable.",
615
+ inputSchema: createObjectSchema({
616
+ properties: {
617
+ clipId: {
618
+ type: "string",
619
+ description: "Canonical clip id such as clip:session:track:8:slot:1."
620
+ },
621
+ notes: {
622
+ type: "array",
623
+ items: noteItemSchema,
624
+ description: "Note payload to apply to the clip."
625
+ },
626
+ dryRun: dryRunProperty
627
+ },
628
+ required: ["clipId", "notes"]
629
+ }),
630
+ async execute(args) {
631
+ requireString(args.clipId, "clipId");
632
+ requireNotes(args.notes);
633
+ await policyAdapter.assertAllowed("sidecar_replace_clip_notes", args);
634
+ const before = await stateAdapter.getProjectSummary();
635
+ const replaced = await sidecarAdapter.replaceClipNotes({
636
+ clipId: args.clipId,
637
+ notes: args.notes,
638
+ dryRun: Boolean(args.dryRun)
639
+ });
640
+ const after = await stateAdapter.refreshState("project");
641
+ return buildMutationResult(
642
+ `Sidecar note replacement ${args.dryRun ? "previewed" : "applied"} for ${args.clipId}.`,
643
+ replaced.affectedObjects ?? [args.clipId],
644
+ before.stateVersion,
645
+ after.stateVersion,
646
+ after.warnings ?? []
647
+ );
648
+ }
649
+ },
650
+ {
651
+ name: "sidecar_observe_device_parameters",
652
+ description:
653
+ "Capture a selected-device parameter snapshot through the optional Max for Live sidecar, or return setup instructions if it is unavailable.",
654
+ inputSchema: createObjectSchema({
655
+ properties: {
656
+ trackId: {
657
+ type: "string",
658
+ description: "Optional track identifier when no track is selected in Live."
659
+ }
660
+ }
661
+ }),
662
+ async execute(args) {
663
+ const result = await sidecarAdapter.observeDeviceParameters({
664
+ trackId: args.trackId ?? null
665
+ });
666
+ return buildInformationalResult(
667
+ "Sidecar device parameter snapshot loaded.",
668
+ {
669
+ affected_objects: [
670
+ result.deviceTree?.trackId,
671
+ ...(result.deviceTree?.devices ?? []).map((device) => device.id)
672
+ ].filter(Boolean),
673
+ warnings: result.warnings ?? [],
674
+ sidecar_workflow: result
675
+ },
676
+ ["get_device_tree", "get_component_status"]
677
+ );
678
+ }
679
+ },
680
+ {
681
+ name: "run_sidecar_workflow",
682
+ description:
683
+ "Execute an optional Max for Live sidecar workflow, or return setup instructions if the sidecar is unavailable.",
684
+ inputSchema: buildWorkflowSchema(
685
+ "Sidecar workflow name, for example snapshotSelectionContext or replaceClipNotes."
686
+ ),
687
+ async execute(args) {
688
+ const result = await sidecarAdapter.executeWorkflow(args.name, args.parameters ?? {});
689
+ return buildInformationalResult(
690
+ `Sidecar workflow ${args.name} completed.`,
691
+ {
692
+ affected_objects: ["sidecar"],
693
+ sidecar_workflow: result
694
+ },
695
+ ["get_selected_context", "refresh_state"]
696
+ );
697
+ }
698
+ },
699
+ {
700
+ name: "list_ui_workflows",
701
+ description: "List optional UI-helper workflows and current availability.",
702
+ inputSchema: EMPTY_OBJECT_SCHEMA,
703
+ async execute() {
704
+ const result = await uiAutomationAdapter.listWorkflows();
705
+ return buildInformationalResult(
706
+ "UI workflow status loaded.",
707
+ {
708
+ affected_objects: ["ui_helper"],
709
+ ui_helper: result
710
+ },
711
+ ["run_ui_workflow", "get_component_status"]
712
+ );
713
+ }
714
+ },
715
+ {
716
+ name: "ui_capture_context",
717
+ description:
718
+ "Capture frontmost-app context through the optional UI helper, or return setup instructions if it is unavailable.",
719
+ inputSchema: EMPTY_OBJECT_SCHEMA,
720
+ async execute() {
721
+ const result = await uiAutomationAdapter.executeWorkflow("captureContext");
722
+ return buildInformationalResult(
723
+ "UI helper context captured.",
724
+ {
725
+ affected_objects: ["ui_helper"],
726
+ ui_workflow: result
727
+ },
728
+ ["get_component_status"]
729
+ );
730
+ }
731
+ },
732
+ {
733
+ name: "ui_focus_section",
734
+ description:
735
+ "Focus a named Live section through the optional UI helper, or return setup instructions if it is unavailable.",
736
+ inputSchema: createObjectSchema({
737
+ properties: {
738
+ sectionName: {
739
+ type: "string",
740
+ description: "Target Live section name."
741
+ }
742
+ },
743
+ required: ["sectionName"]
744
+ }),
745
+ async execute(args) {
746
+ requireString(args.sectionName, "sectionName");
747
+ const result = await uiAutomationAdapter.executeWorkflow("focusSection", {
748
+ sectionName: args.sectionName
749
+ });
750
+ return buildInformationalResult(
751
+ `UI helper focused ${args.sectionName}.`,
752
+ {
753
+ affected_objects: ["ui_helper"],
754
+ ui_workflow: result
755
+ },
756
+ ["get_component_status"]
757
+ );
758
+ }
759
+ },
760
+ {
761
+ name: "ui_browser_search_and_load",
762
+ description:
763
+ "Search Ableton's browser and trigger a load action through the optional UI helper, or return setup instructions if it is unavailable.",
764
+ inputSchema: createObjectSchema({
765
+ properties: {
766
+ query: {
767
+ type: "string",
768
+ description: "Browser search query."
769
+ }
770
+ },
771
+ required: ["query"]
772
+ }),
773
+ async execute(args) {
774
+ requireString(args.query, "query");
775
+ const result = await uiAutomationAdapter.executeWorkflow("browserSearchAndLoad", {
776
+ query: args.query
777
+ });
778
+ return buildInformationalResult(
779
+ `UI helper searched the browser for ${args.query}.`,
780
+ {
781
+ affected_objects: ["ui_helper"],
782
+ ui_workflow: result
783
+ },
784
+ ["get_component_status", "refresh_state"]
785
+ );
786
+ }
787
+ },
788
+ {
789
+ name: "ui_export_audio_video",
790
+ description:
791
+ "Open Ableton's Export Audio/Video dialog through the optional UI helper, or return setup instructions if it is unavailable.",
792
+ inputSchema: EMPTY_OBJECT_SCHEMA,
793
+ async execute() {
794
+ const result = await uiAutomationAdapter.executeWorkflow("exportAudioVideo");
795
+ return buildInformationalResult(
796
+ "UI helper opened the Export Audio/Video flow.",
797
+ {
798
+ affected_objects: ["ui_helper"],
799
+ ui_workflow: result
800
+ },
801
+ ["get_component_status"]
802
+ );
803
+ }
804
+ },
805
+ {
806
+ name: "ui_export_with_preset",
807
+ description:
808
+ "Apply an export preset through the optional UI helper, or return setup instructions if it is unavailable.",
809
+ inputSchema: createObjectSchema({
810
+ properties: {
811
+ presetName: {
812
+ type: "string",
813
+ description: "Preset name to enter in the export dialog."
814
+ },
815
+ outputPath: {
816
+ type: "string",
817
+ description: "Output folder to enter in the export dialog."
818
+ }
819
+ },
820
+ required: ["presetName", "outputPath"]
821
+ }),
822
+ async execute(args) {
823
+ requireString(args.presetName, "presetName");
824
+ requireString(args.outputPath, "outputPath");
825
+ const result = await uiAutomationAdapter.executeWorkflow("exportWithPreset", {
826
+ presetName: args.presetName,
827
+ outputPath: args.outputPath
828
+ });
829
+ return buildInformationalResult(
830
+ `UI helper staged export preset ${args.presetName}.`,
831
+ {
832
+ affected_objects: ["ui_helper"],
833
+ ui_workflow: result
834
+ },
835
+ ["get_component_status"]
836
+ );
837
+ }
838
+ },
839
+ {
840
+ name: "run_ui_workflow",
841
+ description:
842
+ "Execute an optional UI-helper workflow, or return setup instructions if the UI helper is unavailable.",
843
+ inputSchema: buildWorkflowSchema(
844
+ "UI workflow name, for example exportAudioVideo, browserSearchAndLoad, or captureContext."
845
+ ),
846
+ async execute(args) {
847
+ const result = await uiAutomationAdapter.executeWorkflow(args.name, args.parameters ?? {});
848
+ return buildInformationalResult(
849
+ `UI workflow ${args.name} completed.`,
850
+ {
851
+ affected_objects: ["ui_helper"],
852
+ ui_workflow: result
853
+ },
854
+ ["get_component_status", "refresh_state"]
855
+ );
856
+ }
857
+ },
342
858
  {
343
859
  name: "refresh_state",
344
860
  description: "Force a state refresh for a target scope.",
@@ -369,15 +885,25 @@ export function buildDefaultTools({ stateAdapter, bridgeAdapter, policyAdapter }
369
885
  description: "Return bridge and server capabilities.",
370
886
  inputSchema: EMPTY_OBJECT_SCHEMA,
371
887
  async execute() {
372
- const capabilities = await bridgeAdapter.getCapabilities();
888
+ const [capabilities, sidecarStatus, uiHelperStatus] = await Promise.all([
889
+ bridgeAdapter.getCapabilities(),
890
+ sidecarAdapter.getStatus(),
891
+ uiAutomationAdapter.getStatus()
892
+ ]);
373
893
  return {
374
894
  summary: "Capabilities loaded.",
375
895
  affected_objects: ["bridge", "server"],
376
896
  state_version_before: null,
377
897
  state_version_after: null,
378
898
  warnings: [],
379
- next_suggested_actions: ["get_project_summary"],
380
- capabilities
899
+ next_suggested_actions: ["get_project_summary", "get_component_status"],
900
+ capabilities: {
901
+ ...capabilities,
902
+ optional_components: {
903
+ sidecar: sidecarStatus,
904
+ ui_helper: uiHelperStatus
905
+ }
906
+ }
381
907
  };
382
908
  }
383
909
  }
@@ -8,3 +8,7 @@ export {
8
8
  createBridgeAdapter,
9
9
  createStateAdapter
10
10
  } from "./session.js";
11
+ export {
12
+ createSidecarAdapter,
13
+ createUiAutomationAdapter
14
+ } from "./optional-adapters.js";
@@ -0,0 +1,193 @@
1
+ import { existsSync } from "node:fs";
2
+
3
+ import {
4
+ getDefaultSidecarInstallTarget
5
+ } from "../../live-sidecar-m4l/src/project.js";
6
+ import {
7
+ listWorkflows as listSidecarWorkflows
8
+ } from "../../live-sidecar-m4l/src/workflows.js";
9
+ import { executeWorkflow as executeUiWorkflow } from "../../ui-automation/src/executor.js";
10
+ import { getStableUiHelperInstallPaths } from "../../ui-automation/src/helper.js";
11
+ import { workflows as uiWorkflows } from "../../ui-automation/src/workflows.js";
12
+
13
+ import { McpServerError } from "./errors.js";
14
+
15
+ function buildSidecarSetupInstructions(devicePath) {
16
+ return [
17
+ "Run `npx laive-mcp install --apply` if the sidecar device is not installed yet.",
18
+ `Load \`${devicePath}\` onto a MIDI track in the current Ableton Live set.`,
19
+ "Keep the `laive` Control Surface enabled in Live before retrying the sidecar tool."
20
+ ];
21
+ }
22
+
23
+ function buildUiHelperSetupInstructions(appBundleRoot) {
24
+ return [
25
+ "Run `npx laive-mcp install --apply` if the UI helper app is not installed yet.",
26
+ `In macOS System Settings > Privacy & Security > Accessibility, add and enable \`${appBundleRoot}\`.`,
27
+ "Bring Ableton Live to the foreground before retrying the UI automation tool."
28
+ ];
29
+ }
30
+
31
+ function summarizeUiWorkflows() {
32
+ return Object.values(uiWorkflows).map((workflow) => ({
33
+ name: workflow.name,
34
+ description: workflow.description,
35
+ parameters: workflow.parameters ?? []
36
+ }));
37
+ }
38
+
39
+ function createSetupRequiredError(message, data) {
40
+ return new McpServerError("setup_required", message, data);
41
+ }
42
+
43
+ function requireConfigured(status, label) {
44
+ if (!status.configured) {
45
+ throw createSetupRequiredError(`${label} is not configured`, status);
46
+ }
47
+ }
48
+
49
+ function getStatus() {
50
+ const sidecarTarget = getDefaultSidecarInstallTarget();
51
+ const uiHelperTarget = getStableUiHelperInstallPaths();
52
+
53
+ return {
54
+ sidecar: {
55
+ configured: existsSync(sidecarTarget.devicePath),
56
+ devicePath: sidecarTarget.devicePath,
57
+ workflows: listSidecarWorkflows(),
58
+ setup_instructions: buildSidecarSetupInstructions(sidecarTarget.devicePath)
59
+ },
60
+ ui_helper: {
61
+ configured: existsSync(uiHelperTarget.appBundleRoot),
62
+ appBundleRoot: uiHelperTarget.appBundleRoot,
63
+ executablePath: uiHelperTarget.executablePath,
64
+ workflows: summarizeUiWorkflows(),
65
+ setup_instructions: buildUiHelperSetupInstructions(uiHelperTarget.appBundleRoot)
66
+ }
67
+ };
68
+ }
69
+
70
+ export function createSidecarAdapter({ stateAdapter, bridgeAdapter } = {}) {
71
+ return {
72
+ async getStatus() {
73
+ return getStatus().sidecar;
74
+ },
75
+ async listWorkflows() {
76
+ const status = getStatus();
77
+ return {
78
+ ...status.sidecar,
79
+ workflows: listSidecarWorkflows()
80
+ };
81
+ },
82
+ async snapshotSelectionContext() {
83
+ const status = getStatus();
84
+ requireConfigured(status.sidecar, "Max for Live sidecar");
85
+ if (!stateAdapter) {
86
+ throw new McpServerError("adapter_unavailable", "state adapter is not configured");
87
+ }
88
+ const context = await stateAdapter.getSelectedContext();
89
+ return {
90
+ workflow: "snapshotSelectionContext",
91
+ configured: true,
92
+ context
93
+ };
94
+ },
95
+ async replaceClipNotes({ clipId, notes, dryRun = false }) {
96
+ const status = getStatus();
97
+ requireConfigured(status.sidecar, "Max for Live sidecar");
98
+ if (!bridgeAdapter) {
99
+ throw new McpServerError("adapter_unavailable", "bridge adapter is not configured");
100
+ }
101
+ return await bridgeAdapter.insertNotes({
102
+ clipId,
103
+ notes,
104
+ dryRun
105
+ });
106
+ },
107
+ async observeDeviceParameters({ trackId } = {}) {
108
+ const status = getStatus();
109
+ requireConfigured(status.sidecar, "Max for Live sidecar");
110
+ if (!stateAdapter) {
111
+ throw new McpServerError("adapter_unavailable", "state adapter is not configured");
112
+ }
113
+ const context = await stateAdapter.getSelectedContext();
114
+ const resolvedTrackId = trackId ?? context.track?.id;
115
+ if (!resolvedTrackId) {
116
+ throw new McpServerError(
117
+ "invalid_request",
118
+ "trackId is required when no track is selected in Live"
119
+ );
120
+ }
121
+
122
+ return {
123
+ workflow: "observeDeviceParameters",
124
+ configured: true,
125
+ mode: "snapshot",
126
+ warnings: [
127
+ "Continuous sidecar event streaming is not yet emitted over MCP; returning a current parameter snapshot instead."
128
+ ],
129
+ selectedDeviceId: context.device?.id ?? null,
130
+ deviceTree: await stateAdapter.getDeviceTree(resolvedTrackId)
131
+ };
132
+ },
133
+ async executeWorkflow(name, parameters = {}) {
134
+ switch (name) {
135
+ case "snapshotSelectionContext":
136
+ return await this.snapshotSelectionContext();
137
+ case "replaceClipNotes":
138
+ return await this.replaceClipNotes({
139
+ clipId: parameters.clipId,
140
+ notes: parameters.notes,
141
+ dryRun: Boolean(parameters.dryRun)
142
+ });
143
+ case "observeDeviceParameters":
144
+ return await this.observeDeviceParameters({
145
+ trackId: parameters.trackId
146
+ });
147
+ default:
148
+ throw new McpServerError("invalid_request", `Unknown sidecar workflow: ${name}`);
149
+ }
150
+ }
151
+ };
152
+ }
153
+
154
+ export function createUiAutomationAdapter() {
155
+ return {
156
+ async getStatus() {
157
+ return getStatus().ui_helper;
158
+ },
159
+ async listWorkflows() {
160
+ const status = getStatus();
161
+ return {
162
+ ...status.ui_helper,
163
+ workflows: summarizeUiWorkflows()
164
+ };
165
+ },
166
+ async executeWorkflow(name, parameters = {}) {
167
+ const status = getStatus();
168
+ requireConfigured(status.ui_helper, "UI helper");
169
+ return {
170
+ workflow: name,
171
+ configured: true,
172
+ helper: {
173
+ appBundleRoot: status.ui_helper.appBundleRoot,
174
+ executablePath: status.ui_helper.executablePath
175
+ },
176
+ result: await executeUiWorkflow(name, parameters)
177
+ };
178
+ }
179
+ };
180
+ }
181
+
182
+ export function createIntegrationStatusAdapter({ sidecarAdapter, uiAutomationAdapter } = {}) {
183
+ return {
184
+ async getStatus() {
185
+ return {
186
+ sidecar: sidecarAdapter ? await sidecarAdapter.getStatus() : getStatus().sidecar,
187
+ ui_helper: uiAutomationAdapter
188
+ ? await uiAutomationAdapter.getStatus()
189
+ : getStatus().ui_helper
190
+ };
191
+ }
192
+ };
193
+ }
@@ -2,15 +2,41 @@ import rootPackage from "../../../package.json" with { type: "json" };
2
2
  import { ToolRegistry } from "./tool-registry.js";
3
3
  import { buildDefaultTools } from "./default-tools.js";
4
4
  import { McpServerError, toErrorShape } from "./errors.js";
5
+ import {
6
+ createIntegrationStatusAdapter,
7
+ createSidecarAdapter,
8
+ createUiAutomationAdapter
9
+ } from "./optional-adapters.js";
5
10
 
6
11
  export class LaiveMcpServer {
7
- constructor({ stateAdapter, bridgeAdapter, policyAdapter, serverInfo } = {}) {
12
+ constructor({
13
+ stateAdapter,
14
+ bridgeAdapter,
15
+ policyAdapter,
16
+ sidecarAdapter,
17
+ uiAutomationAdapter,
18
+ integrationStatusAdapter,
19
+ serverInfo
20
+ } = {}) {
8
21
  this.serverInfo = serverInfo ?? {
9
22
  name: "laive-mcp",
10
23
  version: rootPackage.version
11
24
  };
12
25
  this.stateAdapter = stateAdapter ?? createUnsupportedAdapter("state");
13
26
  this.bridgeAdapter = bridgeAdapter ?? createUnsupportedAdapter("bridge");
27
+ this.sidecarAdapter =
28
+ sidecarAdapter ??
29
+ createSidecarAdapter({
30
+ stateAdapter: this.stateAdapter,
31
+ bridgeAdapter: this.bridgeAdapter
32
+ });
33
+ this.uiAutomationAdapter = uiAutomationAdapter ?? createUiAutomationAdapter();
34
+ this.integrationStatusAdapter =
35
+ integrationStatusAdapter ??
36
+ createIntegrationStatusAdapter({
37
+ sidecarAdapter: this.sidecarAdapter,
38
+ uiAutomationAdapter: this.uiAutomationAdapter
39
+ });
14
40
  this.policyAdapter = policyAdapter ?? {
15
41
  async assertAllowed() {
16
42
  return true;
@@ -21,7 +47,10 @@ export class LaiveMcpServer {
21
47
  for (const tool of buildDefaultTools({
22
48
  stateAdapter: this.stateAdapter,
23
49
  bridgeAdapter: this.bridgeAdapter,
24
- policyAdapter: this.policyAdapter
50
+ policyAdapter: this.policyAdapter,
51
+ sidecarAdapter: this.sidecarAdapter,
52
+ uiAutomationAdapter: this.uiAutomationAdapter,
53
+ integrationStatusAdapter: this.integrationStatusAdapter
25
54
  })) {
26
55
  this.tools.register(tool);
27
56
  }
@@ -200,6 +200,40 @@ export function createBridgeAdapter(target) {
200
200
  affectedObjects: result.track ? [result.track.id] : []
201
201
  };
202
202
  },
203
+ async playTransport(options = {}) {
204
+ const bridgeClient = await resolveBridgeClient(target);
205
+ return (
206
+ await bridgeClient.request("call", "transport.play", {}, {
207
+ dryRun: Boolean(options.dryRun)
208
+ })
209
+ ).result;
210
+ },
211
+ async stopTransport(options = {}) {
212
+ const bridgeClient = await resolveBridgeClient(target);
213
+ return (
214
+ await bridgeClient.request("call", "transport.stop", {}, {
215
+ dryRun: Boolean(options.dryRun)
216
+ })
217
+ ).result;
218
+ },
219
+ async createScene(name = null, options = {}) {
220
+ const bridgeClient = await resolveBridgeClient(target);
221
+ const result = (
222
+ await bridgeClient.request(
223
+ "call",
224
+ "create_scene",
225
+ {
226
+ name
227
+ },
228
+ { dryRun: Boolean(options.dryRun) }
229
+ )
230
+ ).result;
231
+
232
+ return {
233
+ ...result,
234
+ affectedObjects: result.scene ? [result.scene.id] : ["scenes"]
235
+ };
236
+ },
203
237
  async createClip(payload) {
204
238
  const bridgeClient = await resolveBridgeClient(target);
205
239
  const result = (
@@ -221,6 +255,31 @@ export function createBridgeAdapter(target) {
221
255
  affectedObjects: result.clip ? [payload.trackId, result.clip.id] : [payload.trackId]
222
256
  };
223
257
  },
258
+ async insertNotes(payload, options = {}) {
259
+ const bridgeClient = await resolveBridgeClient(target);
260
+ const result = (
261
+ await bridgeClient.request(
262
+ "call",
263
+ "insert_notes",
264
+ {
265
+ clip_id: payload.clipId,
266
+ notes: (payload.notes ?? []).map((note) => ({
267
+ pitch: note.pitch,
268
+ start_beats: note.startBeats ?? note.start_beats,
269
+ duration_beats: note.durationBeats ?? note.duration_beats,
270
+ velocity: note.velocity,
271
+ mute: note.mute ?? false
272
+ }))
273
+ },
274
+ { dryRun: Boolean(options.dryRun ?? payload.dryRun) }
275
+ )
276
+ ).result;
277
+
278
+ return {
279
+ ...result,
280
+ affectedObjects: [payload.clipId]
281
+ };
282
+ },
224
283
  async setParameter(payload, options = {}) {
225
284
  const bridgeClient = await resolveBridgeClient(target);
226
285
  const result = (
@@ -6,6 +6,7 @@ import argparse
6
6
  import json
7
7
  import shutil
8
8
  import sys
9
+ import time
9
10
  from dataclasses import dataclass
10
11
  from pathlib import Path
11
12
  from typing import Iterable, List
@@ -57,6 +58,21 @@ def ensure_source_exists(source_root: Path = REMOTE_SCRIPT_SOURCE) -> Path:
57
58
  return source_root
58
59
 
59
60
 
61
+ def remove_tree(path: Path, attempts: int = 3, delay_seconds: float = 0.05) -> None:
62
+ last_error = None
63
+ for _attempt in range(attempts):
64
+ try:
65
+ shutil.rmtree(path)
66
+ return
67
+ except FileNotFoundError:
68
+ return
69
+ except OSError as error:
70
+ last_error = error
71
+ time.sleep(delay_seconds)
72
+ if last_error is not None:
73
+ raise last_error
74
+
75
+
60
76
  def stage_remote_script(
61
77
  source_root: Path = REMOTE_SCRIPT_SOURCE,
62
78
  artifacts_dir: Path = DEFAULT_ARTIFACTS_DIR,
@@ -68,7 +84,7 @@ def stage_remote_script(
68
84
  target_dir = staging_root / source_root.name
69
85
 
70
86
  if target_dir.exists():
71
- shutil.rmtree(target_dir)
87
+ remove_tree(target_dir)
72
88
 
73
89
  shutil.copytree(source_root, target_dir)
74
90
  archive_path = shutil.make_archive(