@vibeframe/mcp-server 0.13.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +90 -0
- package/dist/index.js +1313 -0
- package/package.json +55 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,1313 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
// src/index.ts
|
|
4
|
+
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
|
5
|
+
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
|
6
|
+
import {
|
|
7
|
+
CallToolRequestSchema,
|
|
8
|
+
ListToolsRequestSchema,
|
|
9
|
+
ListResourcesRequestSchema,
|
|
10
|
+
ReadResourceRequestSchema,
|
|
11
|
+
ListPromptsRequestSchema,
|
|
12
|
+
GetPromptRequestSchema
|
|
13
|
+
} from "@modelcontextprotocol/sdk/types.js";
|
|
14
|
+
|
|
15
|
+
// src/tools/index.ts
|
|
16
|
+
import { readFile, writeFile } from "node:fs/promises";
|
|
17
|
+
import { resolve } from "node:path";
|
|
18
|
+
|
|
19
|
+
// ../cli/src/engine/project.ts
|
|
20
|
+
var generateId = () => {
|
|
21
|
+
return `${Date.now()}-${Math.random().toString(36).slice(2, 11)}`;
|
|
22
|
+
};
|
|
23
|
+
function createDefaultState(name = "Untitled Project") {
|
|
24
|
+
return {
|
|
25
|
+
project: {
|
|
26
|
+
id: generateId(),
|
|
27
|
+
name,
|
|
28
|
+
createdAt: /* @__PURE__ */ new Date(),
|
|
29
|
+
updatedAt: /* @__PURE__ */ new Date(),
|
|
30
|
+
aspectRatio: "16:9",
|
|
31
|
+
frameRate: 30,
|
|
32
|
+
duration: 0
|
|
33
|
+
},
|
|
34
|
+
tracks: [
|
|
35
|
+
{
|
|
36
|
+
id: "video-track-1",
|
|
37
|
+
name: "Video 1",
|
|
38
|
+
type: "video",
|
|
39
|
+
order: 1,
|
|
40
|
+
isMuted: false,
|
|
41
|
+
isLocked: false,
|
|
42
|
+
isVisible: true
|
|
43
|
+
},
|
|
44
|
+
{
|
|
45
|
+
id: "audio-track-1",
|
|
46
|
+
name: "Audio 1",
|
|
47
|
+
type: "audio",
|
|
48
|
+
order: 0,
|
|
49
|
+
isMuted: false,
|
|
50
|
+
isLocked: false,
|
|
51
|
+
isVisible: true
|
|
52
|
+
}
|
|
53
|
+
],
|
|
54
|
+
clips: [],
|
|
55
|
+
sources: [],
|
|
56
|
+
transitions: [],
|
|
57
|
+
currentTime: 0,
|
|
58
|
+
isPlaying: false,
|
|
59
|
+
zoom: 50,
|
|
60
|
+
scrollX: 0,
|
|
61
|
+
selectedClipIds: [],
|
|
62
|
+
selectedTrackId: null
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
var Project = class _Project {
|
|
66
|
+
state;
|
|
67
|
+
filePath = null;
|
|
68
|
+
constructor(name) {
|
|
69
|
+
this.state = createDefaultState(name);
|
|
70
|
+
}
|
|
71
|
+
/** Get current state (immutable copy) */
|
|
72
|
+
getState() {
|
|
73
|
+
return structuredClone(this.state);
|
|
74
|
+
}
|
|
75
|
+
/** Get project metadata */
|
|
76
|
+
getMeta() {
|
|
77
|
+
return { ...this.state.project };
|
|
78
|
+
}
|
|
79
|
+
/** Get file path */
|
|
80
|
+
getFilePath() {
|
|
81
|
+
return this.filePath;
|
|
82
|
+
}
|
|
83
|
+
// ============ Project Operations ============
|
|
84
|
+
setName(name) {
|
|
85
|
+
this.state.project.name = name;
|
|
86
|
+
this.state.project.updatedAt = /* @__PURE__ */ new Date();
|
|
87
|
+
}
|
|
88
|
+
setAspectRatio(ratio) {
|
|
89
|
+
this.state.project.aspectRatio = ratio;
|
|
90
|
+
this.state.project.updatedAt = /* @__PURE__ */ new Date();
|
|
91
|
+
}
|
|
92
|
+
setFrameRate(fps) {
|
|
93
|
+
this.state.project.frameRate = fps;
|
|
94
|
+
this.state.project.updatedAt = /* @__PURE__ */ new Date();
|
|
95
|
+
}
|
|
96
|
+
// ============ Media Source Operations ============
|
|
97
|
+
addSource(source) {
|
|
98
|
+
const newSource = { ...source, id: generateId() };
|
|
99
|
+
this.state.sources.push(newSource);
|
|
100
|
+
return newSource;
|
|
101
|
+
}
|
|
102
|
+
removeSource(id) {
|
|
103
|
+
const index = this.state.sources.findIndex((s) => s.id === id);
|
|
104
|
+
if (index === -1) return false;
|
|
105
|
+
this.state.sources.splice(index, 1);
|
|
106
|
+
this.state.clips = this.state.clips.filter((c) => c.sourceId !== id);
|
|
107
|
+
this.calculateDuration();
|
|
108
|
+
return true;
|
|
109
|
+
}
|
|
110
|
+
getSource(id) {
|
|
111
|
+
return this.state.sources.find((s) => s.id === id);
|
|
112
|
+
}
|
|
113
|
+
getSources() {
|
|
114
|
+
return [...this.state.sources];
|
|
115
|
+
}
|
|
116
|
+
// ============ Track Operations ============
|
|
117
|
+
addTrack(track) {
|
|
118
|
+
const newTrack = { ...track, id: generateId() };
|
|
119
|
+
this.state.tracks.push(newTrack);
|
|
120
|
+
return newTrack;
|
|
121
|
+
}
|
|
122
|
+
removeTrack(id) {
|
|
123
|
+
const index = this.state.tracks.findIndex((t) => t.id === id);
|
|
124
|
+
if (index === -1) return false;
|
|
125
|
+
this.state.tracks.splice(index, 1);
|
|
126
|
+
this.state.clips = this.state.clips.filter((c) => c.trackId !== id);
|
|
127
|
+
this.calculateDuration();
|
|
128
|
+
return true;
|
|
129
|
+
}
|
|
130
|
+
updateTrack(id, updates) {
|
|
131
|
+
const track = this.state.tracks.find((t) => t.id === id);
|
|
132
|
+
if (!track) return false;
|
|
133
|
+
Object.assign(track, updates);
|
|
134
|
+
return true;
|
|
135
|
+
}
|
|
136
|
+
getTrack(id) {
|
|
137
|
+
return this.state.tracks.find((t) => t.id === id);
|
|
138
|
+
}
|
|
139
|
+
getTracks() {
|
|
140
|
+
return [...this.state.tracks];
|
|
141
|
+
}
|
|
142
|
+
getTracksByType(type) {
|
|
143
|
+
return this.state.tracks.filter((t) => t.type === type);
|
|
144
|
+
}
|
|
145
|
+
// ============ Clip Operations ============
|
|
146
|
+
addClip(clip) {
|
|
147
|
+
const newClip = {
|
|
148
|
+
...clip,
|
|
149
|
+
id: generateId(),
|
|
150
|
+
effects: []
|
|
151
|
+
};
|
|
152
|
+
this.state.clips.push(newClip);
|
|
153
|
+
this.calculateDuration();
|
|
154
|
+
return newClip;
|
|
155
|
+
}
|
|
156
|
+
removeClip(id) {
|
|
157
|
+
const index = this.state.clips.findIndex((c) => c.id === id);
|
|
158
|
+
if (index === -1) return false;
|
|
159
|
+
this.state.clips.splice(index, 1);
|
|
160
|
+
this.state.selectedClipIds = this.state.selectedClipIds.filter((cid) => cid !== id);
|
|
161
|
+
this.calculateDuration();
|
|
162
|
+
return true;
|
|
163
|
+
}
|
|
164
|
+
updateClip(id, updates) {
|
|
165
|
+
const clip = this.state.clips.find((c) => c.id === id);
|
|
166
|
+
if (!clip) return false;
|
|
167
|
+
Object.assign(clip, updates);
|
|
168
|
+
this.calculateDuration();
|
|
169
|
+
return true;
|
|
170
|
+
}
|
|
171
|
+
moveClip(id, trackId, startTime) {
|
|
172
|
+
const clip = this.state.clips.find((c) => c.id === id);
|
|
173
|
+
if (!clip) return false;
|
|
174
|
+
clip.trackId = trackId;
|
|
175
|
+
clip.startTime = Math.max(0, startTime);
|
|
176
|
+
this.calculateDuration();
|
|
177
|
+
return true;
|
|
178
|
+
}
|
|
179
|
+
trimClipStart(id, newStartTime) {
|
|
180
|
+
const clip = this.state.clips.find((c) => c.id === id);
|
|
181
|
+
if (!clip) return false;
|
|
182
|
+
const delta = newStartTime - clip.startTime;
|
|
183
|
+
clip.startTime = newStartTime;
|
|
184
|
+
clip.sourceStartOffset += delta;
|
|
185
|
+
clip.duration -= delta;
|
|
186
|
+
this.calculateDuration();
|
|
187
|
+
return true;
|
|
188
|
+
}
|
|
189
|
+
trimClipEnd(id, newDuration) {
|
|
190
|
+
const clip = this.state.clips.find((c) => c.id === id);
|
|
191
|
+
if (!clip) return false;
|
|
192
|
+
clip.duration = Math.max(0.1, newDuration);
|
|
193
|
+
clip.sourceEndOffset = clip.sourceStartOffset + clip.duration;
|
|
194
|
+
this.calculateDuration();
|
|
195
|
+
return true;
|
|
196
|
+
}
|
|
197
|
+
getClip(id) {
|
|
198
|
+
return this.state.clips.find((c) => c.id === id);
|
|
199
|
+
}
|
|
200
|
+
getClips() {
|
|
201
|
+
return [...this.state.clips];
|
|
202
|
+
}
|
|
203
|
+
getClipsByTrack(trackId) {
|
|
204
|
+
return this.state.clips.filter((c) => c.trackId === trackId);
|
|
205
|
+
}
|
|
206
|
+
/**
|
|
207
|
+
* Split a clip at a specific time, creating two clips
|
|
208
|
+
* @param id Clip ID to split
|
|
209
|
+
* @param splitTime Time relative to clip start (not timeline time)
|
|
210
|
+
* @returns [firstClip, secondClip] or null if failed
|
|
211
|
+
*/
|
|
212
|
+
splitClip(id, splitTime) {
|
|
213
|
+
const clip = this.state.clips.find((c) => c.id === id);
|
|
214
|
+
if (!clip) return null;
|
|
215
|
+
if (splitTime <= 0 || splitTime >= clip.duration) {
|
|
216
|
+
return null;
|
|
217
|
+
}
|
|
218
|
+
const secondClip = {
|
|
219
|
+
id: generateId(),
|
|
220
|
+
sourceId: clip.sourceId,
|
|
221
|
+
trackId: clip.trackId,
|
|
222
|
+
startTime: clip.startTime + splitTime,
|
|
223
|
+
duration: clip.duration - splitTime,
|
|
224
|
+
sourceStartOffset: clip.sourceStartOffset + splitTime,
|
|
225
|
+
sourceEndOffset: clip.sourceEndOffset,
|
|
226
|
+
effects: []
|
|
227
|
+
// Effects don't transfer to split clips
|
|
228
|
+
};
|
|
229
|
+
clip.duration = splitTime;
|
|
230
|
+
clip.sourceEndOffset = clip.sourceStartOffset + splitTime;
|
|
231
|
+
this.state.clips.push(secondClip);
|
|
232
|
+
this.calculateDuration();
|
|
233
|
+
return [clip, secondClip];
|
|
234
|
+
}
|
|
235
|
+
/**
|
|
236
|
+
* Duplicate a clip
|
|
237
|
+
* @param id Clip ID to duplicate
|
|
238
|
+
* @param offsetTime Optional time offset for the duplicate (default: place after original)
|
|
239
|
+
* @returns The duplicated clip or null if failed
|
|
240
|
+
*/
|
|
241
|
+
duplicateClip(id, offsetTime) {
|
|
242
|
+
const clip = this.state.clips.find((c) => c.id === id);
|
|
243
|
+
if (!clip) return null;
|
|
244
|
+
const newStartTime = offsetTime ?? clip.startTime + clip.duration;
|
|
245
|
+
const duplicatedClip = {
|
|
246
|
+
id: generateId(),
|
|
247
|
+
sourceId: clip.sourceId,
|
|
248
|
+
trackId: clip.trackId,
|
|
249
|
+
startTime: newStartTime,
|
|
250
|
+
duration: clip.duration,
|
|
251
|
+
sourceStartOffset: clip.sourceStartOffset,
|
|
252
|
+
sourceEndOffset: clip.sourceEndOffset,
|
|
253
|
+
effects: clip.effects.map((e) => ({
|
|
254
|
+
...e,
|
|
255
|
+
id: generateId()
|
|
256
|
+
}))
|
|
257
|
+
};
|
|
258
|
+
this.state.clips.push(duplicatedClip);
|
|
259
|
+
this.calculateDuration();
|
|
260
|
+
return duplicatedClip;
|
|
261
|
+
}
|
|
262
|
+
// ============ Effect Operations ============
|
|
263
|
+
addEffect(clipId, effect) {
|
|
264
|
+
const clip = this.state.clips.find((c) => c.id === clipId);
|
|
265
|
+
if (!clip) return null;
|
|
266
|
+
const newEffect = { ...effect, id: generateId() };
|
|
267
|
+
clip.effects.push(newEffect);
|
|
268
|
+
return newEffect;
|
|
269
|
+
}
|
|
270
|
+
removeEffect(clipId, effectId) {
|
|
271
|
+
const clip = this.state.clips.find((c) => c.id === clipId);
|
|
272
|
+
if (!clip) return false;
|
|
273
|
+
const index = clip.effects.findIndex((e) => e.id === effectId);
|
|
274
|
+
if (index === -1) return false;
|
|
275
|
+
clip.effects.splice(index, 1);
|
|
276
|
+
return true;
|
|
277
|
+
}
|
|
278
|
+
updateEffect(clipId, effectId, updates) {
|
|
279
|
+
const clip = this.state.clips.find((c) => c.id === clipId);
|
|
280
|
+
if (!clip) return false;
|
|
281
|
+
const effect = clip.effects.find((e) => e.id === effectId);
|
|
282
|
+
if (!effect) return false;
|
|
283
|
+
Object.assign(effect, updates);
|
|
284
|
+
return true;
|
|
285
|
+
}
|
|
286
|
+
// ============ Transition Operations ============
|
|
287
|
+
addTransition(transition) {
|
|
288
|
+
const newTransition = { ...transition, id: generateId() };
|
|
289
|
+
this.state.transitions.push(newTransition);
|
|
290
|
+
return newTransition;
|
|
291
|
+
}
|
|
292
|
+
removeTransition(id) {
|
|
293
|
+
const index = this.state.transitions.findIndex((t) => t.id === id);
|
|
294
|
+
if (index === -1) return false;
|
|
295
|
+
this.state.transitions.splice(index, 1);
|
|
296
|
+
return true;
|
|
297
|
+
}
|
|
298
|
+
getTransitions() {
|
|
299
|
+
return [...this.state.transitions];
|
|
300
|
+
}
|
|
301
|
+
// ============ Duration Calculation ============
|
|
302
|
+
calculateDuration() {
|
|
303
|
+
const maxEndTime = this.state.clips.reduce((max, clip) => {
|
|
304
|
+
const endTime = clip.startTime + clip.duration;
|
|
305
|
+
return Math.max(max, endTime);
|
|
306
|
+
}, 0);
|
|
307
|
+
this.state.project.duration = maxEndTime;
|
|
308
|
+
}
|
|
309
|
+
getDuration() {
|
|
310
|
+
return this.state.project.duration;
|
|
311
|
+
}
|
|
312
|
+
// ============ Serialization ============
|
|
313
|
+
toJSON() {
|
|
314
|
+
return {
|
|
315
|
+
version: "1.0.0",
|
|
316
|
+
state: this.getState()
|
|
317
|
+
};
|
|
318
|
+
}
|
|
319
|
+
static fromJSON(data) {
|
|
320
|
+
const project = new _Project();
|
|
321
|
+
data.state.project.createdAt = new Date(data.state.project.createdAt);
|
|
322
|
+
data.state.project.updatedAt = new Date(data.state.project.updatedAt);
|
|
323
|
+
project.state = data.state;
|
|
324
|
+
return project;
|
|
325
|
+
}
|
|
326
|
+
setFilePath(path) {
|
|
327
|
+
this.filePath = path;
|
|
328
|
+
}
|
|
329
|
+
// ============ Summary ============
|
|
330
|
+
getSummary() {
|
|
331
|
+
return {
|
|
332
|
+
name: this.state.project.name,
|
|
333
|
+
duration: this.state.project.duration,
|
|
334
|
+
aspectRatio: this.state.project.aspectRatio,
|
|
335
|
+
frameRate: this.state.project.frameRate,
|
|
336
|
+
trackCount: this.state.tracks.length,
|
|
337
|
+
clipCount: this.state.clips.length,
|
|
338
|
+
sourceCount: this.state.sources.length
|
|
339
|
+
};
|
|
340
|
+
}
|
|
341
|
+
};
|
|
342
|
+
|
|
343
|
+
// src/tools/index.ts
|
|
344
|
+
var tools = [
|
|
345
|
+
// Project Management
|
|
346
|
+
{
|
|
347
|
+
name: "project_create",
|
|
348
|
+
description: "Create a new VibeFrame project file",
|
|
349
|
+
inputSchema: {
|
|
350
|
+
type: "object",
|
|
351
|
+
properties: {
|
|
352
|
+
name: {
|
|
353
|
+
type: "string",
|
|
354
|
+
description: "Project name"
|
|
355
|
+
},
|
|
356
|
+
outputPath: {
|
|
357
|
+
type: "string",
|
|
358
|
+
description: "Output file path (defaults to {name}.vibe.json)"
|
|
359
|
+
},
|
|
360
|
+
width: {
|
|
361
|
+
type: "number",
|
|
362
|
+
description: "Video width in pixels (default: 1920)"
|
|
363
|
+
},
|
|
364
|
+
height: {
|
|
365
|
+
type: "number",
|
|
366
|
+
description: "Video height in pixels (default: 1080)"
|
|
367
|
+
},
|
|
368
|
+
fps: {
|
|
369
|
+
type: "number",
|
|
370
|
+
description: "Frames per second (default: 30)"
|
|
371
|
+
}
|
|
372
|
+
},
|
|
373
|
+
required: ["name"]
|
|
374
|
+
}
|
|
375
|
+
},
|
|
376
|
+
{
|
|
377
|
+
name: "project_info",
|
|
378
|
+
description: "Get information about a VibeFrame project",
|
|
379
|
+
inputSchema: {
|
|
380
|
+
type: "object",
|
|
381
|
+
properties: {
|
|
382
|
+
projectPath: {
|
|
383
|
+
type: "string",
|
|
384
|
+
description: "Path to the .vibe.json project file"
|
|
385
|
+
}
|
|
386
|
+
},
|
|
387
|
+
required: ["projectPath"]
|
|
388
|
+
}
|
|
389
|
+
},
|
|
390
|
+
// Timeline Operations
|
|
391
|
+
{
|
|
392
|
+
name: "timeline_add_source",
|
|
393
|
+
description: "Add a media source (video, audio, image) to the project",
|
|
394
|
+
inputSchema: {
|
|
395
|
+
type: "object",
|
|
396
|
+
properties: {
|
|
397
|
+
projectPath: {
|
|
398
|
+
type: "string",
|
|
399
|
+
description: "Path to the project file"
|
|
400
|
+
},
|
|
401
|
+
mediaPath: {
|
|
402
|
+
type: "string",
|
|
403
|
+
description: "Path to the media file"
|
|
404
|
+
},
|
|
405
|
+
name: {
|
|
406
|
+
type: "string",
|
|
407
|
+
description: "Optional name for the source"
|
|
408
|
+
},
|
|
409
|
+
duration: {
|
|
410
|
+
type: "number",
|
|
411
|
+
description: "Duration of the media in seconds (default: 10)"
|
|
412
|
+
}
|
|
413
|
+
},
|
|
414
|
+
required: ["projectPath", "mediaPath"]
|
|
415
|
+
}
|
|
416
|
+
},
|
|
417
|
+
{
|
|
418
|
+
name: "timeline_add_clip",
|
|
419
|
+
description: "Add a clip to the timeline from an existing source",
|
|
420
|
+
inputSchema: {
|
|
421
|
+
type: "object",
|
|
422
|
+
properties: {
|
|
423
|
+
projectPath: {
|
|
424
|
+
type: "string",
|
|
425
|
+
description: "Path to the project file"
|
|
426
|
+
},
|
|
427
|
+
sourceId: {
|
|
428
|
+
type: "string",
|
|
429
|
+
description: "ID of the media source"
|
|
430
|
+
},
|
|
431
|
+
trackId: {
|
|
432
|
+
type: "string",
|
|
433
|
+
description: "ID of the track to add clip to (optional, uses first video track)"
|
|
434
|
+
},
|
|
435
|
+
startTime: {
|
|
436
|
+
type: "number",
|
|
437
|
+
description: "Start time on timeline in seconds (default: 0)"
|
|
438
|
+
},
|
|
439
|
+
duration: {
|
|
440
|
+
type: "number",
|
|
441
|
+
description: "Clip duration in seconds (optional, uses source duration)"
|
|
442
|
+
}
|
|
443
|
+
},
|
|
444
|
+
required: ["projectPath", "sourceId"]
|
|
445
|
+
}
|
|
446
|
+
},
|
|
447
|
+
{
|
|
448
|
+
name: "timeline_split_clip",
|
|
449
|
+
description: "Split a clip at a specific time",
|
|
450
|
+
inputSchema: {
|
|
451
|
+
type: "object",
|
|
452
|
+
properties: {
|
|
453
|
+
projectPath: {
|
|
454
|
+
type: "string",
|
|
455
|
+
description: "Path to the project file"
|
|
456
|
+
},
|
|
457
|
+
clipId: {
|
|
458
|
+
type: "string",
|
|
459
|
+
description: "ID of the clip to split"
|
|
460
|
+
},
|
|
461
|
+
splitTime: {
|
|
462
|
+
type: "number",
|
|
463
|
+
description: "Time to split at (relative to clip start) in seconds"
|
|
464
|
+
}
|
|
465
|
+
},
|
|
466
|
+
required: ["projectPath", "clipId", "splitTime"]
|
|
467
|
+
}
|
|
468
|
+
},
|
|
469
|
+
{
|
|
470
|
+
name: "timeline_trim_clip",
|
|
471
|
+
description: "Trim a clip by adjusting its start or end",
|
|
472
|
+
inputSchema: {
|
|
473
|
+
type: "object",
|
|
474
|
+
properties: {
|
|
475
|
+
projectPath: {
|
|
476
|
+
type: "string",
|
|
477
|
+
description: "Path to the project file"
|
|
478
|
+
},
|
|
479
|
+
clipId: {
|
|
480
|
+
type: "string",
|
|
481
|
+
description: "ID of the clip to trim"
|
|
482
|
+
},
|
|
483
|
+
trimStart: {
|
|
484
|
+
type: "number",
|
|
485
|
+
description: "New source start offset in seconds"
|
|
486
|
+
},
|
|
487
|
+
trimEnd: {
|
|
488
|
+
type: "number",
|
|
489
|
+
description: "New duration in seconds"
|
|
490
|
+
}
|
|
491
|
+
},
|
|
492
|
+
required: ["projectPath", "clipId"]
|
|
493
|
+
}
|
|
494
|
+
},
|
|
495
|
+
{
|
|
496
|
+
name: "timeline_move_clip",
|
|
497
|
+
description: "Move a clip to a new position or track",
|
|
498
|
+
inputSchema: {
|
|
499
|
+
type: "object",
|
|
500
|
+
properties: {
|
|
501
|
+
projectPath: {
|
|
502
|
+
type: "string",
|
|
503
|
+
description: "Path to the project file"
|
|
504
|
+
},
|
|
505
|
+
clipId: {
|
|
506
|
+
type: "string",
|
|
507
|
+
description: "ID of the clip to move"
|
|
508
|
+
},
|
|
509
|
+
newStartTime: {
|
|
510
|
+
type: "number",
|
|
511
|
+
description: "New start time on timeline in seconds"
|
|
512
|
+
},
|
|
513
|
+
newTrackId: {
|
|
514
|
+
type: "string",
|
|
515
|
+
description: "ID of the target track (optional)"
|
|
516
|
+
}
|
|
517
|
+
},
|
|
518
|
+
required: ["projectPath", "clipId"]
|
|
519
|
+
}
|
|
520
|
+
},
|
|
521
|
+
{
|
|
522
|
+
name: "timeline_delete_clip",
|
|
523
|
+
description: "Delete a clip from the timeline",
|
|
524
|
+
inputSchema: {
|
|
525
|
+
type: "object",
|
|
526
|
+
properties: {
|
|
527
|
+
projectPath: {
|
|
528
|
+
type: "string",
|
|
529
|
+
description: "Path to the project file"
|
|
530
|
+
},
|
|
531
|
+
clipId: {
|
|
532
|
+
type: "string",
|
|
533
|
+
description: "ID of the clip to delete"
|
|
534
|
+
}
|
|
535
|
+
},
|
|
536
|
+
required: ["projectPath", "clipId"]
|
|
537
|
+
}
|
|
538
|
+
},
|
|
539
|
+
{
|
|
540
|
+
name: "timeline_duplicate_clip",
|
|
541
|
+
description: "Duplicate a clip",
|
|
542
|
+
inputSchema: {
|
|
543
|
+
type: "object",
|
|
544
|
+
properties: {
|
|
545
|
+
projectPath: {
|
|
546
|
+
type: "string",
|
|
547
|
+
description: "Path to the project file"
|
|
548
|
+
},
|
|
549
|
+
clipId: {
|
|
550
|
+
type: "string",
|
|
551
|
+
description: "ID of the clip to duplicate"
|
|
552
|
+
},
|
|
553
|
+
newStartTime: {
|
|
554
|
+
type: "number",
|
|
555
|
+
description: "Start time for the duplicated clip (optional, places after original)"
|
|
556
|
+
}
|
|
557
|
+
},
|
|
558
|
+
required: ["projectPath", "clipId"]
|
|
559
|
+
}
|
|
560
|
+
},
|
|
561
|
+
// Effects
|
|
562
|
+
{
|
|
563
|
+
name: "timeline_add_effect",
|
|
564
|
+
description: "Add an effect to a clip",
|
|
565
|
+
inputSchema: {
|
|
566
|
+
type: "object",
|
|
567
|
+
properties: {
|
|
568
|
+
projectPath: {
|
|
569
|
+
type: "string",
|
|
570
|
+
description: "Path to the project file"
|
|
571
|
+
},
|
|
572
|
+
clipId: {
|
|
573
|
+
type: "string",
|
|
574
|
+
description: "ID of the clip"
|
|
575
|
+
},
|
|
576
|
+
effectType: {
|
|
577
|
+
type: "string",
|
|
578
|
+
description: "Effect type: fadeIn, fadeOut, blur, brightness, contrast, saturation, grayscale, sepia, invert"
|
|
579
|
+
},
|
|
580
|
+
startTime: {
|
|
581
|
+
type: "number",
|
|
582
|
+
description: "Effect start time relative to clip (default: 0)"
|
|
583
|
+
},
|
|
584
|
+
duration: {
|
|
585
|
+
type: "number",
|
|
586
|
+
description: "Effect duration in seconds (default: 1)"
|
|
587
|
+
},
|
|
588
|
+
intensity: {
|
|
589
|
+
type: "number",
|
|
590
|
+
description: "Effect intensity 0-1 (default: 1)"
|
|
591
|
+
}
|
|
592
|
+
},
|
|
593
|
+
required: ["projectPath", "clipId", "effectType"]
|
|
594
|
+
}
|
|
595
|
+
},
|
|
596
|
+
// Tracks
|
|
597
|
+
{
|
|
598
|
+
name: "timeline_add_track",
|
|
599
|
+
description: "Add a new track to the timeline",
|
|
600
|
+
inputSchema: {
|
|
601
|
+
type: "object",
|
|
602
|
+
properties: {
|
|
603
|
+
projectPath: {
|
|
604
|
+
type: "string",
|
|
605
|
+
description: "Path to the project file"
|
|
606
|
+
},
|
|
607
|
+
trackType: {
|
|
608
|
+
type: "string",
|
|
609
|
+
description: "Track type: video or audio"
|
|
610
|
+
},
|
|
611
|
+
name: {
|
|
612
|
+
type: "string",
|
|
613
|
+
description: "Track name (optional)"
|
|
614
|
+
}
|
|
615
|
+
},
|
|
616
|
+
required: ["projectPath", "trackType"]
|
|
617
|
+
}
|
|
618
|
+
},
|
|
619
|
+
// List contents
|
|
620
|
+
{
|
|
621
|
+
name: "timeline_list",
|
|
622
|
+
description: "List all sources, tracks, and clips in a project",
|
|
623
|
+
inputSchema: {
|
|
624
|
+
type: "object",
|
|
625
|
+
properties: {
|
|
626
|
+
projectPath: {
|
|
627
|
+
type: "string",
|
|
628
|
+
description: "Path to the project file"
|
|
629
|
+
}
|
|
630
|
+
},
|
|
631
|
+
required: ["projectPath"]
|
|
632
|
+
}
|
|
633
|
+
}
|
|
634
|
+
];
|
|
635
|
+
async function loadProject(projectPath) {
|
|
636
|
+
const absPath = resolve(process.cwd(), projectPath);
|
|
637
|
+
const content = await readFile(absPath, "utf-8");
|
|
638
|
+
const data = JSON.parse(content);
|
|
639
|
+
return Project.fromJSON(data);
|
|
640
|
+
}
|
|
641
|
+
async function saveProject(projectPath, project) {
|
|
642
|
+
const absPath = resolve(process.cwd(), projectPath);
|
|
643
|
+
await writeFile(absPath, JSON.stringify(project.toJSON(), null, 2), "utf-8");
|
|
644
|
+
}
|
|
645
|
+
async function handleToolCall(name, args) {
|
|
646
|
+
try {
|
|
647
|
+
let result;
|
|
648
|
+
switch (name) {
|
|
649
|
+
case "project_create": {
|
|
650
|
+
const projectName = args.name;
|
|
651
|
+
const outputPath = args.outputPath || `${projectName}.vibe.json`;
|
|
652
|
+
const project = new Project(projectName);
|
|
653
|
+
if (args.fps) {
|
|
654
|
+
project.setFrameRate(args.fps);
|
|
655
|
+
}
|
|
656
|
+
await saveProject(outputPath, project);
|
|
657
|
+
result = `Created project "${projectName}" at ${outputPath}`;
|
|
658
|
+
break;
|
|
659
|
+
}
|
|
660
|
+
case "project_info": {
|
|
661
|
+
const project = await loadProject(args.projectPath);
|
|
662
|
+
const meta = project.getMeta();
|
|
663
|
+
const info = {
|
|
664
|
+
name: meta.name,
|
|
665
|
+
aspectRatio: meta.aspectRatio,
|
|
666
|
+
frameRate: meta.frameRate,
|
|
667
|
+
duration: meta.duration,
|
|
668
|
+
sources: project.getSources().length,
|
|
669
|
+
tracks: project.getTracks().length,
|
|
670
|
+
clips: project.getClips().length
|
|
671
|
+
};
|
|
672
|
+
result = JSON.stringify(info, null, 2);
|
|
673
|
+
break;
|
|
674
|
+
}
|
|
675
|
+
case "timeline_add_source": {
|
|
676
|
+
const project = await loadProject(args.projectPath);
|
|
677
|
+
const mediaPath = resolve(process.cwd(), args.mediaPath);
|
|
678
|
+
const ext = mediaPath.split(".").pop()?.toLowerCase() || "";
|
|
679
|
+
const mediaTypes = {
|
|
680
|
+
mp4: "video",
|
|
681
|
+
webm: "video",
|
|
682
|
+
mov: "video",
|
|
683
|
+
avi: "video",
|
|
684
|
+
mp3: "audio",
|
|
685
|
+
wav: "audio",
|
|
686
|
+
aac: "audio",
|
|
687
|
+
ogg: "audio",
|
|
688
|
+
jpg: "image",
|
|
689
|
+
jpeg: "image",
|
|
690
|
+
png: "image",
|
|
691
|
+
gif: "image",
|
|
692
|
+
webp: "image"
|
|
693
|
+
};
|
|
694
|
+
const source = project.addSource({
|
|
695
|
+
name: args.name || mediaPath.split("/").pop() || "media",
|
|
696
|
+
type: mediaTypes[ext] || "video",
|
|
697
|
+
url: mediaPath,
|
|
698
|
+
duration: args.duration || 10
|
|
699
|
+
});
|
|
700
|
+
await saveProject(args.projectPath, project);
|
|
701
|
+
result = `Added source: ${source.id}`;
|
|
702
|
+
break;
|
|
703
|
+
}
|
|
704
|
+
case "timeline_add_clip": {
|
|
705
|
+
const project = await loadProject(args.projectPath);
|
|
706
|
+
const sourceId = args.sourceId;
|
|
707
|
+
const tracks = project.getTracks();
|
|
708
|
+
const trackId = args.trackId || tracks.find((t) => t.type === "video")?.id || tracks[0]?.id;
|
|
709
|
+
if (!trackId) {
|
|
710
|
+
throw new Error("No tracks available. Add a track first.");
|
|
711
|
+
}
|
|
712
|
+
const source = project.getSource(sourceId);
|
|
713
|
+
const duration = args.duration || source?.duration || 10;
|
|
714
|
+
const clip = project.addClip({
|
|
715
|
+
sourceId,
|
|
716
|
+
trackId,
|
|
717
|
+
startTime: args.startTime || 0,
|
|
718
|
+
duration,
|
|
719
|
+
sourceStartOffset: 0,
|
|
720
|
+
sourceEndOffset: duration
|
|
721
|
+
});
|
|
722
|
+
await saveProject(args.projectPath, project);
|
|
723
|
+
result = `Added clip: ${clip.id}`;
|
|
724
|
+
break;
|
|
725
|
+
}
|
|
726
|
+
case "timeline_split_clip": {
|
|
727
|
+
const project = await loadProject(args.projectPath);
|
|
728
|
+
const splitResult = project.splitClip(args.clipId, args.splitTime);
|
|
729
|
+
await saveProject(args.projectPath, project);
|
|
730
|
+
result = splitResult ? `Split clip. New clip ID: ${splitResult[1].id}` : "Failed to split clip";
|
|
731
|
+
break;
|
|
732
|
+
}
|
|
733
|
+
case "timeline_trim_clip": {
|
|
734
|
+
const project = await loadProject(args.projectPath);
|
|
735
|
+
if (args.trimStart !== void 0) {
|
|
736
|
+
project.trimClipStart(args.clipId, args.trimStart);
|
|
737
|
+
}
|
|
738
|
+
if (args.trimEnd !== void 0) {
|
|
739
|
+
project.trimClipEnd(args.clipId, args.trimEnd);
|
|
740
|
+
}
|
|
741
|
+
await saveProject(args.projectPath, project);
|
|
742
|
+
result = "Trimmed clip";
|
|
743
|
+
break;
|
|
744
|
+
}
|
|
745
|
+
case "timeline_move_clip": {
|
|
746
|
+
const project = await loadProject(args.projectPath);
|
|
747
|
+
const clip = project.getClips().find((c) => c.id === args.clipId);
|
|
748
|
+
if (!clip) throw new Error("Clip not found");
|
|
749
|
+
const newTrackId = args.newTrackId || clip.trackId;
|
|
750
|
+
const newStartTime = args.newStartTime ?? clip.startTime;
|
|
751
|
+
project.moveClip(args.clipId, newTrackId, newStartTime);
|
|
752
|
+
await saveProject(args.projectPath, project);
|
|
753
|
+
result = "Moved clip";
|
|
754
|
+
break;
|
|
755
|
+
}
|
|
756
|
+
case "timeline_delete_clip": {
|
|
757
|
+
const project = await loadProject(args.projectPath);
|
|
758
|
+
const success = project.removeClip(args.clipId);
|
|
759
|
+
await saveProject(args.projectPath, project);
|
|
760
|
+
result = success ? "Deleted clip" : "Clip not found";
|
|
761
|
+
break;
|
|
762
|
+
}
|
|
763
|
+
case "timeline_duplicate_clip": {
|
|
764
|
+
const project = await loadProject(args.projectPath);
|
|
765
|
+
const newClip = project.duplicateClip(args.clipId, args.newStartTime);
|
|
766
|
+
await saveProject(args.projectPath, project);
|
|
767
|
+
result = newClip ? `Duplicated clip. New clip ID: ${newClip.id}` : "Failed to duplicate clip";
|
|
768
|
+
break;
|
|
769
|
+
}
|
|
770
|
+
case "timeline_add_effect": {
|
|
771
|
+
const project = await loadProject(args.projectPath);
|
|
772
|
+
const effect = project.addEffect(args.clipId, {
|
|
773
|
+
type: args.effectType,
|
|
774
|
+
startTime: args.startTime || 0,
|
|
775
|
+
duration: args.duration || 1,
|
|
776
|
+
params: { intensity: args.intensity || 1 }
|
|
777
|
+
});
|
|
778
|
+
await saveProject(args.projectPath, project);
|
|
779
|
+
result = effect ? `Added effect: ${effect.id}` : "Failed to add effect";
|
|
780
|
+
break;
|
|
781
|
+
}
|
|
782
|
+
case "timeline_add_track": {
|
|
783
|
+
const project = await loadProject(args.projectPath);
|
|
784
|
+
const trackType = args.trackType;
|
|
785
|
+
const tracks = project.getTracks();
|
|
786
|
+
const track = project.addTrack({
|
|
787
|
+
type: trackType,
|
|
788
|
+
name: args.name || `${trackType}-${tracks.length + 1}`,
|
|
789
|
+
order: tracks.length,
|
|
790
|
+
isMuted: false,
|
|
791
|
+
isLocked: false,
|
|
792
|
+
isVisible: true
|
|
793
|
+
});
|
|
794
|
+
await saveProject(args.projectPath, project);
|
|
795
|
+
result = `Added track: ${track.id}`;
|
|
796
|
+
break;
|
|
797
|
+
}
|
|
798
|
+
case "timeline_list": {
|
|
799
|
+
const project = await loadProject(args.projectPath);
|
|
800
|
+
const data = {
|
|
801
|
+
sources: project.getSources().map((s) => ({
|
|
802
|
+
id: s.id,
|
|
803
|
+
name: s.name,
|
|
804
|
+
type: s.type,
|
|
805
|
+
duration: s.duration
|
|
806
|
+
})),
|
|
807
|
+
tracks: project.getTracks().map((t) => ({
|
|
808
|
+
id: t.id,
|
|
809
|
+
name: t.name,
|
|
810
|
+
type: t.type
|
|
811
|
+
})),
|
|
812
|
+
clips: project.getClips().map((c) => ({
|
|
813
|
+
id: c.id,
|
|
814
|
+
sourceId: c.sourceId,
|
|
815
|
+
trackId: c.trackId,
|
|
816
|
+
startTime: c.startTime,
|
|
817
|
+
duration: c.duration
|
|
818
|
+
}))
|
|
819
|
+
};
|
|
820
|
+
result = JSON.stringify(data, null, 2);
|
|
821
|
+
break;
|
|
822
|
+
}
|
|
823
|
+
default:
|
|
824
|
+
throw new Error(`Unknown tool: ${name}`);
|
|
825
|
+
}
|
|
826
|
+
return {
|
|
827
|
+
content: [{ type: "text", text: result }]
|
|
828
|
+
};
|
|
829
|
+
} catch (error) {
|
|
830
|
+
return {
|
|
831
|
+
content: [
|
|
832
|
+
{
|
|
833
|
+
type: "text",
|
|
834
|
+
text: `Error: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
835
|
+
}
|
|
836
|
+
]
|
|
837
|
+
};
|
|
838
|
+
}
|
|
839
|
+
}
|
|
840
|
+
|
|
841
|
+
// src/resources/index.ts
|
|
842
|
+
import { readFile as readFile2 } from "node:fs/promises";
|
|
843
|
+
import { resolve as resolve2 } from "node:path";
|
|
844
|
+
var resources = [
|
|
845
|
+
{
|
|
846
|
+
uri: "vibe://project/current",
|
|
847
|
+
name: "Current Project State",
|
|
848
|
+
description: "Full state of the currently active VibeFrame project",
|
|
849
|
+
mimeType: "application/json"
|
|
850
|
+
},
|
|
851
|
+
{
|
|
852
|
+
uri: "vibe://project/clips",
|
|
853
|
+
name: "Project Clips",
|
|
854
|
+
description: "List of all clips in the timeline",
|
|
855
|
+
mimeType: "application/json"
|
|
856
|
+
},
|
|
857
|
+
{
|
|
858
|
+
uri: "vibe://project/sources",
|
|
859
|
+
name: "Media Sources",
|
|
860
|
+
description: "List of all media sources in the project",
|
|
861
|
+
mimeType: "application/json"
|
|
862
|
+
},
|
|
863
|
+
{
|
|
864
|
+
uri: "vibe://project/tracks",
|
|
865
|
+
name: "Timeline Tracks",
|
|
866
|
+
description: "List of all tracks in the timeline",
|
|
867
|
+
mimeType: "application/json"
|
|
868
|
+
},
|
|
869
|
+
{
|
|
870
|
+
uri: "vibe://project/settings",
|
|
871
|
+
name: "Project Settings",
|
|
872
|
+
description: "Project configuration (resolution, fps, etc.)",
|
|
873
|
+
mimeType: "application/json"
|
|
874
|
+
}
|
|
875
|
+
];
|
|
876
|
+
var currentProjectPath = process.env.VIBE_PROJECT_PATH || null;
|
|
877
|
+
async function loadProject2(projectPath) {
|
|
878
|
+
const absPath = resolve2(process.cwd(), projectPath);
|
|
879
|
+
const content = await readFile2(absPath, "utf-8");
|
|
880
|
+
const data = JSON.parse(content);
|
|
881
|
+
return Project.fromJSON(data);
|
|
882
|
+
}
|
|
883
|
+
async function readResource(uri) {
|
|
884
|
+
const match = uri.match(/^vibe:\/\/project\/(.+)$/);
|
|
885
|
+
if (!match) {
|
|
886
|
+
throw new Error(`Invalid resource URI: ${uri}`);
|
|
887
|
+
}
|
|
888
|
+
const resourceType = match[1];
|
|
889
|
+
const projectPath = currentProjectPath;
|
|
890
|
+
if (!projectPath) {
|
|
891
|
+
return {
|
|
892
|
+
contents: [
|
|
893
|
+
{
|
|
894
|
+
uri,
|
|
895
|
+
mimeType: "application/json",
|
|
896
|
+
text: JSON.stringify({
|
|
897
|
+
error: "No project loaded. Set VIBE_PROJECT_PATH environment variable or use project_create tool."
|
|
898
|
+
})
|
|
899
|
+
}
|
|
900
|
+
]
|
|
901
|
+
};
|
|
902
|
+
}
|
|
903
|
+
try {
|
|
904
|
+
const project = await loadProject2(projectPath);
|
|
905
|
+
let data;
|
|
906
|
+
switch (resourceType) {
|
|
907
|
+
case "current":
|
|
908
|
+
data = project.toJSON();
|
|
909
|
+
break;
|
|
910
|
+
case "clips":
|
|
911
|
+
data = project.getClips().map((clip) => ({
|
|
912
|
+
id: clip.id,
|
|
913
|
+
sourceId: clip.sourceId,
|
|
914
|
+
trackId: clip.trackId,
|
|
915
|
+
startTime: clip.startTime,
|
|
916
|
+
duration: clip.duration,
|
|
917
|
+
sourceStartOffset: clip.sourceStartOffset,
|
|
918
|
+
effects: clip.effects
|
|
919
|
+
}));
|
|
920
|
+
break;
|
|
921
|
+
case "sources":
|
|
922
|
+
data = project.getSources().map((source) => ({
|
|
923
|
+
id: source.id,
|
|
924
|
+
name: source.name,
|
|
925
|
+
type: source.type,
|
|
926
|
+
url: source.url,
|
|
927
|
+
duration: source.duration,
|
|
928
|
+
width: source.width,
|
|
929
|
+
height: source.height
|
|
930
|
+
}));
|
|
931
|
+
break;
|
|
932
|
+
case "tracks":
|
|
933
|
+
data = project.getTracks().map((track) => ({
|
|
934
|
+
id: track.id,
|
|
935
|
+
name: track.name,
|
|
936
|
+
type: track.type,
|
|
937
|
+
order: track.order,
|
|
938
|
+
isMuted: track.isMuted,
|
|
939
|
+
isLocked: track.isLocked,
|
|
940
|
+
isVisible: track.isVisible
|
|
941
|
+
}));
|
|
942
|
+
break;
|
|
943
|
+
case "settings": {
|
|
944
|
+
const meta = project.getMeta();
|
|
945
|
+
data = {
|
|
946
|
+
name: meta.name,
|
|
947
|
+
aspectRatio: meta.aspectRatio,
|
|
948
|
+
frameRate: meta.frameRate,
|
|
949
|
+
duration: meta.duration
|
|
950
|
+
};
|
|
951
|
+
break;
|
|
952
|
+
}
|
|
953
|
+
default:
|
|
954
|
+
throw new Error(`Unknown resource type: ${resourceType}`);
|
|
955
|
+
}
|
|
956
|
+
return {
|
|
957
|
+
contents: [
|
|
958
|
+
{
|
|
959
|
+
uri,
|
|
960
|
+
mimeType: "application/json",
|
|
961
|
+
text: JSON.stringify(data, null, 2)
|
|
962
|
+
}
|
|
963
|
+
]
|
|
964
|
+
};
|
|
965
|
+
} catch (error) {
|
|
966
|
+
return {
|
|
967
|
+
contents: [
|
|
968
|
+
{
|
|
969
|
+
uri,
|
|
970
|
+
mimeType: "application/json",
|
|
971
|
+
text: JSON.stringify({
|
|
972
|
+
error: error instanceof Error ? error.message : "Unknown error"
|
|
973
|
+
})
|
|
974
|
+
}
|
|
975
|
+
]
|
|
976
|
+
};
|
|
977
|
+
}
|
|
978
|
+
}
|
|
979
|
+
|
|
980
|
+
// src/prompts/index.ts
|
|
981
|
+
var prompts = [
|
|
982
|
+
{
|
|
983
|
+
name: "edit_video",
|
|
984
|
+
description: "Get guidance on editing a video with natural language instructions",
|
|
985
|
+
arguments: [
|
|
986
|
+
{
|
|
987
|
+
name: "instruction",
|
|
988
|
+
description: "Natural language description of the edit (e.g., 'trim the first 5 seconds')",
|
|
989
|
+
required: true
|
|
990
|
+
},
|
|
991
|
+
{
|
|
992
|
+
name: "projectPath",
|
|
993
|
+
description: "Path to the project file",
|
|
994
|
+
required: false
|
|
995
|
+
}
|
|
996
|
+
]
|
|
997
|
+
},
|
|
998
|
+
{
|
|
999
|
+
name: "create_montage",
|
|
1000
|
+
description: "Create a montage from multiple clips with automatic pacing",
|
|
1001
|
+
arguments: [
|
|
1002
|
+
{
|
|
1003
|
+
name: "clips",
|
|
1004
|
+
description: "Comma-separated list of clip IDs or media paths",
|
|
1005
|
+
required: true
|
|
1006
|
+
},
|
|
1007
|
+
{
|
|
1008
|
+
name: "duration",
|
|
1009
|
+
description: "Target total duration in seconds",
|
|
1010
|
+
required: false
|
|
1011
|
+
},
|
|
1012
|
+
{
|
|
1013
|
+
name: "style",
|
|
1014
|
+
description: "Montage style: fast, slow, rhythmic, dramatic",
|
|
1015
|
+
required: false
|
|
1016
|
+
}
|
|
1017
|
+
]
|
|
1018
|
+
},
|
|
1019
|
+
{
|
|
1020
|
+
name: "add_transitions",
|
|
1021
|
+
description: "Add transitions between clips in the timeline",
|
|
1022
|
+
arguments: [
|
|
1023
|
+
{
|
|
1024
|
+
name: "transitionType",
|
|
1025
|
+
description: "Transition type: fade, dissolve, wipe, cut",
|
|
1026
|
+
required: false
|
|
1027
|
+
},
|
|
1028
|
+
{
|
|
1029
|
+
name: "duration",
|
|
1030
|
+
description: "Transition duration in seconds",
|
|
1031
|
+
required: false
|
|
1032
|
+
}
|
|
1033
|
+
]
|
|
1034
|
+
},
|
|
1035
|
+
{
|
|
1036
|
+
name: "color_grade",
|
|
1037
|
+
description: "Apply color grading to clips",
|
|
1038
|
+
arguments: [
|
|
1039
|
+
{
|
|
1040
|
+
name: "style",
|
|
1041
|
+
description: "Color grade style: cinematic, warm, cool, vintage, noir",
|
|
1042
|
+
required: true
|
|
1043
|
+
},
|
|
1044
|
+
{
|
|
1045
|
+
name: "intensity",
|
|
1046
|
+
description: "Intensity of the effect (0-1)",
|
|
1047
|
+
required: false
|
|
1048
|
+
}
|
|
1049
|
+
]
|
|
1050
|
+
},
|
|
1051
|
+
{
|
|
1052
|
+
name: "generate_subtitles",
|
|
1053
|
+
description: "Generate subtitles from audio using AI transcription",
|
|
1054
|
+
arguments: [
|
|
1055
|
+
{
|
|
1056
|
+
name: "language",
|
|
1057
|
+
description: "Language code (e.g., en, ko, ja)",
|
|
1058
|
+
required: false
|
|
1059
|
+
},
|
|
1060
|
+
{
|
|
1061
|
+
name: "format",
|
|
1062
|
+
description: "Output format: srt, vtt, json",
|
|
1063
|
+
required: false
|
|
1064
|
+
}
|
|
1065
|
+
]
|
|
1066
|
+
},
|
|
1067
|
+
{
|
|
1068
|
+
name: "create_shorts",
|
|
1069
|
+
description: "Create short-form content from a longer video",
|
|
1070
|
+
arguments: [
|
|
1071
|
+
{
|
|
1072
|
+
name: "targetDuration",
|
|
1073
|
+
description: "Target duration for each short (e.g., 60 for 60 seconds)",
|
|
1074
|
+
required: false
|
|
1075
|
+
},
|
|
1076
|
+
{
|
|
1077
|
+
name: "aspectRatio",
|
|
1078
|
+
description: "Aspect ratio: 9:16, 1:1, 4:5",
|
|
1079
|
+
required: false
|
|
1080
|
+
}
|
|
1081
|
+
]
|
|
1082
|
+
},
|
|
1083
|
+
{
|
|
1084
|
+
name: "sync_to_music",
|
|
1085
|
+
description: "Sync video cuts to music beats",
|
|
1086
|
+
arguments: [
|
|
1087
|
+
{
|
|
1088
|
+
name: "audioPath",
|
|
1089
|
+
description: "Path to the audio/music file",
|
|
1090
|
+
required: true
|
|
1091
|
+
},
|
|
1092
|
+
{
|
|
1093
|
+
name: "cutStyle",
|
|
1094
|
+
description: "Cut style: on-beat, off-beat, every-other",
|
|
1095
|
+
required: false
|
|
1096
|
+
}
|
|
1097
|
+
]
|
|
1098
|
+
}
|
|
1099
|
+
];
|
|
1100
|
+
function getPrompt(name, args) {
|
|
1101
|
+
switch (name) {
|
|
1102
|
+
case "edit_video":
|
|
1103
|
+
return {
|
|
1104
|
+
messages: [
|
|
1105
|
+
{
|
|
1106
|
+
role: "user",
|
|
1107
|
+
content: {
|
|
1108
|
+
type: "text",
|
|
1109
|
+
text: `Help me edit a video with the following instruction: "${args.instruction}"
|
|
1110
|
+
|
|
1111
|
+
${args.projectPath ? `Project file: ${args.projectPath}` : "No project file specified."}
|
|
1112
|
+
|
|
1113
|
+
Please analyze the request and suggest the appropriate timeline tools to use. Consider:
|
|
1114
|
+
1. What clips need to be affected?
|
|
1115
|
+
2. What operations are needed (trim, split, move, add effects, etc.)?
|
|
1116
|
+
3. What are the specific parameters?
|
|
1117
|
+
|
|
1118
|
+
Provide step-by-step guidance using the available MCP tools.`
|
|
1119
|
+
}
|
|
1120
|
+
}
|
|
1121
|
+
]
|
|
1122
|
+
};
|
|
1123
|
+
case "create_montage":
|
|
1124
|
+
return {
|
|
1125
|
+
messages: [
|
|
1126
|
+
{
|
|
1127
|
+
role: "user",
|
|
1128
|
+
content: {
|
|
1129
|
+
type: "text",
|
|
1130
|
+
text: `Help me create a ${args.style || "dynamic"} montage from these clips: ${args.clips}
|
|
1131
|
+
|
|
1132
|
+
${args.duration ? `Target duration: ${args.duration} seconds` : ""}
|
|
1133
|
+
|
|
1134
|
+
Please suggest:
|
|
1135
|
+
1. The order of clips for best flow
|
|
1136
|
+
2. Duration for each clip based on pacing
|
|
1137
|
+
3. Transition types between clips
|
|
1138
|
+
4. Any effects to enhance the montage
|
|
1139
|
+
|
|
1140
|
+
Use the available MCP tools to implement this.`
|
|
1141
|
+
}
|
|
1142
|
+
}
|
|
1143
|
+
]
|
|
1144
|
+
};
|
|
1145
|
+
case "add_transitions":
|
|
1146
|
+
return {
|
|
1147
|
+
messages: [
|
|
1148
|
+
{
|
|
1149
|
+
role: "user",
|
|
1150
|
+
content: {
|
|
1151
|
+
type: "text",
|
|
1152
|
+
text: `Add ${args.transitionType || "fade"} transitions between all clips in the timeline.
|
|
1153
|
+
|
|
1154
|
+
${args.duration ? `Transition duration: ${args.duration} seconds` : "Default duration: 0.5 seconds"}
|
|
1155
|
+
|
|
1156
|
+
Please:
|
|
1157
|
+
1. First list all clips in the timeline
|
|
1158
|
+
2. Identify clip boundaries
|
|
1159
|
+
3. Add appropriate effects (fadeOut to ending clip, fadeIn to starting clip)
|
|
1160
|
+
4. Ensure smooth visual flow`
|
|
1161
|
+
}
|
|
1162
|
+
}
|
|
1163
|
+
]
|
|
1164
|
+
};
|
|
1165
|
+
case "color_grade":
|
|
1166
|
+
return {
|
|
1167
|
+
messages: [
|
|
1168
|
+
{
|
|
1169
|
+
role: "user",
|
|
1170
|
+
content: {
|
|
1171
|
+
type: "text",
|
|
1172
|
+
text: `Apply "${args.style}" color grading to the video.
|
|
1173
|
+
|
|
1174
|
+
Intensity: ${args.intensity || "0.7"}
|
|
1175
|
+
|
|
1176
|
+
For this style, suggest and apply:
|
|
1177
|
+
1. Brightness adjustments
|
|
1178
|
+
2. Contrast settings
|
|
1179
|
+
3. Saturation levels
|
|
1180
|
+
4. Any special effects (grayscale for noir, sepia for vintage, etc.)
|
|
1181
|
+
|
|
1182
|
+
Use the timeline_add_effect tool to apply these to all clips.`
|
|
1183
|
+
}
|
|
1184
|
+
}
|
|
1185
|
+
]
|
|
1186
|
+
};
|
|
1187
|
+
case "generate_subtitles":
|
|
1188
|
+
return {
|
|
1189
|
+
messages: [
|
|
1190
|
+
{
|
|
1191
|
+
role: "user",
|
|
1192
|
+
content: {
|
|
1193
|
+
type: "text",
|
|
1194
|
+
text: `Generate subtitles from the video's audio track.
|
|
1195
|
+
|
|
1196
|
+
Language: ${args.language || "auto-detect"}
|
|
1197
|
+
Format: ${args.format || "srt"}
|
|
1198
|
+
|
|
1199
|
+
Steps:
|
|
1200
|
+
1. Extract audio from the video
|
|
1201
|
+
2. Use Whisper transcription (vibe ai transcribe command)
|
|
1202
|
+
3. Format output as ${args.format || "SRT"} subtitles
|
|
1203
|
+
4. Optionally add as text overlay clips
|
|
1204
|
+
|
|
1205
|
+
Note: This requires the CLI transcribe command to be run separately.`
|
|
1206
|
+
}
|
|
1207
|
+
}
|
|
1208
|
+
]
|
|
1209
|
+
};
|
|
1210
|
+
case "create_shorts":
|
|
1211
|
+
return {
|
|
1212
|
+
messages: [
|
|
1213
|
+
{
|
|
1214
|
+
role: "user",
|
|
1215
|
+
content: {
|
|
1216
|
+
type: "text",
|
|
1217
|
+
text: `Create short-form content from this video for social media.
|
|
1218
|
+
|
|
1219
|
+
Target duration per short: ${args.targetDuration || 60} seconds
|
|
1220
|
+
Aspect ratio: ${args.aspectRatio || "9:16"}
|
|
1221
|
+
|
|
1222
|
+
Please:
|
|
1223
|
+
1. Analyze the timeline to find engaging segments
|
|
1224
|
+
2. Identify natural cut points (scene changes, pauses)
|
|
1225
|
+
3. Split the video into ${args.targetDuration || 60}-second segments
|
|
1226
|
+
4. Suggest which segments would work best as standalone shorts
|
|
1227
|
+
5. Note any reframing needed for vertical format`
|
|
1228
|
+
}
|
|
1229
|
+
}
|
|
1230
|
+
]
|
|
1231
|
+
};
|
|
1232
|
+
case "sync_to_music":
|
|
1233
|
+
return {
|
|
1234
|
+
messages: [
|
|
1235
|
+
{
|
|
1236
|
+
role: "user",
|
|
1237
|
+
content: {
|
|
1238
|
+
type: "text",
|
|
1239
|
+
text: `Sync video cuts to music beats.
|
|
1240
|
+
|
|
1241
|
+
Audio file: ${args.audioPath}
|
|
1242
|
+
Cut style: ${args.cutStyle || "on-beat"}
|
|
1243
|
+
|
|
1244
|
+
Steps:
|
|
1245
|
+
1. Analyze the audio for beat detection (use vibe detect beats command)
|
|
1246
|
+
2. Get beat timestamps
|
|
1247
|
+
3. Split or trim clips to align with beats
|
|
1248
|
+
4. Add transitions at beat points for ${args.cutStyle || "on-beat"} style
|
|
1249
|
+
|
|
1250
|
+
This creates a music video-style edit where cuts happen in rhythm with the music.`
|
|
1251
|
+
}
|
|
1252
|
+
}
|
|
1253
|
+
]
|
|
1254
|
+
};
|
|
1255
|
+
default:
|
|
1256
|
+
return {
|
|
1257
|
+
messages: [
|
|
1258
|
+
{
|
|
1259
|
+
role: "user",
|
|
1260
|
+
content: {
|
|
1261
|
+
type: "text",
|
|
1262
|
+
text: `Unknown prompt: ${name}. Available prompts: edit_video, create_montage, add_transitions, color_grade, generate_subtitles, create_shorts, sync_to_music`
|
|
1263
|
+
}
|
|
1264
|
+
}
|
|
1265
|
+
]
|
|
1266
|
+
};
|
|
1267
|
+
}
|
|
1268
|
+
}
|
|
1269
|
+
|
|
1270
|
+
// src/index.ts
|
|
1271
|
+
var server = new Server(
|
|
1272
|
+
{
|
|
1273
|
+
name: "vibeframe",
|
|
1274
|
+
version: "0.1.0"
|
|
1275
|
+
},
|
|
1276
|
+
{
|
|
1277
|
+
capabilities: {
|
|
1278
|
+
tools: {},
|
|
1279
|
+
resources: {},
|
|
1280
|
+
prompts: {}
|
|
1281
|
+
}
|
|
1282
|
+
}
|
|
1283
|
+
);
|
|
1284
|
+
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
|
1285
|
+
return { tools };
|
|
1286
|
+
});
|
|
1287
|
+
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
1288
|
+
const { name, arguments: args } = request.params;
|
|
1289
|
+
return handleToolCall(name, args || {});
|
|
1290
|
+
});
|
|
1291
|
+
server.setRequestHandler(ListResourcesRequestSchema, async () => {
|
|
1292
|
+
return { resources };
|
|
1293
|
+
});
|
|
1294
|
+
server.setRequestHandler(ReadResourceRequestSchema, async (request) => {
|
|
1295
|
+
const { uri } = request.params;
|
|
1296
|
+
return readResource(uri);
|
|
1297
|
+
});
|
|
1298
|
+
server.setRequestHandler(ListPromptsRequestSchema, async () => {
|
|
1299
|
+
return { prompts };
|
|
1300
|
+
});
|
|
1301
|
+
server.setRequestHandler(GetPromptRequestSchema, async (request) => {
|
|
1302
|
+
const { name, arguments: args } = request.params;
|
|
1303
|
+
return getPrompt(name, args || {});
|
|
1304
|
+
});
|
|
1305
|
+
async function main() {
|
|
1306
|
+
const transport = new StdioServerTransport();
|
|
1307
|
+
await server.connect(transport);
|
|
1308
|
+
console.error("VibeFrame MCP Server started");
|
|
1309
|
+
}
|
|
1310
|
+
main().catch((error) => {
|
|
1311
|
+
console.error("Server error:", error);
|
|
1312
|
+
process.exit(1);
|
|
1313
|
+
});
|