mulmocast 2.1.37 → 2.1.39
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/actions/bundle.js
CHANGED
|
@@ -71,7 +71,15 @@ export const mulmoViewerBundle = async (context, options = {}) => {
|
|
|
71
71
|
const sudioBeats = context.studio.beats[index];
|
|
72
72
|
const { duration, startAt } = sudioBeats;
|
|
73
73
|
// console.log(context.studio.beats[index]);
|
|
74
|
-
resultJson.push({
|
|
74
|
+
resultJson.push({
|
|
75
|
+
id: beat.id,
|
|
76
|
+
text: beat.text,
|
|
77
|
+
duration,
|
|
78
|
+
startTime: startAt,
|
|
79
|
+
endTime: (startAt ?? 0) + (duration ?? 0),
|
|
80
|
+
audioSources: {},
|
|
81
|
+
multiLinguals: {},
|
|
82
|
+
});
|
|
75
83
|
});
|
|
76
84
|
// audio
|
|
77
85
|
for (const lang of bundleTargetLang) {
|
|
@@ -60,19 +60,17 @@ export const MulmoStudioContextMethods = {
|
|
|
60
60
|
notifyStateChange(context, sessionType, result);
|
|
61
61
|
},
|
|
62
62
|
setBeatSessionState(context, sessionType, index, id, value) {
|
|
63
|
-
if (!sessionType) {
|
|
63
|
+
if (!sessionType || !Object.hasOwn(context.sessionState.inBeatSession, sessionType)) {
|
|
64
64
|
return;
|
|
65
65
|
}
|
|
66
66
|
const key = beatId(id, index);
|
|
67
|
+
const session = context.sessionState.inBeatSession[sessionType];
|
|
67
68
|
if (value) {
|
|
68
|
-
|
|
69
|
-
context.sessionState.inBeatSession[sessionType] = {};
|
|
70
|
-
}
|
|
71
|
-
context.sessionState.inBeatSession[sessionType][key] = true;
|
|
69
|
+
session[key] = true;
|
|
72
70
|
}
|
|
73
71
|
else {
|
|
74
72
|
// NOTE: Setting to false causes the parse error in rebuildStudio in preprocess.ts
|
|
75
|
-
delete
|
|
73
|
+
delete session[key];
|
|
76
74
|
}
|
|
77
75
|
notifyBeatStateChange(context, sessionType, key);
|
|
78
76
|
},
|
|
@@ -27,7 +27,7 @@ export const provider2TTSAgent = {
|
|
|
27
27
|
defaultModel: "eleven_multilingual_v2",
|
|
28
28
|
// Models | ElevenLabs Documentation
|
|
29
29
|
// https://elevenlabs.io/docs/models
|
|
30
|
-
models: ["eleven_multilingual_v2", "eleven_turbo_v2_5", "eleven_turbo_v2", "eleven_flash_v2_5", "eleven_flash_v2"],
|
|
30
|
+
models: ["eleven_v3", "eleven_multilingual_v2", "eleven_turbo_v2_5", "eleven_turbo_v2", "eleven_flash_v2_5", "eleven_flash_v2"],
|
|
31
31
|
keyName: "ELEVENLABS_API_KEY",
|
|
32
32
|
},
|
|
33
33
|
kotodama: {
|
package/lib/types/viewer.d.ts
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { z } from "zod";
|
|
2
2
|
export declare const mulmoViewerBeatSchema: z.ZodObject<{
|
|
3
|
+
id: z.ZodOptional<z.ZodString>;
|
|
3
4
|
text: z.ZodOptional<z.ZodString>;
|
|
4
5
|
duration: z.ZodOptional<z.ZodNumber>;
|
|
5
6
|
startTime: z.ZodOptional<z.ZodNumber>;
|
|
@@ -16,6 +17,7 @@ export declare const mulmoViewerBeatSchema: z.ZodObject<{
|
|
|
16
17
|
export type MulmoViewerBeat = z.infer<typeof mulmoViewerBeatSchema>;
|
|
17
18
|
export declare const mulmoViewerDataSchema: z.ZodObject<{
|
|
18
19
|
beats: z.ZodArray<z.ZodObject<{
|
|
20
|
+
id: z.ZodOptional<z.ZodString>;
|
|
19
21
|
text: z.ZodOptional<z.ZodString>;
|
|
20
22
|
duration: z.ZodOptional<z.ZodNumber>;
|
|
21
23
|
startTime: z.ZodOptional<z.ZodNumber>;
|
package/lib/types/viewer.js
CHANGED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "mulmocast",
|
|
3
|
-
"version": "2.1.
|
|
3
|
+
"version": "2.1.39",
|
|
4
4
|
"description": "",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "lib/index.node.js",
|
|
@@ -76,9 +76,9 @@
|
|
|
76
76
|
"homepage": "https://github.com/receptron/mulmocast-cli#readme",
|
|
77
77
|
"dependencies": {
|
|
78
78
|
"@google-cloud/text-to-speech": "^6.4.0",
|
|
79
|
-
"@google/genai": "^1.
|
|
79
|
+
"@google/genai": "^1.41.0",
|
|
80
80
|
"@graphai/anthropic_agent": "^2.0.12",
|
|
81
|
-
"@graphai/browserless_agent": "^2.0.
|
|
81
|
+
"@graphai/browserless_agent": "^2.0.2",
|
|
82
82
|
"@graphai/gemini_agent": "^2.0.5",
|
|
83
83
|
"@graphai/groq_agent": "^2.0.2",
|
|
84
84
|
"@graphai/input_agents": "^1.0.2",
|
|
@@ -92,12 +92,12 @@
|
|
|
92
92
|
"@mozilla/readability": "^0.6.0",
|
|
93
93
|
"@tavily/core": "^0.5.11",
|
|
94
94
|
"archiver": "^7.0.1",
|
|
95
|
-
"clipboardy": "^5.
|
|
96
|
-
"dotenv": "^17.
|
|
95
|
+
"clipboardy": "^5.3.0",
|
|
96
|
+
"dotenv": "^17.3.1",
|
|
97
97
|
"fluent-ffmpeg": "^2.1.3",
|
|
98
98
|
"graphai": "^2.0.16",
|
|
99
99
|
"jsdom": "^28.0.0",
|
|
100
|
-
"marked": "^17.0.
|
|
100
|
+
"marked": "^17.0.2",
|
|
101
101
|
"mulmocast-vision": "^1.0.8",
|
|
102
102
|
"ora": "^9.3.0",
|
|
103
103
|
"puppeteer": "^24.37.2",
|
|
@@ -117,12 +117,12 @@
|
|
|
117
117
|
"eslint": "^10.0.0",
|
|
118
118
|
"eslint-config-prettier": "^10.1.8",
|
|
119
119
|
"eslint-plugin-prettier": "^5.5.5",
|
|
120
|
-
"eslint-plugin-sonarjs": "^3.0.
|
|
120
|
+
"eslint-plugin-sonarjs": "^3.0.7",
|
|
121
121
|
"globals": "^17.3.0",
|
|
122
122
|
"prettier": "^3.8.1",
|
|
123
123
|
"tsx": "^4.21.0",
|
|
124
124
|
"typescript": "^5.9.3",
|
|
125
|
-
"typescript-eslint": "^8.
|
|
125
|
+
"typescript-eslint": "^8.55.0"
|
|
126
126
|
},
|
|
127
127
|
"engines": {
|
|
128
128
|
"node": ">=22.0.0"
|
|
@@ -64,6 +64,22 @@
|
|
|
64
64
|
"en": "Flash V2"
|
|
65
65
|
}
|
|
66
66
|
},
|
|
67
|
+
"V3": {
|
|
68
|
+
"provider": "elevenlabs",
|
|
69
|
+
"voiceId": "1SM7GgM6IMuvQlz2BwM3",
|
|
70
|
+
"model": "eleven_v3",
|
|
71
|
+
"displayName": {
|
|
72
|
+
"en": "Eleven V3 (Mark)"
|
|
73
|
+
}
|
|
74
|
+
},
|
|
75
|
+
"V3Japanese": {
|
|
76
|
+
"provider": "elevenlabs",
|
|
77
|
+
"voiceId": "KtSs8OSniRPofXnr5PeA",
|
|
78
|
+
"model": "eleven_v3",
|
|
79
|
+
"displayName": {
|
|
80
|
+
"en": "V3 Japanese (Sumire)"
|
|
81
|
+
}
|
|
82
|
+
},
|
|
67
83
|
"MultilingualJapanese": {
|
|
68
84
|
"provider": "elevenlabs",
|
|
69
85
|
"voiceId": "Mv8AjrYZCBkdsmDHNwcB",
|
|
@@ -157,6 +173,28 @@
|
|
|
157
173
|
}
|
|
158
174
|
}
|
|
159
175
|
},
|
|
176
|
+
{
|
|
177
|
+
"speaker": "V3",
|
|
178
|
+
"text": "Testing Eleven v3, the latest and most advanced model with human-like expressiveness and high emotional range across over 70 languages.",
|
|
179
|
+
"image": {
|
|
180
|
+
"type": "textSlide",
|
|
181
|
+
"slide": {
|
|
182
|
+
"title": "Eleven V3",
|
|
183
|
+
"subtitle": "Latest model, 70+ languages"
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
},
|
|
187
|
+
{
|
|
188
|
+
"speaker": "V3Japanese",
|
|
189
|
+
"text": "Eleven V3モデルの日本語テストです。最新の音声合成モデルで、70以上の言語に対応し、豊かな感情表現が可能です。",
|
|
190
|
+
"image": {
|
|
191
|
+
"type": "textSlide",
|
|
192
|
+
"slide": {
|
|
193
|
+
"title": "Eleven V3日本語テスト",
|
|
194
|
+
"subtitle": "最新モデル、70+言語対応"
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
},
|
|
160
198
|
{
|
|
161
199
|
"speaker": "MultilingualJapanese",
|
|
162
200
|
"text": "こんにちは。これは多言語モデルV2の日本語テストです。29言語に対応し、豊かな感情表現を持つプロフェッショナル品質のモデルです。",
|