@aituber-onair/core 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +723 -0
- package/dist/constants/api.d.ts +4 -0
- package/dist/constants/api.js +13 -0
- package/dist/constants/api.js.map +1 -0
- package/dist/constants/index.d.ts +23 -0
- package/dist/constants/index.js +25 -0
- package/dist/constants/index.js.map +1 -0
- package/dist/constants/openaiApi.d.ts +15 -0
- package/dist/constants/openaiApi.js +15 -0
- package/dist/constants/openaiApi.js.map +1 -0
- package/dist/constants/prompts.d.ts +2 -0
- package/dist/constants/prompts.js +13 -0
- package/dist/constants/prompts.js.map +1 -0
- package/dist/core/AITuberOnAirCore.d.ts +142 -0
- package/dist/core/AITuberOnAirCore.js +316 -0
- package/dist/core/AITuberOnAirCore.js.map +1 -0
- package/dist/core/ChatProcessor.d.ts +86 -0
- package/dist/core/ChatProcessor.js +246 -0
- package/dist/core/ChatProcessor.js.map +1 -0
- package/dist/core/EventEmitter.d.ts +35 -0
- package/dist/core/EventEmitter.js +72 -0
- package/dist/core/EventEmitter.js.map +1 -0
- package/dist/core/MemoryManager.d.ts +98 -0
- package/dist/core/MemoryManager.js +208 -0
- package/dist/core/MemoryManager.js.map +1 -0
- package/dist/index.d.ts +24 -0
- package/dist/index.js +22 -0
- package/dist/index.js.map +1 -0
- package/dist/services/chat/ChatService.d.ts +21 -0
- package/dist/services/chat/ChatService.js +2 -0
- package/dist/services/chat/ChatService.js.map +1 -0
- package/dist/services/chat/ChatServiceFactory.d.ts +38 -0
- package/dist/services/chat/ChatServiceFactory.js +55 -0
- package/dist/services/chat/ChatServiceFactory.js.map +1 -0
- package/dist/services/chat/OpenAIChatService.d.ts +38 -0
- package/dist/services/chat/OpenAIChatService.js +166 -0
- package/dist/services/chat/OpenAIChatService.js.map +1 -0
- package/dist/services/chat/OpenAISummarizer.d.ts +25 -0
- package/dist/services/chat/OpenAISummarizer.js +70 -0
- package/dist/services/chat/OpenAISummarizer.js.map +1 -0
- package/dist/services/chat/providers/ChatServiceProvider.d.ts +44 -0
- package/dist/services/chat/providers/ChatServiceProvider.js +2 -0
- package/dist/services/chat/providers/ChatServiceProvider.js.map +1 -0
- package/dist/services/chat/providers/OpenAIChatServiceProvider.d.ts +33 -0
- package/dist/services/chat/providers/OpenAIChatServiceProvider.js +44 -0
- package/dist/services/chat/providers/OpenAIChatServiceProvider.js.map +1 -0
- package/dist/services/voice/VoiceEngineAdapter.d.ts +46 -0
- package/dist/services/voice/VoiceEngineAdapter.js +173 -0
- package/dist/services/voice/VoiceEngineAdapter.js.map +1 -0
- package/dist/services/voice/VoiceService.d.ts +55 -0
- package/dist/services/voice/VoiceService.js +2 -0
- package/dist/services/voice/VoiceService.js.map +1 -0
- package/dist/services/voice/engines/AivisSpeechEngine.d.ts +10 -0
- package/dist/services/voice/engines/AivisSpeechEngine.js +70 -0
- package/dist/services/voice/engines/AivisSpeechEngine.js.map +1 -0
- package/dist/services/voice/engines/NijiVoiceEngine.d.ts +12 -0
- package/dist/services/voice/engines/NijiVoiceEngine.js +105 -0
- package/dist/services/voice/engines/NijiVoiceEngine.js.map +1 -0
- package/dist/services/voice/engines/OpenAiEngine.d.ts +9 -0
- package/dist/services/voice/engines/OpenAiEngine.js +34 -0
- package/dist/services/voice/engines/OpenAiEngine.js.map +1 -0
- package/dist/services/voice/engines/VoiceEngine.d.ts +21 -0
- package/dist/services/voice/engines/VoiceEngine.js +2 -0
- package/dist/services/voice/engines/VoiceEngine.js.map +1 -0
- package/dist/services/voice/engines/VoiceEngineFactory.d.ts +14 -0
- package/dist/services/voice/engines/VoiceEngineFactory.js +34 -0
- package/dist/services/voice/engines/VoiceEngineFactory.js.map +1 -0
- package/dist/services/voice/engines/VoicePeakEngine.d.ts +13 -0
- package/dist/services/voice/engines/VoicePeakEngine.js +46 -0
- package/dist/services/voice/engines/VoicePeakEngine.js.map +1 -0
- package/dist/services/voice/engines/VoiceVoxEngine.d.ts +13 -0
- package/dist/services/voice/engines/VoiceVoxEngine.js +67 -0
- package/dist/services/voice/engines/VoiceVoxEngine.js.map +1 -0
- package/dist/services/voice/engines/index.d.ts +7 -0
- package/dist/services/voice/engines/index.js +7 -0
- package/dist/services/voice/engines/index.js.map +1 -0
- package/dist/services/voice/messages.d.ts +38 -0
- package/dist/services/voice/messages.js +49 -0
- package/dist/services/voice/messages.js.map +1 -0
- package/dist/services/youtube/YouTubeDataApiService.d.ts +69 -0
- package/dist/services/youtube/YouTubeDataApiService.js +255 -0
- package/dist/services/youtube/YouTubeDataApiService.js.map +1 -0
- package/dist/services/youtube/YouTubeService.d.ts +63 -0
- package/dist/services/youtube/YouTubeService.js +2 -0
- package/dist/services/youtube/YouTubeService.js.map +1 -0
- package/dist/types/index.d.ts +82 -0
- package/dist/types/index.js +5 -0
- package/dist/types/index.js.map +1 -0
- package/dist/types/nijiVoice.d.ts +27 -0
- package/dist/types/nijiVoice.js +2 -0
- package/dist/types/nijiVoice.js.map +1 -0
- package/dist/utils/index.d.ts +5 -0
- package/dist/utils/index.js +6 -0
- package/dist/utils/index.js.map +1 -0
- package/dist/utils/screenplay.d.ts +19 -0
- package/dist/utils/screenplay.js +42 -0
- package/dist/utils/screenplay.js.map +1 -0
- package/dist/utils/screenshot.d.ts +19 -0
- package/dist/utils/screenshot.js +44 -0
- package/dist/utils/screenshot.js.map +1 -0
- package/dist/utils/storage.d.ts +44 -0
- package/dist/utils/storage.js +103 -0
- package/dist/utils/storage.js.map +1 -0
- package/package.json +33 -0
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import { ENDPOINT_OPENAI_CHAT_COMPLETIONS_API, MODEL_GPT_4O_MINI, } from '../../constants';
|
|
2
|
+
import { DEFAULT_SUMMARY_PROMPT_TEMPLATE } from '../../constants/prompts';
|
|
3
|
+
/**
|
|
4
|
+
* Implementation of summarization functionality using OpenAI
|
|
5
|
+
*/
|
|
6
|
+
export class OpenAISummarizer {
|
|
7
|
+
/**
|
|
8
|
+
* Constructor
|
|
9
|
+
* @param apiKey OpenAI API key
|
|
10
|
+
* @param model Name of the model to use
|
|
11
|
+
* @param defaultPromptTemplate Default prompt template for summarization
|
|
12
|
+
*/
|
|
13
|
+
constructor(apiKey, model = MODEL_GPT_4O_MINI, defaultPromptTemplate = DEFAULT_SUMMARY_PROMPT_TEMPLATE) {
|
|
14
|
+
this.apiKey = apiKey;
|
|
15
|
+
this.model = model;
|
|
16
|
+
this.defaultPromptTemplate = defaultPromptTemplate;
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Summarize chat messages
|
|
20
|
+
* @param messages Array of messages to summarize
|
|
21
|
+
* @param maxLength Maximum number of characters (default 256)
|
|
22
|
+
* @param customPrompt Custom prompt template for summarization (optional)
|
|
23
|
+
* @returns Summarized text
|
|
24
|
+
*/
|
|
25
|
+
async summarize(messages, maxLength = 256, customPrompt) {
|
|
26
|
+
try {
|
|
27
|
+
// Create system prompt
|
|
28
|
+
const promptTemplate = customPrompt || this.defaultPromptTemplate;
|
|
29
|
+
const systemPrompt = promptTemplate.replace('{maxLength}', maxLength.toString());
|
|
30
|
+
// Join message content
|
|
31
|
+
const conversationText = messages
|
|
32
|
+
.map((msg) => `${msg.role}: ${msg.content}`)
|
|
33
|
+
.join('\n');
|
|
34
|
+
// API request
|
|
35
|
+
const response = await fetch(ENDPOINT_OPENAI_CHAT_COMPLETIONS_API, {
|
|
36
|
+
method: 'POST',
|
|
37
|
+
headers: {
|
|
38
|
+
'Content-Type': 'application/json',
|
|
39
|
+
Authorization: `Bearer ${this.apiKey}`,
|
|
40
|
+
},
|
|
41
|
+
body: JSON.stringify({
|
|
42
|
+
model: this.model,
|
|
43
|
+
messages: [
|
|
44
|
+
{
|
|
45
|
+
role: 'system',
|
|
46
|
+
content: systemPrompt,
|
|
47
|
+
},
|
|
48
|
+
{
|
|
49
|
+
role: 'user',
|
|
50
|
+
content: conversationText,
|
|
51
|
+
},
|
|
52
|
+
],
|
|
53
|
+
max_tokens: maxLength,
|
|
54
|
+
}),
|
|
55
|
+
});
|
|
56
|
+
if (!response.ok) {
|
|
57
|
+
const errorData = await response.json();
|
|
58
|
+
throw new Error(`OpenAI API error: ${errorData.error?.message || response.statusText}`);
|
|
59
|
+
}
|
|
60
|
+
const data = await response.json();
|
|
61
|
+
return data.choices[0]?.message?.content || '';
|
|
62
|
+
}
|
|
63
|
+
catch (error) {
|
|
64
|
+
console.error('Error in summarize:', error);
|
|
65
|
+
// Error fallback - simple summary
|
|
66
|
+
return `${messages.length} messages. Latest topic: ${messages[messages.length - 1]?.content.substring(0, 50) || 'none'}...`;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
//# sourceMappingURL=OpenAISummarizer.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"OpenAISummarizer.js","sourceRoot":"","sources":["../../../src/services/chat/OpenAISummarizer.ts"],"names":[],"mappings":"AAEA,OAAO,EACL,oCAAoC,EACpC,iBAAiB,GAClB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,+BAA+B,EAAE,MAAM,yBAAyB,CAAC;AAE1E;;GAEG;AACH,MAAM,OAAO,gBAAgB;IAK3B;;;;;OAKG;IACH,YACE,MAAc,EACd,QAAgB,iBAAiB,EACjC,wBAAgC,+BAA+B;QAE/D,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QACnB,IAAI,CAAC,qBAAqB,GAAG,qBAAqB,CAAC;IACrD,CAAC;IAED;;;;;;OAMG;IACH,KAAK,CAAC,SAAS,CACb,QAAmB,EACnB,YAAoB,GAAG,EACvB,YAAqB;QAErB,IAAI,CAAC;YACH,uBAAuB;YACvB,MAAM,cAAc,GAAG,YAAY,IAAI,IAAI,CAAC,qBAAqB,CAAC;YAClE,MAAM,YAAY,GAAG,cAAc,CAAC,OAAO,CACzC,aAAa,EACb,SAAS,CAAC,QAAQ,EAAE,CACrB,CAAC;YAEF,uBAAuB;YACvB,MAAM,gBAAgB,GAAG,QAAQ;iBAC9B,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,GAAG,CAAC,IAAI,KAAK,GAAG,CAAC,OAAO,EAAE,CAAC;iBAC3C,IAAI,CAAC,IAAI,CAAC,CAAC;YAEd,cAAc;YACd,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,oCAAoC,EAAE;gBACjE,MAAM,EAAE,MAAM;gBACd,OAAO,EAAE;oBACP,cAAc,EAAE,kBAAkB;oBAClC,aAAa,EAAE,UAAU,IAAI,CAAC,MAAM,EAAE;iBACvC;gBACD,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC;oBACnB,KAAK,EAAE,IAAI,CAAC,KAAK;oBACjB,QAAQ,EAAE;wBACR;4BACE,IAAI,EAAE,QAAQ;4BACd,OAAO,EAAE,YAAY;yBACtB;wBACD;4BACE,IAAI,EAAE,MAAM;4BACZ,OAAO,EAAE,gBAAgB;yBAC1B;qBACF;oBACD,UAAU,EAAE,SAAS;iBACtB,CAAC;aACH,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;gBACjB,MAAM,SAAS,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;gBACxC,MAAM,IAAI,KAAK,CACb,qBAAqB,SAAS,CAAC,KAAK,EAAE,OAAO,IAAI,QAAQ,CAAC,UAAU,EAAE,CACvE,CAAC;YACJ,CAAC;YAED,MAAM,IAAI,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;YACnC,OAAO,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,IAAI,EAAE,CAAC;QACjD,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,OAAO,CAAC,KAAK,CAAC,qBAAqB,EAAE,KAAK,CAAC,CAAC;YAC5C,kCAAkC;YAClC,OAAO,GAAG,QAAQ,CAAC,MAAM,4BACvB,QAAQ,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,MAC7D,KAAK,CAAC;QACR,CAAC;IACH,CAAC;CACF"}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import { ChatService } from '../ChatService';
|
|
2
|
+
/**
|
|
3
|
+
* Options for chat service providers
|
|
4
|
+
*/
|
|
5
|
+
export interface ChatServiceOptions {
|
|
6
|
+
/** API Key */
|
|
7
|
+
apiKey: string;
|
|
8
|
+
/** Model name */
|
|
9
|
+
model?: string;
|
|
10
|
+
/** Additional provider-specific options */
|
|
11
|
+
[key: string]: any;
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Chat service provider interface
|
|
15
|
+
* Abstraction for various AI API providers (OpenAI, Gemini, Claude, etc.)
|
|
16
|
+
*/
|
|
17
|
+
export interface ChatServiceProvider {
|
|
18
|
+
/**
|
|
19
|
+
* Create a chat service instance
|
|
20
|
+
* @param options Service options
|
|
21
|
+
* @returns ChatService implementation
|
|
22
|
+
*/
|
|
23
|
+
createChatService(options: ChatServiceOptions): ChatService;
|
|
24
|
+
/**
|
|
25
|
+
* Get the provider name
|
|
26
|
+
* @returns Provider name
|
|
27
|
+
*/
|
|
28
|
+
getProviderName(): string;
|
|
29
|
+
/**
|
|
30
|
+
* Get the list of supported models
|
|
31
|
+
* @returns Array of supported models
|
|
32
|
+
*/
|
|
33
|
+
getSupportedModels(): string[];
|
|
34
|
+
/**
|
|
35
|
+
* Get the default model
|
|
36
|
+
* @returns Default model name
|
|
37
|
+
*/
|
|
38
|
+
getDefaultModel(): string;
|
|
39
|
+
/**
|
|
40
|
+
* Check if the provider supports vision (image processing)
|
|
41
|
+
* @returns Support status
|
|
42
|
+
*/
|
|
43
|
+
supportsVision(): boolean;
|
|
44
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ChatServiceProvider.js","sourceRoot":"","sources":["../../../../src/services/chat/providers/ChatServiceProvider.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import { ChatService } from '../ChatService';
|
|
2
|
+
import { ChatServiceOptions, ChatServiceProvider } from './ChatServiceProvider';
|
|
3
|
+
/**
|
|
4
|
+
* OpenAI API provider implementation
|
|
5
|
+
*/
|
|
6
|
+
export declare class OpenAIChatServiceProvider implements ChatServiceProvider {
|
|
7
|
+
/**
|
|
8
|
+
* Create a chat service instance
|
|
9
|
+
* @param options Service options
|
|
10
|
+
* @returns OpenAIChatService instance
|
|
11
|
+
*/
|
|
12
|
+
createChatService(options: ChatServiceOptions): ChatService;
|
|
13
|
+
/**
|
|
14
|
+
* Get the provider name
|
|
15
|
+
* @returns Provider name ('openai')
|
|
16
|
+
*/
|
|
17
|
+
getProviderName(): string;
|
|
18
|
+
/**
|
|
19
|
+
* Get the list of supported models
|
|
20
|
+
* @returns Array of supported model names
|
|
21
|
+
*/
|
|
22
|
+
getSupportedModels(): string[];
|
|
23
|
+
/**
|
|
24
|
+
* Get the default model
|
|
25
|
+
* @returns Default model name
|
|
26
|
+
*/
|
|
27
|
+
getDefaultModel(): string;
|
|
28
|
+
/**
|
|
29
|
+
* Check if this provider supports vision (image processing)
|
|
30
|
+
* @returns Vision support status (true)
|
|
31
|
+
*/
|
|
32
|
+
supportsVision(): boolean;
|
|
33
|
+
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import { MODEL_GPT_4O_MINI } from '../../../constants';
|
|
2
|
+
import { OpenAIChatService } from '../OpenAIChatService';
|
|
3
|
+
/**
|
|
4
|
+
* OpenAI API provider implementation
|
|
5
|
+
*/
|
|
6
|
+
export class OpenAIChatServiceProvider {
|
|
7
|
+
/**
|
|
8
|
+
* Create a chat service instance
|
|
9
|
+
* @param options Service options
|
|
10
|
+
* @returns OpenAIChatService instance
|
|
11
|
+
*/
|
|
12
|
+
createChatService(options) {
|
|
13
|
+
return new OpenAIChatService(options.apiKey, options.model);
|
|
14
|
+
}
|
|
15
|
+
/**
|
|
16
|
+
* Get the provider name
|
|
17
|
+
* @returns Provider name ('openai')
|
|
18
|
+
*/
|
|
19
|
+
getProviderName() {
|
|
20
|
+
return 'openai';
|
|
21
|
+
}
|
|
22
|
+
/**
|
|
23
|
+
* Get the list of supported models
|
|
24
|
+
* @returns Array of supported model names
|
|
25
|
+
*/
|
|
26
|
+
getSupportedModels() {
|
|
27
|
+
return [MODEL_GPT_4O_MINI];
|
|
28
|
+
}
|
|
29
|
+
/**
|
|
30
|
+
* Get the default model
|
|
31
|
+
* @returns Default model name
|
|
32
|
+
*/
|
|
33
|
+
getDefaultModel() {
|
|
34
|
+
return MODEL_GPT_4O_MINI;
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Check if this provider supports vision (image processing)
|
|
38
|
+
* @returns Vision support status (true)
|
|
39
|
+
*/
|
|
40
|
+
supportsVision() {
|
|
41
|
+
return true;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
//# sourceMappingURL=OpenAIChatServiceProvider.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"OpenAIChatServiceProvider.js","sourceRoot":"","sources":["../../../../src/services/chat/providers/OpenAIChatServiceProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,oBAAoB,CAAC;AAEvD,OAAO,EAAE,iBAAiB,EAAE,MAAM,sBAAsB,CAAC;AAGzD;;GAEG;AACH,MAAM,OAAO,yBAAyB;IACpC;;;;OAIG;IACH,iBAAiB,CAAC,OAA2B;QAC3C,OAAO,IAAI,iBAAiB,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,KAAK,CAAC,CAAC;IAC9D,CAAC;IAED;;;OAGG;IACH,eAAe;QACb,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED;;;OAGG;IACH,kBAAkB;QAChB,OAAO,CAAC,iBAAiB,CAAC,CAAC;IAC7B,CAAC;IAED;;;OAGG;IACH,eAAe;QACb,OAAO,iBAAiB,CAAC;IAC3B,CAAC;IAED;;;OAGG;IACH,cAAc;QACZ,OAAO,IAAI,CAAC;IACd,CAAC;CACF"}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import { Screenplay } from '../../types';
|
|
2
|
+
import { VoiceService, VoiceServiceOptions, AudioPlayOptions } from './VoiceService';
|
|
3
|
+
/**
|
|
4
|
+
* Adapter implementation for using existing voice engines
|
|
5
|
+
*/
|
|
6
|
+
export declare class VoiceEngineAdapter implements VoiceService {
|
|
7
|
+
private options;
|
|
8
|
+
private isPlayingAudio;
|
|
9
|
+
private audioElement;
|
|
10
|
+
/**
|
|
11
|
+
* Constructor
|
|
12
|
+
* @param options Voice service options
|
|
13
|
+
*/
|
|
14
|
+
constructor(options: VoiceServiceOptions);
|
|
15
|
+
/**
|
|
16
|
+
* Speak the screenplay as audio
|
|
17
|
+
* @param screenplay Screenplay (text and emotion)
|
|
18
|
+
* @param options Audio playback options
|
|
19
|
+
*/
|
|
20
|
+
speak(screenplay: Screenplay, options?: AudioPlayOptions): Promise<void>;
|
|
21
|
+
/**
|
|
22
|
+
* Speak text as audio
|
|
23
|
+
* @param text Text to speak
|
|
24
|
+
* @param options Audio playback options
|
|
25
|
+
*/
|
|
26
|
+
speakText(text: string, options?: AudioPlayOptions): Promise<void>;
|
|
27
|
+
/**
|
|
28
|
+
* Get whether currently playing
|
|
29
|
+
*/
|
|
30
|
+
isPlaying(): boolean;
|
|
31
|
+
/**
|
|
32
|
+
* Stop playback
|
|
33
|
+
*/
|
|
34
|
+
stop(): void;
|
|
35
|
+
/**
|
|
36
|
+
* Update service settings
|
|
37
|
+
* @param options New settings options
|
|
38
|
+
*/
|
|
39
|
+
updateOptions(options: Partial<VoiceServiceOptions>): void;
|
|
40
|
+
/**
|
|
41
|
+
* Play audio buffer
|
|
42
|
+
* @param audioBuffer Audio data ArrayBuffer
|
|
43
|
+
* @param audioElementId ID of HTML element to play audio (use internal element if omitted)
|
|
44
|
+
*/
|
|
45
|
+
private playAudioBuffer;
|
|
46
|
+
}
|
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
import { textToScreenplay } from '../../utils/screenplay';
|
|
2
|
+
/**
|
|
3
|
+
* Adapter implementation for using existing voice engines
|
|
4
|
+
*/
|
|
5
|
+
export class VoiceEngineAdapter {
|
|
6
|
+
/**
|
|
7
|
+
* Constructor
|
|
8
|
+
* @param options Voice service options
|
|
9
|
+
*/
|
|
10
|
+
constructor(options) {
|
|
11
|
+
this.isPlayingAudio = false;
|
|
12
|
+
this.audioElement = null;
|
|
13
|
+
this.options = options;
|
|
14
|
+
// Create reusable audio element
|
|
15
|
+
if (typeof window !== 'undefined') {
|
|
16
|
+
this.audioElement = document.createElement('audio');
|
|
17
|
+
this.audioElement.addEventListener('ended', () => {
|
|
18
|
+
this.isPlayingAudio = false;
|
|
19
|
+
if (this.options.onComplete) {
|
|
20
|
+
this.options.onComplete();
|
|
21
|
+
}
|
|
22
|
+
});
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
/**
|
|
26
|
+
* Speak the screenplay as audio
|
|
27
|
+
* @param screenplay Screenplay (text and emotion)
|
|
28
|
+
* @param options Audio playback options
|
|
29
|
+
*/
|
|
30
|
+
async speak(screenplay, options) {
|
|
31
|
+
try {
|
|
32
|
+
if (this.isPlayingAudio) {
|
|
33
|
+
this.stop();
|
|
34
|
+
}
|
|
35
|
+
this.isPlayingAudio = true;
|
|
36
|
+
// Import existing VoiceEngineFactory dynamically
|
|
37
|
+
const { VoiceEngineFactory } = await import('./engines/VoiceEngineFactory');
|
|
38
|
+
// Map emotion to style used by existing engine
|
|
39
|
+
const getStyleFromEmotion = (emotion) => {
|
|
40
|
+
switch (emotion) {
|
|
41
|
+
case 'angry':
|
|
42
|
+
return 'angry';
|
|
43
|
+
case 'happy':
|
|
44
|
+
return 'happy';
|
|
45
|
+
case 'sad':
|
|
46
|
+
return 'sad';
|
|
47
|
+
case 'fear':
|
|
48
|
+
return 'fear';
|
|
49
|
+
case 'surprised':
|
|
50
|
+
return 'surprised';
|
|
51
|
+
default:
|
|
52
|
+
return 'talk';
|
|
53
|
+
}
|
|
54
|
+
};
|
|
55
|
+
// Convert to Talk type for VoiceEngine
|
|
56
|
+
const talk = {
|
|
57
|
+
style: getStyleFromEmotion(screenplay.emotion || 'neutral'),
|
|
58
|
+
message: screenplay.text,
|
|
59
|
+
};
|
|
60
|
+
const engine = VoiceEngineFactory.getEngine(this.options.engineType);
|
|
61
|
+
// Get audio data
|
|
62
|
+
const audioBuffer = await engine.fetchAudio(talk, // Use any for type compatibility
|
|
63
|
+
this.options.speaker, this.options.apiKey);
|
|
64
|
+
// If there is a custom playback process, use it
|
|
65
|
+
if (this.options.onPlay) {
|
|
66
|
+
await this.options.onPlay(audioBuffer, options);
|
|
67
|
+
return;
|
|
68
|
+
}
|
|
69
|
+
// Default playback process
|
|
70
|
+
await this.playAudioBuffer(audioBuffer, options?.audioElementId);
|
|
71
|
+
}
|
|
72
|
+
catch (error) {
|
|
73
|
+
console.error('Error in speak:', error);
|
|
74
|
+
this.isPlayingAudio = false;
|
|
75
|
+
throw error;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
/**
|
|
79
|
+
* Speak text as audio
|
|
80
|
+
* @param text Text to speak
|
|
81
|
+
* @param options Audio playback options
|
|
82
|
+
*/
|
|
83
|
+
async speakText(text, options) {
|
|
84
|
+
// Convert text to screenplay and play
|
|
85
|
+
const screenplay = textToScreenplay(text);
|
|
86
|
+
return this.speak(screenplay, options);
|
|
87
|
+
}
|
|
88
|
+
/**
|
|
89
|
+
* Get whether currently playing
|
|
90
|
+
*/
|
|
91
|
+
isPlaying() {
|
|
92
|
+
return this.isPlayingAudio;
|
|
93
|
+
}
|
|
94
|
+
/**
|
|
95
|
+
* Stop playback
|
|
96
|
+
*/
|
|
97
|
+
stop() {
|
|
98
|
+
if (this.audioElement) {
|
|
99
|
+
this.audioElement.pause();
|
|
100
|
+
this.audioElement.currentTime = 0;
|
|
101
|
+
}
|
|
102
|
+
this.isPlayingAudio = false;
|
|
103
|
+
}
|
|
104
|
+
/**
|
|
105
|
+
* Update service settings
|
|
106
|
+
* @param options New settings options
|
|
107
|
+
*/
|
|
108
|
+
updateOptions(options) {
|
|
109
|
+
this.options = { ...this.options, ...options };
|
|
110
|
+
}
|
|
111
|
+
/**
|
|
112
|
+
* Play audio buffer
|
|
113
|
+
* @param audioBuffer Audio data ArrayBuffer
|
|
114
|
+
* @param audioElementId ID of HTML element to play audio (use internal element if omitted)
|
|
115
|
+
*/
|
|
116
|
+
async playAudioBuffer(audioBuffer, audioElementId) {
|
|
117
|
+
return new Promise((resolve, reject) => {
|
|
118
|
+
try {
|
|
119
|
+
// If not in browser environment, do nothing
|
|
120
|
+
if (typeof window === 'undefined') {
|
|
121
|
+
this.isPlayingAudio = false;
|
|
122
|
+
resolve();
|
|
123
|
+
return;
|
|
124
|
+
}
|
|
125
|
+
// Create Blob from audio data
|
|
126
|
+
const blob = new Blob([audioBuffer], { type: 'audio/wav' });
|
|
127
|
+
const url = URL.createObjectURL(blob);
|
|
128
|
+
// Get/create audio element for playback
|
|
129
|
+
let audioEl = this.audioElement;
|
|
130
|
+
if (audioElementId) {
|
|
131
|
+
const customAudioEl = document.getElementById(audioElementId);
|
|
132
|
+
if (customAudioEl) {
|
|
133
|
+
audioEl = customAudioEl;
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
if (!audioEl) {
|
|
137
|
+
reject(new Error('Audio element not available'));
|
|
138
|
+
return;
|
|
139
|
+
}
|
|
140
|
+
// Set event listeners
|
|
141
|
+
const onEnded = () => {
|
|
142
|
+
this.isPlayingAudio = false;
|
|
143
|
+
URL.revokeObjectURL(url);
|
|
144
|
+
audioEl?.removeEventListener('ended', onEnded);
|
|
145
|
+
if (this.options.onComplete) {
|
|
146
|
+
this.options.onComplete();
|
|
147
|
+
}
|
|
148
|
+
resolve();
|
|
149
|
+
};
|
|
150
|
+
const onError = (e) => {
|
|
151
|
+
this.isPlayingAudio = false;
|
|
152
|
+
URL.revokeObjectURL(url);
|
|
153
|
+
audioEl?.removeEventListener('error', onError);
|
|
154
|
+
reject(new Error(`Audio playback error: ${e.message}`));
|
|
155
|
+
};
|
|
156
|
+
audioEl.addEventListener('ended', onEnded);
|
|
157
|
+
audioEl.addEventListener('error', onError);
|
|
158
|
+
// Play audio
|
|
159
|
+
audioEl.src = url;
|
|
160
|
+
audioEl.play().catch((error) => {
|
|
161
|
+
this.isPlayingAudio = false;
|
|
162
|
+
URL.revokeObjectURL(url);
|
|
163
|
+
reject(error);
|
|
164
|
+
});
|
|
165
|
+
}
|
|
166
|
+
catch (error) {
|
|
167
|
+
this.isPlayingAudio = false;
|
|
168
|
+
reject(error);
|
|
169
|
+
}
|
|
170
|
+
});
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
//# sourceMappingURL=VoiceEngineAdapter.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"VoiceEngineAdapter.js","sourceRoot":"","sources":["../../../src/services/voice/VoiceEngineAdapter.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,gBAAgB,EAAE,MAAM,wBAAwB,CAAC;AAE1D;;GAEG;AACH,MAAM,OAAO,kBAAkB;IAK7B;;;OAGG;IACH,YAAY,OAA4B;QAPhC,mBAAc,GAAY,KAAK,CAAC;QAChC,iBAAY,GAA4B,IAAI,CAAC;QAOnD,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QAEvB,gCAAgC;QAChC,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE,CAAC;YAClC,IAAI,CAAC,YAAY,GAAG,QAAQ,CAAC,aAAa,CAAC,OAAO,CAAC,CAAC;YACpD,IAAI,CAAC,YAAY,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE;gBAC/C,IAAI,CAAC,cAAc,GAAG,KAAK,CAAC;gBAC5B,IAAI,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC;oBAC5B,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC;gBAC5B,CAAC;YACH,CAAC,CAAC,CAAC;QACL,CAAC;IACH,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,KAAK,CACT,UAAsB,EACtB,OAA0B;QAE1B,IAAI,CAAC;YACH,IAAI,IAAI,CAAC,cAAc,EAAE,CAAC;gBACxB,IAAI,CAAC,IAAI,EAAE,CAAC;YACd,CAAC;YAED,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC;YAE3B,iDAAiD;YACjD,MAAM,EAAE,kBAAkB,EAAE,GAAG,MAAM,MAAM,CACzC,8BAA8B,CAC/B,CAAC;YAEF,+CAA+C;YAC/C,MAAM,mBAAmB,GAAG,CAAC,OAAe,EAAU,EAAE;gBACtD,QAAQ,OAAO,EAAE,CAAC;oBAChB,KAAK,OAAO;wBACV,OAAO,OAAO,CAAC;oBACjB,KAAK,OAAO;wBACV,OAAO,OAAO,CAAC;oBACjB,KAAK,KAAK;wBACR,OAAO,KAAK,CAAC;oBACf,KAAK,MAAM;wBACT,OAAO,MAAM,CAAC;oBAChB,KAAK,WAAW;wBACd,OAAO,WAAW,CAAC;oBACrB;wBACE,OAAO,MAAM,CAAC;gBAClB,CAAC;YACH,CAAC,CAAC;YAEF,uCAAuC;YACvC,MAAM,IAAI,GAAG;gBACX,KAAK,EAAE,mBAAmB,CAAC,UAAU,CAAC,OAAO,IAAI,SAAS,CAAC;gBAC3D,OAAO,EAAE,UAAU,CAAC,IAAI;aACzB,CAAC;YAEF,MAAM,MAAM,GAAG,kBAAkB,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;YAErE,iBAAiB;YACjB,MAAM,WAAW,GAAG,MAAM,MAAM,CAAC,UAAU,CACzC,IAAW,EAAE,iCAAiC;YAC9C,IAAI,CAAC,OAAO,CAAC,OAAO,EACpB,IAAI,CAAC,OAAO,CAAC,MAAM,CACpB,CAAC;YAEF,gDAAgD;YAChD,IAAI,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,CAAC;gBACxB,MAAM,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,OAAO,CAAC,CAAC;gBAChD,OAAO;YACT,CAAC;YAED,2BAA2B;YAC3B,MAAM,IAAI,CAAC,eAAe,CAAC,WAAW,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;QACnE,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,OAAO,CAAC,KAAK,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;YACxC,IAAI,CAAC,cAAc,GAAG,KAAK,CAAC;YAC5B,MAAM,KAAK,CAAC;QACd,CAAC;IACH,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,SAAS,CAAC,IAAY,EAAE,OAA0B;QACtD,sCAAsC;QACtC,MAAM,UAAU,GAAG,gBAAgB,CAAC,IAAI,CAAC,CAAC;QAC1C,OAAO,IAAI,CAAC,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IACzC,CAAC;IAED;;OAEG;IACH,SAAS;QACP,OAAO,IAAI,CAAC,cAAc,CAAC;IAC7B,CAAC;IAED;;OAEG;IACH,IAAI;QACF,IAAI,IAAI,CAAC,YAAY,EAAE,CAAC;YACtB,IAAI,CAAC,YAAY,CAAC,KAAK,EAAE,CAAC;YAC1B,IAAI,CAAC,YAAY,CAAC,WAAW,GAAG,CAAC,CAAC;QACpC,CAAC;QACD,IAAI,CAAC,cAAc,GAAG,KAAK,CAAC;IAC9B,CAAC;IAED;;;OAGG;IACH,aAAa,CAAC,OAAqC;QACjD,IAAI,CAAC,OAAO,GAAG,EAAE,GAAG,IAAI,CAAC,OAAO,EAAE,GAAG,OAAO,EAAE,CAAC;IACjD,CAAC;IAED;;;;OAIG;IACK,KAAK,CAAC,eAAe,CAC3B,WAAwB,EACxB,cAAuB;QAEvB,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACrC,IAAI,CAAC;gBACH,4CAA4C;gBAC5C,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE,CAAC;oBAClC,IAAI,CAAC,cAAc,GAAG,KAAK,CAAC;oBAC5B,OAAO,EAAE,CAAC;oBACV,OAAO;gBACT,CAAC;gBAED,8BAA8B;gBAC9B,MAAM,IAAI,GAAG,IAAI,IAAI,CAAC,CAAC,WAAW,CAAC,EAAE,EAAE,IAAI,EAAE,WAAW,EAAE,CAAC,CAAC;gBAC5D,MAAM,GAAG,GAAG,GAAG,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC;gBAEtC,wCAAwC;gBACxC,IAAI,OAAO,GAAG,IAAI,CAAC,YAAY,CAAC;gBAChC,IAAI,cAAc,EAAE,CAAC;oBACnB,MAAM,aAAa,GAAG,QAAQ,CAAC,cAAc,CAC3C,cAAc,CACK,CAAC;oBACtB,IAAI,aAAa,EAAE,CAAC;wBAClB,OAAO,GAAG,aAAa,CAAC;oBAC1B,CAAC;gBACH,CAAC;gBAED,IAAI,CAAC,OAAO,EAAE,CAAC;oBACb,MAAM,CAAC,IAAI,KAAK,CAAC,6BAA6B,CAAC,CAAC,CAAC;oBACjD,OAAO;gBACT,CAAC;gBAED,sBAAsB;gBACtB,MAAM,OAAO,GAAG,GAAG,EAAE;oBACnB,IAAI,CAAC,cAAc,GAAG,KAAK,CAAC;oBAC5B,GAAG,CAAC,eAAe,CAAC,GAAG,CAAC,CAAC;oBACzB,OAAO,EAAE,mBAAmB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;oBAC/C,IAAI,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC;wBAC5B,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC;oBAC5B,CAAC;oBACD,OAAO,EAAE,CAAC;gBACZ,CAAC,CAAC;gBAEF,MAAM,OAAO,GAAG,CAAC,CAAQ,EAAE,EAAE;oBAC3B,IAAI,CAAC,cAAc,GAAG,KAAK,CAAC;oBAC5B,GAAG,CAAC,eAAe,CAAC,GAAG,CAAC,CAAC;oBACzB,OAAO,EAAE,mBAAmB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;oBAC/C,MAAM,CACJ,IAAI,KAAK,CAAC,yBAA0B,CAAgB,CAAC,OAAO,EAAE,CAAC,CAChE,CAAC;gBACJ,CAAC,CAAC;gBAEF,OAAO,CAAC,gBAAgB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;gBAC3C,OAAO,CAAC,gBAAgB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;gBAE3C,aAAa;gBACb,OAAO,CAAC,GAAG,GAAG,GAAG,CAAC;gBAClB,OAAO,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE,EAAE;oBAC7B,IAAI,CAAC,cAAc,GAAG,KAAK,CAAC;oBAC5B,GAAG,CAAC,eAAe,CAAC,GAAG,CAAC,CAAC;oBACzB,MAAM,CAAC,KAAK,CAAC,CAAC;gBAChB,CAAC,CAAC,CAAC;YACL,CAAC;YAAC,OAAO,KAAK,EAAE,CAAC;gBACf,IAAI,CAAC,cAAc,GAAG,KAAK,CAAC;gBAC5B,MAAM,CAAC,KAAK,CAAC,CAAC;YAChB,CAAC;QACH,CAAC,CAAC,CAAC;IACL,CAAC;CACF"}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import { Screenplay } from '../../types';
|
|
2
|
+
/**
|
|
3
|
+
* Voice service settings options
|
|
4
|
+
*/
|
|
5
|
+
export interface VoiceServiceOptions {
|
|
6
|
+
/** Speaker ID */
|
|
7
|
+
speaker: string;
|
|
8
|
+
/** Engine type (voicevox, voicepeak, openai, nijivoice, aivisSpeech) */
|
|
9
|
+
engineType: string;
|
|
10
|
+
/** API key (if needed) */
|
|
11
|
+
apiKey?: string;
|
|
12
|
+
/** Audio playback callback */
|
|
13
|
+
onPlay?: (audioBuffer: ArrayBuffer, options?: AudioPlayOptions) => Promise<void>;
|
|
14
|
+
/** Audio playback complete callback */
|
|
15
|
+
onComplete?: () => void;
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Audio playback options
|
|
19
|
+
*/
|
|
20
|
+
export interface AudioPlayOptions {
|
|
21
|
+
/** ID of HTML element to play audio */
|
|
22
|
+
audioElementId?: string;
|
|
23
|
+
/** Enable animation processing */
|
|
24
|
+
enableAnimation?: boolean;
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Voice service interface
|
|
28
|
+
*/
|
|
29
|
+
export interface VoiceService {
|
|
30
|
+
/**
|
|
31
|
+
* Speak screenplay as audio
|
|
32
|
+
* @param screenplay Screenplay (text and emotion)
|
|
33
|
+
* @param options Audio playback options (default settings if omitted)
|
|
34
|
+
*/
|
|
35
|
+
speak(screenplay: Screenplay, options?: AudioPlayOptions): Promise<void>;
|
|
36
|
+
/**
|
|
37
|
+
* Speak text as audio
|
|
38
|
+
* @param text Text to speak
|
|
39
|
+
* @param options Audio playback options (default settings if omitted)
|
|
40
|
+
*/
|
|
41
|
+
speakText(text: string, options?: AudioPlayOptions): Promise<void>;
|
|
42
|
+
/**
|
|
43
|
+
* Get whether currently playing
|
|
44
|
+
*/
|
|
45
|
+
isPlaying(): boolean;
|
|
46
|
+
/**
|
|
47
|
+
* Stop playback
|
|
48
|
+
*/
|
|
49
|
+
stop(): void;
|
|
50
|
+
/**
|
|
51
|
+
* Update service settings
|
|
52
|
+
* @param options New settings options
|
|
53
|
+
*/
|
|
54
|
+
updateOptions(options: Partial<VoiceServiceOptions>): void;
|
|
55
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"VoiceService.js","sourceRoot":"","sources":["../../../src/services/voice/VoiceService.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { Talk } from '../messages';
|
|
2
|
+
import { VoiceEngine } from './VoiceEngine';
|
|
3
|
+
/**
|
|
4
|
+
* AivisSpeech voice synthesis engine
|
|
5
|
+
*/
|
|
6
|
+
export declare class AivisSpeechEngine implements VoiceEngine {
|
|
7
|
+
fetchAudio(input: Talk, speaker: string): Promise<ArrayBuffer>;
|
|
8
|
+
private adjustEmotionParameters;
|
|
9
|
+
getTestMessage(textVoiceText?: string): string;
|
|
10
|
+
}
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import { AIVIS_SPEECH_API_URL } from '../../../constants';
|
|
2
|
+
/**
|
|
3
|
+
* AivisSpeech voice synthesis engine
|
|
4
|
+
*/
|
|
5
|
+
export class AivisSpeechEngine {
|
|
6
|
+
async fetchAudio(input, speaker) {
|
|
7
|
+
const talk = input;
|
|
8
|
+
// talk.styleから感情を取得
|
|
9
|
+
const emotion = talk.style || 'neutral';
|
|
10
|
+
const text = talk.message.trim();
|
|
11
|
+
const ttsQueryResponse = await fetch(`${AIVIS_SPEECH_API_URL}/audio_query?speaker=${speaker}&text=${encodeURIComponent(text)}`, { method: 'POST' });
|
|
12
|
+
if (!ttsQueryResponse.ok) {
|
|
13
|
+
throw new Error('Failed to fetch TTS query from AivisSpeech Engine.');
|
|
14
|
+
}
|
|
15
|
+
const ttsQueryJson = await ttsQueryResponse.json();
|
|
16
|
+
// adjust parameters according to emotion
|
|
17
|
+
this.adjustEmotionParameters(ttsQueryJson, emotion);
|
|
18
|
+
const synthesisResponse = await fetch(`${AIVIS_SPEECH_API_URL}/synthesis?speaker=${speaker}`, {
|
|
19
|
+
method: 'POST',
|
|
20
|
+
headers: {
|
|
21
|
+
'Content-Type': 'application/json',
|
|
22
|
+
'Transfer-Encoding': 'chunked',
|
|
23
|
+
},
|
|
24
|
+
body: JSON.stringify(ttsQueryJson),
|
|
25
|
+
});
|
|
26
|
+
if (!synthesisResponse.ok) {
|
|
27
|
+
throw new Error('Failed to fetch TTS synthesis result from AivisSpeech Engine.');
|
|
28
|
+
}
|
|
29
|
+
const blob = await synthesisResponse.blob();
|
|
30
|
+
return await blob.arrayBuffer();
|
|
31
|
+
}
|
|
32
|
+
adjustEmotionParameters(ttsQueryJson, emotion) {
|
|
33
|
+
// default values
|
|
34
|
+
ttsQueryJson.speedScale = 1.0;
|
|
35
|
+
ttsQueryJson.pitchScale = 0.0;
|
|
36
|
+
ttsQueryJson.intonationScale = 1.0;
|
|
37
|
+
ttsQueryJson.tempoDynamicsScale = 1.0;
|
|
38
|
+
switch (emotion.toLowerCase()) {
|
|
39
|
+
case 'happy':
|
|
40
|
+
ttsQueryJson.speedScale = 1.1;
|
|
41
|
+
ttsQueryJson.pitchScale = 0.05;
|
|
42
|
+
ttsQueryJson.intonationScale = 1.2;
|
|
43
|
+
ttsQueryJson.tempoDynamicsScale = 1.1;
|
|
44
|
+
break;
|
|
45
|
+
case 'sad':
|
|
46
|
+
ttsQueryJson.speedScale = 0.9;
|
|
47
|
+
ttsQueryJson.pitchScale = -0.03;
|
|
48
|
+
ttsQueryJson.intonationScale = 0.8;
|
|
49
|
+
ttsQueryJson.tempoDynamicsScale = 0.9;
|
|
50
|
+
break;
|
|
51
|
+
case 'angry':
|
|
52
|
+
ttsQueryJson.speedScale = 1.0;
|
|
53
|
+
ttsQueryJson.pitchScale = 0.0;
|
|
54
|
+
ttsQueryJson.intonationScale = 1.4;
|
|
55
|
+
ttsQueryJson.tempoDynamicsScale = 1.2;
|
|
56
|
+
break;
|
|
57
|
+
case 'surprised':
|
|
58
|
+
ttsQueryJson.speedScale = 1.2;
|
|
59
|
+
ttsQueryJson.pitchScale = 0.07;
|
|
60
|
+
ttsQueryJson.intonationScale = 1.3;
|
|
61
|
+
ttsQueryJson.tempoDynamicsScale = 1.0;
|
|
62
|
+
break;
|
|
63
|
+
// default: "neutral" etc. other than default values
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
getTestMessage(textVoiceText) {
|
|
67
|
+
return textVoiceText || 'アイビススピーチを使用します';
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
//# sourceMappingURL=AivisSpeechEngine.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"AivisSpeechEngine.js","sourceRoot":"","sources":["../../../../src/services/voice/engines/AivisSpeechEngine.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,oBAAoB,EAAE,MAAM,oBAAoB,CAAC;AAI1D;;GAEG;AACH,MAAM,OAAO,iBAAiB;IAC5B,KAAK,CAAC,UAAU,CAAC,KAAW,EAAE,OAAe;QAC3C,MAAM,IAAI,GAAG,KAAa,CAAC;QAC3B,oBAAoB;QACpB,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,IAAI,SAAS,CAAC;QACxC,MAAM,IAAI,GAAG,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,CAAC;QAEjC,MAAM,gBAAgB,GAAG,MAAM,KAAK,CAClC,GAAG,oBAAoB,wBAAwB,OAAO,SAAS,kBAAkB,CAAC,IAAI,CAAC,EAAE,EACzF,EAAE,MAAM,EAAE,MAAM,EAAE,CACnB,CAAC;QAEF,IAAI,CAAC,gBAAgB,CAAC,EAAE,EAAE,CAAC;YACzB,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;QACxE,CAAC;QAED,MAAM,YAAY,GAAG,MAAM,gBAAgB,CAAC,IAAI,EAAE,CAAC;QAEnD,yCAAyC;QACzC,IAAI,CAAC,uBAAuB,CAAC,YAAY,EAAE,OAAO,CAAC,CAAC;QAEpD,MAAM,iBAAiB,GAAG,MAAM,KAAK,CACnC,GAAG,oBAAoB,sBAAsB,OAAO,EAAE,EACtD;YACE,MAAM,EAAE,MAAM;YACd,OAAO,EAAE;gBACP,cAAc,EAAE,kBAAkB;gBAClC,mBAAmB,EAAE,SAAS;aAC/B;YACD,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC;SACnC,CACF,CAAC;QAEF,IAAI,CAAC,iBAAiB,CAAC,EAAE,EAAE,CAAC;YAC1B,MAAM,IAAI,KAAK,CACb,+DAA+D,CAChE,CAAC;QACJ,CAAC;QAED,MAAM,IAAI,GAAG,MAAM,iBAAiB,CAAC,IAAI,EAAE,CAAC;QAC5C,OAAO,MAAM,IAAI,CAAC,WAAW,EAAE,CAAC;IAClC,CAAC;IAEO,uBAAuB,CAAC,YAAiB,EAAE,OAAe;QAChE,iBAAiB;QACjB,YAAY,CAAC,UAAU,GAAG,GAAG,CAAC;QAC9B,YAAY,CAAC,UAAU,GAAG,GAAG,CAAC;QAC9B,YAAY,CAAC,eAAe,GAAG,GAAG,CAAC;QACnC,YAAY,CAAC,kBAAkB,GAAG,GAAG,CAAC;QAEtC,QAAQ,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;YAC9B,KAAK,OAAO;gBACV,YAAY,CAAC,UAAU,GAAG,GAAG,CAAC;gBAC9B,YAAY,CAAC,UAAU,GAAG,IAAI,CAAC;gBAC/B,YAAY,CAAC,eAAe,GAAG,GAAG,CAAC;gBACnC,YAAY,CAAC,kBAAkB,GAAG,GAAG,CAAC;gBACtC,MAAM;YACR,KAAK,KAAK;gBACR,YAAY,CAAC,UAAU,GAAG,GAAG,CAAC;gBAC9B,YAAY,CAAC,UAAU,GAAG,CAAC,IAAI,CAAC;gBAChC,YAAY,CAAC,eAAe,GAAG,GAAG,CAAC;gBACnC,YAAY,CAAC,kBAAkB,GAAG,GAAG,CAAC;gBACtC,MAAM;YACR,KAAK,OAAO;gBACV,YAAY,CAAC,UAAU,GAAG,GAAG,CAAC;gBAC9B,YAAY,CAAC,UAAU,GAAG,GAAG,CAAC;gBAC9B,YAAY,CAAC,eAAe,GAAG,GAAG,CAAC;gBACnC,YAAY,CAAC,kBAAkB,GAAG,GAAG,CAAC;gBACtC,MAAM;YACR,KAAK,WAAW;gBACd,YAAY,CAAC,UAAU,GAAG,GAAG,CAAC;gBAC9B,YAAY,CAAC,UAAU,GAAG,IAAI,CAAC;gBAC/B,YAAY,CAAC,eAAe,GAAG,GAAG,CAAC;gBACnC,YAAY,CAAC,kBAAkB,GAAG,GAAG,CAAC;gBACtC,MAAM;YACR,oDAAoD;QACtD,CAAC;IACH,CAAC;IAED,cAAc,CAAC,aAAsB;QACnC,OAAO,aAAa,IAAI,gBAAgB,CAAC;IAC3C,CAAC;CACF"}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { VoiceActor } from '../../../types/nijiVoice';
|
|
2
|
+
import { Talk } from '../messages';
|
|
3
|
+
import { VoiceEngine } from './VoiceEngine';
|
|
4
|
+
/**
|
|
5
|
+
* NijiVoice voice synthesis engine
|
|
6
|
+
*/
|
|
7
|
+
export declare class NijiVoiceEngine implements VoiceEngine {
|
|
8
|
+
fetchAudio(input: Talk, speaker: string, apiKey?: string, voiceActor?: VoiceActor): Promise<ArrayBuffer>;
|
|
9
|
+
private processEmotionParams;
|
|
10
|
+
private base64ToArrayBuffer;
|
|
11
|
+
getTestMessage(textVoiceText?: string): string;
|
|
12
|
+
}
|